Merge Ignore and Deprecated in .opt files.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* Pointers to various DWARF2 sections. */
154 static GTY(()) section *debug_info_section;
155 static GTY(()) section *debug_skeleton_info_section;
156 static GTY(()) section *debug_abbrev_section;
157 static GTY(()) section *debug_skeleton_abbrev_section;
158 static GTY(()) section *debug_aranges_section;
159 static GTY(()) section *debug_addr_section;
160 static GTY(()) section *debug_macinfo_section;
161 static const char *debug_macinfo_section_name;
162 static unsigned macinfo_label_base = 1;
163 static GTY(()) section *debug_line_section;
164 static GTY(()) section *debug_skeleton_line_section;
165 static GTY(()) section *debug_loc_section;
166 static GTY(()) section *debug_pubnames_section;
167 static GTY(()) section *debug_pubtypes_section;
168 static GTY(()) section *debug_str_section;
169 static GTY(()) section *debug_line_str_section;
170 static GTY(()) section *debug_str_dwo_section;
171 static GTY(()) section *debug_str_offsets_section;
172 static GTY(()) section *debug_ranges_section;
173 static GTY(()) section *debug_frame_section;
174
175 /* Maximum size (in bytes) of an artificially generated label. */
176 #define MAX_ARTIFICIAL_LABEL_BYTES 40
177
178 /* According to the (draft) DWARF 3 specification, the initial length
179 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
180 bytes are 0xffffffff, followed by the length stored in the next 8
181 bytes.
182
183 However, the SGI/MIPS ABI uses an initial length which is equal to
184 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
185
186 #ifndef DWARF_INITIAL_LENGTH_SIZE
187 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
188 #endif
189
190 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
191 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
192 #endif
193
194 /* Round SIZE up to the nearest BOUNDARY. */
195 #define DWARF_ROUND(SIZE,BOUNDARY) \
196 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
197
198 /* CIE identifier. */
199 #if HOST_BITS_PER_WIDE_INT >= 64
200 #define DWARF_CIE_ID \
201 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
202 #else
203 #define DWARF_CIE_ID DW_CIE_ID
204 #endif
205
206
207 /* A vector for a table that contains frame description
208 information for each routine. */
209 #define NOT_INDEXED (-1U)
210 #define NO_INDEX_ASSIGNED (-2U)
211
212 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
213
214 struct GTY((for_user)) indirect_string_node {
215 const char *str;
216 unsigned int refcount;
217 enum dwarf_form form;
218 char *label;
219 unsigned int index;
220 };
221
222 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
223 {
224 typedef const char *compare_type;
225
226 static hashval_t hash (indirect_string_node *);
227 static bool equal (indirect_string_node *, const char *);
228 };
229
230 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
231
232 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
233
234 /* With split_debug_info, both the comp_dir and dwo_name go in the
235 main object file, rather than the dwo, similar to the force_direct
236 parameter elsewhere but with additional complications:
237
238 1) The string is needed in both the main object file and the dwo.
239 That is, the comp_dir and dwo_name will appear in both places.
240
241 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
242 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
243
244 3) GCC chooses the form to use late, depending on the size and
245 reference count.
246
247 Rather than forcing the all debug string handling functions and
248 callers to deal with these complications, simply use a separate,
249 special-cased string table for any attribute that should go in the
250 main object file. This limits the complexity to just the places
251 that need it. */
252
253 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
254
255 static GTY(()) int dw2_string_counter;
256
257 /* True if the compilation unit places functions in more than one section. */
258 static GTY(()) bool have_multiple_function_sections = false;
259
260 /* Whether the default text and cold text sections have been used at all. */
261 static GTY(()) bool text_section_used = false;
262 static GTY(()) bool cold_text_section_used = false;
263
264 /* The default cold text section. */
265 static GTY(()) section *cold_text_section;
266
267 /* The DIE for C++14 'auto' in a function return type. */
268 static GTY(()) dw_die_ref auto_die;
269
270 /* The DIE for C++14 'decltype(auto)' in a function return type. */
271 static GTY(()) dw_die_ref decltype_auto_die;
272
273 /* Forward declarations for functions defined in this file. */
274
275 static void output_call_frame_info (int);
276 static void dwarf2out_note_section_used (void);
277
278 /* Personality decl of current unit. Used only when assembler does not support
279 personality CFI. */
280 static GTY(()) rtx current_unit_personality;
281
282 /* Whether an eh_frame section is required. */
283 static GTY(()) bool do_eh_frame = false;
284
285 /* .debug_rnglists next index. */
286 static unsigned int rnglist_idx;
287
288 /* Data and reference forms for relocatable data. */
289 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
290 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
291
292 #ifndef DEBUG_FRAME_SECTION
293 #define DEBUG_FRAME_SECTION ".debug_frame"
294 #endif
295
296 #ifndef FUNC_BEGIN_LABEL
297 #define FUNC_BEGIN_LABEL "LFB"
298 #endif
299
300 #ifndef FUNC_END_LABEL
301 #define FUNC_END_LABEL "LFE"
302 #endif
303
304 #ifndef PROLOGUE_END_LABEL
305 #define PROLOGUE_END_LABEL "LPE"
306 #endif
307
308 #ifndef EPILOGUE_BEGIN_LABEL
309 #define EPILOGUE_BEGIN_LABEL "LEB"
310 #endif
311
312 #ifndef FRAME_BEGIN_LABEL
313 #define FRAME_BEGIN_LABEL "Lframe"
314 #endif
315 #define CIE_AFTER_SIZE_LABEL "LSCIE"
316 #define CIE_END_LABEL "LECIE"
317 #define FDE_LABEL "LSFDE"
318 #define FDE_AFTER_SIZE_LABEL "LASFDE"
319 #define FDE_END_LABEL "LEFDE"
320 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
321 #define LINE_NUMBER_END_LABEL "LELT"
322 #define LN_PROLOG_AS_LABEL "LASLTP"
323 #define LN_PROLOG_END_LABEL "LELTP"
324 #define DIE_LABEL_PREFIX "DW"
325 \f
326 /* Match the base name of a file to the base name of a compilation unit. */
327
328 static int
329 matches_main_base (const char *path)
330 {
331 /* Cache the last query. */
332 static const char *last_path = NULL;
333 static int last_match = 0;
334 if (path != last_path)
335 {
336 const char *base;
337 int length = base_of_path (path, &base);
338 last_path = path;
339 last_match = (length == main_input_baselength
340 && memcmp (base, main_input_basename, length) == 0);
341 }
342 return last_match;
343 }
344
345 #ifdef DEBUG_DEBUG_STRUCT
346
347 static int
348 dump_struct_debug (tree type, enum debug_info_usage usage,
349 enum debug_struct_file criterion, int generic,
350 int matches, int result)
351 {
352 /* Find the type name. */
353 tree type_decl = TYPE_STUB_DECL (type);
354 tree t = type_decl;
355 const char *name = 0;
356 if (TREE_CODE (t) == TYPE_DECL)
357 t = DECL_NAME (t);
358 if (t)
359 name = IDENTIFIER_POINTER (t);
360
361 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
362 criterion,
363 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
364 matches ? "bas" : "hdr",
365 generic ? "gen" : "ord",
366 usage == DINFO_USAGE_DFN ? ";" :
367 usage == DINFO_USAGE_DIR_USE ? "." : "*",
368 result,
369 (void*) type_decl, name);
370 return result;
371 }
372 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
373 dump_struct_debug (type, usage, criterion, generic, matches, result)
374
375 #else
376
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 (result)
379
380 #endif
381
382 /* Get the number of HOST_WIDE_INTs needed to represent the precision
383 of the number. Some constants have a large uniform precision, so
384 we get the precision needed for the actual value of the number. */
385
386 static unsigned int
387 get_full_len (const wide_int &op)
388 {
389 int prec = wi::min_precision (op, UNSIGNED);
390 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
391 / HOST_BITS_PER_WIDE_INT);
392 }
393
394 static bool
395 should_emit_struct_debug (tree type, enum debug_info_usage usage)
396 {
397 enum debug_struct_file criterion;
398 tree type_decl;
399 bool generic = lang_hooks.types.generic_p (type);
400
401 if (generic)
402 criterion = debug_struct_generic[usage];
403 else
404 criterion = debug_struct_ordinary[usage];
405
406 if (criterion == DINFO_STRUCT_FILE_NONE)
407 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
408 if (criterion == DINFO_STRUCT_FILE_ANY)
409 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
410
411 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
412
413 if (type_decl != NULL)
414 {
415 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
419 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
420 }
421
422 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
423 }
424 \f
425 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
426 switch to the data section instead, and write out a synthetic start label
427 for collect2 the first time around. */
428
429 static void
430 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
431 {
432 if (eh_frame_section == 0)
433 {
434 int flags;
435
436 if (EH_TABLES_CAN_BE_READ_ONLY)
437 {
438 int fde_encoding;
439 int per_encoding;
440 int lsda_encoding;
441
442 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
443 /*global=*/0);
444 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
445 /*global=*/1);
446 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
447 /*global=*/0);
448 flags = ((! flag_pic
449 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
450 && (fde_encoding & 0x70) != DW_EH_PE_aligned
451 && (per_encoding & 0x70) != DW_EH_PE_absptr
452 && (per_encoding & 0x70) != DW_EH_PE_aligned
453 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
454 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
455 ? 0 : SECTION_WRITE);
456 }
457 else
458 flags = SECTION_WRITE;
459
460 #ifdef EH_FRAME_SECTION_NAME
461 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
462 #else
463 eh_frame_section = ((flags == SECTION_WRITE)
464 ? data_section : readonly_data_section);
465 #endif /* EH_FRAME_SECTION_NAME */
466 }
467
468 switch_to_section (eh_frame_section);
469
470 #ifdef EH_FRAME_THROUGH_COLLECT2
471 /* We have no special eh_frame section. Emit special labels to guide
472 collect2. */
473 if (!back)
474 {
475 tree label = get_file_function_name ("F");
476 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
477 targetm.asm_out.globalize_label (asm_out_file,
478 IDENTIFIER_POINTER (label));
479 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
480 }
481 #endif
482 }
483
484 /* Switch [BACK] to the eh or debug frame table section, depending on
485 FOR_EH. */
486
487 static void
488 switch_to_frame_table_section (int for_eh, bool back)
489 {
490 if (for_eh)
491 switch_to_eh_frame_section (back);
492 else
493 {
494 if (!debug_frame_section)
495 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
496 SECTION_DEBUG, NULL);
497 switch_to_section (debug_frame_section);
498 }
499 }
500
501 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
502
503 enum dw_cfi_oprnd_type
504 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
505 {
506 switch (cfi)
507 {
508 case DW_CFA_nop:
509 case DW_CFA_GNU_window_save:
510 case DW_CFA_remember_state:
511 case DW_CFA_restore_state:
512 return dw_cfi_oprnd_unused;
513
514 case DW_CFA_set_loc:
515 case DW_CFA_advance_loc1:
516 case DW_CFA_advance_loc2:
517 case DW_CFA_advance_loc4:
518 case DW_CFA_MIPS_advance_loc8:
519 return dw_cfi_oprnd_addr;
520
521 case DW_CFA_offset:
522 case DW_CFA_offset_extended:
523 case DW_CFA_def_cfa:
524 case DW_CFA_offset_extended_sf:
525 case DW_CFA_def_cfa_sf:
526 case DW_CFA_restore:
527 case DW_CFA_restore_extended:
528 case DW_CFA_undefined:
529 case DW_CFA_same_value:
530 case DW_CFA_def_cfa_register:
531 case DW_CFA_register:
532 case DW_CFA_expression:
533 case DW_CFA_val_expression:
534 return dw_cfi_oprnd_reg_num;
535
536 case DW_CFA_def_cfa_offset:
537 case DW_CFA_GNU_args_size:
538 case DW_CFA_def_cfa_offset_sf:
539 return dw_cfi_oprnd_offset;
540
541 case DW_CFA_def_cfa_expression:
542 return dw_cfi_oprnd_loc;
543
544 default:
545 gcc_unreachable ();
546 }
547 }
548
549 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
550
551 enum dw_cfi_oprnd_type
552 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
553 {
554 switch (cfi)
555 {
556 case DW_CFA_def_cfa:
557 case DW_CFA_def_cfa_sf:
558 case DW_CFA_offset:
559 case DW_CFA_offset_extended_sf:
560 case DW_CFA_offset_extended:
561 return dw_cfi_oprnd_offset;
562
563 case DW_CFA_register:
564 return dw_cfi_oprnd_reg_num;
565
566 case DW_CFA_expression:
567 case DW_CFA_val_expression:
568 return dw_cfi_oprnd_loc;
569
570 case DW_CFA_def_cfa_expression:
571 return dw_cfi_oprnd_cfa_loc;
572
573 default:
574 return dw_cfi_oprnd_unused;
575 }
576 }
577
578 /* Output one FDE. */
579
580 static void
581 output_fde (dw_fde_ref fde, bool for_eh, bool second,
582 char *section_start_label, int fde_encoding, char *augmentation,
583 bool any_lsda_needed, int lsda_encoding)
584 {
585 const char *begin, *end;
586 static unsigned int j;
587 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
588
589 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
590 /* empty */ 0);
591 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
592 for_eh + j);
593 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
595 if (!XCOFF_DEBUGGING_INFO || for_eh)
596 {
597 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
598 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
599 " indicating 64-bit DWARF extension");
600 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
601 "FDE Length");
602 }
603 ASM_OUTPUT_LABEL (asm_out_file, l1);
604
605 if (for_eh)
606 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
607 else
608 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
609 debug_frame_section, "FDE CIE offset");
610
611 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
612 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
613
614 if (for_eh)
615 {
616 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
617 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
618 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
619 "FDE initial location");
620 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
621 end, begin, "FDE address range");
622 }
623 else
624 {
625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
626 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
627 }
628
629 if (augmentation[0])
630 {
631 if (any_lsda_needed)
632 {
633 int size = size_of_encoded_value (lsda_encoding);
634
635 if (lsda_encoding == DW_EH_PE_aligned)
636 {
637 int offset = ( 4 /* Length */
638 + 4 /* CIE offset */
639 + 2 * size_of_encoded_value (fde_encoding)
640 + 1 /* Augmentation size */ );
641 int pad = -offset & (PTR_SIZE - 1);
642
643 size += pad;
644 gcc_assert (size_of_uleb128 (size) == 1);
645 }
646
647 dw2_asm_output_data_uleb128 (size, "Augmentation size");
648
649 if (fde->uses_eh_lsda)
650 {
651 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
652 fde->funcdef_number);
653 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
654 gen_rtx_SYMBOL_REF (Pmode, l1),
655 false,
656 "Language Specific Data Area");
657 }
658 else
659 {
660 if (lsda_encoding == DW_EH_PE_aligned)
661 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
662 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
663 "Language Specific Data Area (none)");
664 }
665 }
666 else
667 dw2_asm_output_data_uleb128 (0, "Augmentation size");
668 }
669
670 /* Loop through the Call Frame Instructions associated with this FDE. */
671 fde->dw_fde_current_label = begin;
672 {
673 size_t from, until, i;
674
675 from = 0;
676 until = vec_safe_length (fde->dw_fde_cfi);
677
678 if (fde->dw_fde_second_begin == NULL)
679 ;
680 else if (!second)
681 until = fde->dw_fde_switch_cfi_index;
682 else
683 from = fde->dw_fde_switch_cfi_index;
684
685 for (i = from; i < until; i++)
686 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
687 }
688
689 /* If we are to emit a ref/link from function bodies to their frame tables,
690 do it now. This is typically performed to make sure that tables
691 associated with functions are dragged with them and not discarded in
692 garbage collecting links. We need to do this on a per function basis to
693 cope with -ffunction-sections. */
694
695 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
696 /* Switch to the function section, emit the ref to the tables, and
697 switch *back* into the table section. */
698 switch_to_section (function_section (fde->decl));
699 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
700 switch_to_frame_table_section (for_eh, true);
701 #endif
702
703 /* Pad the FDE out to an address sized boundary. */
704 ASM_OUTPUT_ALIGN (asm_out_file,
705 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
706 ASM_OUTPUT_LABEL (asm_out_file, l2);
707
708 j += 2;
709 }
710
711 /* Return true if frame description entry FDE is needed for EH. */
712
713 static bool
714 fde_needed_for_eh_p (dw_fde_ref fde)
715 {
716 if (flag_asynchronous_unwind_tables)
717 return true;
718
719 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
720 return true;
721
722 if (fde->uses_eh_lsda)
723 return true;
724
725 /* If exceptions are enabled, we have collected nothrow info. */
726 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
727 return false;
728
729 return true;
730 }
731
732 /* Output the call frame information used to record information
733 that relates to calculating the frame pointer, and records the
734 location of saved registers. */
735
736 static void
737 output_call_frame_info (int for_eh)
738 {
739 unsigned int i;
740 dw_fde_ref fde;
741 dw_cfi_ref cfi;
742 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
743 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
744 bool any_lsda_needed = false;
745 char augmentation[6];
746 int augmentation_size;
747 int fde_encoding = DW_EH_PE_absptr;
748 int per_encoding = DW_EH_PE_absptr;
749 int lsda_encoding = DW_EH_PE_absptr;
750 int return_reg;
751 rtx personality = NULL;
752 int dw_cie_version;
753
754 /* Don't emit a CIE if there won't be any FDEs. */
755 if (!fde_vec)
756 return;
757
758 /* Nothing to do if the assembler's doing it all. */
759 if (dwarf2out_do_cfi_asm ())
760 return;
761
762 /* If we don't have any functions we'll want to unwind out of, don't emit
763 any EH unwind information. If we make FDEs linkonce, we may have to
764 emit an empty label for an FDE that wouldn't otherwise be emitted. We
765 want to avoid having an FDE kept around when the function it refers to
766 is discarded. Example where this matters: a primary function template
767 in C++ requires EH information, an explicit specialization doesn't. */
768 if (for_eh)
769 {
770 bool any_eh_needed = false;
771
772 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
773 {
774 if (fde->uses_eh_lsda)
775 any_eh_needed = any_lsda_needed = true;
776 else if (fde_needed_for_eh_p (fde))
777 any_eh_needed = true;
778 else if (TARGET_USES_WEAK_UNWIND_INFO)
779 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
780 }
781
782 if (!any_eh_needed)
783 return;
784 }
785
786 /* We're going to be generating comments, so turn on app. */
787 if (flag_debug_asm)
788 app_enable ();
789
790 /* Switch to the proper frame section, first time. */
791 switch_to_frame_table_section (for_eh, false);
792
793 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
794 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
795
796 /* Output the CIE. */
797 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
798 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
799 if (!XCOFF_DEBUGGING_INFO || for_eh)
800 {
801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
802 dw2_asm_output_data (4, 0xffffffff,
803 "Initial length escape value indicating 64-bit DWARF extension");
804 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
805 "Length of Common Information Entry");
806 }
807 ASM_OUTPUT_LABEL (asm_out_file, l1);
808
809 /* Now that the CIE pointer is PC-relative for EH,
810 use 0 to identify the CIE. */
811 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
812 (for_eh ? 0 : DWARF_CIE_ID),
813 "CIE Identifier Tag");
814
815 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
816 use CIE version 1, unless that would produce incorrect results
817 due to overflowing the return register column. */
818 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
819 dw_cie_version = 1;
820 if (return_reg >= 256 || dwarf_version > 2)
821 dw_cie_version = 3;
822 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
823
824 augmentation[0] = 0;
825 augmentation_size = 0;
826
827 personality = current_unit_personality;
828 if (for_eh)
829 {
830 char *p;
831
832 /* Augmentation:
833 z Indicates that a uleb128 is present to size the
834 augmentation section.
835 L Indicates the encoding (and thus presence) of
836 an LSDA pointer in the FDE augmentation.
837 R Indicates a non-default pointer encoding for
838 FDE code pointers.
839 P Indicates the presence of an encoding + language
840 personality routine in the CIE augmentation. */
841
842 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
843 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
844 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
845
846 p = augmentation + 1;
847 if (personality)
848 {
849 *p++ = 'P';
850 augmentation_size += 1 + size_of_encoded_value (per_encoding);
851 assemble_external_libcall (personality);
852 }
853 if (any_lsda_needed)
854 {
855 *p++ = 'L';
856 augmentation_size += 1;
857 }
858 if (fde_encoding != DW_EH_PE_absptr)
859 {
860 *p++ = 'R';
861 augmentation_size += 1;
862 }
863 if (p > augmentation + 1)
864 {
865 augmentation[0] = 'z';
866 *p = '\0';
867 }
868
869 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
870 if (personality && per_encoding == DW_EH_PE_aligned)
871 {
872 int offset = ( 4 /* Length */
873 + 4 /* CIE Id */
874 + 1 /* CIE version */
875 + strlen (augmentation) + 1 /* Augmentation */
876 + size_of_uleb128 (1) /* Code alignment */
877 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
878 + 1 /* RA column */
879 + 1 /* Augmentation size */
880 + 1 /* Personality encoding */ );
881 int pad = -offset & (PTR_SIZE - 1);
882
883 augmentation_size += pad;
884
885 /* Augmentations should be small, so there's scarce need to
886 iterate for a solution. Die if we exceed one uleb128 byte. */
887 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
888 }
889 }
890
891 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
892 if (dw_cie_version >= 4)
893 {
894 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
895 dw2_asm_output_data (1, 0, "CIE Segment Size");
896 }
897 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
898 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
899 "CIE Data Alignment Factor");
900
901 if (dw_cie_version == 1)
902 dw2_asm_output_data (1, return_reg, "CIE RA Column");
903 else
904 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
905
906 if (augmentation[0])
907 {
908 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
909 if (personality)
910 {
911 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
912 eh_data_format_name (per_encoding));
913 dw2_asm_output_encoded_addr_rtx (per_encoding,
914 personality,
915 true, NULL);
916 }
917
918 if (any_lsda_needed)
919 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
920 eh_data_format_name (lsda_encoding));
921
922 if (fde_encoding != DW_EH_PE_absptr)
923 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
924 eh_data_format_name (fde_encoding));
925 }
926
927 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
928 output_cfi (cfi, NULL, for_eh);
929
930 /* Pad the CIE out to an address sized boundary. */
931 ASM_OUTPUT_ALIGN (asm_out_file,
932 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
933 ASM_OUTPUT_LABEL (asm_out_file, l2);
934
935 /* Loop through all of the FDE's. */
936 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
937 {
938 unsigned int k;
939
940 /* Don't emit EH unwind info for leaf functions that don't need it. */
941 if (for_eh && !fde_needed_for_eh_p (fde))
942 continue;
943
944 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
945 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
946 augmentation, any_lsda_needed, lsda_encoding);
947 }
948
949 if (for_eh && targetm.terminate_dw2_eh_frame_info)
950 dw2_asm_output_data (4, 0, "End of Table");
951
952 /* Turn off app to make assembly quicker. */
953 if (flag_debug_asm)
954 app_disable ();
955 }
956
957 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
958
959 static void
960 dwarf2out_do_cfi_startproc (bool second)
961 {
962 int enc;
963 rtx ref;
964
965 fprintf (asm_out_file, "\t.cfi_startproc\n");
966
967 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
968 eh unwinders. */
969 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
970 return;
971
972 rtx personality = get_personality_function (current_function_decl);
973
974 if (personality)
975 {
976 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
977 ref = personality;
978
979 /* ??? The GAS support isn't entirely consistent. We have to
980 handle indirect support ourselves, but PC-relative is done
981 in the assembler. Further, the assembler can't handle any
982 of the weirder relocation types. */
983 if (enc & DW_EH_PE_indirect)
984 ref = dw2_force_const_mem (ref, true);
985
986 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
987 output_addr_const (asm_out_file, ref);
988 fputc ('\n', asm_out_file);
989 }
990
991 if (crtl->uses_eh_lsda)
992 {
993 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
994
995 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
996 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
997 current_function_funcdef_no);
998 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
999 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1000
1001 if (enc & DW_EH_PE_indirect)
1002 ref = dw2_force_const_mem (ref, true);
1003
1004 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1005 output_addr_const (asm_out_file, ref);
1006 fputc ('\n', asm_out_file);
1007 }
1008 }
1009
1010 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1011 this allocation may be done before pass_final. */
1012
1013 dw_fde_ref
1014 dwarf2out_alloc_current_fde (void)
1015 {
1016 dw_fde_ref fde;
1017
1018 fde = ggc_cleared_alloc<dw_fde_node> ();
1019 fde->decl = current_function_decl;
1020 fde->funcdef_number = current_function_funcdef_no;
1021 fde->fde_index = vec_safe_length (fde_vec);
1022 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1023 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1024 fde->nothrow = crtl->nothrow;
1025 fde->drap_reg = INVALID_REGNUM;
1026 fde->vdrap_reg = INVALID_REGNUM;
1027
1028 /* Record the FDE associated with this function. */
1029 cfun->fde = fde;
1030 vec_safe_push (fde_vec, fde);
1031
1032 return fde;
1033 }
1034
1035 /* Output a marker (i.e. a label) for the beginning of a function, before
1036 the prologue. */
1037
1038 void
1039 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1040 unsigned int column ATTRIBUTE_UNUSED,
1041 const char *file ATTRIBUTE_UNUSED)
1042 {
1043 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1044 char * dup_label;
1045 dw_fde_ref fde;
1046 section *fnsec;
1047 bool do_frame;
1048
1049 current_function_func_begin_label = NULL;
1050
1051 do_frame = dwarf2out_do_frame ();
1052
1053 /* ??? current_function_func_begin_label is also used by except.c for
1054 call-site information. We must emit this label if it might be used. */
1055 if (!do_frame
1056 && (!flag_exceptions
1057 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1058 return;
1059
1060 fnsec = function_section (current_function_decl);
1061 switch_to_section (fnsec);
1062 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1063 current_function_funcdef_no);
1064 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1065 current_function_funcdef_no);
1066 dup_label = xstrdup (label);
1067 current_function_func_begin_label = dup_label;
1068
1069 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1070 if (!do_frame)
1071 return;
1072
1073 /* Unlike the debug version, the EH version of frame unwind info is a per-
1074 function setting so we need to record whether we need it for the unit. */
1075 do_eh_frame |= dwarf2out_do_eh_frame ();
1076
1077 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1078 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1079 would include pass_dwarf2_frame. If we've not created the FDE yet,
1080 do so now. */
1081 fde = cfun->fde;
1082 if (fde == NULL)
1083 fde = dwarf2out_alloc_current_fde ();
1084
1085 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1086 fde->dw_fde_begin = dup_label;
1087 fde->dw_fde_current_label = dup_label;
1088 fde->in_std_section = (fnsec == text_section
1089 || (cold_text_section && fnsec == cold_text_section));
1090
1091 /* We only want to output line number information for the genuine dwarf2
1092 prologue case, not the eh frame case. */
1093 #ifdef DWARF2_DEBUGGING_INFO
1094 if (file)
1095 dwarf2out_source_line (line, column, file, 0, true);
1096 #endif
1097
1098 if (dwarf2out_do_cfi_asm ())
1099 dwarf2out_do_cfi_startproc (false);
1100 else
1101 {
1102 rtx personality = get_personality_function (current_function_decl);
1103 if (!current_unit_personality)
1104 current_unit_personality = personality;
1105
1106 /* We cannot keep a current personality per function as without CFI
1107 asm, at the point where we emit the CFI data, there is no current
1108 function anymore. */
1109 if (personality && current_unit_personality != personality)
1110 sorry ("multiple EH personalities are supported only with assemblers "
1111 "supporting .cfi_personality directive");
1112 }
1113 }
1114
1115 /* Output a marker (i.e. a label) for the end of the generated code
1116 for a function prologue. This gets called *after* the prologue code has
1117 been generated. */
1118
1119 void
1120 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1121 const char *file ATTRIBUTE_UNUSED)
1122 {
1123 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1124
1125 /* Output a label to mark the endpoint of the code generated for this
1126 function. */
1127 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1128 current_function_funcdef_no);
1129 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1130 current_function_funcdef_no);
1131 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1132 }
1133
1134 /* Output a marker (i.e. a label) for the beginning of the generated code
1135 for a function epilogue. This gets called *before* the prologue code has
1136 been generated. */
1137
1138 void
1139 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1140 const char *file ATTRIBUTE_UNUSED)
1141 {
1142 dw_fde_ref fde = cfun->fde;
1143 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1144
1145 if (fde->dw_fde_vms_begin_epilogue)
1146 return;
1147
1148 /* Output a label to mark the endpoint of the code generated for this
1149 function. */
1150 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1151 current_function_funcdef_no);
1152 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1153 current_function_funcdef_no);
1154 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1155 }
1156
1157 /* Output a marker (i.e. a label) for the absolute end of the generated code
1158 for a function definition. This gets called *after* the epilogue code has
1159 been generated. */
1160
1161 void
1162 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1163 const char *file ATTRIBUTE_UNUSED)
1164 {
1165 dw_fde_ref fde;
1166 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1167
1168 last_var_location_insn = NULL;
1169 cached_next_real_insn = NULL;
1170
1171 if (dwarf2out_do_cfi_asm ())
1172 fprintf (asm_out_file, "\t.cfi_endproc\n");
1173
1174 /* Output a label to mark the endpoint of the code generated for this
1175 function. */
1176 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1177 current_function_funcdef_no);
1178 ASM_OUTPUT_LABEL (asm_out_file, label);
1179 fde = cfun->fde;
1180 gcc_assert (fde != NULL);
1181 if (fde->dw_fde_second_begin == NULL)
1182 fde->dw_fde_end = xstrdup (label);
1183 }
1184
1185 void
1186 dwarf2out_frame_finish (void)
1187 {
1188 /* Output call frame information. */
1189 if (targetm.debug_unwind_info () == UI_DWARF2)
1190 output_call_frame_info (0);
1191
1192 /* Output another copy for the unwinder. */
1193 if (do_eh_frame)
1194 output_call_frame_info (1);
1195 }
1196
1197 /* Note that the current function section is being used for code. */
1198
1199 static void
1200 dwarf2out_note_section_used (void)
1201 {
1202 section *sec = current_function_section ();
1203 if (sec == text_section)
1204 text_section_used = true;
1205 else if (sec == cold_text_section)
1206 cold_text_section_used = true;
1207 }
1208
1209 static void var_location_switch_text_section (void);
1210 static void set_cur_line_info_table (section *);
1211
1212 void
1213 dwarf2out_switch_text_section (void)
1214 {
1215 section *sect;
1216 dw_fde_ref fde = cfun->fde;
1217
1218 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1219
1220 if (!in_cold_section_p)
1221 {
1222 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1223 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1224 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1225 }
1226 else
1227 {
1228 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1229 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1230 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1231 }
1232 have_multiple_function_sections = true;
1233
1234 /* There is no need to mark used sections when not debugging. */
1235 if (cold_text_section != NULL)
1236 dwarf2out_note_section_used ();
1237
1238 if (dwarf2out_do_cfi_asm ())
1239 fprintf (asm_out_file, "\t.cfi_endproc\n");
1240
1241 /* Now do the real section switch. */
1242 sect = current_function_section ();
1243 switch_to_section (sect);
1244
1245 fde->second_in_std_section
1246 = (sect == text_section
1247 || (cold_text_section && sect == cold_text_section));
1248
1249 if (dwarf2out_do_cfi_asm ())
1250 dwarf2out_do_cfi_startproc (true);
1251
1252 var_location_switch_text_section ();
1253
1254 if (cold_text_section != NULL)
1255 set_cur_line_info_table (sect);
1256 }
1257 \f
1258 /* And now, the subset of the debugging information support code necessary
1259 for emitting location expressions. */
1260
1261 /* Data about a single source file. */
1262 struct GTY((for_user)) dwarf_file_data {
1263 const char * filename;
1264 int emitted_number;
1265 };
1266
1267 /* Describe an entry into the .debug_addr section. */
1268
1269 enum ate_kind {
1270 ate_kind_rtx,
1271 ate_kind_rtx_dtprel,
1272 ate_kind_label
1273 };
1274
1275 struct GTY((for_user)) addr_table_entry {
1276 enum ate_kind kind;
1277 unsigned int refcount;
1278 unsigned int index;
1279 union addr_table_entry_struct_union
1280 {
1281 rtx GTY ((tag ("0"))) rtl;
1282 char * GTY ((tag ("1"))) label;
1283 }
1284 GTY ((desc ("%1.kind"))) addr;
1285 };
1286
1287 typedef unsigned int var_loc_view;
1288
1289 /* Location lists are ranges + location descriptions for that range,
1290 so you can track variables that are in different places over
1291 their entire life. */
1292 typedef struct GTY(()) dw_loc_list_struct {
1293 dw_loc_list_ref dw_loc_next;
1294 const char *begin; /* Label and addr_entry for start of range */
1295 addr_table_entry *begin_entry;
1296 const char *end; /* Label for end of range */
1297 char *ll_symbol; /* Label for beginning of location list.
1298 Only on head of list. */
1299 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1300 const char *section; /* Section this loclist is relative to */
1301 dw_loc_descr_ref expr;
1302 var_loc_view vbegin, vend;
1303 hashval_t hash;
1304 /* True if all addresses in this and subsequent lists are known to be
1305 resolved. */
1306 bool resolved_addr;
1307 /* True if this list has been replaced by dw_loc_next. */
1308 bool replaced;
1309 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1310 section. */
1311 unsigned char emitted : 1;
1312 /* True if hash field is index rather than hash value. */
1313 unsigned char num_assigned : 1;
1314 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1315 unsigned char offset_emitted : 1;
1316 /* True if note_variable_value_in_expr has been called on it. */
1317 unsigned char noted_variable_value : 1;
1318 /* True if the range should be emitted even if begin and end
1319 are the same. */
1320 bool force;
1321 } dw_loc_list_node;
1322
1323 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1324 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1325
1326 /* Convert a DWARF stack opcode into its string name. */
1327
1328 static const char *
1329 dwarf_stack_op_name (unsigned int op)
1330 {
1331 const char *name = get_DW_OP_name (op);
1332
1333 if (name != NULL)
1334 return name;
1335
1336 return "OP_<unknown>";
1337 }
1338
1339 /* Return TRUE iff we're to output location view lists as a separate
1340 attribute next to the location lists, as an extension compatible
1341 with DWARF 2 and above. */
1342
1343 static inline bool
1344 dwarf2out_locviews_in_attribute ()
1345 {
1346 return debug_variable_location_views == 1;
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as part of the
1350 location lists, as proposed for standardization after DWARF 5. */
1351
1352 static inline bool
1353 dwarf2out_locviews_in_loclist ()
1354 {
1355 #ifndef DW_LLE_view_pair
1356 return false;
1357 #else
1358 return debug_variable_location_views == -1;
1359 #endif
1360 }
1361
1362 /* Return a pointer to a newly allocated location description. Location
1363 descriptions are simple expression terms that can be strung
1364 together to form more complicated location (address) descriptions. */
1365
1366 static inline dw_loc_descr_ref
1367 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1368 unsigned HOST_WIDE_INT oprnd2)
1369 {
1370 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1371
1372 descr->dw_loc_opc = op;
1373 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1374 descr->dw_loc_oprnd1.val_entry = NULL;
1375 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1376 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1377 descr->dw_loc_oprnd2.val_entry = NULL;
1378 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1379
1380 return descr;
1381 }
1382
1383 /* Add a location description term to a location description expression. */
1384
1385 static inline void
1386 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1387 {
1388 dw_loc_descr_ref *d;
1389
1390 /* Find the end of the chain. */
1391 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1392 ;
1393
1394 *d = descr;
1395 }
1396
1397 /* Compare two location operands for exact equality. */
1398
1399 static bool
1400 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1401 {
1402 if (a->val_class != b->val_class)
1403 return false;
1404 switch (a->val_class)
1405 {
1406 case dw_val_class_none:
1407 return true;
1408 case dw_val_class_addr:
1409 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1410
1411 case dw_val_class_offset:
1412 case dw_val_class_unsigned_const:
1413 case dw_val_class_const:
1414 case dw_val_class_unsigned_const_implicit:
1415 case dw_val_class_const_implicit:
1416 case dw_val_class_range_list:
1417 /* These are all HOST_WIDE_INT, signed or unsigned. */
1418 return a->v.val_unsigned == b->v.val_unsigned;
1419
1420 case dw_val_class_loc:
1421 return a->v.val_loc == b->v.val_loc;
1422 case dw_val_class_loc_list:
1423 return a->v.val_loc_list == b->v.val_loc_list;
1424 case dw_val_class_view_list:
1425 return a->v.val_view_list == b->v.val_view_list;
1426 case dw_val_class_die_ref:
1427 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1428 case dw_val_class_fde_ref:
1429 return a->v.val_fde_index == b->v.val_fde_index;
1430 case dw_val_class_symview:
1431 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1432 case dw_val_class_lbl_id:
1433 case dw_val_class_lineptr:
1434 case dw_val_class_macptr:
1435 case dw_val_class_loclistsptr:
1436 case dw_val_class_high_pc:
1437 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1438 case dw_val_class_str:
1439 return a->v.val_str == b->v.val_str;
1440 case dw_val_class_flag:
1441 return a->v.val_flag == b->v.val_flag;
1442 case dw_val_class_file:
1443 case dw_val_class_file_implicit:
1444 return a->v.val_file == b->v.val_file;
1445 case dw_val_class_decl_ref:
1446 return a->v.val_decl_ref == b->v.val_decl_ref;
1447
1448 case dw_val_class_const_double:
1449 return (a->v.val_double.high == b->v.val_double.high
1450 && a->v.val_double.low == b->v.val_double.low);
1451
1452 case dw_val_class_wide_int:
1453 return *a->v.val_wide == *b->v.val_wide;
1454
1455 case dw_val_class_vec:
1456 {
1457 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1458 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1459
1460 return (a_len == b_len
1461 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1462 }
1463
1464 case dw_val_class_data8:
1465 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1466
1467 case dw_val_class_vms_delta:
1468 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1469 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1470
1471 case dw_val_class_discr_value:
1472 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1473 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1474 case dw_val_class_discr_list:
1475 /* It makes no sense comparing two discriminant value lists. */
1476 return false;
1477 }
1478 gcc_unreachable ();
1479 }
1480
1481 /* Compare two location atoms for exact equality. */
1482
1483 static bool
1484 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1485 {
1486 if (a->dw_loc_opc != b->dw_loc_opc)
1487 return false;
1488
1489 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1490 address size, but since we always allocate cleared storage it
1491 should be zero for other types of locations. */
1492 if (a->dtprel != b->dtprel)
1493 return false;
1494
1495 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1496 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1497 }
1498
1499 /* Compare two complete location expressions for exact equality. */
1500
1501 bool
1502 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1503 {
1504 while (1)
1505 {
1506 if (a == b)
1507 return true;
1508 if (a == NULL || b == NULL)
1509 return false;
1510 if (!loc_descr_equal_p_1 (a, b))
1511 return false;
1512
1513 a = a->dw_loc_next;
1514 b = b->dw_loc_next;
1515 }
1516 }
1517
1518
1519 /* Add a constant POLY_OFFSET to a location expression. */
1520
1521 static void
1522 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1523 {
1524 dw_loc_descr_ref loc;
1525 HOST_WIDE_INT *p;
1526
1527 gcc_assert (*list_head != NULL);
1528
1529 if (known_eq (poly_offset, 0))
1530 return;
1531
1532 /* Find the end of the chain. */
1533 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1534 ;
1535
1536 HOST_WIDE_INT offset;
1537 if (!poly_offset.is_constant (&offset))
1538 {
1539 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1540 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1541 return;
1542 }
1543
1544 p = NULL;
1545 if (loc->dw_loc_opc == DW_OP_fbreg
1546 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1547 p = &loc->dw_loc_oprnd1.v.val_int;
1548 else if (loc->dw_loc_opc == DW_OP_bregx)
1549 p = &loc->dw_loc_oprnd2.v.val_int;
1550
1551 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1552 offset. Don't optimize if an signed integer overflow would happen. */
1553 if (p != NULL
1554 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1555 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1556 *p += offset;
1557
1558 else if (offset > 0)
1559 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1560
1561 else
1562 {
1563 loc->dw_loc_next
1564 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1565 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1566 }
1567 }
1568
1569 /* Return a pointer to a newly allocated location description for
1570 REG and OFFSET. */
1571
1572 static inline dw_loc_descr_ref
1573 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1574 {
1575 HOST_WIDE_INT const_offset;
1576 if (offset.is_constant (&const_offset))
1577 {
1578 if (reg <= 31)
1579 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1580 const_offset, 0);
1581 else
1582 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1583 }
1584 else
1585 {
1586 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1587 loc_descr_plus_const (&ret, offset);
1588 return ret;
1589 }
1590 }
1591
1592 /* Add a constant OFFSET to a location list. */
1593
1594 static void
1595 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1596 {
1597 dw_loc_list_ref d;
1598 for (d = list_head; d != NULL; d = d->dw_loc_next)
1599 loc_descr_plus_const (&d->expr, offset);
1600 }
1601
1602 #define DWARF_REF_SIZE \
1603 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1604
1605 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1606 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1607 DW_FORM_data16 with 128 bits. */
1608 #define DWARF_LARGEST_DATA_FORM_BITS \
1609 (dwarf_version >= 5 ? 128 : 64)
1610
1611 /* Utility inline function for construction of ops that were GNU extension
1612 before DWARF 5. */
1613 static inline enum dwarf_location_atom
1614 dwarf_OP (enum dwarf_location_atom op)
1615 {
1616 switch (op)
1617 {
1618 case DW_OP_implicit_pointer:
1619 if (dwarf_version < 5)
1620 return DW_OP_GNU_implicit_pointer;
1621 break;
1622
1623 case DW_OP_entry_value:
1624 if (dwarf_version < 5)
1625 return DW_OP_GNU_entry_value;
1626 break;
1627
1628 case DW_OP_const_type:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_const_type;
1631 break;
1632
1633 case DW_OP_regval_type:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_regval_type;
1636 break;
1637
1638 case DW_OP_deref_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_deref_type;
1641 break;
1642
1643 case DW_OP_convert:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_convert;
1646 break;
1647
1648 case DW_OP_reinterpret:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_reinterpret;
1651 break;
1652
1653 case DW_OP_addrx:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_addr_index;
1656 break;
1657
1658 case DW_OP_constx:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_const_index;
1661 break;
1662
1663 default:
1664 break;
1665 }
1666 return op;
1667 }
1668
1669 /* Similarly for attributes. */
1670 static inline enum dwarf_attribute
1671 dwarf_AT (enum dwarf_attribute at)
1672 {
1673 switch (at)
1674 {
1675 case DW_AT_call_return_pc:
1676 if (dwarf_version < 5)
1677 return DW_AT_low_pc;
1678 break;
1679
1680 case DW_AT_call_tail_call:
1681 if (dwarf_version < 5)
1682 return DW_AT_GNU_tail_call;
1683 break;
1684
1685 case DW_AT_call_origin:
1686 if (dwarf_version < 5)
1687 return DW_AT_abstract_origin;
1688 break;
1689
1690 case DW_AT_call_target:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_call_site_target;
1693 break;
1694
1695 case DW_AT_call_target_clobbered:
1696 if (dwarf_version < 5)
1697 return DW_AT_GNU_call_site_target_clobbered;
1698 break;
1699
1700 case DW_AT_call_parameter:
1701 if (dwarf_version < 5)
1702 return DW_AT_abstract_origin;
1703 break;
1704
1705 case DW_AT_call_value:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_value;
1708 break;
1709
1710 case DW_AT_call_data_value:
1711 if (dwarf_version < 5)
1712 return DW_AT_GNU_call_site_data_value;
1713 break;
1714
1715 case DW_AT_call_all_calls:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_all_call_sites;
1718 break;
1719
1720 case DW_AT_call_all_tail_calls:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_all_tail_call_sites;
1723 break;
1724
1725 case DW_AT_dwo_name:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_dwo_name;
1728 break;
1729
1730 case DW_AT_addr_base:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_addr_base;
1733 break;
1734
1735 default:
1736 break;
1737 }
1738 return at;
1739 }
1740
1741 /* And similarly for tags. */
1742 static inline enum dwarf_tag
1743 dwarf_TAG (enum dwarf_tag tag)
1744 {
1745 switch (tag)
1746 {
1747 case DW_TAG_call_site:
1748 if (dwarf_version < 5)
1749 return DW_TAG_GNU_call_site;
1750 break;
1751
1752 case DW_TAG_call_site_parameter:
1753 if (dwarf_version < 5)
1754 return DW_TAG_GNU_call_site_parameter;
1755 break;
1756
1757 default:
1758 break;
1759 }
1760 return tag;
1761 }
1762
1763 /* And similarly for forms. */
1764 static inline enum dwarf_form
1765 dwarf_FORM (enum dwarf_form form)
1766 {
1767 switch (form)
1768 {
1769 case DW_FORM_addrx:
1770 if (dwarf_version < 5)
1771 return DW_FORM_GNU_addr_index;
1772 break;
1773
1774 case DW_FORM_strx:
1775 if (dwarf_version < 5)
1776 return DW_FORM_GNU_str_index;
1777 break;
1778
1779 default:
1780 break;
1781 }
1782 return form;
1783 }
1784
1785 static unsigned long int get_base_type_offset (dw_die_ref);
1786
1787 /* Return the size of a location descriptor. */
1788
1789 static unsigned long
1790 size_of_loc_descr (dw_loc_descr_ref loc)
1791 {
1792 unsigned long size = 1;
1793
1794 switch (loc->dw_loc_opc)
1795 {
1796 case DW_OP_addr:
1797 size += DWARF2_ADDR_SIZE;
1798 break;
1799 case DW_OP_GNU_addr_index:
1800 case DW_OP_addrx:
1801 case DW_OP_GNU_const_index:
1802 case DW_OP_constx:
1803 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1804 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1805 break;
1806 case DW_OP_const1u:
1807 case DW_OP_const1s:
1808 size += 1;
1809 break;
1810 case DW_OP_const2u:
1811 case DW_OP_const2s:
1812 size += 2;
1813 break;
1814 case DW_OP_const4u:
1815 case DW_OP_const4s:
1816 size += 4;
1817 break;
1818 case DW_OP_const8u:
1819 case DW_OP_const8s:
1820 size += 8;
1821 break;
1822 case DW_OP_constu:
1823 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1824 break;
1825 case DW_OP_consts:
1826 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1827 break;
1828 case DW_OP_pick:
1829 size += 1;
1830 break;
1831 case DW_OP_plus_uconst:
1832 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1833 break;
1834 case DW_OP_skip:
1835 case DW_OP_bra:
1836 size += 2;
1837 break;
1838 case DW_OP_breg0:
1839 case DW_OP_breg1:
1840 case DW_OP_breg2:
1841 case DW_OP_breg3:
1842 case DW_OP_breg4:
1843 case DW_OP_breg5:
1844 case DW_OP_breg6:
1845 case DW_OP_breg7:
1846 case DW_OP_breg8:
1847 case DW_OP_breg9:
1848 case DW_OP_breg10:
1849 case DW_OP_breg11:
1850 case DW_OP_breg12:
1851 case DW_OP_breg13:
1852 case DW_OP_breg14:
1853 case DW_OP_breg15:
1854 case DW_OP_breg16:
1855 case DW_OP_breg17:
1856 case DW_OP_breg18:
1857 case DW_OP_breg19:
1858 case DW_OP_breg20:
1859 case DW_OP_breg21:
1860 case DW_OP_breg22:
1861 case DW_OP_breg23:
1862 case DW_OP_breg24:
1863 case DW_OP_breg25:
1864 case DW_OP_breg26:
1865 case DW_OP_breg27:
1866 case DW_OP_breg28:
1867 case DW_OP_breg29:
1868 case DW_OP_breg30:
1869 case DW_OP_breg31:
1870 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1871 break;
1872 case DW_OP_regx:
1873 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1874 break;
1875 case DW_OP_fbreg:
1876 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1877 break;
1878 case DW_OP_bregx:
1879 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1880 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1881 break;
1882 case DW_OP_piece:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_bit_piece:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1888 break;
1889 case DW_OP_deref_size:
1890 case DW_OP_xderef_size:
1891 size += 1;
1892 break;
1893 case DW_OP_call2:
1894 size += 2;
1895 break;
1896 case DW_OP_call4:
1897 size += 4;
1898 break;
1899 case DW_OP_call_ref:
1900 case DW_OP_GNU_variable_value:
1901 size += DWARF_REF_SIZE;
1902 break;
1903 case DW_OP_implicit_value:
1904 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1905 + loc->dw_loc_oprnd1.v.val_unsigned;
1906 break;
1907 case DW_OP_implicit_pointer:
1908 case DW_OP_GNU_implicit_pointer:
1909 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1910 break;
1911 case DW_OP_entry_value:
1912 case DW_OP_GNU_entry_value:
1913 {
1914 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1915 size += size_of_uleb128 (op_size) + op_size;
1916 break;
1917 }
1918 case DW_OP_const_type:
1919 case DW_OP_GNU_const_type:
1920 {
1921 unsigned long o
1922 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1923 size += size_of_uleb128 (o) + 1;
1924 switch (loc->dw_loc_oprnd2.val_class)
1925 {
1926 case dw_val_class_vec:
1927 size += loc->dw_loc_oprnd2.v.val_vec.length
1928 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1929 break;
1930 case dw_val_class_const:
1931 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1932 break;
1933 case dw_val_class_const_double:
1934 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1935 break;
1936 case dw_val_class_wide_int:
1937 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1938 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1939 break;
1940 default:
1941 gcc_unreachable ();
1942 }
1943 break;
1944 }
1945 case DW_OP_regval_type:
1946 case DW_OP_GNU_regval_type:
1947 {
1948 unsigned long o
1949 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1950 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1951 + size_of_uleb128 (o);
1952 }
1953 break;
1954 case DW_OP_deref_type:
1955 case DW_OP_GNU_deref_type:
1956 {
1957 unsigned long o
1958 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1959 size += 1 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_convert:
1963 case DW_OP_reinterpret:
1964 case DW_OP_GNU_convert:
1965 case DW_OP_GNU_reinterpret:
1966 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1967 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1968 else
1969 {
1970 unsigned long o
1971 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1972 size += size_of_uleb128 (o);
1973 }
1974 break;
1975 case DW_OP_GNU_parameter_ref:
1976 size += 4;
1977 break;
1978 default:
1979 break;
1980 }
1981
1982 return size;
1983 }
1984
1985 /* Return the size of a series of location descriptors. */
1986
1987 unsigned long
1988 size_of_locs (dw_loc_descr_ref loc)
1989 {
1990 dw_loc_descr_ref l;
1991 unsigned long size;
1992
1993 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1994 field, to avoid writing to a PCH file. */
1995 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1996 {
1997 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1998 break;
1999 size += size_of_loc_descr (l);
2000 }
2001 if (! l)
2002 return size;
2003
2004 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2005 {
2006 l->dw_loc_addr = size;
2007 size += size_of_loc_descr (l);
2008 }
2009
2010 return size;
2011 }
2012
2013 /* Return the size of the value in a DW_AT_discr_value attribute. */
2014
2015 static int
2016 size_of_discr_value (dw_discr_value *discr_value)
2017 {
2018 if (discr_value->pos)
2019 return size_of_uleb128 (discr_value->v.uval);
2020 else
2021 return size_of_sleb128 (discr_value->v.sval);
2022 }
2023
2024 /* Return the size of the value in a DW_AT_discr_list attribute. */
2025
2026 static int
2027 size_of_discr_list (dw_discr_list_ref discr_list)
2028 {
2029 int size = 0;
2030
2031 for (dw_discr_list_ref list = discr_list;
2032 list != NULL;
2033 list = list->dw_discr_next)
2034 {
2035 /* One byte for the discriminant value descriptor, and then one or two
2036 LEB128 numbers, depending on whether it's a single case label or a
2037 range label. */
2038 size += 1;
2039 size += size_of_discr_value (&list->dw_discr_lower_bound);
2040 if (list->dw_discr_range != 0)
2041 size += size_of_discr_value (&list->dw_discr_upper_bound);
2042 }
2043 return size;
2044 }
2045
2046 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2047 static void get_ref_die_offset_label (char *, dw_die_ref);
2048 static unsigned long int get_ref_die_offset (dw_die_ref);
2049
2050 /* Output location description stack opcode's operands (if any).
2051 The for_eh_or_skip parameter controls whether register numbers are
2052 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2053 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2054 info). This should be suppressed for the cases that have not been converted
2055 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2056
2057 static void
2058 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2059 {
2060 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2061 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2062
2063 switch (loc->dw_loc_opc)
2064 {
2065 #ifdef DWARF2_DEBUGGING_INFO
2066 case DW_OP_const2u:
2067 case DW_OP_const2s:
2068 dw2_asm_output_data (2, val1->v.val_int, NULL);
2069 break;
2070 case DW_OP_const4u:
2071 if (loc->dtprel)
2072 {
2073 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2074 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2075 val1->v.val_addr);
2076 fputc ('\n', asm_out_file);
2077 break;
2078 }
2079 /* FALLTHRU */
2080 case DW_OP_const4s:
2081 dw2_asm_output_data (4, val1->v.val_int, NULL);
2082 break;
2083 case DW_OP_const8u:
2084 if (loc->dtprel)
2085 {
2086 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2087 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2088 val1->v.val_addr);
2089 fputc ('\n', asm_out_file);
2090 break;
2091 }
2092 /* FALLTHRU */
2093 case DW_OP_const8s:
2094 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2095 dw2_asm_output_data (8, val1->v.val_int, NULL);
2096 break;
2097 case DW_OP_skip:
2098 case DW_OP_bra:
2099 {
2100 int offset;
2101
2102 gcc_assert (val1->val_class == dw_val_class_loc);
2103 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2104
2105 dw2_asm_output_data (2, offset, NULL);
2106 }
2107 break;
2108 case DW_OP_implicit_value:
2109 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2110 switch (val2->val_class)
2111 {
2112 case dw_val_class_const:
2113 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2114 break;
2115 case dw_val_class_vec:
2116 {
2117 unsigned int elt_size = val2->v.val_vec.elt_size;
2118 unsigned int len = val2->v.val_vec.length;
2119 unsigned int i;
2120 unsigned char *p;
2121
2122 if (elt_size > sizeof (HOST_WIDE_INT))
2123 {
2124 elt_size /= 2;
2125 len *= 2;
2126 }
2127 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2128 i < len;
2129 i++, p += elt_size)
2130 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2131 "fp or vector constant word %u", i);
2132 }
2133 break;
2134 case dw_val_class_const_double:
2135 {
2136 unsigned HOST_WIDE_INT first, second;
2137
2138 if (WORDS_BIG_ENDIAN)
2139 {
2140 first = val2->v.val_double.high;
2141 second = val2->v.val_double.low;
2142 }
2143 else
2144 {
2145 first = val2->v.val_double.low;
2146 second = val2->v.val_double.high;
2147 }
2148 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2149 first, NULL);
2150 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2151 second, NULL);
2152 }
2153 break;
2154 case dw_val_class_wide_int:
2155 {
2156 int i;
2157 int len = get_full_len (*val2->v.val_wide);
2158 if (WORDS_BIG_ENDIAN)
2159 for (i = len - 1; i >= 0; --i)
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 val2->v.val_wide->elt (i), NULL);
2162 else
2163 for (i = 0; i < len; ++i)
2164 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2165 val2->v.val_wide->elt (i), NULL);
2166 }
2167 break;
2168 case dw_val_class_addr:
2169 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2170 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2171 break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175 break;
2176 #else
2177 case DW_OP_const2u:
2178 case DW_OP_const2s:
2179 case DW_OP_const4u:
2180 case DW_OP_const4s:
2181 case DW_OP_const8u:
2182 case DW_OP_const8s:
2183 case DW_OP_skip:
2184 case DW_OP_bra:
2185 case DW_OP_implicit_value:
2186 /* We currently don't make any attempt to make sure these are
2187 aligned properly like we do for the main unwind info, so
2188 don't support emitting things larger than a byte if we're
2189 only doing unwinding. */
2190 gcc_unreachable ();
2191 #endif
2192 case DW_OP_const1u:
2193 case DW_OP_const1s:
2194 dw2_asm_output_data (1, val1->v.val_int, NULL);
2195 break;
2196 case DW_OP_constu:
2197 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2198 break;
2199 case DW_OP_consts:
2200 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2201 break;
2202 case DW_OP_pick:
2203 dw2_asm_output_data (1, val1->v.val_int, NULL);
2204 break;
2205 case DW_OP_plus_uconst:
2206 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2207 break;
2208 case DW_OP_breg0:
2209 case DW_OP_breg1:
2210 case DW_OP_breg2:
2211 case DW_OP_breg3:
2212 case DW_OP_breg4:
2213 case DW_OP_breg5:
2214 case DW_OP_breg6:
2215 case DW_OP_breg7:
2216 case DW_OP_breg8:
2217 case DW_OP_breg9:
2218 case DW_OP_breg10:
2219 case DW_OP_breg11:
2220 case DW_OP_breg12:
2221 case DW_OP_breg13:
2222 case DW_OP_breg14:
2223 case DW_OP_breg15:
2224 case DW_OP_breg16:
2225 case DW_OP_breg17:
2226 case DW_OP_breg18:
2227 case DW_OP_breg19:
2228 case DW_OP_breg20:
2229 case DW_OP_breg21:
2230 case DW_OP_breg22:
2231 case DW_OP_breg23:
2232 case DW_OP_breg24:
2233 case DW_OP_breg25:
2234 case DW_OP_breg26:
2235 case DW_OP_breg27:
2236 case DW_OP_breg28:
2237 case DW_OP_breg29:
2238 case DW_OP_breg30:
2239 case DW_OP_breg31:
2240 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2241 break;
2242 case DW_OP_regx:
2243 {
2244 unsigned r = val1->v.val_unsigned;
2245 if (for_eh_or_skip >= 0)
2246 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2247 gcc_assert (size_of_uleb128 (r)
2248 == size_of_uleb128 (val1->v.val_unsigned));
2249 dw2_asm_output_data_uleb128 (r, NULL);
2250 }
2251 break;
2252 case DW_OP_fbreg:
2253 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2254 break;
2255 case DW_OP_bregx:
2256 {
2257 unsigned r = val1->v.val_unsigned;
2258 if (for_eh_or_skip >= 0)
2259 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2260 gcc_assert (size_of_uleb128 (r)
2261 == size_of_uleb128 (val1->v.val_unsigned));
2262 dw2_asm_output_data_uleb128 (r, NULL);
2263 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2264 }
2265 break;
2266 case DW_OP_piece:
2267 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2268 break;
2269 case DW_OP_bit_piece:
2270 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2271 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2272 break;
2273 case DW_OP_deref_size:
2274 case DW_OP_xderef_size:
2275 dw2_asm_output_data (1, val1->v.val_int, NULL);
2276 break;
2277
2278 case DW_OP_addr:
2279 if (loc->dtprel)
2280 {
2281 if (targetm.asm_out.output_dwarf_dtprel)
2282 {
2283 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2284 DWARF2_ADDR_SIZE,
2285 val1->v.val_addr);
2286 fputc ('\n', asm_out_file);
2287 }
2288 else
2289 gcc_unreachable ();
2290 }
2291 else
2292 {
2293 #ifdef DWARF2_DEBUGGING_INFO
2294 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2295 #else
2296 gcc_unreachable ();
2297 #endif
2298 }
2299 break;
2300
2301 case DW_OP_GNU_addr_index:
2302 case DW_OP_addrx:
2303 case DW_OP_GNU_const_index:
2304 case DW_OP_constx:
2305 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2306 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2307 "(index into .debug_addr)");
2308 break;
2309
2310 case DW_OP_call2:
2311 case DW_OP_call4:
2312 {
2313 unsigned long die_offset
2314 = get_ref_die_offset (val1->v.val_die_ref.die);
2315 /* Make sure the offset has been computed and that we can encode it as
2316 an operand. */
2317 gcc_assert (die_offset > 0
2318 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2319 ? 0xffff
2320 : 0xffffffff));
2321 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2322 die_offset, NULL);
2323 }
2324 break;
2325
2326 case DW_OP_call_ref:
2327 case DW_OP_GNU_variable_value:
2328 {
2329 char label[MAX_ARTIFICIAL_LABEL_BYTES
2330 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2331 gcc_assert (val1->val_class == dw_val_class_die_ref);
2332 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2333 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2334 }
2335 break;
2336
2337 case DW_OP_implicit_pointer:
2338 case DW_OP_GNU_implicit_pointer:
2339 {
2340 char label[MAX_ARTIFICIAL_LABEL_BYTES
2341 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2342 gcc_assert (val1->val_class == dw_val_class_die_ref);
2343 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2344 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2345 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2346 }
2347 break;
2348
2349 case DW_OP_entry_value:
2350 case DW_OP_GNU_entry_value:
2351 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2352 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2353 break;
2354
2355 case DW_OP_const_type:
2356 case DW_OP_GNU_const_type:
2357 {
2358 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2359 gcc_assert (o);
2360 dw2_asm_output_data_uleb128 (o, NULL);
2361 switch (val2->val_class)
2362 {
2363 case dw_val_class_const:
2364 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2365 dw2_asm_output_data (1, l, NULL);
2366 dw2_asm_output_data (l, val2->v.val_int, NULL);
2367 break;
2368 case dw_val_class_vec:
2369 {
2370 unsigned int elt_size = val2->v.val_vec.elt_size;
2371 unsigned int len = val2->v.val_vec.length;
2372 unsigned int i;
2373 unsigned char *p;
2374
2375 l = len * elt_size;
2376 dw2_asm_output_data (1, l, NULL);
2377 if (elt_size > sizeof (HOST_WIDE_INT))
2378 {
2379 elt_size /= 2;
2380 len *= 2;
2381 }
2382 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2383 i < len;
2384 i++, p += elt_size)
2385 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2386 "fp or vector constant word %u", i);
2387 }
2388 break;
2389 case dw_val_class_const_double:
2390 {
2391 unsigned HOST_WIDE_INT first, second;
2392 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2393
2394 dw2_asm_output_data (1, 2 * l, NULL);
2395 if (WORDS_BIG_ENDIAN)
2396 {
2397 first = val2->v.val_double.high;
2398 second = val2->v.val_double.low;
2399 }
2400 else
2401 {
2402 first = val2->v.val_double.low;
2403 second = val2->v.val_double.high;
2404 }
2405 dw2_asm_output_data (l, first, NULL);
2406 dw2_asm_output_data (l, second, NULL);
2407 }
2408 break;
2409 case dw_val_class_wide_int:
2410 {
2411 int i;
2412 int len = get_full_len (*val2->v.val_wide);
2413 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2414
2415 dw2_asm_output_data (1, len * l, NULL);
2416 if (WORDS_BIG_ENDIAN)
2417 for (i = len - 1; i >= 0; --i)
2418 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2419 else
2420 for (i = 0; i < len; ++i)
2421 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2422 }
2423 break;
2424 default:
2425 gcc_unreachable ();
2426 }
2427 }
2428 break;
2429 case DW_OP_regval_type:
2430 case DW_OP_GNU_regval_type:
2431 {
2432 unsigned r = val1->v.val_unsigned;
2433 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2434 gcc_assert (o);
2435 if (for_eh_or_skip >= 0)
2436 {
2437 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2438 gcc_assert (size_of_uleb128 (r)
2439 == size_of_uleb128 (val1->v.val_unsigned));
2440 }
2441 dw2_asm_output_data_uleb128 (r, NULL);
2442 dw2_asm_output_data_uleb128 (o, NULL);
2443 }
2444 break;
2445 case DW_OP_deref_type:
2446 case DW_OP_GNU_deref_type:
2447 {
2448 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2449 gcc_assert (o);
2450 dw2_asm_output_data (1, val1->v.val_int, NULL);
2451 dw2_asm_output_data_uleb128 (o, NULL);
2452 }
2453 break;
2454 case DW_OP_convert:
2455 case DW_OP_reinterpret:
2456 case DW_OP_GNU_convert:
2457 case DW_OP_GNU_reinterpret:
2458 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2459 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2460 else
2461 {
2462 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2463 gcc_assert (o);
2464 dw2_asm_output_data_uleb128 (o, NULL);
2465 }
2466 break;
2467
2468 case DW_OP_GNU_parameter_ref:
2469 {
2470 unsigned long o;
2471 gcc_assert (val1->val_class == dw_val_class_die_ref);
2472 o = get_ref_die_offset (val1->v.val_die_ref.die);
2473 dw2_asm_output_data (4, o, NULL);
2474 }
2475 break;
2476
2477 default:
2478 /* Other codes have no operands. */
2479 break;
2480 }
2481 }
2482
2483 /* Output a sequence of location operations.
2484 The for_eh_or_skip parameter controls whether register numbers are
2485 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2486 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2487 info). This should be suppressed for the cases that have not been converted
2488 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2489
2490 void
2491 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2492 {
2493 for (; loc != NULL; loc = loc->dw_loc_next)
2494 {
2495 enum dwarf_location_atom opc = loc->dw_loc_opc;
2496 /* Output the opcode. */
2497 if (for_eh_or_skip >= 0
2498 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2499 {
2500 unsigned r = (opc - DW_OP_breg0);
2501 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2502 gcc_assert (r <= 31);
2503 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2504 }
2505 else if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2507 {
2508 unsigned r = (opc - DW_OP_reg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2512 }
2513
2514 dw2_asm_output_data (1, opc,
2515 "%s", dwarf_stack_op_name (opc));
2516
2517 /* Output the operand(s) (if any). */
2518 output_loc_operands (loc, for_eh_or_skip);
2519 }
2520 }
2521
2522 /* Output location description stack opcode's operands (if any).
2523 The output is single bytes on a line, suitable for .cfi_escape. */
2524
2525 static void
2526 output_loc_operands_raw (dw_loc_descr_ref loc)
2527 {
2528 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2529 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2530
2531 switch (loc->dw_loc_opc)
2532 {
2533 case DW_OP_addr:
2534 case DW_OP_GNU_addr_index:
2535 case DW_OP_addrx:
2536 case DW_OP_GNU_const_index:
2537 case DW_OP_constx:
2538 case DW_OP_implicit_value:
2539 /* We cannot output addresses in .cfi_escape, only bytes. */
2540 gcc_unreachable ();
2541
2542 case DW_OP_const1u:
2543 case DW_OP_const1s:
2544 case DW_OP_pick:
2545 case DW_OP_deref_size:
2546 case DW_OP_xderef_size:
2547 fputc (',', asm_out_file);
2548 dw2_asm_output_data_raw (1, val1->v.val_int);
2549 break;
2550
2551 case DW_OP_const2u:
2552 case DW_OP_const2s:
2553 fputc (',', asm_out_file);
2554 dw2_asm_output_data_raw (2, val1->v.val_int);
2555 break;
2556
2557 case DW_OP_const4u:
2558 case DW_OP_const4s:
2559 fputc (',', asm_out_file);
2560 dw2_asm_output_data_raw (4, val1->v.val_int);
2561 break;
2562
2563 case DW_OP_const8u:
2564 case DW_OP_const8s:
2565 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (8, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_skip:
2571 case DW_OP_bra:
2572 {
2573 int offset;
2574
2575 gcc_assert (val1->val_class == dw_val_class_loc);
2576 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2577
2578 fputc (',', asm_out_file);
2579 dw2_asm_output_data_raw (2, offset);
2580 }
2581 break;
2582
2583 case DW_OP_regx:
2584 {
2585 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2586 gcc_assert (size_of_uleb128 (r)
2587 == size_of_uleb128 (val1->v.val_unsigned));
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_uleb128_raw (r);
2590 }
2591 break;
2592
2593 case DW_OP_constu:
2594 case DW_OP_plus_uconst:
2595 case DW_OP_piece:
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2598 break;
2599
2600 case DW_OP_bit_piece:
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2603 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2604 break;
2605
2606 case DW_OP_consts:
2607 case DW_OP_breg0:
2608 case DW_OP_breg1:
2609 case DW_OP_breg2:
2610 case DW_OP_breg3:
2611 case DW_OP_breg4:
2612 case DW_OP_breg5:
2613 case DW_OP_breg6:
2614 case DW_OP_breg7:
2615 case DW_OP_breg8:
2616 case DW_OP_breg9:
2617 case DW_OP_breg10:
2618 case DW_OP_breg11:
2619 case DW_OP_breg12:
2620 case DW_OP_breg13:
2621 case DW_OP_breg14:
2622 case DW_OP_breg15:
2623 case DW_OP_breg16:
2624 case DW_OP_breg17:
2625 case DW_OP_breg18:
2626 case DW_OP_breg19:
2627 case DW_OP_breg20:
2628 case DW_OP_breg21:
2629 case DW_OP_breg22:
2630 case DW_OP_breg23:
2631 case DW_OP_breg24:
2632 case DW_OP_breg25:
2633 case DW_OP_breg26:
2634 case DW_OP_breg27:
2635 case DW_OP_breg28:
2636 case DW_OP_breg29:
2637 case DW_OP_breg30:
2638 case DW_OP_breg31:
2639 case DW_OP_fbreg:
2640 fputc (',', asm_out_file);
2641 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2642 break;
2643
2644 case DW_OP_bregx:
2645 {
2646 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2647 gcc_assert (size_of_uleb128 (r)
2648 == size_of_uleb128 (val1->v.val_unsigned));
2649 fputc (',', asm_out_file);
2650 dw2_asm_output_data_uleb128_raw (r);
2651 fputc (',', asm_out_file);
2652 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2653 }
2654 break;
2655
2656 case DW_OP_implicit_pointer:
2657 case DW_OP_entry_value:
2658 case DW_OP_const_type:
2659 case DW_OP_regval_type:
2660 case DW_OP_deref_type:
2661 case DW_OP_convert:
2662 case DW_OP_reinterpret:
2663 case DW_OP_GNU_implicit_pointer:
2664 case DW_OP_GNU_entry_value:
2665 case DW_OP_GNU_const_type:
2666 case DW_OP_GNU_regval_type:
2667 case DW_OP_GNU_deref_type:
2668 case DW_OP_GNU_convert:
2669 case DW_OP_GNU_reinterpret:
2670 case DW_OP_GNU_parameter_ref:
2671 gcc_unreachable ();
2672 break;
2673
2674 default:
2675 /* Other codes have no operands. */
2676 break;
2677 }
2678 }
2679
2680 void
2681 output_loc_sequence_raw (dw_loc_descr_ref loc)
2682 {
2683 while (1)
2684 {
2685 enum dwarf_location_atom opc = loc->dw_loc_opc;
2686 /* Output the opcode. */
2687 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2688 {
2689 unsigned r = (opc - DW_OP_breg0);
2690 r = DWARF2_FRAME_REG_OUT (r, 1);
2691 gcc_assert (r <= 31);
2692 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2693 }
2694 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2695 {
2696 unsigned r = (opc - DW_OP_reg0);
2697 r = DWARF2_FRAME_REG_OUT (r, 1);
2698 gcc_assert (r <= 31);
2699 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2700 }
2701 /* Output the opcode. */
2702 fprintf (asm_out_file, "%#x", opc);
2703 output_loc_operands_raw (loc);
2704
2705 if (!loc->dw_loc_next)
2706 break;
2707 loc = loc->dw_loc_next;
2708
2709 fputc (',', asm_out_file);
2710 }
2711 }
2712
2713 /* This function builds a dwarf location descriptor sequence from a
2714 dw_cfa_location, adding the given OFFSET to the result of the
2715 expression. */
2716
2717 struct dw_loc_descr_node *
2718 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2719 {
2720 struct dw_loc_descr_node *head, *tmp;
2721
2722 offset += cfa->offset;
2723
2724 if (cfa->indirect)
2725 {
2726 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2727 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2728 head->dw_loc_oprnd1.val_entry = NULL;
2729 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2730 add_loc_descr (&head, tmp);
2731 loc_descr_plus_const (&head, offset);
2732 }
2733 else
2734 head = new_reg_loc_descr (cfa->reg, offset);
2735
2736 return head;
2737 }
2738
2739 /* This function builds a dwarf location descriptor sequence for
2740 the address at OFFSET from the CFA when stack is aligned to
2741 ALIGNMENT byte. */
2742
2743 struct dw_loc_descr_node *
2744 build_cfa_aligned_loc (dw_cfa_location *cfa,
2745 poly_int64 offset, HOST_WIDE_INT alignment)
2746 {
2747 struct dw_loc_descr_node *head;
2748 unsigned int dwarf_fp
2749 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2750
2751 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2752 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2753 {
2754 head = new_reg_loc_descr (dwarf_fp, 0);
2755 add_loc_descr (&head, int_loc_descriptor (alignment));
2756 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2757 loc_descr_plus_const (&head, offset);
2758 }
2759 else
2760 head = new_reg_loc_descr (dwarf_fp, offset);
2761 return head;
2762 }
2763 \f
2764 /* And now, the support for symbolic debugging information. */
2765
2766 /* .debug_str support. */
2767
2768 static void dwarf2out_init (const char *);
2769 static void dwarf2out_finish (const char *);
2770 static void dwarf2out_early_finish (const char *);
2771 static void dwarf2out_assembly_start (void);
2772 static void dwarf2out_define (unsigned int, const char *);
2773 static void dwarf2out_undef (unsigned int, const char *);
2774 static void dwarf2out_start_source_file (unsigned, const char *);
2775 static void dwarf2out_end_source_file (unsigned);
2776 static void dwarf2out_function_decl (tree);
2777 static void dwarf2out_begin_block (unsigned, unsigned);
2778 static void dwarf2out_end_block (unsigned, unsigned);
2779 static bool dwarf2out_ignore_block (const_tree);
2780 static void dwarf2out_early_global_decl (tree);
2781 static void dwarf2out_late_global_decl (tree);
2782 static void dwarf2out_type_decl (tree, int);
2783 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2784 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2785 dw_die_ref);
2786 static void dwarf2out_abstract_function (tree);
2787 static void dwarf2out_var_location (rtx_insn *);
2788 static void dwarf2out_inline_entry (tree);
2789 static void dwarf2out_size_function (tree);
2790 static void dwarf2out_begin_function (tree);
2791 static void dwarf2out_end_function (unsigned int);
2792 static void dwarf2out_register_main_translation_unit (tree unit);
2793 static void dwarf2out_set_name (tree, tree);
2794 static void dwarf2out_register_external_die (tree decl, const char *sym,
2795 unsigned HOST_WIDE_INT off);
2796 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2797 unsigned HOST_WIDE_INT *off);
2798
2799 /* The debug hooks structure. */
2800
2801 const struct gcc_debug_hooks dwarf2_debug_hooks =
2802 {
2803 dwarf2out_init,
2804 dwarf2out_finish,
2805 dwarf2out_early_finish,
2806 dwarf2out_assembly_start,
2807 dwarf2out_define,
2808 dwarf2out_undef,
2809 dwarf2out_start_source_file,
2810 dwarf2out_end_source_file,
2811 dwarf2out_begin_block,
2812 dwarf2out_end_block,
2813 dwarf2out_ignore_block,
2814 dwarf2out_source_line,
2815 dwarf2out_begin_prologue,
2816 #if VMS_DEBUGGING_INFO
2817 dwarf2out_vms_end_prologue,
2818 dwarf2out_vms_begin_epilogue,
2819 #else
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 #endif
2823 dwarf2out_end_epilogue,
2824 dwarf2out_begin_function,
2825 dwarf2out_end_function, /* end_function */
2826 dwarf2out_register_main_translation_unit,
2827 dwarf2out_function_decl, /* function_decl */
2828 dwarf2out_early_global_decl,
2829 dwarf2out_late_global_decl,
2830 dwarf2out_type_decl, /* type_decl */
2831 dwarf2out_imported_module_or_decl,
2832 dwarf2out_die_ref_for_decl,
2833 dwarf2out_register_external_die,
2834 debug_nothing_tree, /* deferred_inline_function */
2835 /* The DWARF 2 backend tries to reduce debugging bloat by not
2836 emitting the abstract description of inline functions until
2837 something tries to reference them. */
2838 dwarf2out_abstract_function, /* outlining_inline_function */
2839 debug_nothing_rtx_code_label, /* label */
2840 debug_nothing_int, /* handle_pch */
2841 dwarf2out_var_location,
2842 dwarf2out_inline_entry, /* inline_entry */
2843 dwarf2out_size_function, /* size_function */
2844 dwarf2out_switch_text_section,
2845 dwarf2out_set_name,
2846 1, /* start_end_main_source_file */
2847 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2848 };
2849
2850 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2851 {
2852 dwarf2out_init,
2853 debug_nothing_charstar,
2854 debug_nothing_charstar,
2855 dwarf2out_assembly_start,
2856 debug_nothing_int_charstar,
2857 debug_nothing_int_charstar,
2858 debug_nothing_int_charstar,
2859 debug_nothing_int,
2860 debug_nothing_int_int, /* begin_block */
2861 debug_nothing_int_int, /* end_block */
2862 debug_true_const_tree, /* ignore_block */
2863 dwarf2out_source_line, /* source_line */
2864 debug_nothing_int_int_charstar, /* begin_prologue */
2865 debug_nothing_int_charstar, /* end_prologue */
2866 debug_nothing_int_charstar, /* begin_epilogue */
2867 debug_nothing_int_charstar, /* end_epilogue */
2868 debug_nothing_tree, /* begin_function */
2869 debug_nothing_int, /* end_function */
2870 debug_nothing_tree, /* register_main_translation_unit */
2871 debug_nothing_tree, /* function_decl */
2872 debug_nothing_tree, /* early_global_decl */
2873 debug_nothing_tree, /* late_global_decl */
2874 debug_nothing_tree_int, /* type_decl */
2875 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2876 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2877 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2878 debug_nothing_tree, /* deferred_inline_function */
2879 debug_nothing_tree, /* outlining_inline_function */
2880 debug_nothing_rtx_code_label, /* label */
2881 debug_nothing_int, /* handle_pch */
2882 debug_nothing_rtx_insn, /* var_location */
2883 debug_nothing_tree, /* inline_entry */
2884 debug_nothing_tree, /* size_function */
2885 debug_nothing_void, /* switch_text_section */
2886 debug_nothing_tree_tree, /* set_name */
2887 0, /* start_end_main_source_file */
2888 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2889 };
2890 \f
2891 /* NOTE: In the comments in this file, many references are made to
2892 "Debugging Information Entries". This term is abbreviated as `DIE'
2893 throughout the remainder of this file. */
2894
2895 /* An internal representation of the DWARF output is built, and then
2896 walked to generate the DWARF debugging info. The walk of the internal
2897 representation is done after the entire program has been compiled.
2898 The types below are used to describe the internal representation. */
2899
2900 /* Whether to put type DIEs into their own section .debug_types instead
2901 of making them part of the .debug_info section. Only supported for
2902 Dwarf V4 or higher and the user didn't disable them through
2903 -fno-debug-types-section. It is more efficient to put them in a
2904 separate comdat sections since the linker will then be able to
2905 remove duplicates. But not all tools support .debug_types sections
2906 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2907 it is DW_UT_type unit type in .debug_info section. */
2908
2909 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2910
2911 /* Various DIE's use offsets relative to the beginning of the
2912 .debug_info section to refer to each other. */
2913
2914 typedef long int dw_offset;
2915
2916 struct comdat_type_node;
2917
2918 /* The entries in the line_info table more-or-less mirror the opcodes
2919 that are used in the real dwarf line table. Arrays of these entries
2920 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2921 supported. */
2922
2923 enum dw_line_info_opcode {
2924 /* Emit DW_LNE_set_address; the operand is the label index. */
2925 LI_set_address,
2926
2927 /* Emit a row to the matrix with the given line. This may be done
2928 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2929 special opcodes. */
2930 LI_set_line,
2931
2932 /* Emit a DW_LNS_set_file. */
2933 LI_set_file,
2934
2935 /* Emit a DW_LNS_set_column. */
2936 LI_set_column,
2937
2938 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2939 LI_negate_stmt,
2940
2941 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2942 LI_set_prologue_end,
2943 LI_set_epilogue_begin,
2944
2945 /* Emit a DW_LNE_set_discriminator. */
2946 LI_set_discriminator,
2947
2948 /* Output a Fixed Advance PC; the target PC is the label index; the
2949 base PC is the previous LI_adv_address or LI_set_address entry.
2950 We only use this when emitting debug views without assembler
2951 support, at explicit user request. Ideally, we should only use
2952 it when the offset might be zero but we can't tell: it's the only
2953 way to maybe change the PC without resetting the view number. */
2954 LI_adv_address
2955 };
2956
2957 typedef struct GTY(()) dw_line_info_struct {
2958 enum dw_line_info_opcode opcode;
2959 unsigned int val;
2960 } dw_line_info_entry;
2961
2962
2963 struct GTY(()) dw_line_info_table {
2964 /* The label that marks the end of this section. */
2965 const char *end_label;
2966
2967 /* The values for the last row of the matrix, as collected in the table.
2968 These are used to minimize the changes to the next row. */
2969 unsigned int file_num;
2970 unsigned int line_num;
2971 unsigned int column_num;
2972 int discrim_num;
2973 bool is_stmt;
2974 bool in_use;
2975
2976 /* This denotes the NEXT view number.
2977
2978 If it is 0, it is known that the NEXT view will be the first view
2979 at the given PC.
2980
2981 If it is -1, we're forcing the view number to be reset, e.g. at a
2982 function entry.
2983
2984 The meaning of other nonzero values depends on whether we're
2985 computing views internally or leaving it for the assembler to do
2986 so. If we're emitting them internally, view denotes the view
2987 number since the last known advance of PC. If we're leaving it
2988 for the assembler, it denotes the LVU label number that we're
2989 going to ask the assembler to assign. */
2990 var_loc_view view;
2991
2992 /* This counts the number of symbolic views emitted in this table
2993 since the latest view reset. Its max value, over all tables,
2994 sets symview_upper_bound. */
2995 var_loc_view symviews_since_reset;
2996
2997 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2998 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2999 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3000 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3001
3002 vec<dw_line_info_entry, va_gc> *entries;
3003 };
3004
3005 /* This is an upper bound for view numbers that the assembler may
3006 assign to symbolic views output in this translation. It is used to
3007 decide how big a field to use to represent view numbers in
3008 symview-classed attributes. */
3009
3010 static var_loc_view symview_upper_bound;
3011
3012 /* If we're keep track of location views and their reset points, and
3013 INSN is a reset point (i.e., it necessarily advances the PC), mark
3014 the next view in TABLE as reset. */
3015
3016 static void
3017 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3018 {
3019 if (!debug_internal_reset_location_views)
3020 return;
3021
3022 /* Maybe turn (part of?) this test into a default target hook. */
3023 int reset = 0;
3024
3025 if (targetm.reset_location_view)
3026 reset = targetm.reset_location_view (insn);
3027
3028 if (reset)
3029 ;
3030 else if (JUMP_TABLE_DATA_P (insn))
3031 reset = 1;
3032 else if (GET_CODE (insn) == USE
3033 || GET_CODE (insn) == CLOBBER
3034 || GET_CODE (insn) == ASM_INPUT
3035 || asm_noperands (insn) >= 0)
3036 ;
3037 else if (get_attr_min_length (insn) > 0)
3038 reset = 1;
3039
3040 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3041 RESET_NEXT_VIEW (table->view);
3042 }
3043
3044 /* Each DIE attribute has a field specifying the attribute kind,
3045 a link to the next attribute in the chain, and an attribute value.
3046 Attributes are typically linked below the DIE they modify. */
3047
3048 typedef struct GTY(()) dw_attr_struct {
3049 enum dwarf_attribute dw_attr;
3050 dw_val_node dw_attr_val;
3051 }
3052 dw_attr_node;
3053
3054
3055 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3056 The children of each node form a circular list linked by
3057 die_sib. die_child points to the node *before* the "first" child node. */
3058
3059 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3060 union die_symbol_or_type_node
3061 {
3062 const char * GTY ((tag ("0"))) die_symbol;
3063 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3064 }
3065 GTY ((desc ("%0.comdat_type_p"))) die_id;
3066 vec<dw_attr_node, va_gc> *die_attr;
3067 dw_die_ref die_parent;
3068 dw_die_ref die_child;
3069 dw_die_ref die_sib;
3070 dw_die_ref die_definition; /* ref from a specification to its definition */
3071 dw_offset die_offset;
3072 unsigned long die_abbrev;
3073 int die_mark;
3074 unsigned int decl_id;
3075 enum dwarf_tag die_tag;
3076 /* Die is used and must not be pruned as unused. */
3077 BOOL_BITFIELD die_perennial_p : 1;
3078 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3079 /* For an external ref to die_symbol if die_offset contains an extra
3080 offset to that symbol. */
3081 BOOL_BITFIELD with_offset : 1;
3082 /* Whether this DIE was removed from the DIE tree, for example via
3083 prune_unused_types. We don't consider those present from the
3084 DIE lookup routines. */
3085 BOOL_BITFIELD removed : 1;
3086 /* Lots of spare bits. */
3087 }
3088 die_node;
3089
3090 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3091 static bool early_dwarf;
3092 static bool early_dwarf_finished;
3093 struct set_early_dwarf {
3094 bool saved;
3095 set_early_dwarf () : saved(early_dwarf)
3096 {
3097 gcc_assert (! early_dwarf_finished);
3098 early_dwarf = true;
3099 }
3100 ~set_early_dwarf () { early_dwarf = saved; }
3101 };
3102
3103 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3104 #define FOR_EACH_CHILD(die, c, expr) do { \
3105 c = die->die_child; \
3106 if (c) do { \
3107 c = c->die_sib; \
3108 expr; \
3109 } while (c != die->die_child); \
3110 } while (0)
3111
3112 /* The pubname structure */
3113
3114 typedef struct GTY(()) pubname_struct {
3115 dw_die_ref die;
3116 const char *name;
3117 }
3118 pubname_entry;
3119
3120
3121 struct GTY(()) dw_ranges {
3122 const char *label;
3123 /* If this is positive, it's a block number, otherwise it's a
3124 bitwise-negated index into dw_ranges_by_label. */
3125 int num;
3126 /* Index for the range list for DW_FORM_rnglistx. */
3127 unsigned int idx : 31;
3128 /* True if this range might be possibly in a different section
3129 from previous entry. */
3130 unsigned int maybe_new_sec : 1;
3131 };
3132
3133 /* A structure to hold a macinfo entry. */
3134
3135 typedef struct GTY(()) macinfo_struct {
3136 unsigned char code;
3137 unsigned HOST_WIDE_INT lineno;
3138 const char *info;
3139 }
3140 macinfo_entry;
3141
3142
3143 struct GTY(()) dw_ranges_by_label {
3144 const char *begin;
3145 const char *end;
3146 };
3147
3148 /* The comdat type node structure. */
3149 struct GTY(()) comdat_type_node
3150 {
3151 dw_die_ref root_die;
3152 dw_die_ref type_die;
3153 dw_die_ref skeleton_die;
3154 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3155 comdat_type_node *next;
3156 };
3157
3158 /* A list of DIEs for which we can't determine ancestry (parent_die
3159 field) just yet. Later in dwarf2out_finish we will fill in the
3160 missing bits. */
3161 typedef struct GTY(()) limbo_die_struct {
3162 dw_die_ref die;
3163 /* The tree for which this DIE was created. We use this to
3164 determine ancestry later. */
3165 tree created_for;
3166 struct limbo_die_struct *next;
3167 }
3168 limbo_die_node;
3169
3170 typedef struct skeleton_chain_struct
3171 {
3172 dw_die_ref old_die;
3173 dw_die_ref new_die;
3174 struct skeleton_chain_struct *parent;
3175 }
3176 skeleton_chain_node;
3177
3178 /* Define a macro which returns nonzero for a TYPE_DECL which was
3179 implicitly generated for a type.
3180
3181 Note that, unlike the C front-end (which generates a NULL named
3182 TYPE_DECL node for each complete tagged type, each array type,
3183 and each function type node created) the C++ front-end generates
3184 a _named_ TYPE_DECL node for each tagged type node created.
3185 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3186 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3187 front-end, but for each type, tagged or not. */
3188
3189 #define TYPE_DECL_IS_STUB(decl) \
3190 (DECL_NAME (decl) == NULL_TREE \
3191 || (DECL_ARTIFICIAL (decl) \
3192 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3193 /* This is necessary for stub decls that \
3194 appear in nested inline functions. */ \
3195 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3196 && (decl_ultimate_origin (decl) \
3197 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3198
3199 /* Information concerning the compilation unit's programming
3200 language, and compiler version. */
3201
3202 /* Fixed size portion of the DWARF compilation unit header. */
3203 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3204 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3205 + (dwarf_version >= 5 ? 4 : 3))
3206
3207 /* Fixed size portion of the DWARF comdat type unit header. */
3208 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3209 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3210 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3211
3212 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3213 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3214 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3215
3216 /* Fixed size portion of public names info. */
3217 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3218
3219 /* Fixed size portion of the address range info. */
3220 #define DWARF_ARANGES_HEADER_SIZE \
3221 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3222 DWARF2_ADDR_SIZE * 2) \
3223 - DWARF_INITIAL_LENGTH_SIZE)
3224
3225 /* Size of padding portion in the address range info. It must be
3226 aligned to twice the pointer size. */
3227 #define DWARF_ARANGES_PAD_SIZE \
3228 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3229 DWARF2_ADDR_SIZE * 2) \
3230 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3231
3232 /* Use assembler line directives if available. */
3233 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3234 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3235 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3236 #else
3237 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3238 #endif
3239 #endif
3240
3241 /* Use assembler views in line directives if available. */
3242 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3243 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3244 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3245 #else
3246 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3247 #endif
3248 #endif
3249
3250 /* Return true if GCC configure detected assembler support for .loc. */
3251
3252 bool
3253 dwarf2out_default_as_loc_support (void)
3254 {
3255 return DWARF2_ASM_LINE_DEBUG_INFO;
3256 #if (GCC_VERSION >= 3000)
3257 # undef DWARF2_ASM_LINE_DEBUG_INFO
3258 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3259 #endif
3260 }
3261
3262 /* Return true if GCC configure detected assembler support for views
3263 in .loc directives. */
3264
3265 bool
3266 dwarf2out_default_as_locview_support (void)
3267 {
3268 return DWARF2_ASM_VIEW_DEBUG_INFO;
3269 #if (GCC_VERSION >= 3000)
3270 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3271 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3272 #endif
3273 }
3274
3275 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3276 view computation, and it refers to a view identifier for which we
3277 will not emit a label because it is known to map to a view number
3278 zero. We won't allocate the bitmap if we're not using assembler
3279 support for location views, but we have to make the variable
3280 visible for GGC and for code that will be optimized out for lack of
3281 support but that's still parsed and compiled. We could abstract it
3282 out with macros, but it's not worth it. */
3283 static GTY(()) bitmap zero_view_p;
3284
3285 /* Evaluate to TRUE iff N is known to identify the first location view
3286 at its PC. When not using assembler location view computation,
3287 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3288 and views label numbers recorded in it are the ones known to be
3289 zero. */
3290 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3291 || (N) == (var_loc_view)-1 \
3292 || (zero_view_p \
3293 && bitmap_bit_p (zero_view_p, (N))))
3294
3295 /* Return true iff we're to emit .loc directives for the assembler to
3296 generate line number sections.
3297
3298 When we're not emitting views, all we need from the assembler is
3299 support for .loc directives.
3300
3301 If we are emitting views, we can only use the assembler's .loc
3302 support if it also supports views.
3303
3304 When the compiler is emitting the line number programs and
3305 computing view numbers itself, it resets view numbers at known PC
3306 changes and counts from that, and then it emits view numbers as
3307 literal constants in locviewlists. There are cases in which the
3308 compiler is not sure about PC changes, e.g. when extra alignment is
3309 requested for a label. In these cases, the compiler may not reset
3310 the view counter, and the potential PC advance in the line number
3311 program will use an opcode that does not reset the view counter
3312 even if the PC actually changes, so that compiler and debug info
3313 consumer can keep view numbers in sync.
3314
3315 When the compiler defers view computation to the assembler, it
3316 emits symbolic view numbers in locviewlists, with the exception of
3317 views known to be zero (forced resets, or reset after
3318 compiler-visible PC changes): instead of emitting symbols for
3319 these, we emit literal zero and assert the assembler agrees with
3320 the compiler's assessment. We could use symbolic views everywhere,
3321 instead of special-casing zero views, but then we'd be unable to
3322 optimize out locviewlists that contain only zeros. */
3323
3324 static bool
3325 output_asm_line_debug_info (void)
3326 {
3327 return (dwarf2out_as_loc_support
3328 && (dwarf2out_as_locview_support
3329 || !debug_variable_location_views));
3330 }
3331
3332 /* Minimum line offset in a special line info. opcode.
3333 This value was chosen to give a reasonable range of values. */
3334 #define DWARF_LINE_BASE -10
3335
3336 /* First special line opcode - leave room for the standard opcodes. */
3337 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3338
3339 /* Range of line offsets in a special line info. opcode. */
3340 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3341
3342 /* Flag that indicates the initial value of the is_stmt_start flag.
3343 In the present implementation, we do not mark any lines as
3344 the beginning of a source statement, because that information
3345 is not made available by the GCC front-end. */
3346 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3347
3348 /* Maximum number of operations per instruction bundle. */
3349 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3350 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3351 #endif
3352
3353 /* This location is used by calc_die_sizes() to keep track
3354 the offset of each DIE within the .debug_info section. */
3355 static unsigned long next_die_offset;
3356
3357 /* Record the root of the DIE's built for the current compilation unit. */
3358 static GTY(()) dw_die_ref single_comp_unit_die;
3359
3360 /* A list of type DIEs that have been separated into comdat sections. */
3361 static GTY(()) comdat_type_node *comdat_type_list;
3362
3363 /* A list of CU DIEs that have been separated. */
3364 static GTY(()) limbo_die_node *cu_die_list;
3365
3366 /* A list of DIEs with a NULL parent waiting to be relocated. */
3367 static GTY(()) limbo_die_node *limbo_die_list;
3368
3369 /* A list of DIEs for which we may have to generate
3370 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3371 static GTY(()) limbo_die_node *deferred_asm_name;
3372
3373 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3374 {
3375 typedef const char *compare_type;
3376
3377 static hashval_t hash (dwarf_file_data *);
3378 static bool equal (dwarf_file_data *, const char *);
3379 };
3380
3381 /* Filenames referenced by this compilation unit. */
3382 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3383
3384 struct decl_die_hasher : ggc_ptr_hash<die_node>
3385 {
3386 typedef tree compare_type;
3387
3388 static hashval_t hash (die_node *);
3389 static bool equal (die_node *, tree);
3390 };
3391 /* A hash table of references to DIE's that describe declarations.
3392 The key is a DECL_UID() which is a unique number identifying each decl. */
3393 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3394
3395 struct GTY ((for_user)) variable_value_struct {
3396 unsigned int decl_id;
3397 vec<dw_die_ref, va_gc> *dies;
3398 };
3399
3400 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3401 {
3402 typedef tree compare_type;
3403
3404 static hashval_t hash (variable_value_struct *);
3405 static bool equal (variable_value_struct *, tree);
3406 };
3407 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3408 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3409 DECL_CONTEXT of the referenced VAR_DECLs. */
3410 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3411
3412 struct block_die_hasher : ggc_ptr_hash<die_struct>
3413 {
3414 static hashval_t hash (die_struct *);
3415 static bool equal (die_struct *, die_struct *);
3416 };
3417
3418 /* A hash table of references to DIE's that describe COMMON blocks.
3419 The key is DECL_UID() ^ die_parent. */
3420 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3421
3422 typedef struct GTY(()) die_arg_entry_struct {
3423 dw_die_ref die;
3424 tree arg;
3425 } die_arg_entry;
3426
3427
3428 /* Node of the variable location list. */
3429 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3430 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3431 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3432 in mode of the EXPR_LIST node and first EXPR_LIST operand
3433 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3434 location or NULL for padding. For larger bitsizes,
3435 mode is 0 and first operand is a CONCAT with bitsize
3436 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3437 NULL as second operand. */
3438 rtx GTY (()) loc;
3439 const char * GTY (()) label;
3440 struct var_loc_node * GTY (()) next;
3441 var_loc_view view;
3442 };
3443
3444 /* Variable location list. */
3445 struct GTY ((for_user)) var_loc_list_def {
3446 struct var_loc_node * GTY (()) first;
3447
3448 /* Pointer to the last but one or last element of the
3449 chained list. If the list is empty, both first and
3450 last are NULL, if the list contains just one node
3451 or the last node certainly is not redundant, it points
3452 to the last node, otherwise points to the last but one.
3453 Do not mark it for GC because it is marked through the chain. */
3454 struct var_loc_node * GTY ((skip ("%h"))) last;
3455
3456 /* Pointer to the last element before section switch,
3457 if NULL, either sections weren't switched or first
3458 is after section switch. */
3459 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3460
3461 /* DECL_UID of the variable decl. */
3462 unsigned int decl_id;
3463 };
3464 typedef struct var_loc_list_def var_loc_list;
3465
3466 /* Call argument location list. */
3467 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3468 rtx GTY (()) call_arg_loc_note;
3469 const char * GTY (()) label;
3470 tree GTY (()) block;
3471 bool tail_call_p;
3472 rtx GTY (()) symbol_ref;
3473 struct call_arg_loc_node * GTY (()) next;
3474 };
3475
3476
3477 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3478 {
3479 typedef const_tree compare_type;
3480
3481 static hashval_t hash (var_loc_list *);
3482 static bool equal (var_loc_list *, const_tree);
3483 };
3484
3485 /* Table of decl location linked lists. */
3486 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3487
3488 /* Head and tail of call_arg_loc chain. */
3489 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3490 static struct call_arg_loc_node *call_arg_loc_last;
3491
3492 /* Number of call sites in the current function. */
3493 static int call_site_count = -1;
3494 /* Number of tail call sites in the current function. */
3495 static int tail_call_site_count = -1;
3496
3497 /* A cached location list. */
3498 struct GTY ((for_user)) cached_dw_loc_list_def {
3499 /* The DECL_UID of the decl that this entry describes. */
3500 unsigned int decl_id;
3501
3502 /* The cached location list. */
3503 dw_loc_list_ref loc_list;
3504 };
3505 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3506
3507 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3508 {
3509
3510 typedef const_tree compare_type;
3511
3512 static hashval_t hash (cached_dw_loc_list *);
3513 static bool equal (cached_dw_loc_list *, const_tree);
3514 };
3515
3516 /* Table of cached location lists. */
3517 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3518
3519 /* A vector of references to DIE's that are uniquely identified by their tag,
3520 presence/absence of children DIE's, and list of attribute/value pairs. */
3521 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3522
3523 /* A hash map to remember the stack usage for DWARF procedures. The value
3524 stored is the stack size difference between before the DWARF procedure
3525 invokation and after it returned. In other words, for a DWARF procedure
3526 that consumes N stack slots and that pushes M ones, this stores M - N. */
3527 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3528
3529 /* A global counter for generating labels for line number data. */
3530 static unsigned int line_info_label_num;
3531
3532 /* The current table to which we should emit line number information
3533 for the current function. This will be set up at the beginning of
3534 assembly for the function. */
3535 static GTY(()) dw_line_info_table *cur_line_info_table;
3536
3537 /* The two default tables of line number info. */
3538 static GTY(()) dw_line_info_table *text_section_line_info;
3539 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3540
3541 /* The set of all non-default tables of line number info. */
3542 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3543
3544 /* A flag to tell pubnames/types export if there is an info section to
3545 refer to. */
3546 static bool info_section_emitted;
3547
3548 /* A pointer to the base of a table that contains a list of publicly
3549 accessible names. */
3550 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3551
3552 /* A pointer to the base of a table that contains a list of publicly
3553 accessible types. */
3554 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3555
3556 /* A pointer to the base of a table that contains a list of macro
3557 defines/undefines (and file start/end markers). */
3558 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3559
3560 /* True if .debug_macinfo or .debug_macros section is going to be
3561 emitted. */
3562 #define have_macinfo \
3563 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3564 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3565 && !macinfo_table->is_empty ())
3566
3567 /* Vector of dies for which we should generate .debug_ranges info. */
3568 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3569
3570 /* Vector of pairs of labels referenced in ranges_table. */
3571 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3572
3573 /* Whether we have location lists that need outputting */
3574 static GTY(()) bool have_location_lists;
3575
3576 /* Unique label counter. */
3577 static GTY(()) unsigned int loclabel_num;
3578
3579 /* Unique label counter for point-of-call tables. */
3580 static GTY(()) unsigned int poc_label_num;
3581
3582 /* The last file entry emitted by maybe_emit_file(). */
3583 static GTY(()) struct dwarf_file_data * last_emitted_file;
3584
3585 /* Number of internal labels generated by gen_internal_sym(). */
3586 static GTY(()) int label_num;
3587
3588 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3589
3590 /* Instances of generic types for which we need to generate debug
3591 info that describe their generic parameters and arguments. That
3592 generation needs to happen once all types are properly laid out so
3593 we do it at the end of compilation. */
3594 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3595
3596 /* Offset from the "steady-state frame pointer" to the frame base,
3597 within the current function. */
3598 static poly_int64 frame_pointer_fb_offset;
3599 static bool frame_pointer_fb_offset_valid;
3600
3601 static vec<dw_die_ref> base_types;
3602
3603 /* Flags to represent a set of attribute classes for attributes that represent
3604 a scalar value (bounds, pointers, ...). */
3605 enum dw_scalar_form
3606 {
3607 dw_scalar_form_constant = 0x01,
3608 dw_scalar_form_exprloc = 0x02,
3609 dw_scalar_form_reference = 0x04
3610 };
3611
3612 /* Forward declarations for functions defined in this file. */
3613
3614 static int is_pseudo_reg (const_rtx);
3615 static tree type_main_variant (tree);
3616 static int is_tagged_type (const_tree);
3617 static const char *dwarf_tag_name (unsigned);
3618 static const char *dwarf_attr_name (unsigned);
3619 static const char *dwarf_form_name (unsigned);
3620 static tree decl_ultimate_origin (const_tree);
3621 static tree decl_class_context (tree);
3622 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3623 static inline enum dw_val_class AT_class (dw_attr_node *);
3624 static inline unsigned int AT_index (dw_attr_node *);
3625 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3626 static inline unsigned AT_flag (dw_attr_node *);
3627 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3628 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3629 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3630 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3631 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3632 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3633 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3634 unsigned int, unsigned char *);
3635 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3636 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3637 static inline const char *AT_string (dw_attr_node *);
3638 static enum dwarf_form AT_string_form (dw_attr_node *);
3639 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3640 static void add_AT_specification (dw_die_ref, dw_die_ref);
3641 static inline dw_die_ref AT_ref (dw_attr_node *);
3642 static inline int AT_ref_external (dw_attr_node *);
3643 static inline void set_AT_ref_external (dw_attr_node *, int);
3644 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3645 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3646 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3647 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3648 dw_loc_list_ref);
3649 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3650 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3651 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3652 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3653 static void remove_addr_table_entry (addr_table_entry *);
3654 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3655 static inline rtx AT_addr (dw_attr_node *);
3656 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3657 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3658 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3659 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3660 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3661 const char *);
3662 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3663 unsigned HOST_WIDE_INT);
3664 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3665 unsigned long, bool);
3666 static inline const char *AT_lbl (dw_attr_node *);
3667 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3668 static const char *get_AT_low_pc (dw_die_ref);
3669 static const char *get_AT_hi_pc (dw_die_ref);
3670 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3671 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3672 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3673 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3674 static bool is_cxx (void);
3675 static bool is_cxx (const_tree);
3676 static bool is_fortran (void);
3677 static bool is_ada (void);
3678 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3679 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3680 static void add_child_die (dw_die_ref, dw_die_ref);
3681 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3682 static dw_die_ref lookup_type_die (tree);
3683 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3684 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3685 static void equate_type_number_to_die (tree, dw_die_ref);
3686 static dw_die_ref lookup_decl_die (tree);
3687 static var_loc_list *lookup_decl_loc (const_tree);
3688 static void equate_decl_number_to_die (tree, dw_die_ref);
3689 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3690 static void print_spaces (FILE *);
3691 static void print_die (dw_die_ref, FILE *);
3692 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3693 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3694 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3695 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3696 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3697 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3698 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3699 struct md5_ctx *, int *);
3700 struct checksum_attributes;
3701 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3702 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3703 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3704 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3705 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3706 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3707 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3708 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3709 static int is_type_die (dw_die_ref);
3710 static int is_comdat_die (dw_die_ref);
3711 static inline bool is_template_instantiation (dw_die_ref);
3712 static int is_declaration_die (dw_die_ref);
3713 static int should_move_die_to_comdat (dw_die_ref);
3714 static dw_die_ref clone_as_declaration (dw_die_ref);
3715 static dw_die_ref clone_die (dw_die_ref);
3716 static dw_die_ref clone_tree (dw_die_ref);
3717 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3718 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3719 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3720 static dw_die_ref generate_skeleton (dw_die_ref);
3721 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3722 dw_die_ref,
3723 dw_die_ref);
3724 static void break_out_comdat_types (dw_die_ref);
3725 static void copy_decls_for_unworthy_types (dw_die_ref);
3726
3727 static void add_sibling_attributes (dw_die_ref);
3728 static void output_location_lists (dw_die_ref);
3729 static int constant_size (unsigned HOST_WIDE_INT);
3730 static unsigned long size_of_die (dw_die_ref);
3731 static void calc_die_sizes (dw_die_ref);
3732 static void calc_base_type_die_sizes (void);
3733 static void mark_dies (dw_die_ref);
3734 static void unmark_dies (dw_die_ref);
3735 static void unmark_all_dies (dw_die_ref);
3736 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3737 static unsigned long size_of_aranges (void);
3738 static enum dwarf_form value_format (dw_attr_node *);
3739 static void output_value_format (dw_attr_node *);
3740 static void output_abbrev_section (void);
3741 static void output_die_abbrevs (unsigned long, dw_die_ref);
3742 static void output_die (dw_die_ref);
3743 static void output_compilation_unit_header (enum dwarf_unit_type);
3744 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3745 static void output_comdat_type_unit (comdat_type_node *);
3746 static const char *dwarf2_name (tree, int);
3747 static void add_pubname (tree, dw_die_ref);
3748 static void add_enumerator_pubname (const char *, dw_die_ref);
3749 static void add_pubname_string (const char *, dw_die_ref);
3750 static void add_pubtype (tree, dw_die_ref);
3751 static void output_pubnames (vec<pubname_entry, va_gc> *);
3752 static void output_aranges (void);
3753 static unsigned int add_ranges (const_tree, bool = false);
3754 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3755 bool *, bool);
3756 static void output_ranges (void);
3757 static dw_line_info_table *new_line_info_table (void);
3758 static void output_line_info (bool);
3759 static void output_file_names (void);
3760 static dw_die_ref base_type_die (tree, bool);
3761 static int is_base_type (tree);
3762 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3763 static int decl_quals (const_tree);
3764 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3765 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3766 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3767 static int type_is_enum (const_tree);
3768 static unsigned int dbx_reg_number (const_rtx);
3769 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3770 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3771 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3772 enum var_init_status);
3773 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3774 enum var_init_status);
3775 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3776 enum var_init_status);
3777 static int is_based_loc (const_rtx);
3778 static bool resolve_one_addr (rtx *);
3779 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3780 enum var_init_status);
3781 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3782 enum var_init_status);
3783 struct loc_descr_context;
3784 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3785 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3786 static dw_loc_list_ref loc_list_from_tree (tree, int,
3787 struct loc_descr_context *);
3788 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3789 struct loc_descr_context *);
3790 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3791 static tree field_type (const_tree);
3792 static unsigned int simple_type_align_in_bits (const_tree);
3793 static unsigned int simple_decl_align_in_bits (const_tree);
3794 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3795 struct vlr_context;
3796 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3797 HOST_WIDE_INT *);
3798 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3799 dw_loc_list_ref);
3800 static void add_data_member_location_attribute (dw_die_ref, tree,
3801 struct vlr_context *);
3802 static bool add_const_value_attribute (dw_die_ref, rtx);
3803 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3804 static void insert_wide_int (const wide_int &, unsigned char *, int);
3805 static void insert_float (const_rtx, unsigned char *);
3806 static rtx rtl_for_decl_location (tree);
3807 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3808 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3809 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3810 static void add_name_attribute (dw_die_ref, const char *);
3811 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3812 static void add_comp_dir_attribute (dw_die_ref);
3813 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3814 struct loc_descr_context *);
3815 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3816 struct loc_descr_context *);
3817 static void add_subscript_info (dw_die_ref, tree, bool);
3818 static void add_byte_size_attribute (dw_die_ref, tree);
3819 static void add_alignment_attribute (dw_die_ref, tree);
3820 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3821 struct vlr_context *);
3822 static void add_bit_size_attribute (dw_die_ref, tree);
3823 static void add_prototyped_attribute (dw_die_ref, tree);
3824 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3825 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3826 static void add_src_coords_attributes (dw_die_ref, tree);
3827 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3828 static void add_discr_value (dw_die_ref, dw_discr_value *);
3829 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3830 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3831 static dw_die_ref scope_die_for (tree, dw_die_ref);
3832 static inline int local_scope_p (dw_die_ref);
3833 static inline int class_scope_p (dw_die_ref);
3834 static inline int class_or_namespace_scope_p (dw_die_ref);
3835 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3836 static void add_calling_convention_attribute (dw_die_ref, tree);
3837 static const char *type_tag (const_tree);
3838 static tree member_declared_type (const_tree);
3839 #if 0
3840 static const char *decl_start_label (tree);
3841 #endif
3842 static void gen_array_type_die (tree, dw_die_ref);
3843 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3844 #if 0
3845 static void gen_entry_point_die (tree, dw_die_ref);
3846 #endif
3847 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3848 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3850 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3851 static void gen_formal_types_die (tree, dw_die_ref);
3852 static void gen_subprogram_die (tree, dw_die_ref);
3853 static void gen_variable_die (tree, tree, dw_die_ref);
3854 static void gen_const_die (tree, dw_die_ref);
3855 static void gen_label_die (tree, dw_die_ref);
3856 static void gen_lexical_block_die (tree, dw_die_ref);
3857 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3858 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3859 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3860 static dw_die_ref gen_compile_unit_die (const char *);
3861 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3862 static void gen_member_die (tree, dw_die_ref);
3863 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3864 enum debug_info_usage);
3865 static void gen_subroutine_type_die (tree, dw_die_ref);
3866 static void gen_typedef_die (tree, dw_die_ref);
3867 static void gen_type_die (tree, dw_die_ref);
3868 static void gen_block_die (tree, dw_die_ref);
3869 static void decls_for_scope (tree, dw_die_ref);
3870 static bool is_naming_typedef_decl (const_tree);
3871 static inline dw_die_ref get_context_die (tree);
3872 static void gen_namespace_die (tree, dw_die_ref);
3873 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3874 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3875 static dw_die_ref force_decl_die (tree);
3876 static dw_die_ref force_type_die (tree);
3877 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3878 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3879 static struct dwarf_file_data * lookup_filename (const char *);
3880 static void retry_incomplete_types (void);
3881 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3882 static void gen_generic_params_dies (tree);
3883 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3884 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3885 static void splice_child_die (dw_die_ref, dw_die_ref);
3886 static int file_info_cmp (const void *, const void *);
3887 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3888 const char *, var_loc_view, const char *);
3889 static void output_loc_list (dw_loc_list_ref);
3890 static char *gen_internal_sym (const char *);
3891 static bool want_pubnames (void);
3892
3893 static void prune_unmark_dies (dw_die_ref);
3894 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3895 static void prune_unused_types_mark (dw_die_ref, int);
3896 static void prune_unused_types_walk (dw_die_ref);
3897 static void prune_unused_types_walk_attribs (dw_die_ref);
3898 static void prune_unused_types_prune (dw_die_ref);
3899 static void prune_unused_types (void);
3900 static int maybe_emit_file (struct dwarf_file_data *fd);
3901 static inline const char *AT_vms_delta1 (dw_attr_node *);
3902 static inline const char *AT_vms_delta2 (dw_attr_node *);
3903 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3904 const char *, const char *);
3905 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3906 static void gen_remaining_tmpl_value_param_die_attribute (void);
3907 static bool generic_type_p (tree);
3908 static void schedule_generic_params_dies_gen (tree t);
3909 static void gen_scheduled_generic_parms_dies (void);
3910 static void resolve_variable_values (void);
3911
3912 static const char *comp_dir_string (void);
3913
3914 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3915
3916 /* enum for tracking thread-local variables whose address is really an offset
3917 relative to the TLS pointer, which will need link-time relocation, but will
3918 not need relocation by the DWARF consumer. */
3919
3920 enum dtprel_bool
3921 {
3922 dtprel_false = 0,
3923 dtprel_true = 1
3924 };
3925
3926 /* Return the operator to use for an address of a variable. For dtprel_true, we
3927 use DW_OP_const*. For regular variables, which need both link-time
3928 relocation and consumer-level relocation (e.g., to account for shared objects
3929 loaded at a random address), we use DW_OP_addr*. */
3930
3931 static inline enum dwarf_location_atom
3932 dw_addr_op (enum dtprel_bool dtprel)
3933 {
3934 if (dtprel == dtprel_true)
3935 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3936 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3937 else
3938 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3939 }
3940
3941 /* Return a pointer to a newly allocated address location description. If
3942 dwarf_split_debug_info is true, then record the address with the appropriate
3943 relocation. */
3944 static inline dw_loc_descr_ref
3945 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3946 {
3947 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3948
3949 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3950 ref->dw_loc_oprnd1.v.val_addr = addr;
3951 ref->dtprel = dtprel;
3952 if (dwarf_split_debug_info)
3953 ref->dw_loc_oprnd1.val_entry
3954 = add_addr_table_entry (addr,
3955 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3956 else
3957 ref->dw_loc_oprnd1.val_entry = NULL;
3958
3959 return ref;
3960 }
3961
3962 /* Section names used to hold DWARF debugging information. */
3963
3964 #ifndef DEBUG_INFO_SECTION
3965 #define DEBUG_INFO_SECTION ".debug_info"
3966 #endif
3967 #ifndef DEBUG_DWO_INFO_SECTION
3968 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3969 #endif
3970 #ifndef DEBUG_LTO_INFO_SECTION
3971 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3972 #endif
3973 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3974 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3975 #endif
3976 #ifndef DEBUG_ABBREV_SECTION
3977 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3978 #endif
3979 #ifndef DEBUG_LTO_ABBREV_SECTION
3980 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3981 #endif
3982 #ifndef DEBUG_DWO_ABBREV_SECTION
3983 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3984 #endif
3985 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3986 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3987 #endif
3988 #ifndef DEBUG_ARANGES_SECTION
3989 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3990 #endif
3991 #ifndef DEBUG_ADDR_SECTION
3992 #define DEBUG_ADDR_SECTION ".debug_addr"
3993 #endif
3994 #ifndef DEBUG_MACINFO_SECTION
3995 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3996 #endif
3997 #ifndef DEBUG_LTO_MACINFO_SECTION
3998 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3999 #endif
4000 #ifndef DEBUG_DWO_MACINFO_SECTION
4001 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4002 #endif
4003 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4004 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4005 #endif
4006 #ifndef DEBUG_MACRO_SECTION
4007 #define DEBUG_MACRO_SECTION ".debug_macro"
4008 #endif
4009 #ifndef DEBUG_LTO_MACRO_SECTION
4010 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4011 #endif
4012 #ifndef DEBUG_DWO_MACRO_SECTION
4013 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4014 #endif
4015 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4016 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4017 #endif
4018 #ifndef DEBUG_LINE_SECTION
4019 #define DEBUG_LINE_SECTION ".debug_line"
4020 #endif
4021 #ifndef DEBUG_LTO_LINE_SECTION
4022 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4023 #endif
4024 #ifndef DEBUG_DWO_LINE_SECTION
4025 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4026 #endif
4027 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4028 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4029 #endif
4030 #ifndef DEBUG_LOC_SECTION
4031 #define DEBUG_LOC_SECTION ".debug_loc"
4032 #endif
4033 #ifndef DEBUG_DWO_LOC_SECTION
4034 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4035 #endif
4036 #ifndef DEBUG_LOCLISTS_SECTION
4037 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4038 #endif
4039 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4040 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4041 #endif
4042 #ifndef DEBUG_PUBNAMES_SECTION
4043 #define DEBUG_PUBNAMES_SECTION \
4044 ((debug_generate_pub_sections == 2) \
4045 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4046 #endif
4047 #ifndef DEBUG_PUBTYPES_SECTION
4048 #define DEBUG_PUBTYPES_SECTION \
4049 ((debug_generate_pub_sections == 2) \
4050 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4051 #endif
4052 #ifndef DEBUG_STR_OFFSETS_SECTION
4053 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4054 #endif
4055 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4056 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4057 #endif
4058 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4059 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4060 #endif
4061 #ifndef DEBUG_STR_SECTION
4062 #define DEBUG_STR_SECTION ".debug_str"
4063 #endif
4064 #ifndef DEBUG_LTO_STR_SECTION
4065 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4066 #endif
4067 #ifndef DEBUG_STR_DWO_SECTION
4068 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4069 #endif
4070 #ifndef DEBUG_LTO_STR_DWO_SECTION
4071 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4072 #endif
4073 #ifndef DEBUG_RANGES_SECTION
4074 #define DEBUG_RANGES_SECTION ".debug_ranges"
4075 #endif
4076 #ifndef DEBUG_RNGLISTS_SECTION
4077 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4078 #endif
4079 #ifndef DEBUG_LINE_STR_SECTION
4080 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4081 #endif
4082 #ifndef DEBUG_LTO_LINE_STR_SECTION
4083 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4084 #endif
4085
4086 /* Standard ELF section names for compiled code and data. */
4087 #ifndef TEXT_SECTION_NAME
4088 #define TEXT_SECTION_NAME ".text"
4089 #endif
4090
4091 /* Section flags for .debug_str section. */
4092 #define DEBUG_STR_SECTION_FLAGS \
4093 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4094 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4095 : SECTION_DEBUG)
4096
4097 /* Section flags for .debug_str.dwo section. */
4098 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4099
4100 /* Attribute used to refer to the macro section. */
4101 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4102 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4103
4104 /* Labels we insert at beginning sections we can reference instead of
4105 the section names themselves. */
4106
4107 #ifndef TEXT_SECTION_LABEL
4108 #define TEXT_SECTION_LABEL "Ltext"
4109 #endif
4110 #ifndef COLD_TEXT_SECTION_LABEL
4111 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4112 #endif
4113 #ifndef DEBUG_LINE_SECTION_LABEL
4114 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4115 #endif
4116 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4117 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4118 #endif
4119 #ifndef DEBUG_INFO_SECTION_LABEL
4120 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4121 #endif
4122 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4123 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4124 #endif
4125 #ifndef DEBUG_ABBREV_SECTION_LABEL
4126 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4127 #endif
4128 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4129 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4130 #endif
4131 #ifndef DEBUG_ADDR_SECTION_LABEL
4132 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4133 #endif
4134 #ifndef DEBUG_LOC_SECTION_LABEL
4135 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4136 #endif
4137 #ifndef DEBUG_RANGES_SECTION_LABEL
4138 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4139 #endif
4140 #ifndef DEBUG_MACINFO_SECTION_LABEL
4141 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4142 #endif
4143 #ifndef DEBUG_MACRO_SECTION_LABEL
4144 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4145 #endif
4146 #define SKELETON_COMP_DIE_ABBREV 1
4147 #define SKELETON_TYPE_DIE_ABBREV 2
4148
4149 /* Definitions of defaults for formats and names of various special
4150 (artificial) labels which may be generated within this file (when the -g
4151 options is used and DWARF2_DEBUGGING_INFO is in effect.
4152 If necessary, these may be overridden from within the tm.h file, but
4153 typically, overriding these defaults is unnecessary. */
4154
4155 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170
4171 #ifndef TEXT_END_LABEL
4172 #define TEXT_END_LABEL "Letext"
4173 #endif
4174 #ifndef COLD_END_LABEL
4175 #define COLD_END_LABEL "Letext_cold"
4176 #endif
4177 #ifndef BLOCK_BEGIN_LABEL
4178 #define BLOCK_BEGIN_LABEL "LBB"
4179 #endif
4180 #ifndef BLOCK_INLINE_ENTRY_LABEL
4181 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4182 #endif
4183 #ifndef BLOCK_END_LABEL
4184 #define BLOCK_END_LABEL "LBE"
4185 #endif
4186 #ifndef LINE_CODE_LABEL
4187 #define LINE_CODE_LABEL "LM"
4188 #endif
4189
4190 \f
4191 /* Return the root of the DIE's built for the current compilation unit. */
4192 static dw_die_ref
4193 comp_unit_die (void)
4194 {
4195 if (!single_comp_unit_die)
4196 single_comp_unit_die = gen_compile_unit_die (NULL);
4197 return single_comp_unit_die;
4198 }
4199
4200 /* We allow a language front-end to designate a function that is to be
4201 called to "demangle" any name before it is put into a DIE. */
4202
4203 static const char *(*demangle_name_func) (const char *);
4204
4205 void
4206 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4207 {
4208 demangle_name_func = func;
4209 }
4210
4211 /* Test if rtl node points to a pseudo register. */
4212
4213 static inline int
4214 is_pseudo_reg (const_rtx rtl)
4215 {
4216 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4217 || (GET_CODE (rtl) == SUBREG
4218 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4219 }
4220
4221 /* Return a reference to a type, with its const and volatile qualifiers
4222 removed. */
4223
4224 static inline tree
4225 type_main_variant (tree type)
4226 {
4227 type = TYPE_MAIN_VARIANT (type);
4228
4229 /* ??? There really should be only one main variant among any group of
4230 variants of a given type (and all of the MAIN_VARIANT values for all
4231 members of the group should point to that one type) but sometimes the C
4232 front-end messes this up for array types, so we work around that bug
4233 here. */
4234 if (TREE_CODE (type) == ARRAY_TYPE)
4235 while (type != TYPE_MAIN_VARIANT (type))
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 return type;
4239 }
4240
4241 /* Return nonzero if the given type node represents a tagged type. */
4242
4243 static inline int
4244 is_tagged_type (const_tree type)
4245 {
4246 enum tree_code code = TREE_CODE (type);
4247
4248 return (code == RECORD_TYPE || code == UNION_TYPE
4249 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4250 }
4251
4252 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4253
4254 static void
4255 get_ref_die_offset_label (char *label, dw_die_ref ref)
4256 {
4257 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4258 }
4259
4260 /* Return die_offset of a DIE reference to a base type. */
4261
4262 static unsigned long int
4263 get_base_type_offset (dw_die_ref ref)
4264 {
4265 if (ref->die_offset)
4266 return ref->die_offset;
4267 if (comp_unit_die ()->die_abbrev)
4268 {
4269 calc_base_type_die_sizes ();
4270 gcc_assert (ref->die_offset);
4271 }
4272 return ref->die_offset;
4273 }
4274
4275 /* Return die_offset of a DIE reference other than base type. */
4276
4277 static unsigned long int
4278 get_ref_die_offset (dw_die_ref ref)
4279 {
4280 gcc_assert (ref->die_offset);
4281 return ref->die_offset;
4282 }
4283
4284 /* Convert a DIE tag into its string name. */
4285
4286 static const char *
4287 dwarf_tag_name (unsigned int tag)
4288 {
4289 const char *name = get_DW_TAG_name (tag);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_TAG_<unknown>";
4295 }
4296
4297 /* Convert a DWARF attribute code into its string name. */
4298
4299 static const char *
4300 dwarf_attr_name (unsigned int attr)
4301 {
4302 const char *name;
4303
4304 switch (attr)
4305 {
4306 #if VMS_DEBUGGING_INFO
4307 case DW_AT_HP_prologue:
4308 return "DW_AT_HP_prologue";
4309 #else
4310 case DW_AT_MIPS_loop_unroll_factor:
4311 return "DW_AT_MIPS_loop_unroll_factor";
4312 #endif
4313
4314 #if VMS_DEBUGGING_INFO
4315 case DW_AT_HP_epilogue:
4316 return "DW_AT_HP_epilogue";
4317 #else
4318 case DW_AT_MIPS_stride:
4319 return "DW_AT_MIPS_stride";
4320 #endif
4321 }
4322
4323 name = get_DW_AT_name (attr);
4324
4325 if (name != NULL)
4326 return name;
4327
4328 return "DW_AT_<unknown>";
4329 }
4330
4331 /* Convert a DWARF value form code into its string name. */
4332
4333 static const char *
4334 dwarf_form_name (unsigned int form)
4335 {
4336 const char *name = get_DW_FORM_name (form);
4337
4338 if (name != NULL)
4339 return name;
4340
4341 return "DW_FORM_<unknown>";
4342 }
4343 \f
4344 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4345 instance of an inlined instance of a decl which is local to an inline
4346 function, so we have to trace all of the way back through the origin chain
4347 to find out what sort of node actually served as the original seed for the
4348 given block. */
4349
4350 static tree
4351 decl_ultimate_origin (const_tree decl)
4352 {
4353 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4354 return NULL_TREE;
4355
4356 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4357 we're trying to output the abstract instance of this function. */
4358 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4359 return NULL_TREE;
4360
4361 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4362 most distant ancestor, this should never happen. */
4363 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4364
4365 return DECL_ABSTRACT_ORIGIN (decl);
4366 }
4367
4368 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4369 of a virtual function may refer to a base class, so we check the 'this'
4370 parameter. */
4371
4372 static tree
4373 decl_class_context (tree decl)
4374 {
4375 tree context = NULL_TREE;
4376
4377 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4378 context = DECL_CONTEXT (decl);
4379 else
4380 context = TYPE_MAIN_VARIANT
4381 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4382
4383 if (context && !TYPE_P (context))
4384 context = NULL_TREE;
4385
4386 return context;
4387 }
4388 \f
4389 /* Add an attribute/value pair to a DIE. */
4390
4391 static inline void
4392 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4393 {
4394 /* Maybe this should be an assert? */
4395 if (die == NULL)
4396 return;
4397
4398 if (flag_checking)
4399 {
4400 /* Check we do not add duplicate attrs. Can't use get_AT here
4401 because that recurses to the specification/abstract origin DIE. */
4402 dw_attr_node *a;
4403 unsigned ix;
4404 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4405 gcc_assert (a->dw_attr != attr->dw_attr);
4406 }
4407
4408 vec_safe_reserve (die->die_attr, 1);
4409 vec_safe_push (die->die_attr, *attr);
4410 }
4411
4412 static inline enum dw_val_class
4413 AT_class (dw_attr_node *a)
4414 {
4415 return a->dw_attr_val.val_class;
4416 }
4417
4418 /* Return the index for any attribute that will be referenced with a
4419 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4420 indices are stored in dw_attr_val.v.val_str for reference counting
4421 pruning. */
4422
4423 static inline unsigned int
4424 AT_index (dw_attr_node *a)
4425 {
4426 if (AT_class (a) == dw_val_class_str)
4427 return a->dw_attr_val.v.val_str->index;
4428 else if (a->dw_attr_val.val_entry != NULL)
4429 return a->dw_attr_val.val_entry->index;
4430 return NOT_INDEXED;
4431 }
4432
4433 /* Add a flag value attribute to a DIE. */
4434
4435 static inline void
4436 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4437 {
4438 dw_attr_node attr;
4439
4440 attr.dw_attr = attr_kind;
4441 attr.dw_attr_val.val_class = dw_val_class_flag;
4442 attr.dw_attr_val.val_entry = NULL;
4443 attr.dw_attr_val.v.val_flag = flag;
4444 add_dwarf_attr (die, &attr);
4445 }
4446
4447 static inline unsigned
4448 AT_flag (dw_attr_node *a)
4449 {
4450 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4451 return a->dw_attr_val.v.val_flag;
4452 }
4453
4454 /* Add a signed integer attribute value to a DIE. */
4455
4456 static inline void
4457 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4458 {
4459 dw_attr_node attr;
4460
4461 attr.dw_attr = attr_kind;
4462 attr.dw_attr_val.val_class = dw_val_class_const;
4463 attr.dw_attr_val.val_entry = NULL;
4464 attr.dw_attr_val.v.val_int = int_val;
4465 add_dwarf_attr (die, &attr);
4466 }
4467
4468 static inline HOST_WIDE_INT
4469 AT_int (dw_attr_node *a)
4470 {
4471 gcc_assert (a && (AT_class (a) == dw_val_class_const
4472 || AT_class (a) == dw_val_class_const_implicit));
4473 return a->dw_attr_val.v.val_int;
4474 }
4475
4476 /* Add an unsigned integer attribute value to a DIE. */
4477
4478 static inline void
4479 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4480 unsigned HOST_WIDE_INT unsigned_val)
4481 {
4482 dw_attr_node attr;
4483
4484 attr.dw_attr = attr_kind;
4485 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4486 attr.dw_attr_val.val_entry = NULL;
4487 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4488 add_dwarf_attr (die, &attr);
4489 }
4490
4491 static inline unsigned HOST_WIDE_INT
4492 AT_unsigned (dw_attr_node *a)
4493 {
4494 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4495 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4496 return a->dw_attr_val.v.val_unsigned;
4497 }
4498
4499 /* Add an unsigned wide integer attribute value to a DIE. */
4500
4501 static inline void
4502 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4503 const wide_int& w)
4504 {
4505 dw_attr_node attr;
4506
4507 attr.dw_attr = attr_kind;
4508 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4509 attr.dw_attr_val.val_entry = NULL;
4510 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4511 *attr.dw_attr_val.v.val_wide = w;
4512 add_dwarf_attr (die, &attr);
4513 }
4514
4515 /* Add an unsigned double integer attribute value to a DIE. */
4516
4517 static inline void
4518 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4519 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4520 {
4521 dw_attr_node attr;
4522
4523 attr.dw_attr = attr_kind;
4524 attr.dw_attr_val.val_class = dw_val_class_const_double;
4525 attr.dw_attr_val.val_entry = NULL;
4526 attr.dw_attr_val.v.val_double.high = high;
4527 attr.dw_attr_val.v.val_double.low = low;
4528 add_dwarf_attr (die, &attr);
4529 }
4530
4531 /* Add a floating point attribute value to a DIE and return it. */
4532
4533 static inline void
4534 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4535 unsigned int length, unsigned int elt_size, unsigned char *array)
4536 {
4537 dw_attr_node attr;
4538
4539 attr.dw_attr = attr_kind;
4540 attr.dw_attr_val.val_class = dw_val_class_vec;
4541 attr.dw_attr_val.val_entry = NULL;
4542 attr.dw_attr_val.v.val_vec.length = length;
4543 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4544 attr.dw_attr_val.v.val_vec.array = array;
4545 add_dwarf_attr (die, &attr);
4546 }
4547
4548 /* Add an 8-byte data attribute value to a DIE. */
4549
4550 static inline void
4551 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4552 unsigned char data8[8])
4553 {
4554 dw_attr_node attr;
4555
4556 attr.dw_attr = attr_kind;
4557 attr.dw_attr_val.val_class = dw_val_class_data8;
4558 attr.dw_attr_val.val_entry = NULL;
4559 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4560 add_dwarf_attr (die, &attr);
4561 }
4562
4563 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4564 dwarf_split_debug_info, address attributes in dies destined for the
4565 final executable have force_direct set to avoid using indexed
4566 references. */
4567
4568 static inline void
4569 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4570 bool force_direct)
4571 {
4572 dw_attr_node attr;
4573 char * lbl_id;
4574
4575 lbl_id = xstrdup (lbl_low);
4576 attr.dw_attr = DW_AT_low_pc;
4577 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4578 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4579 if (dwarf_split_debug_info && !force_direct)
4580 attr.dw_attr_val.val_entry
4581 = add_addr_table_entry (lbl_id, ate_kind_label);
4582 else
4583 attr.dw_attr_val.val_entry = NULL;
4584 add_dwarf_attr (die, &attr);
4585
4586 attr.dw_attr = DW_AT_high_pc;
4587 if (dwarf_version < 4)
4588 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4589 else
4590 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4591 lbl_id = xstrdup (lbl_high);
4592 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4593 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4594 && dwarf_split_debug_info && !force_direct)
4595 attr.dw_attr_val.val_entry
4596 = add_addr_table_entry (lbl_id, ate_kind_label);
4597 else
4598 attr.dw_attr_val.val_entry = NULL;
4599 add_dwarf_attr (die, &attr);
4600 }
4601
4602 /* Hash and equality functions for debug_str_hash. */
4603
4604 hashval_t
4605 indirect_string_hasher::hash (indirect_string_node *x)
4606 {
4607 return htab_hash_string (x->str);
4608 }
4609
4610 bool
4611 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4612 {
4613 return strcmp (x1->str, x2) == 0;
4614 }
4615
4616 /* Add STR to the given string hash table. */
4617
4618 static struct indirect_string_node *
4619 find_AT_string_in_table (const char *str,
4620 hash_table<indirect_string_hasher> *table)
4621 {
4622 struct indirect_string_node *node;
4623
4624 indirect_string_node **slot
4625 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4626 if (*slot == NULL)
4627 {
4628 node = ggc_cleared_alloc<indirect_string_node> ();
4629 node->str = ggc_strdup (str);
4630 *slot = node;
4631 }
4632 else
4633 node = *slot;
4634
4635 node->refcount++;
4636 return node;
4637 }
4638
4639 /* Add STR to the indirect string hash table. */
4640
4641 static struct indirect_string_node *
4642 find_AT_string (const char *str)
4643 {
4644 if (! debug_str_hash)
4645 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4646
4647 return find_AT_string_in_table (str, debug_str_hash);
4648 }
4649
4650 /* Add a string attribute value to a DIE. */
4651
4652 static inline void
4653 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4654 {
4655 dw_attr_node attr;
4656 struct indirect_string_node *node;
4657
4658 node = find_AT_string (str);
4659
4660 attr.dw_attr = attr_kind;
4661 attr.dw_attr_val.val_class = dw_val_class_str;
4662 attr.dw_attr_val.val_entry = NULL;
4663 attr.dw_attr_val.v.val_str = node;
4664 add_dwarf_attr (die, &attr);
4665 }
4666
4667 static inline const char *
4668 AT_string (dw_attr_node *a)
4669 {
4670 gcc_assert (a && AT_class (a) == dw_val_class_str);
4671 return a->dw_attr_val.v.val_str->str;
4672 }
4673
4674 /* Call this function directly to bypass AT_string_form's logic to put
4675 the string inline in the die. */
4676
4677 static void
4678 set_indirect_string (struct indirect_string_node *node)
4679 {
4680 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4681 /* Already indirect is a no op. */
4682 if (node->form == DW_FORM_strp
4683 || node->form == DW_FORM_line_strp
4684 || node->form == dwarf_FORM (DW_FORM_strx))
4685 {
4686 gcc_assert (node->label);
4687 return;
4688 }
4689 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4690 ++dw2_string_counter;
4691 node->label = xstrdup (label);
4692
4693 if (!dwarf_split_debug_info)
4694 {
4695 node->form = DW_FORM_strp;
4696 node->index = NOT_INDEXED;
4697 }
4698 else
4699 {
4700 node->form = dwarf_FORM (DW_FORM_strx);
4701 node->index = NO_INDEX_ASSIGNED;
4702 }
4703 }
4704
4705 /* A helper function for dwarf2out_finish, called to reset indirect
4706 string decisions done for early LTO dwarf output before fat object
4707 dwarf output. */
4708
4709 int
4710 reset_indirect_string (indirect_string_node **h, void *)
4711 {
4712 struct indirect_string_node *node = *h;
4713 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4714 {
4715 free (node->label);
4716 node->label = NULL;
4717 node->form = (dwarf_form) 0;
4718 node->index = 0;
4719 }
4720 return 1;
4721 }
4722
4723 /* Find out whether a string should be output inline in DIE
4724 or out-of-line in .debug_str section. */
4725
4726 static enum dwarf_form
4727 find_string_form (struct indirect_string_node *node)
4728 {
4729 unsigned int len;
4730
4731 if (node->form)
4732 return node->form;
4733
4734 len = strlen (node->str) + 1;
4735
4736 /* If the string is shorter or equal to the size of the reference, it is
4737 always better to put it inline. */
4738 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4739 return node->form = DW_FORM_string;
4740
4741 /* If we cannot expect the linker to merge strings in .debug_str
4742 section, only put it into .debug_str if it is worth even in this
4743 single module. */
4744 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4745 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4746 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4747 return node->form = DW_FORM_string;
4748
4749 set_indirect_string (node);
4750
4751 return node->form;
4752 }
4753
4754 /* Find out whether the string referenced from the attribute should be
4755 output inline in DIE or out-of-line in .debug_str section. */
4756
4757 static enum dwarf_form
4758 AT_string_form (dw_attr_node *a)
4759 {
4760 gcc_assert (a && AT_class (a) == dw_val_class_str);
4761 return find_string_form (a->dw_attr_val.v.val_str);
4762 }
4763
4764 /* Add a DIE reference attribute value to a DIE. */
4765
4766 static inline void
4767 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4768 {
4769 dw_attr_node attr;
4770 gcc_checking_assert (targ_die != NULL);
4771
4772 /* With LTO we can end up trying to reference something we didn't create
4773 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4774 if (targ_die == NULL)
4775 return;
4776
4777 attr.dw_attr = attr_kind;
4778 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4779 attr.dw_attr_val.val_entry = NULL;
4780 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4781 attr.dw_attr_val.v.val_die_ref.external = 0;
4782 add_dwarf_attr (die, &attr);
4783 }
4784
4785 /* Change DIE reference REF to point to NEW_DIE instead. */
4786
4787 static inline void
4788 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4789 {
4790 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4791 ref->dw_attr_val.v.val_die_ref.die = new_die;
4792 ref->dw_attr_val.v.val_die_ref.external = 0;
4793 }
4794
4795 /* Add an AT_specification attribute to a DIE, and also make the back
4796 pointer from the specification to the definition. */
4797
4798 static inline void
4799 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4800 {
4801 add_AT_die_ref (die, DW_AT_specification, targ_die);
4802 gcc_assert (!targ_die->die_definition);
4803 targ_die->die_definition = die;
4804 }
4805
4806 static inline dw_die_ref
4807 AT_ref (dw_attr_node *a)
4808 {
4809 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4810 return a->dw_attr_val.v.val_die_ref.die;
4811 }
4812
4813 static inline int
4814 AT_ref_external (dw_attr_node *a)
4815 {
4816 if (a && AT_class (a) == dw_val_class_die_ref)
4817 return a->dw_attr_val.v.val_die_ref.external;
4818
4819 return 0;
4820 }
4821
4822 static inline void
4823 set_AT_ref_external (dw_attr_node *a, int i)
4824 {
4825 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4826 a->dw_attr_val.v.val_die_ref.external = i;
4827 }
4828
4829 /* Add an FDE reference attribute value to a DIE. */
4830
4831 static inline void
4832 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4833 {
4834 dw_attr_node attr;
4835
4836 attr.dw_attr = attr_kind;
4837 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4838 attr.dw_attr_val.val_entry = NULL;
4839 attr.dw_attr_val.v.val_fde_index = targ_fde;
4840 add_dwarf_attr (die, &attr);
4841 }
4842
4843 /* Add a location description attribute value to a DIE. */
4844
4845 static inline void
4846 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4847 {
4848 dw_attr_node attr;
4849
4850 attr.dw_attr = attr_kind;
4851 attr.dw_attr_val.val_class = dw_val_class_loc;
4852 attr.dw_attr_val.val_entry = NULL;
4853 attr.dw_attr_val.v.val_loc = loc;
4854 add_dwarf_attr (die, &attr);
4855 }
4856
4857 static inline dw_loc_descr_ref
4858 AT_loc (dw_attr_node *a)
4859 {
4860 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4861 return a->dw_attr_val.v.val_loc;
4862 }
4863
4864 static inline void
4865 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4866 {
4867 dw_attr_node attr;
4868
4869 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4870 return;
4871
4872 attr.dw_attr = attr_kind;
4873 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4874 attr.dw_attr_val.val_entry = NULL;
4875 attr.dw_attr_val.v.val_loc_list = loc_list;
4876 add_dwarf_attr (die, &attr);
4877 have_location_lists = true;
4878 }
4879
4880 static inline dw_loc_list_ref
4881 AT_loc_list (dw_attr_node *a)
4882 {
4883 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4884 return a->dw_attr_val.v.val_loc_list;
4885 }
4886
4887 /* Add a view list attribute to DIE. It must have a DW_AT_location
4888 attribute, because the view list complements the location list. */
4889
4890 static inline void
4891 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4892 {
4893 dw_attr_node attr;
4894
4895 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4896 return;
4897
4898 attr.dw_attr = attr_kind;
4899 attr.dw_attr_val.val_class = dw_val_class_view_list;
4900 attr.dw_attr_val.val_entry = NULL;
4901 attr.dw_attr_val.v.val_view_list = die;
4902 add_dwarf_attr (die, &attr);
4903 gcc_checking_assert (get_AT (die, DW_AT_location));
4904 gcc_assert (have_location_lists);
4905 }
4906
4907 /* Return a pointer to the location list referenced by the attribute.
4908 If the named attribute is a view list, look up the corresponding
4909 DW_AT_location attribute and return its location list. */
4910
4911 static inline dw_loc_list_ref *
4912 AT_loc_list_ptr (dw_attr_node *a)
4913 {
4914 gcc_assert (a);
4915 switch (AT_class (a))
4916 {
4917 case dw_val_class_loc_list:
4918 return &a->dw_attr_val.v.val_loc_list;
4919 case dw_val_class_view_list:
4920 {
4921 dw_attr_node *l;
4922 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4923 if (!l)
4924 return NULL;
4925 gcc_checking_assert (l + 1 == a);
4926 return AT_loc_list_ptr (l);
4927 }
4928 default:
4929 gcc_unreachable ();
4930 }
4931 }
4932
4933 /* Return the location attribute value associated with a view list
4934 attribute value. */
4935
4936 static inline dw_val_node *
4937 view_list_to_loc_list_val_node (dw_val_node *val)
4938 {
4939 gcc_assert (val->val_class == dw_val_class_view_list);
4940 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4941 if (!loc)
4942 return NULL;
4943 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4944 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4945 return &loc->dw_attr_val;
4946 }
4947
4948 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4949 {
4950 static hashval_t hash (addr_table_entry *);
4951 static bool equal (addr_table_entry *, addr_table_entry *);
4952 };
4953
4954 /* Table of entries into the .debug_addr section. */
4955
4956 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4957
4958 /* Hash an address_table_entry. */
4959
4960 hashval_t
4961 addr_hasher::hash (addr_table_entry *a)
4962 {
4963 inchash::hash hstate;
4964 switch (a->kind)
4965 {
4966 case ate_kind_rtx:
4967 hstate.add_int (0);
4968 break;
4969 case ate_kind_rtx_dtprel:
4970 hstate.add_int (1);
4971 break;
4972 case ate_kind_label:
4973 return htab_hash_string (a->addr.label);
4974 default:
4975 gcc_unreachable ();
4976 }
4977 inchash::add_rtx (a->addr.rtl, hstate);
4978 return hstate.end ();
4979 }
4980
4981 /* Determine equality for two address_table_entries. */
4982
4983 bool
4984 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4985 {
4986 if (a1->kind != a2->kind)
4987 return 0;
4988 switch (a1->kind)
4989 {
4990 case ate_kind_rtx:
4991 case ate_kind_rtx_dtprel:
4992 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4993 case ate_kind_label:
4994 return strcmp (a1->addr.label, a2->addr.label) == 0;
4995 default:
4996 gcc_unreachable ();
4997 }
4998 }
4999
5000 /* Initialize an addr_table_entry. */
5001
5002 void
5003 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5004 {
5005 e->kind = kind;
5006 switch (kind)
5007 {
5008 case ate_kind_rtx:
5009 case ate_kind_rtx_dtprel:
5010 e->addr.rtl = (rtx) addr;
5011 break;
5012 case ate_kind_label:
5013 e->addr.label = (char *) addr;
5014 break;
5015 }
5016 e->refcount = 0;
5017 e->index = NO_INDEX_ASSIGNED;
5018 }
5019
5020 /* Add attr to the address table entry to the table. Defer setting an
5021 index until output time. */
5022
5023 static addr_table_entry *
5024 add_addr_table_entry (void *addr, enum ate_kind kind)
5025 {
5026 addr_table_entry *node;
5027 addr_table_entry finder;
5028
5029 gcc_assert (dwarf_split_debug_info);
5030 if (! addr_index_table)
5031 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5032 init_addr_table_entry (&finder, kind, addr);
5033 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5034
5035 if (*slot == HTAB_EMPTY_ENTRY)
5036 {
5037 node = ggc_cleared_alloc<addr_table_entry> ();
5038 init_addr_table_entry (node, kind, addr);
5039 *slot = node;
5040 }
5041 else
5042 node = *slot;
5043
5044 node->refcount++;
5045 return node;
5046 }
5047
5048 /* Remove an entry from the addr table by decrementing its refcount.
5049 Strictly, decrementing the refcount would be enough, but the
5050 assertion that the entry is actually in the table has found
5051 bugs. */
5052
5053 static void
5054 remove_addr_table_entry (addr_table_entry *entry)
5055 {
5056 gcc_assert (dwarf_split_debug_info && addr_index_table);
5057 /* After an index is assigned, the table is frozen. */
5058 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5059 entry->refcount--;
5060 }
5061
5062 /* Given a location list, remove all addresses it refers to from the
5063 address_table. */
5064
5065 static void
5066 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5067 {
5068 for (; descr; descr = descr->dw_loc_next)
5069 if (descr->dw_loc_oprnd1.val_entry != NULL)
5070 {
5071 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5072 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5073 }
5074 }
5075
5076 /* A helper function for dwarf2out_finish called through
5077 htab_traverse. Assign an addr_table_entry its index. All entries
5078 must be collected into the table when this function is called,
5079 because the indexing code relies on htab_traverse to traverse nodes
5080 in the same order for each run. */
5081
5082 int
5083 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5084 {
5085 addr_table_entry *node = *h;
5086
5087 /* Don't index unreferenced nodes. */
5088 if (node->refcount == 0)
5089 return 1;
5090
5091 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5092 node->index = *index;
5093 *index += 1;
5094
5095 return 1;
5096 }
5097
5098 /* Add an address constant attribute value to a DIE. When using
5099 dwarf_split_debug_info, address attributes in dies destined for the
5100 final executable should be direct references--setting the parameter
5101 force_direct ensures this behavior. */
5102
5103 static inline void
5104 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5105 bool force_direct)
5106 {
5107 dw_attr_node attr;
5108
5109 attr.dw_attr = attr_kind;
5110 attr.dw_attr_val.val_class = dw_val_class_addr;
5111 attr.dw_attr_val.v.val_addr = addr;
5112 if (dwarf_split_debug_info && !force_direct)
5113 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5114 else
5115 attr.dw_attr_val.val_entry = NULL;
5116 add_dwarf_attr (die, &attr);
5117 }
5118
5119 /* Get the RTX from to an address DIE attribute. */
5120
5121 static inline rtx
5122 AT_addr (dw_attr_node *a)
5123 {
5124 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5125 return a->dw_attr_val.v.val_addr;
5126 }
5127
5128 /* Add a file attribute value to a DIE. */
5129
5130 static inline void
5131 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5132 struct dwarf_file_data *fd)
5133 {
5134 dw_attr_node attr;
5135
5136 attr.dw_attr = attr_kind;
5137 attr.dw_attr_val.val_class = dw_val_class_file;
5138 attr.dw_attr_val.val_entry = NULL;
5139 attr.dw_attr_val.v.val_file = fd;
5140 add_dwarf_attr (die, &attr);
5141 }
5142
5143 /* Get the dwarf_file_data from a file DIE attribute. */
5144
5145 static inline struct dwarf_file_data *
5146 AT_file (dw_attr_node *a)
5147 {
5148 gcc_assert (a && (AT_class (a) == dw_val_class_file
5149 || AT_class (a) == dw_val_class_file_implicit));
5150 return a->dw_attr_val.v.val_file;
5151 }
5152
5153 /* Add a vms delta attribute value to a DIE. */
5154
5155 static inline void
5156 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5157 const char *lbl1, const char *lbl2)
5158 {
5159 dw_attr_node attr;
5160
5161 attr.dw_attr = attr_kind;
5162 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5163 attr.dw_attr_val.val_entry = NULL;
5164 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5165 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a symbolic view identifier attribute value to a DIE. */
5170
5171 static inline void
5172 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5173 const char *view_label)
5174 {
5175 dw_attr_node attr;
5176
5177 attr.dw_attr = attr_kind;
5178 attr.dw_attr_val.val_class = dw_val_class_symview;
5179 attr.dw_attr_val.val_entry = NULL;
5180 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5181 add_dwarf_attr (die, &attr);
5182 }
5183
5184 /* Add a label identifier attribute value to a DIE. */
5185
5186 static inline void
5187 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5188 const char *lbl_id)
5189 {
5190 dw_attr_node attr;
5191
5192 attr.dw_attr = attr_kind;
5193 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5194 attr.dw_attr_val.val_entry = NULL;
5195 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5196 if (dwarf_split_debug_info)
5197 attr.dw_attr_val.val_entry
5198 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5199 ate_kind_label);
5200 add_dwarf_attr (die, &attr);
5201 }
5202
5203 /* Add a section offset attribute value to a DIE, an offset into the
5204 debug_line section. */
5205
5206 static inline void
5207 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5208 const char *label)
5209 {
5210 dw_attr_node attr;
5211
5212 attr.dw_attr = attr_kind;
5213 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5214 attr.dw_attr_val.val_entry = NULL;
5215 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5216 add_dwarf_attr (die, &attr);
5217 }
5218
5219 /* Add a section offset attribute value to a DIE, an offset into the
5220 debug_loclists section. */
5221
5222 static inline void
5223 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5224 const char *label)
5225 {
5226 dw_attr_node attr;
5227
5228 attr.dw_attr = attr_kind;
5229 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5230 attr.dw_attr_val.val_entry = NULL;
5231 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5232 add_dwarf_attr (die, &attr);
5233 }
5234
5235 /* Add a section offset attribute value to a DIE, an offset into the
5236 debug_macinfo section. */
5237
5238 static inline void
5239 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5240 const char *label)
5241 {
5242 dw_attr_node attr;
5243
5244 attr.dw_attr = attr_kind;
5245 attr.dw_attr_val.val_class = dw_val_class_macptr;
5246 attr.dw_attr_val.val_entry = NULL;
5247 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5248 add_dwarf_attr (die, &attr);
5249 }
5250
5251 /* Add an offset attribute value to a DIE. */
5252
5253 static inline void
5254 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5255 unsigned HOST_WIDE_INT offset)
5256 {
5257 dw_attr_node attr;
5258
5259 attr.dw_attr = attr_kind;
5260 attr.dw_attr_val.val_class = dw_val_class_offset;
5261 attr.dw_attr_val.val_entry = NULL;
5262 attr.dw_attr_val.v.val_offset = offset;
5263 add_dwarf_attr (die, &attr);
5264 }
5265
5266 /* Add a range_list attribute value to a DIE. When using
5267 dwarf_split_debug_info, address attributes in dies destined for the
5268 final executable should be direct references--setting the parameter
5269 force_direct ensures this behavior. */
5270
5271 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5272 #define RELOCATED_OFFSET (NULL)
5273
5274 static void
5275 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5276 long unsigned int offset, bool force_direct)
5277 {
5278 dw_attr_node attr;
5279
5280 attr.dw_attr = attr_kind;
5281 attr.dw_attr_val.val_class = dw_val_class_range_list;
5282 /* For the range_list attribute, use val_entry to store whether the
5283 offset should follow split-debug-info or normal semantics. This
5284 value is read in output_range_list_offset. */
5285 if (dwarf_split_debug_info && !force_direct)
5286 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5287 else
5288 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5289 attr.dw_attr_val.v.val_offset = offset;
5290 add_dwarf_attr (die, &attr);
5291 }
5292
5293 /* Return the start label of a delta attribute. */
5294
5295 static inline const char *
5296 AT_vms_delta1 (dw_attr_node *a)
5297 {
5298 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5299 return a->dw_attr_val.v.val_vms_delta.lbl1;
5300 }
5301
5302 /* Return the end label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta2 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl2;
5309 }
5310
5311 static inline const char *
5312 AT_lbl (dw_attr_node *a)
5313 {
5314 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5315 || AT_class (a) == dw_val_class_lineptr
5316 || AT_class (a) == dw_val_class_macptr
5317 || AT_class (a) == dw_val_class_loclistsptr
5318 || AT_class (a) == dw_val_class_high_pc));
5319 return a->dw_attr_val.v.val_lbl_id;
5320 }
5321
5322 /* Get the attribute of type attr_kind. */
5323
5324 static dw_attr_node *
5325 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5326 {
5327 dw_attr_node *a;
5328 unsigned ix;
5329 dw_die_ref spec = NULL;
5330
5331 if (! die)
5332 return NULL;
5333
5334 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5335 if (a->dw_attr == attr_kind)
5336 return a;
5337 else if (a->dw_attr == DW_AT_specification
5338 || a->dw_attr == DW_AT_abstract_origin)
5339 spec = AT_ref (a);
5340
5341 if (spec)
5342 return get_AT (spec, attr_kind);
5343
5344 return NULL;
5345 }
5346
5347 /* Returns the parent of the declaration of DIE. */
5348
5349 static dw_die_ref
5350 get_die_parent (dw_die_ref die)
5351 {
5352 dw_die_ref t;
5353
5354 if (!die)
5355 return NULL;
5356
5357 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5358 || (t = get_AT_ref (die, DW_AT_specification)))
5359 die = t;
5360
5361 return die->die_parent;
5362 }
5363
5364 /* Return the "low pc" attribute value, typically associated with a subprogram
5365 DIE. Return null if the "low pc" attribute is either not present, or if it
5366 cannot be represented as an assembler label identifier. */
5367
5368 static inline const char *
5369 get_AT_low_pc (dw_die_ref die)
5370 {
5371 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5372
5373 return a ? AT_lbl (a) : NULL;
5374 }
5375
5376 /* Return the "high pc" attribute value, typically associated with a subprogram
5377 DIE. Return null if the "high pc" attribute is either not present, or if it
5378 cannot be represented as an assembler label identifier. */
5379
5380 static inline const char *
5381 get_AT_hi_pc (dw_die_ref die)
5382 {
5383 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5384
5385 return a ? AT_lbl (a) : NULL;
5386 }
5387
5388 /* Return the value of the string attribute designated by ATTR_KIND, or
5389 NULL if it is not present. */
5390
5391 static inline const char *
5392 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5393 {
5394 dw_attr_node *a = get_AT (die, attr_kind);
5395
5396 return a ? AT_string (a) : NULL;
5397 }
5398
5399 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5400 if it is not present. */
5401
5402 static inline int
5403 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5404 {
5405 dw_attr_node *a = get_AT (die, attr_kind);
5406
5407 return a ? AT_flag (a) : 0;
5408 }
5409
5410 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5411 if it is not present. */
5412
5413 static inline unsigned
5414 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5415 {
5416 dw_attr_node *a = get_AT (die, attr_kind);
5417
5418 return a ? AT_unsigned (a) : 0;
5419 }
5420
5421 static inline dw_die_ref
5422 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5423 {
5424 dw_attr_node *a = get_AT (die, attr_kind);
5425
5426 return a ? AT_ref (a) : NULL;
5427 }
5428
5429 static inline struct dwarf_file_data *
5430 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5431 {
5432 dw_attr_node *a = get_AT (die, attr_kind);
5433
5434 return a ? AT_file (a) : NULL;
5435 }
5436
5437 /* Return TRUE if the language is C++. */
5438
5439 static inline bool
5440 is_cxx (void)
5441 {
5442 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5443
5444 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5445 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5446 }
5447
5448 /* Return TRUE if DECL was created by the C++ frontend. */
5449
5450 static bool
5451 is_cxx (const_tree decl)
5452 {
5453 if (in_lto_p)
5454 {
5455 const_tree context = get_ultimate_context (decl);
5456 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5457 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5458 }
5459 return is_cxx ();
5460 }
5461
5462 /* Return TRUE if the language is Fortran. */
5463
5464 static inline bool
5465 is_fortran (void)
5466 {
5467 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5468
5469 return (lang == DW_LANG_Fortran77
5470 || lang == DW_LANG_Fortran90
5471 || lang == DW_LANG_Fortran95
5472 || lang == DW_LANG_Fortran03
5473 || lang == DW_LANG_Fortran08);
5474 }
5475
5476 static inline bool
5477 is_fortran (const_tree decl)
5478 {
5479 if (in_lto_p)
5480 {
5481 const_tree context = get_ultimate_context (decl);
5482 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5483 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5484 "GNU Fortran", 11) == 0
5485 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5486 "GNU F77") == 0);
5487 }
5488 return is_fortran ();
5489 }
5490
5491 /* Return TRUE if the language is Ada. */
5492
5493 static inline bool
5494 is_ada (void)
5495 {
5496 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5497
5498 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5499 }
5500
5501 /* Remove the specified attribute if present. Return TRUE if removal
5502 was successful. */
5503
5504 static bool
5505 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5506 {
5507 dw_attr_node *a;
5508 unsigned ix;
5509
5510 if (! die)
5511 return false;
5512
5513 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5514 if (a->dw_attr == attr_kind)
5515 {
5516 if (AT_class (a) == dw_val_class_str)
5517 if (a->dw_attr_val.v.val_str->refcount)
5518 a->dw_attr_val.v.val_str->refcount--;
5519
5520 /* vec::ordered_remove should help reduce the number of abbrevs
5521 that are needed. */
5522 die->die_attr->ordered_remove (ix);
5523 return true;
5524 }
5525 return false;
5526 }
5527
5528 /* Remove CHILD from its parent. PREV must have the property that
5529 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5530
5531 static void
5532 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5533 {
5534 gcc_assert (child->die_parent == prev->die_parent);
5535 gcc_assert (prev->die_sib == child);
5536 if (prev == child)
5537 {
5538 gcc_assert (child->die_parent->die_child == child);
5539 prev = NULL;
5540 }
5541 else
5542 prev->die_sib = child->die_sib;
5543 if (child->die_parent->die_child == child)
5544 child->die_parent->die_child = prev;
5545 child->die_sib = NULL;
5546 }
5547
5548 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5549 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5550
5551 static void
5552 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5553 {
5554 dw_die_ref parent = old_child->die_parent;
5555
5556 gcc_assert (parent == prev->die_parent);
5557 gcc_assert (prev->die_sib == old_child);
5558
5559 new_child->die_parent = parent;
5560 if (prev == old_child)
5561 {
5562 gcc_assert (parent->die_child == old_child);
5563 new_child->die_sib = new_child;
5564 }
5565 else
5566 {
5567 prev->die_sib = new_child;
5568 new_child->die_sib = old_child->die_sib;
5569 }
5570 if (old_child->die_parent->die_child == old_child)
5571 old_child->die_parent->die_child = new_child;
5572 old_child->die_sib = NULL;
5573 }
5574
5575 /* Move all children from OLD_PARENT to NEW_PARENT. */
5576
5577 static void
5578 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5579 {
5580 dw_die_ref c;
5581 new_parent->die_child = old_parent->die_child;
5582 old_parent->die_child = NULL;
5583 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5584 }
5585
5586 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5587 matches TAG. */
5588
5589 static void
5590 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5591 {
5592 dw_die_ref c;
5593
5594 c = die->die_child;
5595 if (c) do {
5596 dw_die_ref prev = c;
5597 c = c->die_sib;
5598 while (c->die_tag == tag)
5599 {
5600 remove_child_with_prev (c, prev);
5601 c->die_parent = NULL;
5602 /* Might have removed every child. */
5603 if (die->die_child == NULL)
5604 return;
5605 c = prev->die_sib;
5606 }
5607 } while (c != die->die_child);
5608 }
5609
5610 /* Add a CHILD_DIE as the last child of DIE. */
5611
5612 static void
5613 add_child_die (dw_die_ref die, dw_die_ref child_die)
5614 {
5615 /* FIXME this should probably be an assert. */
5616 if (! die || ! child_die)
5617 return;
5618 gcc_assert (die != child_die);
5619
5620 child_die->die_parent = die;
5621 if (die->die_child)
5622 {
5623 child_die->die_sib = die->die_child->die_sib;
5624 die->die_child->die_sib = child_die;
5625 }
5626 else
5627 child_die->die_sib = child_die;
5628 die->die_child = child_die;
5629 }
5630
5631 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5632
5633 static void
5634 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5635 dw_die_ref after_die)
5636 {
5637 gcc_assert (die
5638 && child_die
5639 && after_die
5640 && die->die_child
5641 && die != child_die);
5642
5643 child_die->die_parent = die;
5644 child_die->die_sib = after_die->die_sib;
5645 after_die->die_sib = child_die;
5646 if (die->die_child == after_die)
5647 die->die_child = child_die;
5648 }
5649
5650 /* Unassociate CHILD from its parent, and make its parent be
5651 NEW_PARENT. */
5652
5653 static void
5654 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5655 {
5656 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5657 if (p->die_sib == child)
5658 {
5659 remove_child_with_prev (child, p);
5660 break;
5661 }
5662 add_child_die (new_parent, child);
5663 }
5664
5665 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5666 is the specification, to the end of PARENT's list of children.
5667 This is done by removing and re-adding it. */
5668
5669 static void
5670 splice_child_die (dw_die_ref parent, dw_die_ref child)
5671 {
5672 /* We want the declaration DIE from inside the class, not the
5673 specification DIE at toplevel. */
5674 if (child->die_parent != parent)
5675 {
5676 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5677
5678 if (tmp)
5679 child = tmp;
5680 }
5681
5682 gcc_assert (child->die_parent == parent
5683 || (child->die_parent
5684 == get_AT_ref (parent, DW_AT_specification)));
5685
5686 reparent_child (child, parent);
5687 }
5688
5689 /* Create and return a new die with TAG_VALUE as tag. */
5690
5691 static inline dw_die_ref
5692 new_die_raw (enum dwarf_tag tag_value)
5693 {
5694 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5695 die->die_tag = tag_value;
5696 return die;
5697 }
5698
5699 /* Create and return a new die with a parent of PARENT_DIE. If
5700 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5701 associated tree T must be supplied to determine parenthood
5702 later. */
5703
5704 static inline dw_die_ref
5705 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5706 {
5707 dw_die_ref die = new_die_raw (tag_value);
5708
5709 if (parent_die != NULL)
5710 add_child_die (parent_die, die);
5711 else
5712 {
5713 limbo_die_node *limbo_node;
5714
5715 /* No DIEs created after early dwarf should end up in limbo,
5716 because the limbo list should not persist past LTO
5717 streaming. */
5718 if (tag_value != DW_TAG_compile_unit
5719 /* These are allowed because they're generated while
5720 breaking out COMDAT units late. */
5721 && tag_value != DW_TAG_type_unit
5722 && tag_value != DW_TAG_skeleton_unit
5723 && !early_dwarf
5724 /* Allow nested functions to live in limbo because they will
5725 only temporarily live there, as decls_for_scope will fix
5726 them up. */
5727 && (TREE_CODE (t) != FUNCTION_DECL
5728 || !decl_function_context (t))
5729 /* Same as nested functions above but for types. Types that
5730 are local to a function will be fixed in
5731 decls_for_scope. */
5732 && (!RECORD_OR_UNION_TYPE_P (t)
5733 || !TYPE_CONTEXT (t)
5734 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5735 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5736 especially in the ltrans stage, but once we implement LTO
5737 dwarf streaming, we should remove this exception. */
5738 && !in_lto_p)
5739 {
5740 fprintf (stderr, "symbol ended up in limbo too late:");
5741 debug_generic_stmt (t);
5742 gcc_unreachable ();
5743 }
5744
5745 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5746 limbo_node->die = die;
5747 limbo_node->created_for = t;
5748 limbo_node->next = limbo_die_list;
5749 limbo_die_list = limbo_node;
5750 }
5751
5752 return die;
5753 }
5754
5755 /* Return the DIE associated with the given type specifier. */
5756
5757 static inline dw_die_ref
5758 lookup_type_die (tree type)
5759 {
5760 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5761 if (die && die->removed)
5762 {
5763 TYPE_SYMTAB_DIE (type) = NULL;
5764 return NULL;
5765 }
5766 return die;
5767 }
5768
5769 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5770 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5771 anonymous type instead the one of the naming typedef. */
5772
5773 static inline dw_die_ref
5774 strip_naming_typedef (tree type, dw_die_ref type_die)
5775 {
5776 if (type
5777 && TREE_CODE (type) == RECORD_TYPE
5778 && type_die
5779 && type_die->die_tag == DW_TAG_typedef
5780 && is_naming_typedef_decl (TYPE_NAME (type)))
5781 type_die = get_AT_ref (type_die, DW_AT_type);
5782 return type_die;
5783 }
5784
5785 /* Like lookup_type_die, but if type is an anonymous type named by a
5786 typedef[1], return the DIE of the anonymous type instead the one of
5787 the naming typedef. This is because in gen_typedef_die, we did
5788 equate the anonymous struct named by the typedef with the DIE of
5789 the naming typedef. So by default, lookup_type_die on an anonymous
5790 struct yields the DIE of the naming typedef.
5791
5792 [1]: Read the comment of is_naming_typedef_decl to learn about what
5793 a naming typedef is. */
5794
5795 static inline dw_die_ref
5796 lookup_type_die_strip_naming_typedef (tree type)
5797 {
5798 dw_die_ref die = lookup_type_die (type);
5799 return strip_naming_typedef (type, die);
5800 }
5801
5802 /* Equate a DIE to a given type specifier. */
5803
5804 static inline void
5805 equate_type_number_to_die (tree type, dw_die_ref type_die)
5806 {
5807 TYPE_SYMTAB_DIE (type) = type_die;
5808 }
5809
5810 /* Returns a hash value for X (which really is a die_struct). */
5811
5812 inline hashval_t
5813 decl_die_hasher::hash (die_node *x)
5814 {
5815 return (hashval_t) x->decl_id;
5816 }
5817
5818 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5819
5820 inline bool
5821 decl_die_hasher::equal (die_node *x, tree y)
5822 {
5823 return (x->decl_id == DECL_UID (y));
5824 }
5825
5826 /* Return the DIE associated with a given declaration. */
5827
5828 static inline dw_die_ref
5829 lookup_decl_die (tree decl)
5830 {
5831 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5832 NO_INSERT);
5833 if (!die)
5834 return NULL;
5835 if ((*die)->removed)
5836 {
5837 decl_die_table->clear_slot (die);
5838 return NULL;
5839 }
5840 return *die;
5841 }
5842
5843
5844 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5845 style reference. Return true if we found one refering to a DIE for
5846 DECL, otherwise return false. */
5847
5848 static bool
5849 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5850 unsigned HOST_WIDE_INT *off)
5851 {
5852 dw_die_ref die;
5853
5854 if (in_lto_p && !decl_die_table)
5855 return false;
5856
5857 if (TREE_CODE (decl) == BLOCK)
5858 die = BLOCK_DIE (decl);
5859 else
5860 die = lookup_decl_die (decl);
5861 if (!die)
5862 return false;
5863
5864 /* During WPA stage and incremental linking we currently use DIEs
5865 to store the decl <-> label + offset map. That's quite inefficient
5866 but it works for now. */
5867 if (in_lto_p)
5868 {
5869 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5870 if (!ref)
5871 {
5872 gcc_assert (die == comp_unit_die ());
5873 return false;
5874 }
5875 *off = ref->die_offset;
5876 *sym = ref->die_id.die_symbol;
5877 return true;
5878 }
5879
5880 /* Similar to get_ref_die_offset_label, but using the "correct"
5881 label. */
5882 *off = die->die_offset;
5883 while (die->die_parent)
5884 die = die->die_parent;
5885 /* For the containing CU DIE we compute a die_symbol in
5886 compute_comp_unit_symbol. */
5887 gcc_assert (die->die_tag == DW_TAG_compile_unit
5888 && die->die_id.die_symbol != NULL);
5889 *sym = die->die_id.die_symbol;
5890 return true;
5891 }
5892
5893 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5894
5895 static void
5896 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5897 const char *symbol, HOST_WIDE_INT offset)
5898 {
5899 /* Create a fake DIE that contains the reference. Don't use
5900 new_die because we don't want to end up in the limbo list. */
5901 dw_die_ref ref = new_die_raw (die->die_tag);
5902 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5903 ref->die_offset = offset;
5904 ref->with_offset = 1;
5905 add_AT_die_ref (die, attr_kind, ref);
5906 }
5907
5908 /* Create a DIE for DECL if required and add a reference to a DIE
5909 at SYMBOL + OFFSET which contains attributes dumped early. */
5910
5911 static void
5912 dwarf2out_register_external_die (tree decl, const char *sym,
5913 unsigned HOST_WIDE_INT off)
5914 {
5915 if (debug_info_level == DINFO_LEVEL_NONE)
5916 return;
5917
5918 if ((flag_wpa
5919 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5920 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5921
5922 dw_die_ref die
5923 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5924 gcc_assert (!die);
5925
5926 tree ctx;
5927 dw_die_ref parent = NULL;
5928 /* Need to lookup a DIE for the decls context - the containing
5929 function or translation unit. */
5930 if (TREE_CODE (decl) == BLOCK)
5931 {
5932 ctx = BLOCK_SUPERCONTEXT (decl);
5933 /* ??? We do not output DIEs for all scopes thus skip as
5934 many DIEs as needed. */
5935 while (TREE_CODE (ctx) == BLOCK
5936 && !BLOCK_DIE (ctx))
5937 ctx = BLOCK_SUPERCONTEXT (ctx);
5938 }
5939 else
5940 ctx = DECL_CONTEXT (decl);
5941 /* Peel types in the context stack. */
5942 while (ctx && TYPE_P (ctx))
5943 ctx = TYPE_CONTEXT (ctx);
5944 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5945 if (debug_info_level <= DINFO_LEVEL_TERSE)
5946 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5947 ctx = DECL_CONTEXT (ctx);
5948 if (ctx)
5949 {
5950 if (TREE_CODE (ctx) == BLOCK)
5951 parent = BLOCK_DIE (ctx);
5952 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5953 /* Keep the 1:1 association during WPA. */
5954 && !flag_wpa
5955 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5956 /* Otherwise all late annotations go to the main CU which
5957 imports the original CUs. */
5958 parent = comp_unit_die ();
5959 else if (TREE_CODE (ctx) == FUNCTION_DECL
5960 && TREE_CODE (decl) != FUNCTION_DECL
5961 && TREE_CODE (decl) != PARM_DECL
5962 && TREE_CODE (decl) != RESULT_DECL
5963 && TREE_CODE (decl) != BLOCK)
5964 /* Leave function local entities parent determination to when
5965 we process scope vars. */
5966 ;
5967 else
5968 parent = lookup_decl_die (ctx);
5969 }
5970 else
5971 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5972 Handle this case gracefully by globalizing stuff. */
5973 parent = comp_unit_die ();
5974 /* Create a DIE "stub". */
5975 switch (TREE_CODE (decl))
5976 {
5977 case TRANSLATION_UNIT_DECL:
5978 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5979 {
5980 die = comp_unit_die ();
5981 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5982 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5983 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5984 to create a DIE for the original CUs. */
5985 return;
5986 }
5987 /* Keep the 1:1 association during WPA. */
5988 die = new_die (DW_TAG_compile_unit, NULL, decl);
5989 break;
5990 case NAMESPACE_DECL:
5991 if (is_fortran (decl))
5992 die = new_die (DW_TAG_module, parent, decl);
5993 else
5994 die = new_die (DW_TAG_namespace, parent, decl);
5995 break;
5996 case FUNCTION_DECL:
5997 die = new_die (DW_TAG_subprogram, parent, decl);
5998 break;
5999 case VAR_DECL:
6000 die = new_die (DW_TAG_variable, parent, decl);
6001 break;
6002 case RESULT_DECL:
6003 die = new_die (DW_TAG_variable, parent, decl);
6004 break;
6005 case PARM_DECL:
6006 die = new_die (DW_TAG_formal_parameter, parent, decl);
6007 break;
6008 case CONST_DECL:
6009 die = new_die (DW_TAG_constant, parent, decl);
6010 break;
6011 case LABEL_DECL:
6012 die = new_die (DW_TAG_label, parent, decl);
6013 break;
6014 case BLOCK:
6015 die = new_die (DW_TAG_lexical_block, parent, decl);
6016 break;
6017 default:
6018 gcc_unreachable ();
6019 }
6020 if (TREE_CODE (decl) == BLOCK)
6021 BLOCK_DIE (decl) = die;
6022 else
6023 equate_decl_number_to_die (decl, die);
6024
6025 /* Add a reference to the DIE providing early debug at $sym + off. */
6026 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6027 }
6028
6029 /* Returns a hash value for X (which really is a var_loc_list). */
6030
6031 inline hashval_t
6032 decl_loc_hasher::hash (var_loc_list *x)
6033 {
6034 return (hashval_t) x->decl_id;
6035 }
6036
6037 /* Return nonzero if decl_id of var_loc_list X is the same as
6038 UID of decl *Y. */
6039
6040 inline bool
6041 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6042 {
6043 return (x->decl_id == DECL_UID (y));
6044 }
6045
6046 /* Return the var_loc list associated with a given declaration. */
6047
6048 static inline var_loc_list *
6049 lookup_decl_loc (const_tree decl)
6050 {
6051 if (!decl_loc_table)
6052 return NULL;
6053 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6054 }
6055
6056 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6057
6058 inline hashval_t
6059 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6060 {
6061 return (hashval_t) x->decl_id;
6062 }
6063
6064 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6065 UID of decl *Y. */
6066
6067 inline bool
6068 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6069 {
6070 return (x->decl_id == DECL_UID (y));
6071 }
6072
6073 /* Equate a DIE to a particular declaration. */
6074
6075 static void
6076 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6077 {
6078 unsigned int decl_id = DECL_UID (decl);
6079
6080 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6081 decl_die->decl_id = decl_id;
6082 }
6083
6084 /* Return how many bits covers PIECE EXPR_LIST. */
6085
6086 static HOST_WIDE_INT
6087 decl_piece_bitsize (rtx piece)
6088 {
6089 int ret = (int) GET_MODE (piece);
6090 if (ret)
6091 return ret;
6092 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6093 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6094 return INTVAL (XEXP (XEXP (piece, 0), 0));
6095 }
6096
6097 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6098
6099 static rtx *
6100 decl_piece_varloc_ptr (rtx piece)
6101 {
6102 if ((int) GET_MODE (piece))
6103 return &XEXP (piece, 0);
6104 else
6105 return &XEXP (XEXP (piece, 0), 1);
6106 }
6107
6108 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6109 Next is the chain of following piece nodes. */
6110
6111 static rtx_expr_list *
6112 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6113 {
6114 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6115 return alloc_EXPR_LIST (bitsize, loc_note, next);
6116 else
6117 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6118 GEN_INT (bitsize),
6119 loc_note), next);
6120 }
6121
6122 /* Return rtx that should be stored into loc field for
6123 LOC_NOTE and BITPOS/BITSIZE. */
6124
6125 static rtx
6126 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6127 HOST_WIDE_INT bitsize)
6128 {
6129 if (bitsize != -1)
6130 {
6131 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6132 if (bitpos != 0)
6133 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6134 }
6135 return loc_note;
6136 }
6137
6138 /* This function either modifies location piece list *DEST in
6139 place (if SRC and INNER is NULL), or copies location piece list
6140 *SRC to *DEST while modifying it. Location BITPOS is modified
6141 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6142 not copied and if needed some padding around it is added.
6143 When modifying in place, DEST should point to EXPR_LIST where
6144 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6145 to the start of the whole list and INNER points to the EXPR_LIST
6146 where earlier pieces cover PIECE_BITPOS bits. */
6147
6148 static void
6149 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6150 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6151 HOST_WIDE_INT bitsize, rtx loc_note)
6152 {
6153 HOST_WIDE_INT diff;
6154 bool copy = inner != NULL;
6155
6156 if (copy)
6157 {
6158 /* First copy all nodes preceding the current bitpos. */
6159 while (src != inner)
6160 {
6161 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6162 decl_piece_bitsize (*src), NULL_RTX);
6163 dest = &XEXP (*dest, 1);
6164 src = &XEXP (*src, 1);
6165 }
6166 }
6167 /* Add padding if needed. */
6168 if (bitpos != piece_bitpos)
6169 {
6170 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6171 copy ? NULL_RTX : *dest);
6172 dest = &XEXP (*dest, 1);
6173 }
6174 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6175 {
6176 gcc_assert (!copy);
6177 /* A piece with correct bitpos and bitsize already exist,
6178 just update the location for it and return. */
6179 *decl_piece_varloc_ptr (*dest) = loc_note;
6180 return;
6181 }
6182 /* Add the piece that changed. */
6183 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6184 dest = &XEXP (*dest, 1);
6185 /* Skip over pieces that overlap it. */
6186 diff = bitpos - piece_bitpos + bitsize;
6187 if (!copy)
6188 src = dest;
6189 while (diff > 0 && *src)
6190 {
6191 rtx piece = *src;
6192 diff -= decl_piece_bitsize (piece);
6193 if (copy)
6194 src = &XEXP (piece, 1);
6195 else
6196 {
6197 *src = XEXP (piece, 1);
6198 free_EXPR_LIST_node (piece);
6199 }
6200 }
6201 /* Add padding if needed. */
6202 if (diff < 0 && *src)
6203 {
6204 if (!copy)
6205 dest = src;
6206 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 }
6209 if (!copy)
6210 return;
6211 /* Finally copy all nodes following it. */
6212 while (*src)
6213 {
6214 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6215 decl_piece_bitsize (*src), NULL_RTX);
6216 dest = &XEXP (*dest, 1);
6217 src = &XEXP (*src, 1);
6218 }
6219 }
6220
6221 /* Add a variable location node to the linked list for DECL. */
6222
6223 static struct var_loc_node *
6224 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6225 {
6226 unsigned int decl_id;
6227 var_loc_list *temp;
6228 struct var_loc_node *loc = NULL;
6229 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6230
6231 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6232 {
6233 tree realdecl = DECL_DEBUG_EXPR (decl);
6234 if (handled_component_p (realdecl)
6235 || (TREE_CODE (realdecl) == MEM_REF
6236 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6237 {
6238 bool reverse;
6239 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6240 &bitsize, &reverse);
6241 if (!innerdecl
6242 || !DECL_P (innerdecl)
6243 || DECL_IGNORED_P (innerdecl)
6244 || TREE_STATIC (innerdecl)
6245 || bitsize == 0
6246 || bitpos + bitsize > 256)
6247 return NULL;
6248 decl = innerdecl;
6249 }
6250 }
6251
6252 decl_id = DECL_UID (decl);
6253 var_loc_list **slot
6254 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6255 if (*slot == NULL)
6256 {
6257 temp = ggc_cleared_alloc<var_loc_list> ();
6258 temp->decl_id = decl_id;
6259 *slot = temp;
6260 }
6261 else
6262 temp = *slot;
6263
6264 /* For PARM_DECLs try to keep around the original incoming value,
6265 even if that means we'll emit a zero-range .debug_loc entry. */
6266 if (temp->last
6267 && temp->first == temp->last
6268 && TREE_CODE (decl) == PARM_DECL
6269 && NOTE_P (temp->first->loc)
6270 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6271 && DECL_INCOMING_RTL (decl)
6272 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6273 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6274 == GET_CODE (DECL_INCOMING_RTL (decl))
6275 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6276 && (bitsize != -1
6277 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6278 NOTE_VAR_LOCATION_LOC (loc_note))
6279 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6280 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6281 {
6282 loc = ggc_cleared_alloc<var_loc_node> ();
6283 temp->first->next = loc;
6284 temp->last = loc;
6285 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6286 }
6287 else if (temp->last)
6288 {
6289 struct var_loc_node *last = temp->last, *unused = NULL;
6290 rtx *piece_loc = NULL, last_loc_note;
6291 HOST_WIDE_INT piece_bitpos = 0;
6292 if (last->next)
6293 {
6294 last = last->next;
6295 gcc_assert (last->next == NULL);
6296 }
6297 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6298 {
6299 piece_loc = &last->loc;
6300 do
6301 {
6302 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6303 if (piece_bitpos + cur_bitsize > bitpos)
6304 break;
6305 piece_bitpos += cur_bitsize;
6306 piece_loc = &XEXP (*piece_loc, 1);
6307 }
6308 while (*piece_loc);
6309 }
6310 /* TEMP->LAST here is either pointer to the last but one or
6311 last element in the chained list, LAST is pointer to the
6312 last element. */
6313 if (label && strcmp (last->label, label) == 0 && last->view == view)
6314 {
6315 /* For SRA optimized variables if there weren't any real
6316 insns since last note, just modify the last node. */
6317 if (piece_loc != NULL)
6318 {
6319 adjust_piece_list (piece_loc, NULL, NULL,
6320 bitpos, piece_bitpos, bitsize, loc_note);
6321 return NULL;
6322 }
6323 /* If the last note doesn't cover any instructions, remove it. */
6324 if (temp->last != last)
6325 {
6326 temp->last->next = NULL;
6327 unused = last;
6328 last = temp->last;
6329 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6330 }
6331 else
6332 {
6333 gcc_assert (temp->first == temp->last
6334 || (temp->first->next == temp->last
6335 && TREE_CODE (decl) == PARM_DECL));
6336 memset (temp->last, '\0', sizeof (*temp->last));
6337 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6338 return temp->last;
6339 }
6340 }
6341 if (bitsize == -1 && NOTE_P (last->loc))
6342 last_loc_note = last->loc;
6343 else if (piece_loc != NULL
6344 && *piece_loc != NULL_RTX
6345 && piece_bitpos == bitpos
6346 && decl_piece_bitsize (*piece_loc) == bitsize)
6347 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6348 else
6349 last_loc_note = NULL_RTX;
6350 /* If the current location is the same as the end of the list,
6351 and either both or neither of the locations is uninitialized,
6352 we have nothing to do. */
6353 if (last_loc_note == NULL_RTX
6354 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6355 NOTE_VAR_LOCATION_LOC (loc_note)))
6356 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6357 != NOTE_VAR_LOCATION_STATUS (loc_note))
6358 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6359 == VAR_INIT_STATUS_UNINITIALIZED)
6360 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6361 == VAR_INIT_STATUS_UNINITIALIZED))))
6362 {
6363 /* Add LOC to the end of list and update LAST. If the last
6364 element of the list has been removed above, reuse its
6365 memory for the new node, otherwise allocate a new one. */
6366 if (unused)
6367 {
6368 loc = unused;
6369 memset (loc, '\0', sizeof (*loc));
6370 }
6371 else
6372 loc = ggc_cleared_alloc<var_loc_node> ();
6373 if (bitsize == -1 || piece_loc == NULL)
6374 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6375 else
6376 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6377 bitpos, piece_bitpos, bitsize, loc_note);
6378 last->next = loc;
6379 /* Ensure TEMP->LAST will point either to the new last but one
6380 element of the chain, or to the last element in it. */
6381 if (last != temp->last)
6382 temp->last = last;
6383 }
6384 else if (unused)
6385 ggc_free (unused);
6386 }
6387 else
6388 {
6389 loc = ggc_cleared_alloc<var_loc_node> ();
6390 temp->first = loc;
6391 temp->last = loc;
6392 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6393 }
6394 return loc;
6395 }
6396 \f
6397 /* Keep track of the number of spaces used to indent the
6398 output of the debugging routines that print the structure of
6399 the DIE internal representation. */
6400 static int print_indent;
6401
6402 /* Indent the line the number of spaces given by print_indent. */
6403
6404 static inline void
6405 print_spaces (FILE *outfile)
6406 {
6407 fprintf (outfile, "%*s", print_indent, "");
6408 }
6409
6410 /* Print a type signature in hex. */
6411
6412 static inline void
6413 print_signature (FILE *outfile, char *sig)
6414 {
6415 int i;
6416
6417 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6418 fprintf (outfile, "%02x", sig[i] & 0xff);
6419 }
6420
6421 static inline void
6422 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6423 {
6424 if (discr_value->pos)
6425 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6426 else
6427 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6428 }
6429
6430 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6431
6432 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6433 RECURSE, output location descriptor operations. */
6434
6435 static void
6436 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6437 {
6438 switch (val->val_class)
6439 {
6440 case dw_val_class_addr:
6441 fprintf (outfile, "address");
6442 break;
6443 case dw_val_class_offset:
6444 fprintf (outfile, "offset");
6445 break;
6446 case dw_val_class_loc:
6447 fprintf (outfile, "location descriptor");
6448 if (val->v.val_loc == NULL)
6449 fprintf (outfile, " -> <null>\n");
6450 else if (recurse)
6451 {
6452 fprintf (outfile, ":\n");
6453 print_indent += 4;
6454 print_loc_descr (val->v.val_loc, outfile);
6455 print_indent -= 4;
6456 }
6457 else
6458 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6459 break;
6460 case dw_val_class_loc_list:
6461 fprintf (outfile, "location list -> label:%s",
6462 val->v.val_loc_list->ll_symbol);
6463 break;
6464 case dw_val_class_view_list:
6465 val = view_list_to_loc_list_val_node (val);
6466 fprintf (outfile, "location list with views -> labels:%s and %s",
6467 val->v.val_loc_list->ll_symbol,
6468 val->v.val_loc_list->vl_symbol);
6469 break;
6470 case dw_val_class_range_list:
6471 fprintf (outfile, "range list");
6472 break;
6473 case dw_val_class_const:
6474 case dw_val_class_const_implicit:
6475 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6476 break;
6477 case dw_val_class_unsigned_const:
6478 case dw_val_class_unsigned_const_implicit:
6479 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6480 break;
6481 case dw_val_class_const_double:
6482 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6483 HOST_WIDE_INT_PRINT_UNSIGNED")",
6484 val->v.val_double.high,
6485 val->v.val_double.low);
6486 break;
6487 case dw_val_class_wide_int:
6488 {
6489 int i = val->v.val_wide->get_len ();
6490 fprintf (outfile, "constant (");
6491 gcc_assert (i > 0);
6492 if (val->v.val_wide->elt (i - 1) == 0)
6493 fprintf (outfile, "0x");
6494 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6495 val->v.val_wide->elt (--i));
6496 while (--i >= 0)
6497 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6498 val->v.val_wide->elt (i));
6499 fprintf (outfile, ")");
6500 break;
6501 }
6502 case dw_val_class_vec:
6503 fprintf (outfile, "floating-point or vector constant");
6504 break;
6505 case dw_val_class_flag:
6506 fprintf (outfile, "%u", val->v.val_flag);
6507 break;
6508 case dw_val_class_die_ref:
6509 if (val->v.val_die_ref.die != NULL)
6510 {
6511 dw_die_ref die = val->v.val_die_ref.die;
6512
6513 if (die->comdat_type_p)
6514 {
6515 fprintf (outfile, "die -> signature: ");
6516 print_signature (outfile,
6517 die->die_id.die_type_node->signature);
6518 }
6519 else if (die->die_id.die_symbol)
6520 {
6521 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6522 if (die->with_offset)
6523 fprintf (outfile, " + %ld", die->die_offset);
6524 }
6525 else
6526 fprintf (outfile, "die -> %ld", die->die_offset);
6527 fprintf (outfile, " (%p)", (void *) die);
6528 }
6529 else
6530 fprintf (outfile, "die -> <null>");
6531 break;
6532 case dw_val_class_vms_delta:
6533 fprintf (outfile, "delta: @slotcount(%s-%s)",
6534 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6535 break;
6536 case dw_val_class_symview:
6537 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6538 break;
6539 case dw_val_class_lbl_id:
6540 case dw_val_class_lineptr:
6541 case dw_val_class_macptr:
6542 case dw_val_class_loclistsptr:
6543 case dw_val_class_high_pc:
6544 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6545 break;
6546 case dw_val_class_str:
6547 if (val->v.val_str->str != NULL)
6548 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6549 else
6550 fprintf (outfile, "<null>");
6551 break;
6552 case dw_val_class_file:
6553 case dw_val_class_file_implicit:
6554 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6555 val->v.val_file->emitted_number);
6556 break;
6557 case dw_val_class_data8:
6558 {
6559 int i;
6560
6561 for (i = 0; i < 8; i++)
6562 fprintf (outfile, "%02x", val->v.val_data8[i]);
6563 break;
6564 }
6565 case dw_val_class_discr_value:
6566 print_discr_value (outfile, &val->v.val_discr_value);
6567 break;
6568 case dw_val_class_discr_list:
6569 for (dw_discr_list_ref node = val->v.val_discr_list;
6570 node != NULL;
6571 node = node->dw_discr_next)
6572 {
6573 if (node->dw_discr_range)
6574 {
6575 fprintf (outfile, " .. ");
6576 print_discr_value (outfile, &node->dw_discr_lower_bound);
6577 print_discr_value (outfile, &node->dw_discr_upper_bound);
6578 }
6579 else
6580 print_discr_value (outfile, &node->dw_discr_lower_bound);
6581
6582 if (node->dw_discr_next != NULL)
6583 fprintf (outfile, " | ");
6584 }
6585 default:
6586 break;
6587 }
6588 }
6589
6590 /* Likewise, for a DIE attribute. */
6591
6592 static void
6593 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6594 {
6595 print_dw_val (&a->dw_attr_val, recurse, outfile);
6596 }
6597
6598
6599 /* Print the list of operands in the LOC location description to OUTFILE. This
6600 routine is a debugging aid only. */
6601
6602 static void
6603 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6604 {
6605 dw_loc_descr_ref l = loc;
6606
6607 if (loc == NULL)
6608 {
6609 print_spaces (outfile);
6610 fprintf (outfile, "<null>\n");
6611 return;
6612 }
6613
6614 for (l = loc; l != NULL; l = l->dw_loc_next)
6615 {
6616 print_spaces (outfile);
6617 fprintf (outfile, "(%p) %s",
6618 (void *) l,
6619 dwarf_stack_op_name (l->dw_loc_opc));
6620 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6621 {
6622 fprintf (outfile, " ");
6623 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6624 }
6625 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6626 {
6627 fprintf (outfile, ", ");
6628 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6629 }
6630 fprintf (outfile, "\n");
6631 }
6632 }
6633
6634 /* Print the information associated with a given DIE, and its children.
6635 This routine is a debugging aid only. */
6636
6637 static void
6638 print_die (dw_die_ref die, FILE *outfile)
6639 {
6640 dw_attr_node *a;
6641 dw_die_ref c;
6642 unsigned ix;
6643
6644 print_spaces (outfile);
6645 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6646 die->die_offset, dwarf_tag_name (die->die_tag),
6647 (void*) die);
6648 print_spaces (outfile);
6649 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6650 fprintf (outfile, " offset: %ld", die->die_offset);
6651 fprintf (outfile, " mark: %d\n", die->die_mark);
6652
6653 if (die->comdat_type_p)
6654 {
6655 print_spaces (outfile);
6656 fprintf (outfile, " signature: ");
6657 print_signature (outfile, die->die_id.die_type_node->signature);
6658 fprintf (outfile, "\n");
6659 }
6660
6661 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6662 {
6663 print_spaces (outfile);
6664 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6665
6666 print_attribute (a, true, outfile);
6667 fprintf (outfile, "\n");
6668 }
6669
6670 if (die->die_child != NULL)
6671 {
6672 print_indent += 4;
6673 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6674 print_indent -= 4;
6675 }
6676 if (print_indent == 0)
6677 fprintf (outfile, "\n");
6678 }
6679
6680 /* Print the list of operations in the LOC location description. */
6681
6682 DEBUG_FUNCTION void
6683 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6684 {
6685 print_loc_descr (loc, stderr);
6686 }
6687
6688 /* Print the information collected for a given DIE. */
6689
6690 DEBUG_FUNCTION void
6691 debug_dwarf_die (dw_die_ref die)
6692 {
6693 print_die (die, stderr);
6694 }
6695
6696 DEBUG_FUNCTION void
6697 debug (die_struct &ref)
6698 {
6699 print_die (&ref, stderr);
6700 }
6701
6702 DEBUG_FUNCTION void
6703 debug (die_struct *ptr)
6704 {
6705 if (ptr)
6706 debug (*ptr);
6707 else
6708 fprintf (stderr, "<nil>\n");
6709 }
6710
6711
6712 /* Print all DWARF information collected for the compilation unit.
6713 This routine is a debugging aid only. */
6714
6715 DEBUG_FUNCTION void
6716 debug_dwarf (void)
6717 {
6718 print_indent = 0;
6719 print_die (comp_unit_die (), stderr);
6720 }
6721
6722 /* Verify the DIE tree structure. */
6723
6724 DEBUG_FUNCTION void
6725 verify_die (dw_die_ref die)
6726 {
6727 gcc_assert (!die->die_mark);
6728 if (die->die_parent == NULL
6729 && die->die_sib == NULL)
6730 return;
6731 /* Verify the die_sib list is cyclic. */
6732 dw_die_ref x = die;
6733 do
6734 {
6735 x->die_mark = 1;
6736 x = x->die_sib;
6737 }
6738 while (x && !x->die_mark);
6739 gcc_assert (x == die);
6740 x = die;
6741 do
6742 {
6743 /* Verify all dies have the same parent. */
6744 gcc_assert (x->die_parent == die->die_parent);
6745 if (x->die_child)
6746 {
6747 /* Verify the child has the proper parent and recurse. */
6748 gcc_assert (x->die_child->die_parent == x);
6749 verify_die (x->die_child);
6750 }
6751 x->die_mark = 0;
6752 x = x->die_sib;
6753 }
6754 while (x && x->die_mark);
6755 }
6756
6757 /* Sanity checks on DIEs. */
6758
6759 static void
6760 check_die (dw_die_ref die)
6761 {
6762 unsigned ix;
6763 dw_attr_node *a;
6764 bool inline_found = false;
6765 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6766 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6767 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6768 {
6769 switch (a->dw_attr)
6770 {
6771 case DW_AT_inline:
6772 if (a->dw_attr_val.v.val_unsigned)
6773 inline_found = true;
6774 break;
6775 case DW_AT_location:
6776 ++n_location;
6777 break;
6778 case DW_AT_low_pc:
6779 ++n_low_pc;
6780 break;
6781 case DW_AT_high_pc:
6782 ++n_high_pc;
6783 break;
6784 case DW_AT_artificial:
6785 ++n_artificial;
6786 break;
6787 case DW_AT_decl_column:
6788 ++n_decl_column;
6789 break;
6790 case DW_AT_decl_line:
6791 ++n_decl_line;
6792 break;
6793 case DW_AT_decl_file:
6794 ++n_decl_file;
6795 break;
6796 default:
6797 break;
6798 }
6799 }
6800 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6801 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6802 {
6803 fprintf (stderr, "Duplicate attributes in DIE:\n");
6804 debug_dwarf_die (die);
6805 gcc_unreachable ();
6806 }
6807 if (inline_found)
6808 {
6809 /* A debugging information entry that is a member of an abstract
6810 instance tree [that has DW_AT_inline] should not contain any
6811 attributes which describe aspects of the subroutine which vary
6812 between distinct inlined expansions or distinct out-of-line
6813 expansions. */
6814 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6815 gcc_assert (a->dw_attr != DW_AT_low_pc
6816 && a->dw_attr != DW_AT_high_pc
6817 && a->dw_attr != DW_AT_location
6818 && a->dw_attr != DW_AT_frame_base
6819 && a->dw_attr != DW_AT_call_all_calls
6820 && a->dw_attr != DW_AT_GNU_all_call_sites);
6821 }
6822 }
6823 \f
6824 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6825 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6826 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6827
6828 /* Calculate the checksum of a location expression. */
6829
6830 static inline void
6831 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6832 {
6833 int tem;
6834 inchash::hash hstate;
6835 hashval_t hash;
6836
6837 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6838 CHECKSUM (tem);
6839 hash_loc_operands (loc, hstate);
6840 hash = hstate.end();
6841 CHECKSUM (hash);
6842 }
6843
6844 /* Calculate the checksum of an attribute. */
6845
6846 static void
6847 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6848 {
6849 dw_loc_descr_ref loc;
6850 rtx r;
6851
6852 CHECKSUM (at->dw_attr);
6853
6854 /* We don't care that this was compiled with a different compiler
6855 snapshot; if the output is the same, that's what matters. */
6856 if (at->dw_attr == DW_AT_producer)
6857 return;
6858
6859 switch (AT_class (at))
6860 {
6861 case dw_val_class_const:
6862 case dw_val_class_const_implicit:
6863 CHECKSUM (at->dw_attr_val.v.val_int);
6864 break;
6865 case dw_val_class_unsigned_const:
6866 case dw_val_class_unsigned_const_implicit:
6867 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6868 break;
6869 case dw_val_class_const_double:
6870 CHECKSUM (at->dw_attr_val.v.val_double);
6871 break;
6872 case dw_val_class_wide_int:
6873 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6874 get_full_len (*at->dw_attr_val.v.val_wide)
6875 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6876 break;
6877 case dw_val_class_vec:
6878 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6879 (at->dw_attr_val.v.val_vec.length
6880 * at->dw_attr_val.v.val_vec.elt_size));
6881 break;
6882 case dw_val_class_flag:
6883 CHECKSUM (at->dw_attr_val.v.val_flag);
6884 break;
6885 case dw_val_class_str:
6886 CHECKSUM_STRING (AT_string (at));
6887 break;
6888
6889 case dw_val_class_addr:
6890 r = AT_addr (at);
6891 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6892 CHECKSUM_STRING (XSTR (r, 0));
6893 break;
6894
6895 case dw_val_class_offset:
6896 CHECKSUM (at->dw_attr_val.v.val_offset);
6897 break;
6898
6899 case dw_val_class_loc:
6900 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6901 loc_checksum (loc, ctx);
6902 break;
6903
6904 case dw_val_class_die_ref:
6905 die_checksum (AT_ref (at), ctx, mark);
6906 break;
6907
6908 case dw_val_class_fde_ref:
6909 case dw_val_class_vms_delta:
6910 case dw_val_class_symview:
6911 case dw_val_class_lbl_id:
6912 case dw_val_class_lineptr:
6913 case dw_val_class_macptr:
6914 case dw_val_class_loclistsptr:
6915 case dw_val_class_high_pc:
6916 break;
6917
6918 case dw_val_class_file:
6919 case dw_val_class_file_implicit:
6920 CHECKSUM_STRING (AT_file (at)->filename);
6921 break;
6922
6923 case dw_val_class_data8:
6924 CHECKSUM (at->dw_attr_val.v.val_data8);
6925 break;
6926
6927 default:
6928 break;
6929 }
6930 }
6931
6932 /* Calculate the checksum of a DIE. */
6933
6934 static void
6935 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6936 {
6937 dw_die_ref c;
6938 dw_attr_node *a;
6939 unsigned ix;
6940
6941 /* To avoid infinite recursion. */
6942 if (die->die_mark)
6943 {
6944 CHECKSUM (die->die_mark);
6945 return;
6946 }
6947 die->die_mark = ++(*mark);
6948
6949 CHECKSUM (die->die_tag);
6950
6951 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6952 attr_checksum (a, ctx, mark);
6953
6954 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6955 }
6956
6957 #undef CHECKSUM
6958 #undef CHECKSUM_BLOCK
6959 #undef CHECKSUM_STRING
6960
6961 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6962 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6963 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6964 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6965 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6966 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6967 #define CHECKSUM_ATTR(FOO) \
6968 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6969
6970 /* Calculate the checksum of a number in signed LEB128 format. */
6971
6972 static void
6973 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6974 {
6975 unsigned char byte;
6976 bool more;
6977
6978 while (1)
6979 {
6980 byte = (value & 0x7f);
6981 value >>= 7;
6982 more = !((value == 0 && (byte & 0x40) == 0)
6983 || (value == -1 && (byte & 0x40) != 0));
6984 if (more)
6985 byte |= 0x80;
6986 CHECKSUM (byte);
6987 if (!more)
6988 break;
6989 }
6990 }
6991
6992 /* Calculate the checksum of a number in unsigned LEB128 format. */
6993
6994 static void
6995 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6996 {
6997 while (1)
6998 {
6999 unsigned char byte = (value & 0x7f);
7000 value >>= 7;
7001 if (value != 0)
7002 /* More bytes to follow. */
7003 byte |= 0x80;
7004 CHECKSUM (byte);
7005 if (value == 0)
7006 break;
7007 }
7008 }
7009
7010 /* Checksum the context of the DIE. This adds the names of any
7011 surrounding namespaces or structures to the checksum. */
7012
7013 static void
7014 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7015 {
7016 const char *name;
7017 dw_die_ref spec;
7018 int tag = die->die_tag;
7019
7020 if (tag != DW_TAG_namespace
7021 && tag != DW_TAG_structure_type
7022 && tag != DW_TAG_class_type)
7023 return;
7024
7025 name = get_AT_string (die, DW_AT_name);
7026
7027 spec = get_AT_ref (die, DW_AT_specification);
7028 if (spec != NULL)
7029 die = spec;
7030
7031 if (die->die_parent != NULL)
7032 checksum_die_context (die->die_parent, ctx);
7033
7034 CHECKSUM_ULEB128 ('C');
7035 CHECKSUM_ULEB128 (tag);
7036 if (name != NULL)
7037 CHECKSUM_STRING (name);
7038 }
7039
7040 /* Calculate the checksum of a location expression. */
7041
7042 static inline void
7043 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7044 {
7045 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7046 were emitted as a DW_FORM_sdata instead of a location expression. */
7047 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7048 {
7049 CHECKSUM_ULEB128 (DW_FORM_sdata);
7050 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7051 return;
7052 }
7053
7054 /* Otherwise, just checksum the raw location expression. */
7055 while (loc != NULL)
7056 {
7057 inchash::hash hstate;
7058 hashval_t hash;
7059
7060 CHECKSUM_ULEB128 (loc->dtprel);
7061 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7062 hash_loc_operands (loc, hstate);
7063 hash = hstate.end ();
7064 CHECKSUM (hash);
7065 loc = loc->dw_loc_next;
7066 }
7067 }
7068
7069 /* Calculate the checksum of an attribute. */
7070
7071 static void
7072 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7073 struct md5_ctx *ctx, int *mark)
7074 {
7075 dw_loc_descr_ref loc;
7076 rtx r;
7077
7078 if (AT_class (at) == dw_val_class_die_ref)
7079 {
7080 dw_die_ref target_die = AT_ref (at);
7081
7082 /* For pointer and reference types, we checksum only the (qualified)
7083 name of the target type (if there is a name). For friend entries,
7084 we checksum only the (qualified) name of the target type or function.
7085 This allows the checksum to remain the same whether the target type
7086 is complete or not. */
7087 if ((at->dw_attr == DW_AT_type
7088 && (tag == DW_TAG_pointer_type
7089 || tag == DW_TAG_reference_type
7090 || tag == DW_TAG_rvalue_reference_type
7091 || tag == DW_TAG_ptr_to_member_type))
7092 || (at->dw_attr == DW_AT_friend
7093 && tag == DW_TAG_friend))
7094 {
7095 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7096
7097 if (name_attr != NULL)
7098 {
7099 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7100
7101 if (decl == NULL)
7102 decl = target_die;
7103 CHECKSUM_ULEB128 ('N');
7104 CHECKSUM_ULEB128 (at->dw_attr);
7105 if (decl->die_parent != NULL)
7106 checksum_die_context (decl->die_parent, ctx);
7107 CHECKSUM_ULEB128 ('E');
7108 CHECKSUM_STRING (AT_string (name_attr));
7109 return;
7110 }
7111 }
7112
7113 /* For all other references to another DIE, we check to see if the
7114 target DIE has already been visited. If it has, we emit a
7115 backward reference; if not, we descend recursively. */
7116 if (target_die->die_mark > 0)
7117 {
7118 CHECKSUM_ULEB128 ('R');
7119 CHECKSUM_ULEB128 (at->dw_attr);
7120 CHECKSUM_ULEB128 (target_die->die_mark);
7121 }
7122 else
7123 {
7124 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7125
7126 if (decl == NULL)
7127 decl = target_die;
7128 target_die->die_mark = ++(*mark);
7129 CHECKSUM_ULEB128 ('T');
7130 CHECKSUM_ULEB128 (at->dw_attr);
7131 if (decl->die_parent != NULL)
7132 checksum_die_context (decl->die_parent, ctx);
7133 die_checksum_ordered (target_die, ctx, mark);
7134 }
7135 return;
7136 }
7137
7138 CHECKSUM_ULEB128 ('A');
7139 CHECKSUM_ULEB128 (at->dw_attr);
7140
7141 switch (AT_class (at))
7142 {
7143 case dw_val_class_const:
7144 case dw_val_class_const_implicit:
7145 CHECKSUM_ULEB128 (DW_FORM_sdata);
7146 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7147 break;
7148
7149 case dw_val_class_unsigned_const:
7150 case dw_val_class_unsigned_const_implicit:
7151 CHECKSUM_ULEB128 (DW_FORM_sdata);
7152 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7153 break;
7154
7155 case dw_val_class_const_double:
7156 CHECKSUM_ULEB128 (DW_FORM_block);
7157 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7158 CHECKSUM (at->dw_attr_val.v.val_double);
7159 break;
7160
7161 case dw_val_class_wide_int:
7162 CHECKSUM_ULEB128 (DW_FORM_block);
7163 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7164 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7165 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7166 get_full_len (*at->dw_attr_val.v.val_wide)
7167 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7168 break;
7169
7170 case dw_val_class_vec:
7171 CHECKSUM_ULEB128 (DW_FORM_block);
7172 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7173 * at->dw_attr_val.v.val_vec.elt_size);
7174 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7175 (at->dw_attr_val.v.val_vec.length
7176 * at->dw_attr_val.v.val_vec.elt_size));
7177 break;
7178
7179 case dw_val_class_flag:
7180 CHECKSUM_ULEB128 (DW_FORM_flag);
7181 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7182 break;
7183
7184 case dw_val_class_str:
7185 CHECKSUM_ULEB128 (DW_FORM_string);
7186 CHECKSUM_STRING (AT_string (at));
7187 break;
7188
7189 case dw_val_class_addr:
7190 r = AT_addr (at);
7191 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7192 CHECKSUM_ULEB128 (DW_FORM_string);
7193 CHECKSUM_STRING (XSTR (r, 0));
7194 break;
7195
7196 case dw_val_class_offset:
7197 CHECKSUM_ULEB128 (DW_FORM_sdata);
7198 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7199 break;
7200
7201 case dw_val_class_loc:
7202 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7203 loc_checksum_ordered (loc, ctx);
7204 break;
7205
7206 case dw_val_class_fde_ref:
7207 case dw_val_class_symview:
7208 case dw_val_class_lbl_id:
7209 case dw_val_class_lineptr:
7210 case dw_val_class_macptr:
7211 case dw_val_class_loclistsptr:
7212 case dw_val_class_high_pc:
7213 break;
7214
7215 case dw_val_class_file:
7216 case dw_val_class_file_implicit:
7217 CHECKSUM_ULEB128 (DW_FORM_string);
7218 CHECKSUM_STRING (AT_file (at)->filename);
7219 break;
7220
7221 case dw_val_class_data8:
7222 CHECKSUM (at->dw_attr_val.v.val_data8);
7223 break;
7224
7225 default:
7226 break;
7227 }
7228 }
7229
7230 struct checksum_attributes
7231 {
7232 dw_attr_node *at_name;
7233 dw_attr_node *at_type;
7234 dw_attr_node *at_friend;
7235 dw_attr_node *at_accessibility;
7236 dw_attr_node *at_address_class;
7237 dw_attr_node *at_alignment;
7238 dw_attr_node *at_allocated;
7239 dw_attr_node *at_artificial;
7240 dw_attr_node *at_associated;
7241 dw_attr_node *at_binary_scale;
7242 dw_attr_node *at_bit_offset;
7243 dw_attr_node *at_bit_size;
7244 dw_attr_node *at_bit_stride;
7245 dw_attr_node *at_byte_size;
7246 dw_attr_node *at_byte_stride;
7247 dw_attr_node *at_const_value;
7248 dw_attr_node *at_containing_type;
7249 dw_attr_node *at_count;
7250 dw_attr_node *at_data_location;
7251 dw_attr_node *at_data_member_location;
7252 dw_attr_node *at_decimal_scale;
7253 dw_attr_node *at_decimal_sign;
7254 dw_attr_node *at_default_value;
7255 dw_attr_node *at_digit_count;
7256 dw_attr_node *at_discr;
7257 dw_attr_node *at_discr_list;
7258 dw_attr_node *at_discr_value;
7259 dw_attr_node *at_encoding;
7260 dw_attr_node *at_endianity;
7261 dw_attr_node *at_explicit;
7262 dw_attr_node *at_is_optional;
7263 dw_attr_node *at_location;
7264 dw_attr_node *at_lower_bound;
7265 dw_attr_node *at_mutable;
7266 dw_attr_node *at_ordering;
7267 dw_attr_node *at_picture_string;
7268 dw_attr_node *at_prototyped;
7269 dw_attr_node *at_small;
7270 dw_attr_node *at_segment;
7271 dw_attr_node *at_string_length;
7272 dw_attr_node *at_string_length_bit_size;
7273 dw_attr_node *at_string_length_byte_size;
7274 dw_attr_node *at_threads_scaled;
7275 dw_attr_node *at_upper_bound;
7276 dw_attr_node *at_use_location;
7277 dw_attr_node *at_use_UTF8;
7278 dw_attr_node *at_variable_parameter;
7279 dw_attr_node *at_virtuality;
7280 dw_attr_node *at_visibility;
7281 dw_attr_node *at_vtable_elem_location;
7282 };
7283
7284 /* Collect the attributes that we will want to use for the checksum. */
7285
7286 static void
7287 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7288 {
7289 dw_attr_node *a;
7290 unsigned ix;
7291
7292 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7293 {
7294 switch (a->dw_attr)
7295 {
7296 case DW_AT_name:
7297 attrs->at_name = a;
7298 break;
7299 case DW_AT_type:
7300 attrs->at_type = a;
7301 break;
7302 case DW_AT_friend:
7303 attrs->at_friend = a;
7304 break;
7305 case DW_AT_accessibility:
7306 attrs->at_accessibility = a;
7307 break;
7308 case DW_AT_address_class:
7309 attrs->at_address_class = a;
7310 break;
7311 case DW_AT_alignment:
7312 attrs->at_alignment = a;
7313 break;
7314 case DW_AT_allocated:
7315 attrs->at_allocated = a;
7316 break;
7317 case DW_AT_artificial:
7318 attrs->at_artificial = a;
7319 break;
7320 case DW_AT_associated:
7321 attrs->at_associated = a;
7322 break;
7323 case DW_AT_binary_scale:
7324 attrs->at_binary_scale = a;
7325 break;
7326 case DW_AT_bit_offset:
7327 attrs->at_bit_offset = a;
7328 break;
7329 case DW_AT_bit_size:
7330 attrs->at_bit_size = a;
7331 break;
7332 case DW_AT_bit_stride:
7333 attrs->at_bit_stride = a;
7334 break;
7335 case DW_AT_byte_size:
7336 attrs->at_byte_size = a;
7337 break;
7338 case DW_AT_byte_stride:
7339 attrs->at_byte_stride = a;
7340 break;
7341 case DW_AT_const_value:
7342 attrs->at_const_value = a;
7343 break;
7344 case DW_AT_containing_type:
7345 attrs->at_containing_type = a;
7346 break;
7347 case DW_AT_count:
7348 attrs->at_count = a;
7349 break;
7350 case DW_AT_data_location:
7351 attrs->at_data_location = a;
7352 break;
7353 case DW_AT_data_member_location:
7354 attrs->at_data_member_location = a;
7355 break;
7356 case DW_AT_decimal_scale:
7357 attrs->at_decimal_scale = a;
7358 break;
7359 case DW_AT_decimal_sign:
7360 attrs->at_decimal_sign = a;
7361 break;
7362 case DW_AT_default_value:
7363 attrs->at_default_value = a;
7364 break;
7365 case DW_AT_digit_count:
7366 attrs->at_digit_count = a;
7367 break;
7368 case DW_AT_discr:
7369 attrs->at_discr = a;
7370 break;
7371 case DW_AT_discr_list:
7372 attrs->at_discr_list = a;
7373 break;
7374 case DW_AT_discr_value:
7375 attrs->at_discr_value = a;
7376 break;
7377 case DW_AT_encoding:
7378 attrs->at_encoding = a;
7379 break;
7380 case DW_AT_endianity:
7381 attrs->at_endianity = a;
7382 break;
7383 case DW_AT_explicit:
7384 attrs->at_explicit = a;
7385 break;
7386 case DW_AT_is_optional:
7387 attrs->at_is_optional = a;
7388 break;
7389 case DW_AT_location:
7390 attrs->at_location = a;
7391 break;
7392 case DW_AT_lower_bound:
7393 attrs->at_lower_bound = a;
7394 break;
7395 case DW_AT_mutable:
7396 attrs->at_mutable = a;
7397 break;
7398 case DW_AT_ordering:
7399 attrs->at_ordering = a;
7400 break;
7401 case DW_AT_picture_string:
7402 attrs->at_picture_string = a;
7403 break;
7404 case DW_AT_prototyped:
7405 attrs->at_prototyped = a;
7406 break;
7407 case DW_AT_small:
7408 attrs->at_small = a;
7409 break;
7410 case DW_AT_segment:
7411 attrs->at_segment = a;
7412 break;
7413 case DW_AT_string_length:
7414 attrs->at_string_length = a;
7415 break;
7416 case DW_AT_string_length_bit_size:
7417 attrs->at_string_length_bit_size = a;
7418 break;
7419 case DW_AT_string_length_byte_size:
7420 attrs->at_string_length_byte_size = a;
7421 break;
7422 case DW_AT_threads_scaled:
7423 attrs->at_threads_scaled = a;
7424 break;
7425 case DW_AT_upper_bound:
7426 attrs->at_upper_bound = a;
7427 break;
7428 case DW_AT_use_location:
7429 attrs->at_use_location = a;
7430 break;
7431 case DW_AT_use_UTF8:
7432 attrs->at_use_UTF8 = a;
7433 break;
7434 case DW_AT_variable_parameter:
7435 attrs->at_variable_parameter = a;
7436 break;
7437 case DW_AT_virtuality:
7438 attrs->at_virtuality = a;
7439 break;
7440 case DW_AT_visibility:
7441 attrs->at_visibility = a;
7442 break;
7443 case DW_AT_vtable_elem_location:
7444 attrs->at_vtable_elem_location = a;
7445 break;
7446 default:
7447 break;
7448 }
7449 }
7450 }
7451
7452 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7453
7454 static void
7455 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7456 {
7457 dw_die_ref c;
7458 dw_die_ref decl;
7459 struct checksum_attributes attrs;
7460
7461 CHECKSUM_ULEB128 ('D');
7462 CHECKSUM_ULEB128 (die->die_tag);
7463
7464 memset (&attrs, 0, sizeof (attrs));
7465
7466 decl = get_AT_ref (die, DW_AT_specification);
7467 if (decl != NULL)
7468 collect_checksum_attributes (&attrs, decl);
7469 collect_checksum_attributes (&attrs, die);
7470
7471 CHECKSUM_ATTR (attrs.at_name);
7472 CHECKSUM_ATTR (attrs.at_accessibility);
7473 CHECKSUM_ATTR (attrs.at_address_class);
7474 CHECKSUM_ATTR (attrs.at_allocated);
7475 CHECKSUM_ATTR (attrs.at_artificial);
7476 CHECKSUM_ATTR (attrs.at_associated);
7477 CHECKSUM_ATTR (attrs.at_binary_scale);
7478 CHECKSUM_ATTR (attrs.at_bit_offset);
7479 CHECKSUM_ATTR (attrs.at_bit_size);
7480 CHECKSUM_ATTR (attrs.at_bit_stride);
7481 CHECKSUM_ATTR (attrs.at_byte_size);
7482 CHECKSUM_ATTR (attrs.at_byte_stride);
7483 CHECKSUM_ATTR (attrs.at_const_value);
7484 CHECKSUM_ATTR (attrs.at_containing_type);
7485 CHECKSUM_ATTR (attrs.at_count);
7486 CHECKSUM_ATTR (attrs.at_data_location);
7487 CHECKSUM_ATTR (attrs.at_data_member_location);
7488 CHECKSUM_ATTR (attrs.at_decimal_scale);
7489 CHECKSUM_ATTR (attrs.at_decimal_sign);
7490 CHECKSUM_ATTR (attrs.at_default_value);
7491 CHECKSUM_ATTR (attrs.at_digit_count);
7492 CHECKSUM_ATTR (attrs.at_discr);
7493 CHECKSUM_ATTR (attrs.at_discr_list);
7494 CHECKSUM_ATTR (attrs.at_discr_value);
7495 CHECKSUM_ATTR (attrs.at_encoding);
7496 CHECKSUM_ATTR (attrs.at_endianity);
7497 CHECKSUM_ATTR (attrs.at_explicit);
7498 CHECKSUM_ATTR (attrs.at_is_optional);
7499 CHECKSUM_ATTR (attrs.at_location);
7500 CHECKSUM_ATTR (attrs.at_lower_bound);
7501 CHECKSUM_ATTR (attrs.at_mutable);
7502 CHECKSUM_ATTR (attrs.at_ordering);
7503 CHECKSUM_ATTR (attrs.at_picture_string);
7504 CHECKSUM_ATTR (attrs.at_prototyped);
7505 CHECKSUM_ATTR (attrs.at_small);
7506 CHECKSUM_ATTR (attrs.at_segment);
7507 CHECKSUM_ATTR (attrs.at_string_length);
7508 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7509 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7510 CHECKSUM_ATTR (attrs.at_threads_scaled);
7511 CHECKSUM_ATTR (attrs.at_upper_bound);
7512 CHECKSUM_ATTR (attrs.at_use_location);
7513 CHECKSUM_ATTR (attrs.at_use_UTF8);
7514 CHECKSUM_ATTR (attrs.at_variable_parameter);
7515 CHECKSUM_ATTR (attrs.at_virtuality);
7516 CHECKSUM_ATTR (attrs.at_visibility);
7517 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7518 CHECKSUM_ATTR (attrs.at_type);
7519 CHECKSUM_ATTR (attrs.at_friend);
7520 CHECKSUM_ATTR (attrs.at_alignment);
7521
7522 /* Checksum the child DIEs. */
7523 c = die->die_child;
7524 if (c) do {
7525 dw_attr_node *name_attr;
7526
7527 c = c->die_sib;
7528 name_attr = get_AT (c, DW_AT_name);
7529 if (is_template_instantiation (c))
7530 {
7531 /* Ignore instantiations of member type and function templates. */
7532 }
7533 else if (name_attr != NULL
7534 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7535 {
7536 /* Use a shallow checksum for named nested types and member
7537 functions. */
7538 CHECKSUM_ULEB128 ('S');
7539 CHECKSUM_ULEB128 (c->die_tag);
7540 CHECKSUM_STRING (AT_string (name_attr));
7541 }
7542 else
7543 {
7544 /* Use a deep checksum for other children. */
7545 /* Mark this DIE so it gets processed when unmarking. */
7546 if (c->die_mark == 0)
7547 c->die_mark = -1;
7548 die_checksum_ordered (c, ctx, mark);
7549 }
7550 } while (c != die->die_child);
7551
7552 CHECKSUM_ULEB128 (0);
7553 }
7554
7555 /* Add a type name and tag to a hash. */
7556 static void
7557 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7558 {
7559 CHECKSUM_ULEB128 (tag);
7560 CHECKSUM_STRING (name);
7561 }
7562
7563 #undef CHECKSUM
7564 #undef CHECKSUM_STRING
7565 #undef CHECKSUM_ATTR
7566 #undef CHECKSUM_LEB128
7567 #undef CHECKSUM_ULEB128
7568
7569 /* Generate the type signature for DIE. This is computed by generating an
7570 MD5 checksum over the DIE's tag, its relevant attributes, and its
7571 children. Attributes that are references to other DIEs are processed
7572 by recursion, using the MARK field to prevent infinite recursion.
7573 If the DIE is nested inside a namespace or another type, we also
7574 need to include that context in the signature. The lower 64 bits
7575 of the resulting MD5 checksum comprise the signature. */
7576
7577 static void
7578 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7579 {
7580 int mark;
7581 const char *name;
7582 unsigned char checksum[16];
7583 struct md5_ctx ctx;
7584 dw_die_ref decl;
7585 dw_die_ref parent;
7586
7587 name = get_AT_string (die, DW_AT_name);
7588 decl = get_AT_ref (die, DW_AT_specification);
7589 parent = get_die_parent (die);
7590
7591 /* First, compute a signature for just the type name (and its surrounding
7592 context, if any. This is stored in the type unit DIE for link-time
7593 ODR (one-definition rule) checking. */
7594
7595 if (is_cxx () && name != NULL)
7596 {
7597 md5_init_ctx (&ctx);
7598
7599 /* Checksum the names of surrounding namespaces and structures. */
7600 if (parent != NULL)
7601 checksum_die_context (parent, &ctx);
7602
7603 /* Checksum the current DIE. */
7604 die_odr_checksum (die->die_tag, name, &ctx);
7605 md5_finish_ctx (&ctx, checksum);
7606
7607 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7608 }
7609
7610 /* Next, compute the complete type signature. */
7611
7612 md5_init_ctx (&ctx);
7613 mark = 1;
7614 die->die_mark = mark;
7615
7616 /* Checksum the names of surrounding namespaces and structures. */
7617 if (parent != NULL)
7618 checksum_die_context (parent, &ctx);
7619
7620 /* Checksum the DIE and its children. */
7621 die_checksum_ordered (die, &ctx, &mark);
7622 unmark_all_dies (die);
7623 md5_finish_ctx (&ctx, checksum);
7624
7625 /* Store the signature in the type node and link the type DIE and the
7626 type node together. */
7627 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7628 DWARF_TYPE_SIGNATURE_SIZE);
7629 die->comdat_type_p = true;
7630 die->die_id.die_type_node = type_node;
7631 type_node->type_die = die;
7632
7633 /* If the DIE is a specification, link its declaration to the type node
7634 as well. */
7635 if (decl != NULL)
7636 {
7637 decl->comdat_type_p = true;
7638 decl->die_id.die_type_node = type_node;
7639 }
7640 }
7641
7642 /* Do the location expressions look same? */
7643 static inline int
7644 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7645 {
7646 return loc1->dw_loc_opc == loc2->dw_loc_opc
7647 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7648 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7649 }
7650
7651 /* Do the values look the same? */
7652 static int
7653 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7654 {
7655 dw_loc_descr_ref loc1, loc2;
7656 rtx r1, r2;
7657
7658 if (v1->val_class != v2->val_class)
7659 return 0;
7660
7661 switch (v1->val_class)
7662 {
7663 case dw_val_class_const:
7664 case dw_val_class_const_implicit:
7665 return v1->v.val_int == v2->v.val_int;
7666 case dw_val_class_unsigned_const:
7667 case dw_val_class_unsigned_const_implicit:
7668 return v1->v.val_unsigned == v2->v.val_unsigned;
7669 case dw_val_class_const_double:
7670 return v1->v.val_double.high == v2->v.val_double.high
7671 && v1->v.val_double.low == v2->v.val_double.low;
7672 case dw_val_class_wide_int:
7673 return *v1->v.val_wide == *v2->v.val_wide;
7674 case dw_val_class_vec:
7675 if (v1->v.val_vec.length != v2->v.val_vec.length
7676 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7677 return 0;
7678 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7679 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7680 return 0;
7681 return 1;
7682 case dw_val_class_flag:
7683 return v1->v.val_flag == v2->v.val_flag;
7684 case dw_val_class_str:
7685 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7686
7687 case dw_val_class_addr:
7688 r1 = v1->v.val_addr;
7689 r2 = v2->v.val_addr;
7690 if (GET_CODE (r1) != GET_CODE (r2))
7691 return 0;
7692 return !rtx_equal_p (r1, r2);
7693
7694 case dw_val_class_offset:
7695 return v1->v.val_offset == v2->v.val_offset;
7696
7697 case dw_val_class_loc:
7698 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7699 loc1 && loc2;
7700 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7701 if (!same_loc_p (loc1, loc2, mark))
7702 return 0;
7703 return !loc1 && !loc2;
7704
7705 case dw_val_class_die_ref:
7706 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7707
7708 case dw_val_class_symview:
7709 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7710
7711 case dw_val_class_fde_ref:
7712 case dw_val_class_vms_delta:
7713 case dw_val_class_lbl_id:
7714 case dw_val_class_lineptr:
7715 case dw_val_class_macptr:
7716 case dw_val_class_loclistsptr:
7717 case dw_val_class_high_pc:
7718 return 1;
7719
7720 case dw_val_class_file:
7721 case dw_val_class_file_implicit:
7722 return v1->v.val_file == v2->v.val_file;
7723
7724 case dw_val_class_data8:
7725 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7726
7727 default:
7728 return 1;
7729 }
7730 }
7731
7732 /* Do the attributes look the same? */
7733
7734 static int
7735 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7736 {
7737 if (at1->dw_attr != at2->dw_attr)
7738 return 0;
7739
7740 /* We don't care that this was compiled with a different compiler
7741 snapshot; if the output is the same, that's what matters. */
7742 if (at1->dw_attr == DW_AT_producer)
7743 return 1;
7744
7745 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7746 }
7747
7748 /* Do the dies look the same? */
7749
7750 static int
7751 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7752 {
7753 dw_die_ref c1, c2;
7754 dw_attr_node *a1;
7755 unsigned ix;
7756
7757 /* To avoid infinite recursion. */
7758 if (die1->die_mark)
7759 return die1->die_mark == die2->die_mark;
7760 die1->die_mark = die2->die_mark = ++(*mark);
7761
7762 if (die1->die_tag != die2->die_tag)
7763 return 0;
7764
7765 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7766 return 0;
7767
7768 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7769 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7770 return 0;
7771
7772 c1 = die1->die_child;
7773 c2 = die2->die_child;
7774 if (! c1)
7775 {
7776 if (c2)
7777 return 0;
7778 }
7779 else
7780 for (;;)
7781 {
7782 if (!same_die_p (c1, c2, mark))
7783 return 0;
7784 c1 = c1->die_sib;
7785 c2 = c2->die_sib;
7786 if (c1 == die1->die_child)
7787 {
7788 if (c2 == die2->die_child)
7789 break;
7790 else
7791 return 0;
7792 }
7793 }
7794
7795 return 1;
7796 }
7797
7798 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7799 children, and set die_symbol. */
7800
7801 static void
7802 compute_comp_unit_symbol (dw_die_ref unit_die)
7803 {
7804 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7805 const char *base = die_name ? lbasename (die_name) : "anonymous";
7806 char *name = XALLOCAVEC (char, strlen (base) + 64);
7807 char *p;
7808 int i, mark;
7809 unsigned char checksum[16];
7810 struct md5_ctx ctx;
7811
7812 /* Compute the checksum of the DIE, then append part of it as hex digits to
7813 the name filename of the unit. */
7814
7815 md5_init_ctx (&ctx);
7816 mark = 0;
7817 die_checksum (unit_die, &ctx, &mark);
7818 unmark_all_dies (unit_die);
7819 md5_finish_ctx (&ctx, checksum);
7820
7821 /* When we this for comp_unit_die () we have a DW_AT_name that might
7822 not start with a letter but with anything valid for filenames and
7823 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7824 character is not a letter. */
7825 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7826 clean_symbol_name (name);
7827
7828 p = name + strlen (name);
7829 for (i = 0; i < 4; i++)
7830 {
7831 sprintf (p, "%.2x", checksum[i]);
7832 p += 2;
7833 }
7834
7835 unit_die->die_id.die_symbol = xstrdup (name);
7836 }
7837
7838 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7839
7840 static int
7841 is_type_die (dw_die_ref die)
7842 {
7843 switch (die->die_tag)
7844 {
7845 case DW_TAG_array_type:
7846 case DW_TAG_class_type:
7847 case DW_TAG_interface_type:
7848 case DW_TAG_enumeration_type:
7849 case DW_TAG_pointer_type:
7850 case DW_TAG_reference_type:
7851 case DW_TAG_rvalue_reference_type:
7852 case DW_TAG_string_type:
7853 case DW_TAG_structure_type:
7854 case DW_TAG_subroutine_type:
7855 case DW_TAG_union_type:
7856 case DW_TAG_ptr_to_member_type:
7857 case DW_TAG_set_type:
7858 case DW_TAG_subrange_type:
7859 case DW_TAG_base_type:
7860 case DW_TAG_const_type:
7861 case DW_TAG_file_type:
7862 case DW_TAG_packed_type:
7863 case DW_TAG_volatile_type:
7864 case DW_TAG_typedef:
7865 return 1;
7866 default:
7867 return 0;
7868 }
7869 }
7870
7871 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7872 Basically, we want to choose the bits that are likely to be shared between
7873 compilations (types) and leave out the bits that are specific to individual
7874 compilations (functions). */
7875
7876 static int
7877 is_comdat_die (dw_die_ref c)
7878 {
7879 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7880 we do for stabs. The advantage is a greater likelihood of sharing between
7881 objects that don't include headers in the same order (and therefore would
7882 put the base types in a different comdat). jason 8/28/00 */
7883
7884 if (c->die_tag == DW_TAG_base_type)
7885 return 0;
7886
7887 if (c->die_tag == DW_TAG_pointer_type
7888 || c->die_tag == DW_TAG_reference_type
7889 || c->die_tag == DW_TAG_rvalue_reference_type
7890 || c->die_tag == DW_TAG_const_type
7891 || c->die_tag == DW_TAG_volatile_type)
7892 {
7893 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7894
7895 return t ? is_comdat_die (t) : 0;
7896 }
7897
7898 return is_type_die (c);
7899 }
7900
7901 /* Returns true iff C is a compile-unit DIE. */
7902
7903 static inline bool
7904 is_cu_die (dw_die_ref c)
7905 {
7906 return c && (c->die_tag == DW_TAG_compile_unit
7907 || c->die_tag == DW_TAG_skeleton_unit);
7908 }
7909
7910 /* Returns true iff C is a unit DIE of some sort. */
7911
7912 static inline bool
7913 is_unit_die (dw_die_ref c)
7914 {
7915 return c && (c->die_tag == DW_TAG_compile_unit
7916 || c->die_tag == DW_TAG_partial_unit
7917 || c->die_tag == DW_TAG_type_unit
7918 || c->die_tag == DW_TAG_skeleton_unit);
7919 }
7920
7921 /* Returns true iff C is a namespace DIE. */
7922
7923 static inline bool
7924 is_namespace_die (dw_die_ref c)
7925 {
7926 return c && c->die_tag == DW_TAG_namespace;
7927 }
7928
7929 /* Returns true iff C is a class or structure DIE. */
7930
7931 static inline bool
7932 is_class_die (dw_die_ref c)
7933 {
7934 return c && (c->die_tag == DW_TAG_class_type
7935 || c->die_tag == DW_TAG_structure_type);
7936 }
7937
7938 /* Return non-zero if this DIE is a template parameter. */
7939
7940 static inline bool
7941 is_template_parameter (dw_die_ref die)
7942 {
7943 switch (die->die_tag)
7944 {
7945 case DW_TAG_template_type_param:
7946 case DW_TAG_template_value_param:
7947 case DW_TAG_GNU_template_template_param:
7948 case DW_TAG_GNU_template_parameter_pack:
7949 return true;
7950 default:
7951 return false;
7952 }
7953 }
7954
7955 /* Return non-zero if this DIE represents a template instantiation. */
7956
7957 static inline bool
7958 is_template_instantiation (dw_die_ref die)
7959 {
7960 dw_die_ref c;
7961
7962 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7963 return false;
7964 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7965 return false;
7966 }
7967
7968 static char *
7969 gen_internal_sym (const char *prefix)
7970 {
7971 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7972
7973 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7974 return xstrdup (buf);
7975 }
7976
7977 /* Return non-zero if this DIE is a declaration. */
7978
7979 static int
7980 is_declaration_die (dw_die_ref die)
7981 {
7982 dw_attr_node *a;
7983 unsigned ix;
7984
7985 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7986 if (a->dw_attr == DW_AT_declaration)
7987 return 1;
7988
7989 return 0;
7990 }
7991
7992 /* Return non-zero if this DIE is nested inside a subprogram. */
7993
7994 static int
7995 is_nested_in_subprogram (dw_die_ref die)
7996 {
7997 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7998
7999 if (decl == NULL)
8000 decl = die;
8001 return local_scope_p (decl);
8002 }
8003
8004 /* Return non-zero if this DIE contains a defining declaration of a
8005 subprogram. */
8006
8007 static int
8008 contains_subprogram_definition (dw_die_ref die)
8009 {
8010 dw_die_ref c;
8011
8012 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8013 return 1;
8014 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8015 return 0;
8016 }
8017
8018 /* Return non-zero if this is a type DIE that should be moved to a
8019 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8020 unit type. */
8021
8022 static int
8023 should_move_die_to_comdat (dw_die_ref die)
8024 {
8025 switch (die->die_tag)
8026 {
8027 case DW_TAG_class_type:
8028 case DW_TAG_structure_type:
8029 case DW_TAG_enumeration_type:
8030 case DW_TAG_union_type:
8031 /* Don't move declarations, inlined instances, types nested in a
8032 subprogram, or types that contain subprogram definitions. */
8033 if (is_declaration_die (die)
8034 || get_AT (die, DW_AT_abstract_origin)
8035 || is_nested_in_subprogram (die)
8036 || contains_subprogram_definition (die))
8037 return 0;
8038 return 1;
8039 case DW_TAG_array_type:
8040 case DW_TAG_interface_type:
8041 case DW_TAG_pointer_type:
8042 case DW_TAG_reference_type:
8043 case DW_TAG_rvalue_reference_type:
8044 case DW_TAG_string_type:
8045 case DW_TAG_subroutine_type:
8046 case DW_TAG_ptr_to_member_type:
8047 case DW_TAG_set_type:
8048 case DW_TAG_subrange_type:
8049 case DW_TAG_base_type:
8050 case DW_TAG_const_type:
8051 case DW_TAG_file_type:
8052 case DW_TAG_packed_type:
8053 case DW_TAG_volatile_type:
8054 case DW_TAG_typedef:
8055 default:
8056 return 0;
8057 }
8058 }
8059
8060 /* Make a clone of DIE. */
8061
8062 static dw_die_ref
8063 clone_die (dw_die_ref die)
8064 {
8065 dw_die_ref clone = new_die_raw (die->die_tag);
8066 dw_attr_node *a;
8067 unsigned ix;
8068
8069 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8070 add_dwarf_attr (clone, a);
8071
8072 return clone;
8073 }
8074
8075 /* Make a clone of the tree rooted at DIE. */
8076
8077 static dw_die_ref
8078 clone_tree (dw_die_ref die)
8079 {
8080 dw_die_ref c;
8081 dw_die_ref clone = clone_die (die);
8082
8083 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8084
8085 return clone;
8086 }
8087
8088 /* Make a clone of DIE as a declaration. */
8089
8090 static dw_die_ref
8091 clone_as_declaration (dw_die_ref die)
8092 {
8093 dw_die_ref clone;
8094 dw_die_ref decl;
8095 dw_attr_node *a;
8096 unsigned ix;
8097
8098 /* If the DIE is already a declaration, just clone it. */
8099 if (is_declaration_die (die))
8100 return clone_die (die);
8101
8102 /* If the DIE is a specification, just clone its declaration DIE. */
8103 decl = get_AT_ref (die, DW_AT_specification);
8104 if (decl != NULL)
8105 {
8106 clone = clone_die (decl);
8107 if (die->comdat_type_p)
8108 add_AT_die_ref (clone, DW_AT_signature, die);
8109 return clone;
8110 }
8111
8112 clone = new_die_raw (die->die_tag);
8113
8114 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8115 {
8116 /* We don't want to copy over all attributes.
8117 For example we don't want DW_AT_byte_size because otherwise we will no
8118 longer have a declaration and GDB will treat it as a definition. */
8119
8120 switch (a->dw_attr)
8121 {
8122 case DW_AT_abstract_origin:
8123 case DW_AT_artificial:
8124 case DW_AT_containing_type:
8125 case DW_AT_external:
8126 case DW_AT_name:
8127 case DW_AT_type:
8128 case DW_AT_virtuality:
8129 case DW_AT_linkage_name:
8130 case DW_AT_MIPS_linkage_name:
8131 add_dwarf_attr (clone, a);
8132 break;
8133 case DW_AT_byte_size:
8134 case DW_AT_alignment:
8135 default:
8136 break;
8137 }
8138 }
8139
8140 if (die->comdat_type_p)
8141 add_AT_die_ref (clone, DW_AT_signature, die);
8142
8143 add_AT_flag (clone, DW_AT_declaration, 1);
8144 return clone;
8145 }
8146
8147
8148 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8149
8150 struct decl_table_entry
8151 {
8152 dw_die_ref orig;
8153 dw_die_ref copy;
8154 };
8155
8156 /* Helpers to manipulate hash table of copied declarations. */
8157
8158 /* Hashtable helpers. */
8159
8160 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8161 {
8162 typedef die_struct *compare_type;
8163 static inline hashval_t hash (const decl_table_entry *);
8164 static inline bool equal (const decl_table_entry *, const die_struct *);
8165 };
8166
8167 inline hashval_t
8168 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8169 {
8170 return htab_hash_pointer (entry->orig);
8171 }
8172
8173 inline bool
8174 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8175 const die_struct *entry2)
8176 {
8177 return entry1->orig == entry2;
8178 }
8179
8180 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8181
8182 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8183 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8184 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8185 to check if the ancestor has already been copied into UNIT. */
8186
8187 static dw_die_ref
8188 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8189 decl_hash_type *decl_table)
8190 {
8191 dw_die_ref parent = die->die_parent;
8192 dw_die_ref new_parent = unit;
8193 dw_die_ref copy;
8194 decl_table_entry **slot = NULL;
8195 struct decl_table_entry *entry = NULL;
8196
8197 if (decl_table)
8198 {
8199 /* Check if the entry has already been copied to UNIT. */
8200 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8201 INSERT);
8202 if (*slot != HTAB_EMPTY_ENTRY)
8203 {
8204 entry = *slot;
8205 return entry->copy;
8206 }
8207
8208 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8209 entry = XCNEW (struct decl_table_entry);
8210 entry->orig = die;
8211 entry->copy = NULL;
8212 *slot = entry;
8213 }
8214
8215 if (parent != NULL)
8216 {
8217 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8218 if (spec != NULL)
8219 parent = spec;
8220 if (!is_unit_die (parent))
8221 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8222 }
8223
8224 copy = clone_as_declaration (die);
8225 add_child_die (new_parent, copy);
8226
8227 if (decl_table)
8228 {
8229 /* Record the pointer to the copy. */
8230 entry->copy = copy;
8231 }
8232
8233 return copy;
8234 }
8235 /* Copy the declaration context to the new type unit DIE. This includes
8236 any surrounding namespace or type declarations. If the DIE has an
8237 AT_specification attribute, it also includes attributes and children
8238 attached to the specification, and returns a pointer to the original
8239 parent of the declaration DIE. Returns NULL otherwise. */
8240
8241 static dw_die_ref
8242 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8243 {
8244 dw_die_ref decl;
8245 dw_die_ref new_decl;
8246 dw_die_ref orig_parent = NULL;
8247
8248 decl = get_AT_ref (die, DW_AT_specification);
8249 if (decl == NULL)
8250 decl = die;
8251 else
8252 {
8253 unsigned ix;
8254 dw_die_ref c;
8255 dw_attr_node *a;
8256
8257 /* The original DIE will be changed to a declaration, and must
8258 be moved to be a child of the original declaration DIE. */
8259 orig_parent = decl->die_parent;
8260
8261 /* Copy the type node pointer from the new DIE to the original
8262 declaration DIE so we can forward references later. */
8263 decl->comdat_type_p = true;
8264 decl->die_id.die_type_node = die->die_id.die_type_node;
8265
8266 remove_AT (die, DW_AT_specification);
8267
8268 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8269 {
8270 if (a->dw_attr != DW_AT_name
8271 && a->dw_attr != DW_AT_declaration
8272 && a->dw_attr != DW_AT_external)
8273 add_dwarf_attr (die, a);
8274 }
8275
8276 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8277 }
8278
8279 if (decl->die_parent != NULL
8280 && !is_unit_die (decl->die_parent))
8281 {
8282 new_decl = copy_ancestor_tree (unit, decl, NULL);
8283 if (new_decl != NULL)
8284 {
8285 remove_AT (new_decl, DW_AT_signature);
8286 add_AT_specification (die, new_decl);
8287 }
8288 }
8289
8290 return orig_parent;
8291 }
8292
8293 /* Generate the skeleton ancestor tree for the given NODE, then clone
8294 the DIE and add the clone into the tree. */
8295
8296 static void
8297 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8298 {
8299 if (node->new_die != NULL)
8300 return;
8301
8302 node->new_die = clone_as_declaration (node->old_die);
8303
8304 if (node->parent != NULL)
8305 {
8306 generate_skeleton_ancestor_tree (node->parent);
8307 add_child_die (node->parent->new_die, node->new_die);
8308 }
8309 }
8310
8311 /* Generate a skeleton tree of DIEs containing any declarations that are
8312 found in the original tree. We traverse the tree looking for declaration
8313 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8314
8315 static void
8316 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8317 {
8318 skeleton_chain_node node;
8319 dw_die_ref c;
8320 dw_die_ref first;
8321 dw_die_ref prev = NULL;
8322 dw_die_ref next = NULL;
8323
8324 node.parent = parent;
8325
8326 first = c = parent->old_die->die_child;
8327 if (c)
8328 next = c->die_sib;
8329 if (c) do {
8330 if (prev == NULL || prev->die_sib == c)
8331 prev = c;
8332 c = next;
8333 next = (c == first ? NULL : c->die_sib);
8334 node.old_die = c;
8335 node.new_die = NULL;
8336 if (is_declaration_die (c))
8337 {
8338 if (is_template_instantiation (c))
8339 {
8340 /* Instantiated templates do not need to be cloned into the
8341 type unit. Just move the DIE and its children back to
8342 the skeleton tree (in the main CU). */
8343 remove_child_with_prev (c, prev);
8344 add_child_die (parent->new_die, c);
8345 c = prev;
8346 }
8347 else if (c->comdat_type_p)
8348 {
8349 /* This is the skeleton of earlier break_out_comdat_types
8350 type. Clone the existing DIE, but keep the children
8351 under the original (which is in the main CU). */
8352 dw_die_ref clone = clone_die (c);
8353
8354 replace_child (c, clone, prev);
8355 generate_skeleton_ancestor_tree (parent);
8356 add_child_die (parent->new_die, c);
8357 c = clone;
8358 continue;
8359 }
8360 else
8361 {
8362 /* Clone the existing DIE, move the original to the skeleton
8363 tree (which is in the main CU), and put the clone, with
8364 all the original's children, where the original came from
8365 (which is about to be moved to the type unit). */
8366 dw_die_ref clone = clone_die (c);
8367 move_all_children (c, clone);
8368
8369 /* If the original has a DW_AT_object_pointer attribute,
8370 it would now point to a child DIE just moved to the
8371 cloned tree, so we need to remove that attribute from
8372 the original. */
8373 remove_AT (c, DW_AT_object_pointer);
8374
8375 replace_child (c, clone, prev);
8376 generate_skeleton_ancestor_tree (parent);
8377 add_child_die (parent->new_die, c);
8378 node.old_die = clone;
8379 node.new_die = c;
8380 c = clone;
8381 }
8382 }
8383 generate_skeleton_bottom_up (&node);
8384 } while (next != NULL);
8385 }
8386
8387 /* Wrapper function for generate_skeleton_bottom_up. */
8388
8389 static dw_die_ref
8390 generate_skeleton (dw_die_ref die)
8391 {
8392 skeleton_chain_node node;
8393
8394 node.old_die = die;
8395 node.new_die = NULL;
8396 node.parent = NULL;
8397
8398 /* If this type definition is nested inside another type,
8399 and is not an instantiation of a template, always leave
8400 at least a declaration in its place. */
8401 if (die->die_parent != NULL
8402 && is_type_die (die->die_parent)
8403 && !is_template_instantiation (die))
8404 node.new_die = clone_as_declaration (die);
8405
8406 generate_skeleton_bottom_up (&node);
8407 return node.new_die;
8408 }
8409
8410 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8411 declaration. The original DIE is moved to a new compile unit so that
8412 existing references to it follow it to the new location. If any of the
8413 original DIE's descendants is a declaration, we need to replace the
8414 original DIE with a skeleton tree and move the declarations back into the
8415 skeleton tree. */
8416
8417 static dw_die_ref
8418 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8419 dw_die_ref prev)
8420 {
8421 dw_die_ref skeleton, orig_parent;
8422
8423 /* Copy the declaration context to the type unit DIE. If the returned
8424 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8425 that DIE. */
8426 orig_parent = copy_declaration_context (unit, child);
8427
8428 skeleton = generate_skeleton (child);
8429 if (skeleton == NULL)
8430 remove_child_with_prev (child, prev);
8431 else
8432 {
8433 skeleton->comdat_type_p = true;
8434 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8435
8436 /* If the original DIE was a specification, we need to put
8437 the skeleton under the parent DIE of the declaration.
8438 This leaves the original declaration in the tree, but
8439 it will be pruned later since there are no longer any
8440 references to it. */
8441 if (orig_parent != NULL)
8442 {
8443 remove_child_with_prev (child, prev);
8444 add_child_die (orig_parent, skeleton);
8445 }
8446 else
8447 replace_child (child, skeleton, prev);
8448 }
8449
8450 return skeleton;
8451 }
8452
8453 static void
8454 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8455 comdat_type_node *type_node,
8456 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8457
8458 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8459 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8460 DWARF procedure references in the DW_AT_location attribute. */
8461
8462 static dw_die_ref
8463 copy_dwarf_procedure (dw_die_ref die,
8464 comdat_type_node *type_node,
8465 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8466 {
8467 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8468
8469 /* DWARF procedures are not supposed to have children... */
8470 gcc_assert (die->die_child == NULL);
8471
8472 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8473 gcc_assert (vec_safe_length (die->die_attr) == 1
8474 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8475
8476 /* Do not copy more than once DWARF procedures. */
8477 bool existed;
8478 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8479 if (existed)
8480 return die_copy;
8481
8482 die_copy = clone_die (die);
8483 add_child_die (type_node->root_die, die_copy);
8484 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8485 return die_copy;
8486 }
8487
8488 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8489 procedures in DIE's attributes. */
8490
8491 static void
8492 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8493 comdat_type_node *type_node,
8494 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8495 {
8496 dw_attr_node *a;
8497 unsigned i;
8498
8499 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8500 {
8501 dw_loc_descr_ref loc;
8502
8503 if (a->dw_attr_val.val_class != dw_val_class_loc)
8504 continue;
8505
8506 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8507 {
8508 switch (loc->dw_loc_opc)
8509 {
8510 case DW_OP_call2:
8511 case DW_OP_call4:
8512 case DW_OP_call_ref:
8513 gcc_assert (loc->dw_loc_oprnd1.val_class
8514 == dw_val_class_die_ref);
8515 loc->dw_loc_oprnd1.v.val_die_ref.die
8516 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8517 type_node,
8518 copied_dwarf_procs);
8519
8520 default:
8521 break;
8522 }
8523 }
8524 }
8525 }
8526
8527 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8528 rewrite references to point to the copies.
8529
8530 References are looked for in DIE's attributes and recursively in all its
8531 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8532 mapping from old DWARF procedures to their copy. It is used not to copy
8533 twice the same DWARF procedure under TYPE_NODE. */
8534
8535 static void
8536 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8537 comdat_type_node *type_node,
8538 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8539 {
8540 dw_die_ref c;
8541
8542 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8543 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8544 type_node,
8545 copied_dwarf_procs));
8546 }
8547
8548 /* Traverse the DIE and set up additional .debug_types or .debug_info
8549 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8550 section. */
8551
8552 static void
8553 break_out_comdat_types (dw_die_ref die)
8554 {
8555 dw_die_ref c;
8556 dw_die_ref first;
8557 dw_die_ref prev = NULL;
8558 dw_die_ref next = NULL;
8559 dw_die_ref unit = NULL;
8560
8561 first = c = die->die_child;
8562 if (c)
8563 next = c->die_sib;
8564 if (c) do {
8565 if (prev == NULL || prev->die_sib == c)
8566 prev = c;
8567 c = next;
8568 next = (c == first ? NULL : c->die_sib);
8569 if (should_move_die_to_comdat (c))
8570 {
8571 dw_die_ref replacement;
8572 comdat_type_node *type_node;
8573
8574 /* Break out nested types into their own type units. */
8575 break_out_comdat_types (c);
8576
8577 /* Create a new type unit DIE as the root for the new tree, and
8578 add it to the list of comdat types. */
8579 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8580 add_AT_unsigned (unit, DW_AT_language,
8581 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8582 type_node = ggc_cleared_alloc<comdat_type_node> ();
8583 type_node->root_die = unit;
8584 type_node->next = comdat_type_list;
8585 comdat_type_list = type_node;
8586
8587 /* Generate the type signature. */
8588 generate_type_signature (c, type_node);
8589
8590 /* Copy the declaration context, attributes, and children of the
8591 declaration into the new type unit DIE, then remove this DIE
8592 from the main CU (or replace it with a skeleton if necessary). */
8593 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8594 type_node->skeleton_die = replacement;
8595
8596 /* Add the DIE to the new compunit. */
8597 add_child_die (unit, c);
8598
8599 /* Types can reference DWARF procedures for type size or data location
8600 expressions. Calls in DWARF expressions cannot target procedures
8601 that are not in the same section. So we must copy DWARF procedures
8602 along with this type and then rewrite references to them. */
8603 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8604 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8605
8606 if (replacement != NULL)
8607 c = replacement;
8608 }
8609 else if (c->die_tag == DW_TAG_namespace
8610 || c->die_tag == DW_TAG_class_type
8611 || c->die_tag == DW_TAG_structure_type
8612 || c->die_tag == DW_TAG_union_type)
8613 {
8614 /* Look for nested types that can be broken out. */
8615 break_out_comdat_types (c);
8616 }
8617 } while (next != NULL);
8618 }
8619
8620 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8621 Enter all the cloned children into the hash table decl_table. */
8622
8623 static dw_die_ref
8624 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8625 {
8626 dw_die_ref c;
8627 dw_die_ref clone;
8628 struct decl_table_entry *entry;
8629 decl_table_entry **slot;
8630
8631 if (die->die_tag == DW_TAG_subprogram)
8632 clone = clone_as_declaration (die);
8633 else
8634 clone = clone_die (die);
8635
8636 slot = decl_table->find_slot_with_hash (die,
8637 htab_hash_pointer (die), INSERT);
8638
8639 /* Assert that DIE isn't in the hash table yet. If it would be there
8640 before, the ancestors would be necessarily there as well, therefore
8641 clone_tree_partial wouldn't be called. */
8642 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8643
8644 entry = XCNEW (struct decl_table_entry);
8645 entry->orig = die;
8646 entry->copy = clone;
8647 *slot = entry;
8648
8649 if (die->die_tag != DW_TAG_subprogram)
8650 FOR_EACH_CHILD (die, c,
8651 add_child_die (clone, clone_tree_partial (c, decl_table)));
8652
8653 return clone;
8654 }
8655
8656 /* Walk the DIE and its children, looking for references to incomplete
8657 or trivial types that are unmarked (i.e., that are not in the current
8658 type_unit). */
8659
8660 static void
8661 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8662 {
8663 dw_die_ref c;
8664 dw_attr_node *a;
8665 unsigned ix;
8666
8667 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8668 {
8669 if (AT_class (a) == dw_val_class_die_ref)
8670 {
8671 dw_die_ref targ = AT_ref (a);
8672 decl_table_entry **slot;
8673 struct decl_table_entry *entry;
8674
8675 if (targ->die_mark != 0 || targ->comdat_type_p)
8676 continue;
8677
8678 slot = decl_table->find_slot_with_hash (targ,
8679 htab_hash_pointer (targ),
8680 INSERT);
8681
8682 if (*slot != HTAB_EMPTY_ENTRY)
8683 {
8684 /* TARG has already been copied, so we just need to
8685 modify the reference to point to the copy. */
8686 entry = *slot;
8687 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8688 }
8689 else
8690 {
8691 dw_die_ref parent = unit;
8692 dw_die_ref copy = clone_die (targ);
8693
8694 /* Record in DECL_TABLE that TARG has been copied.
8695 Need to do this now, before the recursive call,
8696 because DECL_TABLE may be expanded and SLOT
8697 would no longer be a valid pointer. */
8698 entry = XCNEW (struct decl_table_entry);
8699 entry->orig = targ;
8700 entry->copy = copy;
8701 *slot = entry;
8702
8703 /* If TARG is not a declaration DIE, we need to copy its
8704 children. */
8705 if (!is_declaration_die (targ))
8706 {
8707 FOR_EACH_CHILD (
8708 targ, c,
8709 add_child_die (copy,
8710 clone_tree_partial (c, decl_table)));
8711 }
8712
8713 /* Make sure the cloned tree is marked as part of the
8714 type unit. */
8715 mark_dies (copy);
8716
8717 /* If TARG has surrounding context, copy its ancestor tree
8718 into the new type unit. */
8719 if (targ->die_parent != NULL
8720 && !is_unit_die (targ->die_parent))
8721 parent = copy_ancestor_tree (unit, targ->die_parent,
8722 decl_table);
8723
8724 add_child_die (parent, copy);
8725 a->dw_attr_val.v.val_die_ref.die = copy;
8726
8727 /* Make sure the newly-copied DIE is walked. If it was
8728 installed in a previously-added context, it won't
8729 get visited otherwise. */
8730 if (parent != unit)
8731 {
8732 /* Find the highest point of the newly-added tree,
8733 mark each node along the way, and walk from there. */
8734 parent->die_mark = 1;
8735 while (parent->die_parent
8736 && parent->die_parent->die_mark == 0)
8737 {
8738 parent = parent->die_parent;
8739 parent->die_mark = 1;
8740 }
8741 copy_decls_walk (unit, parent, decl_table);
8742 }
8743 }
8744 }
8745 }
8746
8747 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8748 }
8749
8750 /* Copy declarations for "unworthy" types into the new comdat section.
8751 Incomplete types, modified types, and certain other types aren't broken
8752 out into comdat sections of their own, so they don't have a signature,
8753 and we need to copy the declaration into the same section so that we
8754 don't have an external reference. */
8755
8756 static void
8757 copy_decls_for_unworthy_types (dw_die_ref unit)
8758 {
8759 mark_dies (unit);
8760 decl_hash_type decl_table (10);
8761 copy_decls_walk (unit, unit, &decl_table);
8762 unmark_dies (unit);
8763 }
8764
8765 /* Traverse the DIE and add a sibling attribute if it may have the
8766 effect of speeding up access to siblings. To save some space,
8767 avoid generating sibling attributes for DIE's without children. */
8768
8769 static void
8770 add_sibling_attributes (dw_die_ref die)
8771 {
8772 dw_die_ref c;
8773
8774 if (! die->die_child)
8775 return;
8776
8777 if (die->die_parent && die != die->die_parent->die_child)
8778 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8779
8780 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8781 }
8782
8783 /* Output all location lists for the DIE and its children. */
8784
8785 static void
8786 output_location_lists (dw_die_ref die)
8787 {
8788 dw_die_ref c;
8789 dw_attr_node *a;
8790 unsigned ix;
8791
8792 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8793 if (AT_class (a) == dw_val_class_loc_list)
8794 output_loc_list (AT_loc_list (a));
8795
8796 FOR_EACH_CHILD (die, c, output_location_lists (c));
8797 }
8798
8799 /* During assign_location_list_indexes and output_loclists_offset the
8800 current index, after it the number of assigned indexes (i.e. how
8801 large the .debug_loclists* offset table should be). */
8802 static unsigned int loc_list_idx;
8803
8804 /* Output all location list offsets for the DIE and its children. */
8805
8806 static void
8807 output_loclists_offsets (dw_die_ref die)
8808 {
8809 dw_die_ref c;
8810 dw_attr_node *a;
8811 unsigned ix;
8812
8813 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8814 if (AT_class (a) == dw_val_class_loc_list)
8815 {
8816 dw_loc_list_ref l = AT_loc_list (a);
8817 if (l->offset_emitted)
8818 continue;
8819 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8820 loc_section_label, NULL);
8821 gcc_assert (l->hash == loc_list_idx);
8822 loc_list_idx++;
8823 l->offset_emitted = true;
8824 }
8825
8826 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8827 }
8828
8829 /* Recursively set indexes of location lists. */
8830
8831 static void
8832 assign_location_list_indexes (dw_die_ref die)
8833 {
8834 dw_die_ref c;
8835 dw_attr_node *a;
8836 unsigned ix;
8837
8838 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8839 if (AT_class (a) == dw_val_class_loc_list)
8840 {
8841 dw_loc_list_ref list = AT_loc_list (a);
8842 if (!list->num_assigned)
8843 {
8844 list->num_assigned = true;
8845 list->hash = loc_list_idx++;
8846 }
8847 }
8848
8849 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8850 }
8851
8852 /* We want to limit the number of external references, because they are
8853 larger than local references: a relocation takes multiple words, and
8854 even a sig8 reference is always eight bytes, whereas a local reference
8855 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8856 So if we encounter multiple external references to the same type DIE, we
8857 make a local typedef stub for it and redirect all references there.
8858
8859 This is the element of the hash table for keeping track of these
8860 references. */
8861
8862 struct external_ref
8863 {
8864 dw_die_ref type;
8865 dw_die_ref stub;
8866 unsigned n_refs;
8867 };
8868
8869 /* Hashtable helpers. */
8870
8871 struct external_ref_hasher : free_ptr_hash <external_ref>
8872 {
8873 static inline hashval_t hash (const external_ref *);
8874 static inline bool equal (const external_ref *, const external_ref *);
8875 };
8876
8877 inline hashval_t
8878 external_ref_hasher::hash (const external_ref *r)
8879 {
8880 dw_die_ref die = r->type;
8881 hashval_t h = 0;
8882
8883 /* We can't use the address of the DIE for hashing, because
8884 that will make the order of the stub DIEs non-deterministic. */
8885 if (! die->comdat_type_p)
8886 /* We have a symbol; use it to compute a hash. */
8887 h = htab_hash_string (die->die_id.die_symbol);
8888 else
8889 {
8890 /* We have a type signature; use a subset of the bits as the hash.
8891 The 8-byte signature is at least as large as hashval_t. */
8892 comdat_type_node *type_node = die->die_id.die_type_node;
8893 memcpy (&h, type_node->signature, sizeof (h));
8894 }
8895 return h;
8896 }
8897
8898 inline bool
8899 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8900 {
8901 return r1->type == r2->type;
8902 }
8903
8904 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8905
8906 /* Return a pointer to the external_ref for references to DIE. */
8907
8908 static struct external_ref *
8909 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8910 {
8911 struct external_ref ref, *ref_p;
8912 external_ref **slot;
8913
8914 ref.type = die;
8915 slot = map->find_slot (&ref, INSERT);
8916 if (*slot != HTAB_EMPTY_ENTRY)
8917 return *slot;
8918
8919 ref_p = XCNEW (struct external_ref);
8920 ref_p->type = die;
8921 *slot = ref_p;
8922 return ref_p;
8923 }
8924
8925 /* Subroutine of optimize_external_refs, below.
8926
8927 If we see a type skeleton, record it as our stub. If we see external
8928 references, remember how many we've seen. */
8929
8930 static void
8931 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8932 {
8933 dw_die_ref c;
8934 dw_attr_node *a;
8935 unsigned ix;
8936 struct external_ref *ref_p;
8937
8938 if (is_type_die (die)
8939 && (c = get_AT_ref (die, DW_AT_signature)))
8940 {
8941 /* This is a local skeleton; use it for local references. */
8942 ref_p = lookup_external_ref (map, c);
8943 ref_p->stub = die;
8944 }
8945
8946 /* Scan the DIE references, and remember any that refer to DIEs from
8947 other CUs (i.e. those which are not marked). */
8948 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8949 if (AT_class (a) == dw_val_class_die_ref
8950 && (c = AT_ref (a))->die_mark == 0
8951 && is_type_die (c))
8952 {
8953 ref_p = lookup_external_ref (map, c);
8954 ref_p->n_refs++;
8955 }
8956
8957 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8958 }
8959
8960 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8961 points to an external_ref, DATA is the CU we're processing. If we don't
8962 already have a local stub, and we have multiple refs, build a stub. */
8963
8964 int
8965 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8966 {
8967 struct external_ref *ref_p = *slot;
8968
8969 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8970 {
8971 /* We have multiple references to this type, so build a small stub.
8972 Both of these forms are a bit dodgy from the perspective of the
8973 DWARF standard, since technically they should have names. */
8974 dw_die_ref cu = data;
8975 dw_die_ref type = ref_p->type;
8976 dw_die_ref stub = NULL;
8977
8978 if (type->comdat_type_p)
8979 {
8980 /* If we refer to this type via sig8, use AT_signature. */
8981 stub = new_die (type->die_tag, cu, NULL_TREE);
8982 add_AT_die_ref (stub, DW_AT_signature, type);
8983 }
8984 else
8985 {
8986 /* Otherwise, use a typedef with no name. */
8987 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8988 add_AT_die_ref (stub, DW_AT_type, type);
8989 }
8990
8991 stub->die_mark++;
8992 ref_p->stub = stub;
8993 }
8994 return 1;
8995 }
8996
8997 /* DIE is a unit; look through all the DIE references to see if there are
8998 any external references to types, and if so, create local stubs for
8999 them which will be applied in build_abbrev_table. This is useful because
9000 references to local DIEs are smaller. */
9001
9002 static external_ref_hash_type *
9003 optimize_external_refs (dw_die_ref die)
9004 {
9005 external_ref_hash_type *map = new external_ref_hash_type (10);
9006 optimize_external_refs_1 (die, map);
9007 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9008 return map;
9009 }
9010
9011 /* The following 3 variables are temporaries that are computed only during the
9012 build_abbrev_table call and used and released during the following
9013 optimize_abbrev_table call. */
9014
9015 /* First abbrev_id that can be optimized based on usage. */
9016 static unsigned int abbrev_opt_start;
9017
9018 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9019 abbrev_id smaller than this, because they must be already sized
9020 during build_abbrev_table). */
9021 static unsigned int abbrev_opt_base_type_end;
9022
9023 /* Vector of usage counts during build_abbrev_table. Indexed by
9024 abbrev_id - abbrev_opt_start. */
9025 static vec<unsigned int> abbrev_usage_count;
9026
9027 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9028 static vec<dw_die_ref> sorted_abbrev_dies;
9029
9030 /* The format of each DIE (and its attribute value pairs) is encoded in an
9031 abbreviation table. This routine builds the abbreviation table and assigns
9032 a unique abbreviation id for each abbreviation entry. The children of each
9033 die are visited recursively. */
9034
9035 static void
9036 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9037 {
9038 unsigned int abbrev_id = 0;
9039 dw_die_ref c;
9040 dw_attr_node *a;
9041 unsigned ix;
9042 dw_die_ref abbrev;
9043
9044 /* Scan the DIE references, and replace any that refer to
9045 DIEs from other CUs (i.e. those which are not marked) with
9046 the local stubs we built in optimize_external_refs. */
9047 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9048 if (AT_class (a) == dw_val_class_die_ref
9049 && (c = AT_ref (a))->die_mark == 0)
9050 {
9051 struct external_ref *ref_p;
9052 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9053
9054 ref_p = lookup_external_ref (extern_map, c);
9055 if (ref_p->stub && ref_p->stub != die)
9056 change_AT_die_ref (a, ref_p->stub);
9057 else
9058 /* We aren't changing this reference, so mark it external. */
9059 set_AT_ref_external (a, 1);
9060 }
9061
9062 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9063 {
9064 dw_attr_node *die_a, *abbrev_a;
9065 unsigned ix;
9066 bool ok = true;
9067
9068 if (abbrev_id == 0)
9069 continue;
9070 if (abbrev->die_tag != die->die_tag)
9071 continue;
9072 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9073 continue;
9074
9075 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9076 continue;
9077
9078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9079 {
9080 abbrev_a = &(*abbrev->die_attr)[ix];
9081 if ((abbrev_a->dw_attr != die_a->dw_attr)
9082 || (value_format (abbrev_a) != value_format (die_a)))
9083 {
9084 ok = false;
9085 break;
9086 }
9087 }
9088 if (ok)
9089 break;
9090 }
9091
9092 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9093 {
9094 vec_safe_push (abbrev_die_table, die);
9095 if (abbrev_opt_start)
9096 abbrev_usage_count.safe_push (0);
9097 }
9098 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9099 {
9100 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9101 sorted_abbrev_dies.safe_push (die);
9102 }
9103
9104 die->die_abbrev = abbrev_id;
9105 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9106 }
9107
9108 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9109 by die_abbrev's usage count, from the most commonly used
9110 abbreviation to the least. */
9111
9112 static int
9113 die_abbrev_cmp (const void *p1, const void *p2)
9114 {
9115 dw_die_ref die1 = *(const dw_die_ref *) p1;
9116 dw_die_ref die2 = *(const dw_die_ref *) p2;
9117
9118 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9119 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9120
9121 if (die1->die_abbrev >= abbrev_opt_base_type_end
9122 && die2->die_abbrev >= abbrev_opt_base_type_end)
9123 {
9124 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9125 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9126 return -1;
9127 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9128 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9129 return 1;
9130 }
9131
9132 /* Stabilize the sort. */
9133 if (die1->die_abbrev < die2->die_abbrev)
9134 return -1;
9135 if (die1->die_abbrev > die2->die_abbrev)
9136 return 1;
9137
9138 return 0;
9139 }
9140
9141 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9142 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9143 into dw_val_class_const_implicit or
9144 dw_val_class_unsigned_const_implicit. */
9145
9146 static void
9147 optimize_implicit_const (unsigned int first_id, unsigned int end,
9148 vec<bool> &implicit_consts)
9149 {
9150 /* It never makes sense if there is just one DIE using the abbreviation. */
9151 if (end < first_id + 2)
9152 return;
9153
9154 dw_attr_node *a;
9155 unsigned ix, i;
9156 dw_die_ref die = sorted_abbrev_dies[first_id];
9157 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9158 if (implicit_consts[ix])
9159 {
9160 enum dw_val_class new_class = dw_val_class_none;
9161 switch (AT_class (a))
9162 {
9163 case dw_val_class_unsigned_const:
9164 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9165 continue;
9166
9167 /* The .debug_abbrev section will grow by
9168 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9169 in all the DIEs using that abbreviation. */
9170 if (constant_size (AT_unsigned (a)) * (end - first_id)
9171 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9172 continue;
9173
9174 new_class = dw_val_class_unsigned_const_implicit;
9175 break;
9176
9177 case dw_val_class_const:
9178 new_class = dw_val_class_const_implicit;
9179 break;
9180
9181 case dw_val_class_file:
9182 new_class = dw_val_class_file_implicit;
9183 break;
9184
9185 default:
9186 continue;
9187 }
9188 for (i = first_id; i < end; i++)
9189 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9190 = new_class;
9191 }
9192 }
9193
9194 /* Attempt to optimize abbreviation table from abbrev_opt_start
9195 abbreviation above. */
9196
9197 static void
9198 optimize_abbrev_table (void)
9199 {
9200 if (abbrev_opt_start
9201 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9202 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9203 {
9204 auto_vec<bool, 32> implicit_consts;
9205 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9206
9207 unsigned int abbrev_id = abbrev_opt_start - 1;
9208 unsigned int first_id = ~0U;
9209 unsigned int last_abbrev_id = 0;
9210 unsigned int i;
9211 dw_die_ref die;
9212 if (abbrev_opt_base_type_end > abbrev_opt_start)
9213 abbrev_id = abbrev_opt_base_type_end - 1;
9214 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9215 most commonly used abbreviations come first. */
9216 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9217 {
9218 dw_attr_node *a;
9219 unsigned ix;
9220
9221 /* If calc_base_type_die_sizes has been called, the CU and
9222 base types after it can't be optimized, because we've already
9223 calculated their DIE offsets. We've sorted them first. */
9224 if (die->die_abbrev < abbrev_opt_base_type_end)
9225 continue;
9226 if (die->die_abbrev != last_abbrev_id)
9227 {
9228 last_abbrev_id = die->die_abbrev;
9229 if (dwarf_version >= 5 && first_id != ~0U)
9230 optimize_implicit_const (first_id, i, implicit_consts);
9231 abbrev_id++;
9232 (*abbrev_die_table)[abbrev_id] = die;
9233 if (dwarf_version >= 5)
9234 {
9235 first_id = i;
9236 implicit_consts.truncate (0);
9237
9238 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9239 switch (AT_class (a))
9240 {
9241 case dw_val_class_const:
9242 case dw_val_class_unsigned_const:
9243 case dw_val_class_file:
9244 implicit_consts.safe_push (true);
9245 break;
9246 default:
9247 implicit_consts.safe_push (false);
9248 break;
9249 }
9250 }
9251 }
9252 else if (dwarf_version >= 5)
9253 {
9254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9255 if (!implicit_consts[ix])
9256 continue;
9257 else
9258 {
9259 dw_attr_node *other_a
9260 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9261 if (!dw_val_equal_p (&a->dw_attr_val,
9262 &other_a->dw_attr_val))
9263 implicit_consts[ix] = false;
9264 }
9265 }
9266 die->die_abbrev = abbrev_id;
9267 }
9268 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9269 if (dwarf_version >= 5 && first_id != ~0U)
9270 optimize_implicit_const (first_id, i, implicit_consts);
9271 }
9272
9273 abbrev_opt_start = 0;
9274 abbrev_opt_base_type_end = 0;
9275 abbrev_usage_count.release ();
9276 sorted_abbrev_dies.release ();
9277 }
9278 \f
9279 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9280
9281 static int
9282 constant_size (unsigned HOST_WIDE_INT value)
9283 {
9284 int log;
9285
9286 if (value == 0)
9287 log = 0;
9288 else
9289 log = floor_log2 (value);
9290
9291 log = log / 8;
9292 log = 1 << (floor_log2 (log) + 1);
9293
9294 return log;
9295 }
9296
9297 /* Return the size of a DIE as it is represented in the
9298 .debug_info section. */
9299
9300 static unsigned long
9301 size_of_die (dw_die_ref die)
9302 {
9303 unsigned long size = 0;
9304 dw_attr_node *a;
9305 unsigned ix;
9306 enum dwarf_form form;
9307
9308 size += size_of_uleb128 (die->die_abbrev);
9309 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9310 {
9311 switch (AT_class (a))
9312 {
9313 case dw_val_class_addr:
9314 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9315 {
9316 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9317 size += size_of_uleb128 (AT_index (a));
9318 }
9319 else
9320 size += DWARF2_ADDR_SIZE;
9321 break;
9322 case dw_val_class_offset:
9323 size += DWARF_OFFSET_SIZE;
9324 break;
9325 case dw_val_class_loc:
9326 {
9327 unsigned long lsize = size_of_locs (AT_loc (a));
9328
9329 /* Block length. */
9330 if (dwarf_version >= 4)
9331 size += size_of_uleb128 (lsize);
9332 else
9333 size += constant_size (lsize);
9334 size += lsize;
9335 }
9336 break;
9337 case dw_val_class_loc_list:
9338 case dw_val_class_view_list:
9339 if (dwarf_split_debug_info && dwarf_version >= 5)
9340 {
9341 gcc_assert (AT_loc_list (a)->num_assigned);
9342 size += size_of_uleb128 (AT_loc_list (a)->hash);
9343 }
9344 else
9345 size += DWARF_OFFSET_SIZE;
9346 break;
9347 case dw_val_class_range_list:
9348 if (value_format (a) == DW_FORM_rnglistx)
9349 {
9350 gcc_assert (rnglist_idx);
9351 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9352 size += size_of_uleb128 (r->idx);
9353 }
9354 else
9355 size += DWARF_OFFSET_SIZE;
9356 break;
9357 case dw_val_class_const:
9358 size += size_of_sleb128 (AT_int (a));
9359 break;
9360 case dw_val_class_unsigned_const:
9361 {
9362 int csize = constant_size (AT_unsigned (a));
9363 if (dwarf_version == 3
9364 && a->dw_attr == DW_AT_data_member_location
9365 && csize >= 4)
9366 size += size_of_uleb128 (AT_unsigned (a));
9367 else
9368 size += csize;
9369 }
9370 break;
9371 case dw_val_class_symview:
9372 if (symview_upper_bound <= 0xff)
9373 size += 1;
9374 else if (symview_upper_bound <= 0xffff)
9375 size += 2;
9376 else if (symview_upper_bound <= 0xffffffff)
9377 size += 4;
9378 else
9379 size += 8;
9380 break;
9381 case dw_val_class_const_implicit:
9382 case dw_val_class_unsigned_const_implicit:
9383 case dw_val_class_file_implicit:
9384 /* These occupy no size in the DIE, just an extra sleb128 in
9385 .debug_abbrev. */
9386 break;
9387 case dw_val_class_const_double:
9388 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9389 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9390 size++; /* block */
9391 break;
9392 case dw_val_class_wide_int:
9393 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9394 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9395 if (get_full_len (*a->dw_attr_val.v.val_wide)
9396 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9397 size++; /* block */
9398 break;
9399 case dw_val_class_vec:
9400 size += constant_size (a->dw_attr_val.v.val_vec.length
9401 * a->dw_attr_val.v.val_vec.elt_size)
9402 + a->dw_attr_val.v.val_vec.length
9403 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9404 break;
9405 case dw_val_class_flag:
9406 if (dwarf_version >= 4)
9407 /* Currently all add_AT_flag calls pass in 1 as last argument,
9408 so DW_FORM_flag_present can be used. If that ever changes,
9409 we'll need to use DW_FORM_flag and have some optimization
9410 in build_abbrev_table that will change those to
9411 DW_FORM_flag_present if it is set to 1 in all DIEs using
9412 the same abbrev entry. */
9413 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9414 else
9415 size += 1;
9416 break;
9417 case dw_val_class_die_ref:
9418 if (AT_ref_external (a))
9419 {
9420 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9421 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9422 is sized by target address length, whereas in DWARF3
9423 it's always sized as an offset. */
9424 if (use_debug_types)
9425 size += DWARF_TYPE_SIGNATURE_SIZE;
9426 else if (dwarf_version == 2)
9427 size += DWARF2_ADDR_SIZE;
9428 else
9429 size += DWARF_OFFSET_SIZE;
9430 }
9431 else
9432 size += DWARF_OFFSET_SIZE;
9433 break;
9434 case dw_val_class_fde_ref:
9435 size += DWARF_OFFSET_SIZE;
9436 break;
9437 case dw_val_class_lbl_id:
9438 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9439 {
9440 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9441 size += size_of_uleb128 (AT_index (a));
9442 }
9443 else
9444 size += DWARF2_ADDR_SIZE;
9445 break;
9446 case dw_val_class_lineptr:
9447 case dw_val_class_macptr:
9448 case dw_val_class_loclistsptr:
9449 size += DWARF_OFFSET_SIZE;
9450 break;
9451 case dw_val_class_str:
9452 form = AT_string_form (a);
9453 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9454 size += DWARF_OFFSET_SIZE;
9455 else if (form == dwarf_FORM (DW_FORM_strx))
9456 size += size_of_uleb128 (AT_index (a));
9457 else
9458 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9459 break;
9460 case dw_val_class_file:
9461 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9462 break;
9463 case dw_val_class_data8:
9464 size += 8;
9465 break;
9466 case dw_val_class_vms_delta:
9467 size += DWARF_OFFSET_SIZE;
9468 break;
9469 case dw_val_class_high_pc:
9470 size += DWARF2_ADDR_SIZE;
9471 break;
9472 case dw_val_class_discr_value:
9473 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9474 break;
9475 case dw_val_class_discr_list:
9476 {
9477 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9478
9479 /* This is a block, so we have the block length and then its
9480 data. */
9481 size += constant_size (block_size) + block_size;
9482 }
9483 break;
9484 default:
9485 gcc_unreachable ();
9486 }
9487 }
9488
9489 return size;
9490 }
9491
9492 /* Size the debugging information associated with a given DIE. Visits the
9493 DIE's children recursively. Updates the global variable next_die_offset, on
9494 each time through. Uses the current value of next_die_offset to update the
9495 die_offset field in each DIE. */
9496
9497 static void
9498 calc_die_sizes (dw_die_ref die)
9499 {
9500 dw_die_ref c;
9501
9502 gcc_assert (die->die_offset == 0
9503 || (unsigned long int) die->die_offset == next_die_offset);
9504 die->die_offset = next_die_offset;
9505 next_die_offset += size_of_die (die);
9506
9507 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9508
9509 if (die->die_child != NULL)
9510 /* Count the null byte used to terminate sibling lists. */
9511 next_die_offset += 1;
9512 }
9513
9514 /* Size just the base type children at the start of the CU.
9515 This is needed because build_abbrev needs to size locs
9516 and sizing of type based stack ops needs to know die_offset
9517 values for the base types. */
9518
9519 static void
9520 calc_base_type_die_sizes (void)
9521 {
9522 unsigned long die_offset = (dwarf_split_debug_info
9523 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9524 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9525 unsigned int i;
9526 dw_die_ref base_type;
9527 #if ENABLE_ASSERT_CHECKING
9528 dw_die_ref prev = comp_unit_die ()->die_child;
9529 #endif
9530
9531 die_offset += size_of_die (comp_unit_die ());
9532 for (i = 0; base_types.iterate (i, &base_type); i++)
9533 {
9534 #if ENABLE_ASSERT_CHECKING
9535 gcc_assert (base_type->die_offset == 0
9536 && prev->die_sib == base_type
9537 && base_type->die_child == NULL
9538 && base_type->die_abbrev);
9539 prev = base_type;
9540 #endif
9541 if (abbrev_opt_start
9542 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9543 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9544 base_type->die_offset = die_offset;
9545 die_offset += size_of_die (base_type);
9546 }
9547 }
9548
9549 /* Set the marks for a die and its children. We do this so
9550 that we know whether or not a reference needs to use FORM_ref_addr; only
9551 DIEs in the same CU will be marked. We used to clear out the offset
9552 and use that as the flag, but ran into ordering problems. */
9553
9554 static void
9555 mark_dies (dw_die_ref die)
9556 {
9557 dw_die_ref c;
9558
9559 gcc_assert (!die->die_mark);
9560
9561 die->die_mark = 1;
9562 FOR_EACH_CHILD (die, c, mark_dies (c));
9563 }
9564
9565 /* Clear the marks for a die and its children. */
9566
9567 static void
9568 unmark_dies (dw_die_ref die)
9569 {
9570 dw_die_ref c;
9571
9572 if (! use_debug_types)
9573 gcc_assert (die->die_mark);
9574
9575 die->die_mark = 0;
9576 FOR_EACH_CHILD (die, c, unmark_dies (c));
9577 }
9578
9579 /* Clear the marks for a die, its children and referred dies. */
9580
9581 static void
9582 unmark_all_dies (dw_die_ref die)
9583 {
9584 dw_die_ref c;
9585 dw_attr_node *a;
9586 unsigned ix;
9587
9588 if (!die->die_mark)
9589 return;
9590 die->die_mark = 0;
9591
9592 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9593
9594 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9595 if (AT_class (a) == dw_val_class_die_ref)
9596 unmark_all_dies (AT_ref (a));
9597 }
9598
9599 /* Calculate if the entry should appear in the final output file. It may be
9600 from a pruned a type. */
9601
9602 static bool
9603 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9604 {
9605 /* By limiting gnu pubnames to definitions only, gold can generate a
9606 gdb index without entries for declarations, which don't include
9607 enough information to be useful. */
9608 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9609 return false;
9610
9611 if (table == pubname_table)
9612 {
9613 /* Enumerator names are part of the pubname table, but the
9614 parent DW_TAG_enumeration_type die may have been pruned.
9615 Don't output them if that is the case. */
9616 if (p->die->die_tag == DW_TAG_enumerator &&
9617 (p->die->die_parent == NULL
9618 || !p->die->die_parent->die_perennial_p))
9619 return false;
9620
9621 /* Everything else in the pubname table is included. */
9622 return true;
9623 }
9624
9625 /* The pubtypes table shouldn't include types that have been
9626 pruned. */
9627 return (p->die->die_offset != 0
9628 || !flag_eliminate_unused_debug_types);
9629 }
9630
9631 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9632 generated for the compilation unit. */
9633
9634 static unsigned long
9635 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9636 {
9637 unsigned long size;
9638 unsigned i;
9639 pubname_entry *p;
9640 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9641
9642 size = DWARF_PUBNAMES_HEADER_SIZE;
9643 FOR_EACH_VEC_ELT (*names, i, p)
9644 if (include_pubname_in_output (names, p))
9645 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9646
9647 size += DWARF_OFFSET_SIZE;
9648 return size;
9649 }
9650
9651 /* Return the size of the information in the .debug_aranges section. */
9652
9653 static unsigned long
9654 size_of_aranges (void)
9655 {
9656 unsigned long size;
9657
9658 size = DWARF_ARANGES_HEADER_SIZE;
9659
9660 /* Count the address/length pair for this compilation unit. */
9661 if (text_section_used)
9662 size += 2 * DWARF2_ADDR_SIZE;
9663 if (cold_text_section_used)
9664 size += 2 * DWARF2_ADDR_SIZE;
9665 if (have_multiple_function_sections)
9666 {
9667 unsigned fde_idx;
9668 dw_fde_ref fde;
9669
9670 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9671 {
9672 if (DECL_IGNORED_P (fde->decl))
9673 continue;
9674 if (!fde->in_std_section)
9675 size += 2 * DWARF2_ADDR_SIZE;
9676 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9677 size += 2 * DWARF2_ADDR_SIZE;
9678 }
9679 }
9680
9681 /* Count the two zero words used to terminated the address range table. */
9682 size += 2 * DWARF2_ADDR_SIZE;
9683 return size;
9684 }
9685 \f
9686 /* Select the encoding of an attribute value. */
9687
9688 static enum dwarf_form
9689 value_format (dw_attr_node *a)
9690 {
9691 switch (AT_class (a))
9692 {
9693 case dw_val_class_addr:
9694 /* Only very few attributes allow DW_FORM_addr. */
9695 switch (a->dw_attr)
9696 {
9697 case DW_AT_low_pc:
9698 case DW_AT_high_pc:
9699 case DW_AT_entry_pc:
9700 case DW_AT_trampoline:
9701 return (AT_index (a) == NOT_INDEXED
9702 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9703 default:
9704 break;
9705 }
9706 switch (DWARF2_ADDR_SIZE)
9707 {
9708 case 1:
9709 return DW_FORM_data1;
9710 case 2:
9711 return DW_FORM_data2;
9712 case 4:
9713 return DW_FORM_data4;
9714 case 8:
9715 return DW_FORM_data8;
9716 default:
9717 gcc_unreachable ();
9718 }
9719 case dw_val_class_loc_list:
9720 case dw_val_class_view_list:
9721 if (dwarf_split_debug_info
9722 && dwarf_version >= 5
9723 && AT_loc_list (a)->num_assigned)
9724 return DW_FORM_loclistx;
9725 /* FALLTHRU */
9726 case dw_val_class_range_list:
9727 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9728 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9729 care about sizes of .debug* sections in shared libraries and
9730 executables and don't take into account relocations that affect just
9731 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9732 table in the .debug_rnglists section. */
9733 if (dwarf_split_debug_info
9734 && dwarf_version >= 5
9735 && AT_class (a) == dw_val_class_range_list
9736 && rnglist_idx
9737 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9738 return DW_FORM_rnglistx;
9739 if (dwarf_version >= 4)
9740 return DW_FORM_sec_offset;
9741 /* FALLTHRU */
9742 case dw_val_class_vms_delta:
9743 case dw_val_class_offset:
9744 switch (DWARF_OFFSET_SIZE)
9745 {
9746 case 4:
9747 return DW_FORM_data4;
9748 case 8:
9749 return DW_FORM_data8;
9750 default:
9751 gcc_unreachable ();
9752 }
9753 case dw_val_class_loc:
9754 if (dwarf_version >= 4)
9755 return DW_FORM_exprloc;
9756 switch (constant_size (size_of_locs (AT_loc (a))))
9757 {
9758 case 1:
9759 return DW_FORM_block1;
9760 case 2:
9761 return DW_FORM_block2;
9762 case 4:
9763 return DW_FORM_block4;
9764 default:
9765 gcc_unreachable ();
9766 }
9767 case dw_val_class_const:
9768 return DW_FORM_sdata;
9769 case dw_val_class_unsigned_const:
9770 switch (constant_size (AT_unsigned (a)))
9771 {
9772 case 1:
9773 return DW_FORM_data1;
9774 case 2:
9775 return DW_FORM_data2;
9776 case 4:
9777 /* In DWARF3 DW_AT_data_member_location with
9778 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9779 constant, so we need to use DW_FORM_udata if we need
9780 a large constant. */
9781 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9782 return DW_FORM_udata;
9783 return DW_FORM_data4;
9784 case 8:
9785 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9786 return DW_FORM_udata;
9787 return DW_FORM_data8;
9788 default:
9789 gcc_unreachable ();
9790 }
9791 case dw_val_class_const_implicit:
9792 case dw_val_class_unsigned_const_implicit:
9793 case dw_val_class_file_implicit:
9794 return DW_FORM_implicit_const;
9795 case dw_val_class_const_double:
9796 switch (HOST_BITS_PER_WIDE_INT)
9797 {
9798 case 8:
9799 return DW_FORM_data2;
9800 case 16:
9801 return DW_FORM_data4;
9802 case 32:
9803 return DW_FORM_data8;
9804 case 64:
9805 if (dwarf_version >= 5)
9806 return DW_FORM_data16;
9807 /* FALLTHRU */
9808 default:
9809 return DW_FORM_block1;
9810 }
9811 case dw_val_class_wide_int:
9812 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9813 {
9814 case 8:
9815 return DW_FORM_data1;
9816 case 16:
9817 return DW_FORM_data2;
9818 case 32:
9819 return DW_FORM_data4;
9820 case 64:
9821 return DW_FORM_data8;
9822 case 128:
9823 if (dwarf_version >= 5)
9824 return DW_FORM_data16;
9825 /* FALLTHRU */
9826 default:
9827 return DW_FORM_block1;
9828 }
9829 case dw_val_class_symview:
9830 /* ??? We might use uleb128, but then we'd have to compute
9831 .debug_info offsets in the assembler. */
9832 if (symview_upper_bound <= 0xff)
9833 return DW_FORM_data1;
9834 else if (symview_upper_bound <= 0xffff)
9835 return DW_FORM_data2;
9836 else if (symview_upper_bound <= 0xffffffff)
9837 return DW_FORM_data4;
9838 else
9839 return DW_FORM_data8;
9840 case dw_val_class_vec:
9841 switch (constant_size (a->dw_attr_val.v.val_vec.length
9842 * a->dw_attr_val.v.val_vec.elt_size))
9843 {
9844 case 1:
9845 return DW_FORM_block1;
9846 case 2:
9847 return DW_FORM_block2;
9848 case 4:
9849 return DW_FORM_block4;
9850 default:
9851 gcc_unreachable ();
9852 }
9853 case dw_val_class_flag:
9854 if (dwarf_version >= 4)
9855 {
9856 /* Currently all add_AT_flag calls pass in 1 as last argument,
9857 so DW_FORM_flag_present can be used. If that ever changes,
9858 we'll need to use DW_FORM_flag and have some optimization
9859 in build_abbrev_table that will change those to
9860 DW_FORM_flag_present if it is set to 1 in all DIEs using
9861 the same abbrev entry. */
9862 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9863 return DW_FORM_flag_present;
9864 }
9865 return DW_FORM_flag;
9866 case dw_val_class_die_ref:
9867 if (AT_ref_external (a))
9868 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9869 else
9870 return DW_FORM_ref;
9871 case dw_val_class_fde_ref:
9872 return DW_FORM_data;
9873 case dw_val_class_lbl_id:
9874 return (AT_index (a) == NOT_INDEXED
9875 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9876 case dw_val_class_lineptr:
9877 case dw_val_class_macptr:
9878 case dw_val_class_loclistsptr:
9879 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9880 case dw_val_class_str:
9881 return AT_string_form (a);
9882 case dw_val_class_file:
9883 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9884 {
9885 case 1:
9886 return DW_FORM_data1;
9887 case 2:
9888 return DW_FORM_data2;
9889 case 4:
9890 return DW_FORM_data4;
9891 default:
9892 gcc_unreachable ();
9893 }
9894
9895 case dw_val_class_data8:
9896 return DW_FORM_data8;
9897
9898 case dw_val_class_high_pc:
9899 switch (DWARF2_ADDR_SIZE)
9900 {
9901 case 1:
9902 return DW_FORM_data1;
9903 case 2:
9904 return DW_FORM_data2;
9905 case 4:
9906 return DW_FORM_data4;
9907 case 8:
9908 return DW_FORM_data8;
9909 default:
9910 gcc_unreachable ();
9911 }
9912
9913 case dw_val_class_discr_value:
9914 return (a->dw_attr_val.v.val_discr_value.pos
9915 ? DW_FORM_udata
9916 : DW_FORM_sdata);
9917 case dw_val_class_discr_list:
9918 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9919 {
9920 case 1:
9921 return DW_FORM_block1;
9922 case 2:
9923 return DW_FORM_block2;
9924 case 4:
9925 return DW_FORM_block4;
9926 default:
9927 gcc_unreachable ();
9928 }
9929
9930 default:
9931 gcc_unreachable ();
9932 }
9933 }
9934
9935 /* Output the encoding of an attribute value. */
9936
9937 static void
9938 output_value_format (dw_attr_node *a)
9939 {
9940 enum dwarf_form form = value_format (a);
9941
9942 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9943 }
9944
9945 /* Given a die and id, produce the appropriate abbreviations. */
9946
9947 static void
9948 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9949 {
9950 unsigned ix;
9951 dw_attr_node *a_attr;
9952
9953 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9954 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9955 dwarf_tag_name (abbrev->die_tag));
9956
9957 if (abbrev->die_child != NULL)
9958 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9959 else
9960 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9961
9962 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9963 {
9964 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9965 dwarf_attr_name (a_attr->dw_attr));
9966 output_value_format (a_attr);
9967 if (value_format (a_attr) == DW_FORM_implicit_const)
9968 {
9969 if (AT_class (a_attr) == dw_val_class_file_implicit)
9970 {
9971 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9972 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9973 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9974 }
9975 else
9976 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9977 }
9978 }
9979
9980 dw2_asm_output_data (1, 0, NULL);
9981 dw2_asm_output_data (1, 0, NULL);
9982 }
9983
9984
9985 /* Output the .debug_abbrev section which defines the DIE abbreviation
9986 table. */
9987
9988 static void
9989 output_abbrev_section (void)
9990 {
9991 unsigned int abbrev_id;
9992 dw_die_ref abbrev;
9993
9994 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9995 if (abbrev_id != 0)
9996 output_die_abbrevs (abbrev_id, abbrev);
9997
9998 /* Terminate the table. */
9999 dw2_asm_output_data (1, 0, NULL);
10000 }
10001
10002 /* Return a new location list, given the begin and end range, and the
10003 expression. */
10004
10005 static inline dw_loc_list_ref
10006 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10007 const char *end, var_loc_view vend,
10008 const char *section)
10009 {
10010 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10011
10012 retlist->begin = begin;
10013 retlist->begin_entry = NULL;
10014 retlist->end = end;
10015 retlist->expr = expr;
10016 retlist->section = section;
10017 retlist->vbegin = vbegin;
10018 retlist->vend = vend;
10019
10020 return retlist;
10021 }
10022
10023 /* Return true iff there's any nonzero view number in the loc list.
10024
10025 ??? When views are not enabled, we'll often extend a single range
10026 to the entire function, so that we emit a single location
10027 expression rather than a location list. With views, even with a
10028 single range, we'll output a list if start or end have a nonzero
10029 view. If we change this, we may want to stop splitting a single
10030 range in dw_loc_list just because of a nonzero view, even if it
10031 straddles across hot/cold partitions. */
10032
10033 static bool
10034 loc_list_has_views (dw_loc_list_ref list)
10035 {
10036 if (!debug_variable_location_views)
10037 return false;
10038
10039 for (dw_loc_list_ref loc = list;
10040 loc != NULL; loc = loc->dw_loc_next)
10041 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10042 return true;
10043
10044 return false;
10045 }
10046
10047 /* Generate a new internal symbol for this location list node, if it
10048 hasn't got one yet. */
10049
10050 static inline void
10051 gen_llsym (dw_loc_list_ref list)
10052 {
10053 gcc_assert (!list->ll_symbol);
10054 list->ll_symbol = gen_internal_sym ("LLST");
10055
10056 if (!loc_list_has_views (list))
10057 return;
10058
10059 if (dwarf2out_locviews_in_attribute ())
10060 {
10061 /* Use the same label_num for the view list. */
10062 label_num--;
10063 list->vl_symbol = gen_internal_sym ("LVUS");
10064 }
10065 else
10066 list->vl_symbol = list->ll_symbol;
10067 }
10068
10069 /* Generate a symbol for the list, but only if we really want to emit
10070 it as a list. */
10071
10072 static inline void
10073 maybe_gen_llsym (dw_loc_list_ref list)
10074 {
10075 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10076 return;
10077
10078 gen_llsym (list);
10079 }
10080
10081 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10082 NULL, don't consider size of the location expression. If we're not
10083 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10084 representation in *SIZEP. */
10085
10086 static bool
10087 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10088 {
10089 /* Don't output an entry that starts and ends at the same address. */
10090 if (strcmp (curr->begin, curr->end) == 0
10091 && curr->vbegin == curr->vend && !curr->force)
10092 return true;
10093
10094 if (!sizep)
10095 return false;
10096
10097 unsigned long size = size_of_locs (curr->expr);
10098
10099 /* If the expression is too large, drop it on the floor. We could
10100 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10101 in the expression, but >= 64KB expressions for a single value
10102 in a single range are unlikely very useful. */
10103 if (dwarf_version < 5 && size > 0xffff)
10104 return true;
10105
10106 *sizep = size;
10107
10108 return false;
10109 }
10110
10111 /* Output a view pair loclist entry for CURR, if it requires one. */
10112
10113 static void
10114 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10115 {
10116 if (!dwarf2out_locviews_in_loclist ())
10117 return;
10118
10119 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10120 return;
10121
10122 #ifdef DW_LLE_view_pair
10123 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10124
10125 if (dwarf2out_as_locview_support)
10126 {
10127 if (ZERO_VIEW_P (curr->vbegin))
10128 dw2_asm_output_data_uleb128 (0, "Location view begin");
10129 else
10130 {
10131 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10132 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10133 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10134 }
10135
10136 if (ZERO_VIEW_P (curr->vend))
10137 dw2_asm_output_data_uleb128 (0, "Location view end");
10138 else
10139 {
10140 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10141 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10142 dw2_asm_output_symname_uleb128 (label, "Location view end");
10143 }
10144 }
10145 else
10146 {
10147 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10148 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10149 }
10150 #endif /* DW_LLE_view_pair */
10151
10152 return;
10153 }
10154
10155 /* Output the location list given to us. */
10156
10157 static void
10158 output_loc_list (dw_loc_list_ref list_head)
10159 {
10160 int vcount = 0, lcount = 0;
10161
10162 if (list_head->emitted)
10163 return;
10164 list_head->emitted = true;
10165
10166 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10167 {
10168 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10169
10170 for (dw_loc_list_ref curr = list_head; curr != NULL;
10171 curr = curr->dw_loc_next)
10172 {
10173 unsigned long size;
10174
10175 if (skip_loc_list_entry (curr, &size))
10176 continue;
10177
10178 vcount++;
10179
10180 /* ?? dwarf_split_debug_info? */
10181 if (dwarf2out_as_locview_support)
10182 {
10183 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10184
10185 if (!ZERO_VIEW_P (curr->vbegin))
10186 {
10187 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10188 dw2_asm_output_symname_uleb128 (label,
10189 "View list begin (%s)",
10190 list_head->vl_symbol);
10191 }
10192 else
10193 dw2_asm_output_data_uleb128 (0,
10194 "View list begin (%s)",
10195 list_head->vl_symbol);
10196
10197 if (!ZERO_VIEW_P (curr->vend))
10198 {
10199 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10200 dw2_asm_output_symname_uleb128 (label,
10201 "View list end (%s)",
10202 list_head->vl_symbol);
10203 }
10204 else
10205 dw2_asm_output_data_uleb128 (0,
10206 "View list end (%s)",
10207 list_head->vl_symbol);
10208 }
10209 else
10210 {
10211 dw2_asm_output_data_uleb128 (curr->vbegin,
10212 "View list begin (%s)",
10213 list_head->vl_symbol);
10214 dw2_asm_output_data_uleb128 (curr->vend,
10215 "View list end (%s)",
10216 list_head->vl_symbol);
10217 }
10218 }
10219 }
10220
10221 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10222
10223 const char *last_section = NULL;
10224 const char *base_label = NULL;
10225
10226 /* Walk the location list, and output each range + expression. */
10227 for (dw_loc_list_ref curr = list_head; curr != NULL;
10228 curr = curr->dw_loc_next)
10229 {
10230 unsigned long size;
10231
10232 /* Skip this entry? If we skip it here, we must skip it in the
10233 view list above as well. */
10234 if (skip_loc_list_entry (curr, &size))
10235 continue;
10236
10237 lcount++;
10238
10239 if (dwarf_version >= 5)
10240 {
10241 if (dwarf_split_debug_info)
10242 {
10243 dwarf2out_maybe_output_loclist_view_pair (curr);
10244 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10245 uleb128 index into .debug_addr and uleb128 length. */
10246 dw2_asm_output_data (1, DW_LLE_startx_length,
10247 "DW_LLE_startx_length (%s)",
10248 list_head->ll_symbol);
10249 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10250 "Location list range start index "
10251 "(%s)", curr->begin);
10252 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10253 For that case we probably need to emit DW_LLE_startx_endx,
10254 but we'd need 2 .debug_addr entries rather than just one. */
10255 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10256 "Location list length (%s)",
10257 list_head->ll_symbol);
10258 }
10259 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10260 {
10261 dwarf2out_maybe_output_loclist_view_pair (curr);
10262 /* If all code is in .text section, the base address is
10263 already provided by the CU attributes. Use
10264 DW_LLE_offset_pair where both addresses are uleb128 encoded
10265 offsets against that base. */
10266 dw2_asm_output_data (1, DW_LLE_offset_pair,
10267 "DW_LLE_offset_pair (%s)",
10268 list_head->ll_symbol);
10269 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10270 "Location list begin address (%s)",
10271 list_head->ll_symbol);
10272 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10273 "Location list end address (%s)",
10274 list_head->ll_symbol);
10275 }
10276 else if (HAVE_AS_LEB128)
10277 {
10278 /* Otherwise, find out how many consecutive entries could share
10279 the same base entry. If just one, emit DW_LLE_start_length,
10280 otherwise emit DW_LLE_base_address for the base address
10281 followed by a series of DW_LLE_offset_pair. */
10282 if (last_section == NULL || curr->section != last_section)
10283 {
10284 dw_loc_list_ref curr2;
10285 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10286 curr2 = curr2->dw_loc_next)
10287 {
10288 if (strcmp (curr2->begin, curr2->end) == 0
10289 && !curr2->force)
10290 continue;
10291 break;
10292 }
10293 if (curr2 == NULL || curr->section != curr2->section)
10294 last_section = NULL;
10295 else
10296 {
10297 last_section = curr->section;
10298 base_label = curr->begin;
10299 dw2_asm_output_data (1, DW_LLE_base_address,
10300 "DW_LLE_base_address (%s)",
10301 list_head->ll_symbol);
10302 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10303 "Base address (%s)",
10304 list_head->ll_symbol);
10305 }
10306 }
10307 /* Only one entry with the same base address. Use
10308 DW_LLE_start_length with absolute address and uleb128
10309 length. */
10310 if (last_section == NULL)
10311 {
10312 dwarf2out_maybe_output_loclist_view_pair (curr);
10313 dw2_asm_output_data (1, DW_LLE_start_length,
10314 "DW_LLE_start_length (%s)",
10315 list_head->ll_symbol);
10316 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10317 "Location list begin address (%s)",
10318 list_head->ll_symbol);
10319 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10320 "Location list length "
10321 "(%s)", list_head->ll_symbol);
10322 }
10323 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10324 DW_LLE_base_address. */
10325 else
10326 {
10327 dwarf2out_maybe_output_loclist_view_pair (curr);
10328 dw2_asm_output_data (1, DW_LLE_offset_pair,
10329 "DW_LLE_offset_pair (%s)",
10330 list_head->ll_symbol);
10331 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10332 "Location list begin address "
10333 "(%s)", list_head->ll_symbol);
10334 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10335 "Location list end address "
10336 "(%s)", list_head->ll_symbol);
10337 }
10338 }
10339 /* The assembler does not support .uleb128 directive. Emit
10340 DW_LLE_start_end with a pair of absolute addresses. */
10341 else
10342 {
10343 dwarf2out_maybe_output_loclist_view_pair (curr);
10344 dw2_asm_output_data (1, DW_LLE_start_end,
10345 "DW_LLE_start_end (%s)",
10346 list_head->ll_symbol);
10347 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10348 "Location list begin address (%s)",
10349 list_head->ll_symbol);
10350 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10351 "Location list end address (%s)",
10352 list_head->ll_symbol);
10353 }
10354 }
10355 else if (dwarf_split_debug_info)
10356 {
10357 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10358 and 4 byte length. */
10359 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10360 "Location list start/length entry (%s)",
10361 list_head->ll_symbol);
10362 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10363 "Location list range start index (%s)",
10364 curr->begin);
10365 /* The length field is 4 bytes. If we ever need to support
10366 an 8-byte length, we can add a new DW_LLE code or fall back
10367 to DW_LLE_GNU_start_end_entry. */
10368 dw2_asm_output_delta (4, curr->end, curr->begin,
10369 "Location list range length (%s)",
10370 list_head->ll_symbol);
10371 }
10372 else if (!have_multiple_function_sections)
10373 {
10374 /* Pair of relative addresses against start of text section. */
10375 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10376 "Location list begin address (%s)",
10377 list_head->ll_symbol);
10378 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10379 "Location list end address (%s)",
10380 list_head->ll_symbol);
10381 }
10382 else
10383 {
10384 /* Pair of absolute addresses. */
10385 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10386 "Location list begin address (%s)",
10387 list_head->ll_symbol);
10388 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10389 "Location list end address (%s)",
10390 list_head->ll_symbol);
10391 }
10392
10393 /* Output the block length for this list of location operations. */
10394 if (dwarf_version >= 5)
10395 dw2_asm_output_data_uleb128 (size, "Location expression size");
10396 else
10397 {
10398 gcc_assert (size <= 0xffff);
10399 dw2_asm_output_data (2, size, "Location expression size");
10400 }
10401
10402 output_loc_sequence (curr->expr, -1);
10403 }
10404
10405 /* And finally list termination. */
10406 if (dwarf_version >= 5)
10407 dw2_asm_output_data (1, DW_LLE_end_of_list,
10408 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10409 else if (dwarf_split_debug_info)
10410 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10411 "Location list terminator (%s)",
10412 list_head->ll_symbol);
10413 else
10414 {
10415 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10416 "Location list terminator begin (%s)",
10417 list_head->ll_symbol);
10418 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10419 "Location list terminator end (%s)",
10420 list_head->ll_symbol);
10421 }
10422
10423 gcc_assert (!list_head->vl_symbol
10424 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10425 }
10426
10427 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10428 section. Emit a relocated reference if val_entry is NULL, otherwise,
10429 emit an indirect reference. */
10430
10431 static void
10432 output_range_list_offset (dw_attr_node *a)
10433 {
10434 const char *name = dwarf_attr_name (a->dw_attr);
10435
10436 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10437 {
10438 if (dwarf_version >= 5)
10439 {
10440 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10441 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10442 debug_ranges_section, "%s", name);
10443 }
10444 else
10445 {
10446 char *p = strchr (ranges_section_label, '\0');
10447 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10448 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10449 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10450 debug_ranges_section, "%s", name);
10451 *p = '\0';
10452 }
10453 }
10454 else if (dwarf_version >= 5)
10455 {
10456 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10457 gcc_assert (rnglist_idx);
10458 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10459 }
10460 else
10461 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10462 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10463 "%s (offset from %s)", name, ranges_section_label);
10464 }
10465
10466 /* Output the offset into the debug_loc section. */
10467
10468 static void
10469 output_loc_list_offset (dw_attr_node *a)
10470 {
10471 char *sym = AT_loc_list (a)->ll_symbol;
10472
10473 gcc_assert (sym);
10474 if (!dwarf_split_debug_info)
10475 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10476 "%s", dwarf_attr_name (a->dw_attr));
10477 else if (dwarf_version >= 5)
10478 {
10479 gcc_assert (AT_loc_list (a)->num_assigned);
10480 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10481 dwarf_attr_name (a->dw_attr),
10482 sym);
10483 }
10484 else
10485 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10486 "%s", dwarf_attr_name (a->dw_attr));
10487 }
10488
10489 /* Output the offset into the debug_loc section. */
10490
10491 static void
10492 output_view_list_offset (dw_attr_node *a)
10493 {
10494 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10495
10496 gcc_assert (sym);
10497 if (dwarf_split_debug_info)
10498 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10499 "%s", dwarf_attr_name (a->dw_attr));
10500 else
10501 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10502 "%s", dwarf_attr_name (a->dw_attr));
10503 }
10504
10505 /* Output an attribute's index or value appropriately. */
10506
10507 static void
10508 output_attr_index_or_value (dw_attr_node *a)
10509 {
10510 const char *name = dwarf_attr_name (a->dw_attr);
10511
10512 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10513 {
10514 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10515 return;
10516 }
10517 switch (AT_class (a))
10518 {
10519 case dw_val_class_addr:
10520 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10521 break;
10522 case dw_val_class_high_pc:
10523 case dw_val_class_lbl_id:
10524 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10525 break;
10526 default:
10527 gcc_unreachable ();
10528 }
10529 }
10530
10531 /* Output a type signature. */
10532
10533 static inline void
10534 output_signature (const char *sig, const char *name)
10535 {
10536 int i;
10537
10538 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10539 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10540 }
10541
10542 /* Output a discriminant value. */
10543
10544 static inline void
10545 output_discr_value (dw_discr_value *discr_value, const char *name)
10546 {
10547 if (discr_value->pos)
10548 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10549 else
10550 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10551 }
10552
10553 /* Output the DIE and its attributes. Called recursively to generate
10554 the definitions of each child DIE. */
10555
10556 static void
10557 output_die (dw_die_ref die)
10558 {
10559 dw_attr_node *a;
10560 dw_die_ref c;
10561 unsigned long size;
10562 unsigned ix;
10563
10564 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10565 (unsigned long)die->die_offset,
10566 dwarf_tag_name (die->die_tag));
10567
10568 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10569 {
10570 const char *name = dwarf_attr_name (a->dw_attr);
10571
10572 switch (AT_class (a))
10573 {
10574 case dw_val_class_addr:
10575 output_attr_index_or_value (a);
10576 break;
10577
10578 case dw_val_class_offset:
10579 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10580 "%s", name);
10581 break;
10582
10583 case dw_val_class_range_list:
10584 output_range_list_offset (a);
10585 break;
10586
10587 case dw_val_class_loc:
10588 size = size_of_locs (AT_loc (a));
10589
10590 /* Output the block length for this list of location operations. */
10591 if (dwarf_version >= 4)
10592 dw2_asm_output_data_uleb128 (size, "%s", name);
10593 else
10594 dw2_asm_output_data (constant_size (size), size, "%s", name);
10595
10596 output_loc_sequence (AT_loc (a), -1);
10597 break;
10598
10599 case dw_val_class_const:
10600 /* ??? It would be slightly more efficient to use a scheme like is
10601 used for unsigned constants below, but gdb 4.x does not sign
10602 extend. Gdb 5.x does sign extend. */
10603 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10604 break;
10605
10606 case dw_val_class_unsigned_const:
10607 {
10608 int csize = constant_size (AT_unsigned (a));
10609 if (dwarf_version == 3
10610 && a->dw_attr == DW_AT_data_member_location
10611 && csize >= 4)
10612 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10613 else
10614 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10615 }
10616 break;
10617
10618 case dw_val_class_symview:
10619 {
10620 int vsize;
10621 if (symview_upper_bound <= 0xff)
10622 vsize = 1;
10623 else if (symview_upper_bound <= 0xffff)
10624 vsize = 2;
10625 else if (symview_upper_bound <= 0xffffffff)
10626 vsize = 4;
10627 else
10628 vsize = 8;
10629 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10630 "%s", name);
10631 }
10632 break;
10633
10634 case dw_val_class_const_implicit:
10635 if (flag_debug_asm)
10636 fprintf (asm_out_file, "\t\t\t%s %s ("
10637 HOST_WIDE_INT_PRINT_DEC ")\n",
10638 ASM_COMMENT_START, name, AT_int (a));
10639 break;
10640
10641 case dw_val_class_unsigned_const_implicit:
10642 if (flag_debug_asm)
10643 fprintf (asm_out_file, "\t\t\t%s %s ("
10644 HOST_WIDE_INT_PRINT_HEX ")\n",
10645 ASM_COMMENT_START, name, AT_unsigned (a));
10646 break;
10647
10648 case dw_val_class_const_double:
10649 {
10650 unsigned HOST_WIDE_INT first, second;
10651
10652 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10653 dw2_asm_output_data (1,
10654 HOST_BITS_PER_DOUBLE_INT
10655 / HOST_BITS_PER_CHAR,
10656 NULL);
10657
10658 if (WORDS_BIG_ENDIAN)
10659 {
10660 first = a->dw_attr_val.v.val_double.high;
10661 second = a->dw_attr_val.v.val_double.low;
10662 }
10663 else
10664 {
10665 first = a->dw_attr_val.v.val_double.low;
10666 second = a->dw_attr_val.v.val_double.high;
10667 }
10668
10669 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10670 first, "%s", name);
10671 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10672 second, NULL);
10673 }
10674 break;
10675
10676 case dw_val_class_wide_int:
10677 {
10678 int i;
10679 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10680 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10681 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10682 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10683 * l, NULL);
10684
10685 if (WORDS_BIG_ENDIAN)
10686 for (i = len - 1; i >= 0; --i)
10687 {
10688 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10689 "%s", name);
10690 name = "";
10691 }
10692 else
10693 for (i = 0; i < len; ++i)
10694 {
10695 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10696 "%s", name);
10697 name = "";
10698 }
10699 }
10700 break;
10701
10702 case dw_val_class_vec:
10703 {
10704 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10705 unsigned int len = a->dw_attr_val.v.val_vec.length;
10706 unsigned int i;
10707 unsigned char *p;
10708
10709 dw2_asm_output_data (constant_size (len * elt_size),
10710 len * elt_size, "%s", name);
10711 if (elt_size > sizeof (HOST_WIDE_INT))
10712 {
10713 elt_size /= 2;
10714 len *= 2;
10715 }
10716 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10717 i < len;
10718 i++, p += elt_size)
10719 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10720 "fp or vector constant word %u", i);
10721 break;
10722 }
10723
10724 case dw_val_class_flag:
10725 if (dwarf_version >= 4)
10726 {
10727 /* Currently all add_AT_flag calls pass in 1 as last argument,
10728 so DW_FORM_flag_present can be used. If that ever changes,
10729 we'll need to use DW_FORM_flag and have some optimization
10730 in build_abbrev_table that will change those to
10731 DW_FORM_flag_present if it is set to 1 in all DIEs using
10732 the same abbrev entry. */
10733 gcc_assert (AT_flag (a) == 1);
10734 if (flag_debug_asm)
10735 fprintf (asm_out_file, "\t\t\t%s %s\n",
10736 ASM_COMMENT_START, name);
10737 break;
10738 }
10739 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10740 break;
10741
10742 case dw_val_class_loc_list:
10743 output_loc_list_offset (a);
10744 break;
10745
10746 case dw_val_class_view_list:
10747 output_view_list_offset (a);
10748 break;
10749
10750 case dw_val_class_die_ref:
10751 if (AT_ref_external (a))
10752 {
10753 if (AT_ref (a)->comdat_type_p)
10754 {
10755 comdat_type_node *type_node
10756 = AT_ref (a)->die_id.die_type_node;
10757
10758 gcc_assert (type_node);
10759 output_signature (type_node->signature, name);
10760 }
10761 else
10762 {
10763 const char *sym = AT_ref (a)->die_id.die_symbol;
10764 int size;
10765
10766 gcc_assert (sym);
10767 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10768 length, whereas in DWARF3 it's always sized as an
10769 offset. */
10770 if (dwarf_version == 2)
10771 size = DWARF2_ADDR_SIZE;
10772 else
10773 size = DWARF_OFFSET_SIZE;
10774 /* ??? We cannot unconditionally output die_offset if
10775 non-zero - others might create references to those
10776 DIEs via symbols.
10777 And we do not clear its DIE offset after outputting it
10778 (and the label refers to the actual DIEs, not the
10779 DWARF CU unit header which is when using label + offset
10780 would be the correct thing to do).
10781 ??? This is the reason for the with_offset flag. */
10782 if (AT_ref (a)->with_offset)
10783 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10784 debug_info_section, "%s", name);
10785 else
10786 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10787 name);
10788 }
10789 }
10790 else
10791 {
10792 gcc_assert (AT_ref (a)->die_offset);
10793 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10794 "%s", name);
10795 }
10796 break;
10797
10798 case dw_val_class_fde_ref:
10799 {
10800 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10801
10802 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10803 a->dw_attr_val.v.val_fde_index * 2);
10804 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10805 "%s", name);
10806 }
10807 break;
10808
10809 case dw_val_class_vms_delta:
10810 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10811 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10812 AT_vms_delta2 (a), AT_vms_delta1 (a),
10813 "%s", name);
10814 #else
10815 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10816 AT_vms_delta2 (a), AT_vms_delta1 (a),
10817 "%s", name);
10818 #endif
10819 break;
10820
10821 case dw_val_class_lbl_id:
10822 output_attr_index_or_value (a);
10823 break;
10824
10825 case dw_val_class_lineptr:
10826 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10827 debug_line_section, "%s", name);
10828 break;
10829
10830 case dw_val_class_macptr:
10831 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10832 debug_macinfo_section, "%s", name);
10833 break;
10834
10835 case dw_val_class_loclistsptr:
10836 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10837 debug_loc_section, "%s", name);
10838 break;
10839
10840 case dw_val_class_str:
10841 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10842 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10843 a->dw_attr_val.v.val_str->label,
10844 debug_str_section,
10845 "%s: \"%s\"", name, AT_string (a));
10846 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10847 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10848 a->dw_attr_val.v.val_str->label,
10849 debug_line_str_section,
10850 "%s: \"%s\"", name, AT_string (a));
10851 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10852 dw2_asm_output_data_uleb128 (AT_index (a),
10853 "%s: \"%s\"", name, AT_string (a));
10854 else
10855 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10856 break;
10857
10858 case dw_val_class_file:
10859 {
10860 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10861
10862 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10863 a->dw_attr_val.v.val_file->filename);
10864 break;
10865 }
10866
10867 case dw_val_class_file_implicit:
10868 if (flag_debug_asm)
10869 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10870 ASM_COMMENT_START, name,
10871 maybe_emit_file (a->dw_attr_val.v.val_file),
10872 a->dw_attr_val.v.val_file->filename);
10873 break;
10874
10875 case dw_val_class_data8:
10876 {
10877 int i;
10878
10879 for (i = 0; i < 8; i++)
10880 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10881 i == 0 ? "%s" : NULL, name);
10882 break;
10883 }
10884
10885 case dw_val_class_high_pc:
10886 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10887 get_AT_low_pc (die), "DW_AT_high_pc");
10888 break;
10889
10890 case dw_val_class_discr_value:
10891 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10892 break;
10893
10894 case dw_val_class_discr_list:
10895 {
10896 dw_discr_list_ref list = AT_discr_list (a);
10897 const int size = size_of_discr_list (list);
10898
10899 /* This is a block, so output its length first. */
10900 dw2_asm_output_data (constant_size (size), size,
10901 "%s: block size", name);
10902
10903 for (; list != NULL; list = list->dw_discr_next)
10904 {
10905 /* One byte for the discriminant value descriptor, and then as
10906 many LEB128 numbers as required. */
10907 if (list->dw_discr_range)
10908 dw2_asm_output_data (1, DW_DSC_range,
10909 "%s: DW_DSC_range", name);
10910 else
10911 dw2_asm_output_data (1, DW_DSC_label,
10912 "%s: DW_DSC_label", name);
10913
10914 output_discr_value (&list->dw_discr_lower_bound, name);
10915 if (list->dw_discr_range)
10916 output_discr_value (&list->dw_discr_upper_bound, name);
10917 }
10918 break;
10919 }
10920
10921 default:
10922 gcc_unreachable ();
10923 }
10924 }
10925
10926 FOR_EACH_CHILD (die, c, output_die (c));
10927
10928 /* Add null byte to terminate sibling list. */
10929 if (die->die_child != NULL)
10930 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10931 (unsigned long) die->die_offset);
10932 }
10933
10934 /* Output the dwarf version number. */
10935
10936 static void
10937 output_dwarf_version ()
10938 {
10939 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10940 views in loclist. That will change eventually. */
10941 if (dwarf_version == 6)
10942 {
10943 static bool once;
10944 if (!once)
10945 {
10946 warning (0,
10947 "-gdwarf-6 is output as version 5 with incompatibilities");
10948 once = true;
10949 }
10950 dw2_asm_output_data (2, 5, "DWARF version number");
10951 }
10952 else
10953 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10954 }
10955
10956 /* Output the compilation unit that appears at the beginning of the
10957 .debug_info section, and precedes the DIE descriptions. */
10958
10959 static void
10960 output_compilation_unit_header (enum dwarf_unit_type ut)
10961 {
10962 if (!XCOFF_DEBUGGING_INFO)
10963 {
10964 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10965 dw2_asm_output_data (4, 0xffffffff,
10966 "Initial length escape value indicating 64-bit DWARF extension");
10967 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10968 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10969 "Length of Compilation Unit Info");
10970 }
10971
10972 output_dwarf_version ();
10973 if (dwarf_version >= 5)
10974 {
10975 const char *name;
10976 switch (ut)
10977 {
10978 case DW_UT_compile: name = "DW_UT_compile"; break;
10979 case DW_UT_type: name = "DW_UT_type"; break;
10980 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10981 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10982 default: gcc_unreachable ();
10983 }
10984 dw2_asm_output_data (1, ut, "%s", name);
10985 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10986 }
10987 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10988 debug_abbrev_section,
10989 "Offset Into Abbrev. Section");
10990 if (dwarf_version < 5)
10991 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10992 }
10993
10994 /* Output the compilation unit DIE and its children. */
10995
10996 static void
10997 output_comp_unit (dw_die_ref die, int output_if_empty,
10998 const unsigned char *dwo_id)
10999 {
11000 const char *secname, *oldsym;
11001 char *tmp;
11002
11003 /* Unless we are outputting main CU, we may throw away empty ones. */
11004 if (!output_if_empty && die->die_child == NULL)
11005 return;
11006
11007 /* Even if there are no children of this DIE, we must output the information
11008 about the compilation unit. Otherwise, on an empty translation unit, we
11009 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11010 will then complain when examining the file. First mark all the DIEs in
11011 this CU so we know which get local refs. */
11012 mark_dies (die);
11013
11014 external_ref_hash_type *extern_map = optimize_external_refs (die);
11015
11016 /* For now, optimize only the main CU, in order to optimize the rest
11017 we'd need to see all of them earlier. Leave the rest for post-linking
11018 tools like DWZ. */
11019 if (die == comp_unit_die ())
11020 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11021
11022 build_abbrev_table (die, extern_map);
11023
11024 optimize_abbrev_table ();
11025
11026 delete extern_map;
11027
11028 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11029 next_die_offset = (dwo_id
11030 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11031 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11032 calc_die_sizes (die);
11033
11034 oldsym = die->die_id.die_symbol;
11035 if (oldsym && die->comdat_type_p)
11036 {
11037 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11038
11039 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11040 secname = tmp;
11041 die->die_id.die_symbol = NULL;
11042 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11043 }
11044 else
11045 {
11046 switch_to_section (debug_info_section);
11047 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11048 info_section_emitted = true;
11049 }
11050
11051 /* For LTO cross unit DIE refs we want a symbol on the start of the
11052 debuginfo section, not on the CU DIE. */
11053 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11054 {
11055 /* ??? No way to get visibility assembled without a decl. */
11056 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11057 get_identifier (oldsym), char_type_node);
11058 TREE_PUBLIC (decl) = true;
11059 TREE_STATIC (decl) = true;
11060 DECL_ARTIFICIAL (decl) = true;
11061 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11062 DECL_VISIBILITY_SPECIFIED (decl) = true;
11063 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11064 #ifdef ASM_WEAKEN_LABEL
11065 /* We prefer a .weak because that handles duplicates from duplicate
11066 archive members in a graceful way. */
11067 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11068 #else
11069 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11070 #endif
11071 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11072 }
11073
11074 /* Output debugging information. */
11075 output_compilation_unit_header (dwo_id
11076 ? DW_UT_split_compile : DW_UT_compile);
11077 if (dwarf_version >= 5)
11078 {
11079 if (dwo_id != NULL)
11080 for (int i = 0; i < 8; i++)
11081 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11082 }
11083 output_die (die);
11084
11085 /* Leave the marks on the main CU, so we can check them in
11086 output_pubnames. */
11087 if (oldsym)
11088 {
11089 unmark_dies (die);
11090 die->die_id.die_symbol = oldsym;
11091 }
11092 }
11093
11094 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11095 and .debug_pubtypes. This is configured per-target, but can be
11096 overridden by the -gpubnames or -gno-pubnames options. */
11097
11098 static inline bool
11099 want_pubnames (void)
11100 {
11101 if (debug_info_level <= DINFO_LEVEL_TERSE)
11102 return false;
11103 if (debug_generate_pub_sections != -1)
11104 return debug_generate_pub_sections;
11105 return targetm.want_debug_pub_sections;
11106 }
11107
11108 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11109
11110 static void
11111 add_AT_pubnames (dw_die_ref die)
11112 {
11113 if (want_pubnames ())
11114 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11115 }
11116
11117 /* Add a string attribute value to a skeleton DIE. */
11118
11119 static inline void
11120 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11121 const char *str)
11122 {
11123 dw_attr_node attr;
11124 struct indirect_string_node *node;
11125
11126 if (! skeleton_debug_str_hash)
11127 skeleton_debug_str_hash
11128 = hash_table<indirect_string_hasher>::create_ggc (10);
11129
11130 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11131 find_string_form (node);
11132 if (node->form == dwarf_FORM (DW_FORM_strx))
11133 node->form = DW_FORM_strp;
11134
11135 attr.dw_attr = attr_kind;
11136 attr.dw_attr_val.val_class = dw_val_class_str;
11137 attr.dw_attr_val.val_entry = NULL;
11138 attr.dw_attr_val.v.val_str = node;
11139 add_dwarf_attr (die, &attr);
11140 }
11141
11142 /* Helper function to generate top-level dies for skeleton debug_info and
11143 debug_types. */
11144
11145 static void
11146 add_top_level_skeleton_die_attrs (dw_die_ref die)
11147 {
11148 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11149 const char *comp_dir = comp_dir_string ();
11150
11151 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11152 if (comp_dir != NULL)
11153 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11154 add_AT_pubnames (die);
11155 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11156 }
11157
11158 /* Output skeleton debug sections that point to the dwo file. */
11159
11160 static void
11161 output_skeleton_debug_sections (dw_die_ref comp_unit,
11162 const unsigned char *dwo_id)
11163 {
11164 /* These attributes will be found in the full debug_info section. */
11165 remove_AT (comp_unit, DW_AT_producer);
11166 remove_AT (comp_unit, DW_AT_language);
11167
11168 switch_to_section (debug_skeleton_info_section);
11169 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11170
11171 /* Produce the skeleton compilation-unit header. This one differs enough from
11172 a normal CU header that it's better not to call output_compilation_unit
11173 header. */
11174 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11175 dw2_asm_output_data (4, 0xffffffff,
11176 "Initial length escape value indicating 64-bit "
11177 "DWARF extension");
11178
11179 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11180 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11181 - DWARF_INITIAL_LENGTH_SIZE
11182 + size_of_die (comp_unit),
11183 "Length of Compilation Unit Info");
11184 output_dwarf_version ();
11185 if (dwarf_version >= 5)
11186 {
11187 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11188 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11189 }
11190 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11191 debug_skeleton_abbrev_section,
11192 "Offset Into Abbrev. Section");
11193 if (dwarf_version < 5)
11194 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11195 else
11196 for (int i = 0; i < 8; i++)
11197 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11198
11199 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11200 output_die (comp_unit);
11201
11202 /* Build the skeleton debug_abbrev section. */
11203 switch_to_section (debug_skeleton_abbrev_section);
11204 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11205
11206 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11207
11208 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11209 }
11210
11211 /* Output a comdat type unit DIE and its children. */
11212
11213 static void
11214 output_comdat_type_unit (comdat_type_node *node)
11215 {
11216 const char *secname;
11217 char *tmp;
11218 int i;
11219 #if defined (OBJECT_FORMAT_ELF)
11220 tree comdat_key;
11221 #endif
11222
11223 /* First mark all the DIEs in this CU so we know which get local refs. */
11224 mark_dies (node->root_die);
11225
11226 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11227
11228 build_abbrev_table (node->root_die, extern_map);
11229
11230 delete extern_map;
11231 extern_map = NULL;
11232
11233 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11234 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11235 calc_die_sizes (node->root_die);
11236
11237 #if defined (OBJECT_FORMAT_ELF)
11238 if (dwarf_version >= 5)
11239 {
11240 if (!dwarf_split_debug_info)
11241 secname = ".debug_info";
11242 else
11243 secname = ".debug_info.dwo";
11244 }
11245 else if (!dwarf_split_debug_info)
11246 secname = ".debug_types";
11247 else
11248 secname = ".debug_types.dwo";
11249
11250 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11251 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11252 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11253 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11254 comdat_key = get_identifier (tmp);
11255 targetm.asm_out.named_section (secname,
11256 SECTION_DEBUG | SECTION_LINKONCE,
11257 comdat_key);
11258 #else
11259 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11260 sprintf (tmp, (dwarf_version >= 5
11261 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11262 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11263 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11264 secname = tmp;
11265 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11266 #endif
11267
11268 /* Output debugging information. */
11269 output_compilation_unit_header (dwarf_split_debug_info
11270 ? DW_UT_split_type : DW_UT_type);
11271 output_signature (node->signature, "Type Signature");
11272 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11273 "Offset to Type DIE");
11274 output_die (node->root_die);
11275
11276 unmark_dies (node->root_die);
11277 }
11278
11279 /* Return the DWARF2/3 pubname associated with a decl. */
11280
11281 static const char *
11282 dwarf2_name (tree decl, int scope)
11283 {
11284 if (DECL_NAMELESS (decl))
11285 return NULL;
11286 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11287 }
11288
11289 /* Add a new entry to .debug_pubnames if appropriate. */
11290
11291 static void
11292 add_pubname_string (const char *str, dw_die_ref die)
11293 {
11294 pubname_entry e;
11295
11296 e.die = die;
11297 e.name = xstrdup (str);
11298 vec_safe_push (pubname_table, e);
11299 }
11300
11301 static void
11302 add_pubname (tree decl, dw_die_ref die)
11303 {
11304 if (!want_pubnames ())
11305 return;
11306
11307 /* Don't add items to the table when we expect that the consumer will have
11308 just read the enclosing die. For example, if the consumer is looking at a
11309 class_member, it will either be inside the class already, or will have just
11310 looked up the class to find the member. Either way, searching the class is
11311 faster than searching the index. */
11312 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11313 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11314 {
11315 const char *name = dwarf2_name (decl, 1);
11316
11317 if (name)
11318 add_pubname_string (name, die);
11319 }
11320 }
11321
11322 /* Add an enumerator to the pubnames section. */
11323
11324 static void
11325 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11326 {
11327 pubname_entry e;
11328
11329 gcc_assert (scope_name);
11330 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11331 e.die = die;
11332 vec_safe_push (pubname_table, e);
11333 }
11334
11335 /* Add a new entry to .debug_pubtypes if appropriate. */
11336
11337 static void
11338 add_pubtype (tree decl, dw_die_ref die)
11339 {
11340 pubname_entry e;
11341
11342 if (!want_pubnames ())
11343 return;
11344
11345 if ((TREE_PUBLIC (decl)
11346 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11347 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11348 {
11349 tree scope = NULL;
11350 const char *scope_name = "";
11351 const char *sep = is_cxx () ? "::" : ".";
11352 const char *name;
11353
11354 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11355 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11356 {
11357 scope_name = lang_hooks.dwarf_name (scope, 1);
11358 if (scope_name != NULL && scope_name[0] != '\0')
11359 scope_name = concat (scope_name, sep, NULL);
11360 else
11361 scope_name = "";
11362 }
11363
11364 if (TYPE_P (decl))
11365 name = type_tag (decl);
11366 else
11367 name = lang_hooks.dwarf_name (decl, 1);
11368
11369 /* If we don't have a name for the type, there's no point in adding
11370 it to the table. */
11371 if (name != NULL && name[0] != '\0')
11372 {
11373 e.die = die;
11374 e.name = concat (scope_name, name, NULL);
11375 vec_safe_push (pubtype_table, e);
11376 }
11377
11378 /* Although it might be more consistent to add the pubinfo for the
11379 enumerators as their dies are created, they should only be added if the
11380 enum type meets the criteria above. So rather than re-check the parent
11381 enum type whenever an enumerator die is created, just output them all
11382 here. This isn't protected by the name conditional because anonymous
11383 enums don't have names. */
11384 if (die->die_tag == DW_TAG_enumeration_type)
11385 {
11386 dw_die_ref c;
11387
11388 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11389 }
11390 }
11391 }
11392
11393 /* Output a single entry in the pubnames table. */
11394
11395 static void
11396 output_pubname (dw_offset die_offset, pubname_entry *entry)
11397 {
11398 dw_die_ref die = entry->die;
11399 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11400
11401 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11402
11403 if (debug_generate_pub_sections == 2)
11404 {
11405 /* This logic follows gdb's method for determining the value of the flag
11406 byte. */
11407 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11408 switch (die->die_tag)
11409 {
11410 case DW_TAG_typedef:
11411 case DW_TAG_base_type:
11412 case DW_TAG_subrange_type:
11413 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11414 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11415 break;
11416 case DW_TAG_enumerator:
11417 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11418 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11419 if (!is_cxx ())
11420 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11421 break;
11422 case DW_TAG_subprogram:
11423 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11424 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11425 if (!is_ada ())
11426 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11427 break;
11428 case DW_TAG_constant:
11429 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11430 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11431 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11432 break;
11433 case DW_TAG_variable:
11434 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11435 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11436 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11437 break;
11438 case DW_TAG_namespace:
11439 case DW_TAG_imported_declaration:
11440 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11441 break;
11442 case DW_TAG_class_type:
11443 case DW_TAG_interface_type:
11444 case DW_TAG_structure_type:
11445 case DW_TAG_union_type:
11446 case DW_TAG_enumeration_type:
11447 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11448 if (!is_cxx ())
11449 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11450 break;
11451 default:
11452 /* An unusual tag. Leave the flag-byte empty. */
11453 break;
11454 }
11455 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11456 "GDB-index flags");
11457 }
11458
11459 dw2_asm_output_nstring (entry->name, -1, "external name");
11460 }
11461
11462
11463 /* Output the public names table used to speed up access to externally
11464 visible names; or the public types table used to find type definitions. */
11465
11466 static void
11467 output_pubnames (vec<pubname_entry, va_gc> *names)
11468 {
11469 unsigned i;
11470 unsigned long pubnames_length = size_of_pubnames (names);
11471 pubname_entry *pub;
11472
11473 if (!XCOFF_DEBUGGING_INFO)
11474 {
11475 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11476 dw2_asm_output_data (4, 0xffffffff,
11477 "Initial length escape value indicating 64-bit DWARF extension");
11478 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11479 "Pub Info Length");
11480 }
11481
11482 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11483 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11484
11485 if (dwarf_split_debug_info)
11486 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11487 debug_skeleton_info_section,
11488 "Offset of Compilation Unit Info");
11489 else
11490 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11491 debug_info_section,
11492 "Offset of Compilation Unit Info");
11493 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11494 "Compilation Unit Length");
11495
11496 FOR_EACH_VEC_ELT (*names, i, pub)
11497 {
11498 if (include_pubname_in_output (names, pub))
11499 {
11500 dw_offset die_offset = pub->die->die_offset;
11501
11502 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11503 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11504 gcc_assert (pub->die->die_mark);
11505
11506 /* If we're putting types in their own .debug_types sections,
11507 the .debug_pubtypes table will still point to the compile
11508 unit (not the type unit), so we want to use the offset of
11509 the skeleton DIE (if there is one). */
11510 if (pub->die->comdat_type_p && names == pubtype_table)
11511 {
11512 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11513
11514 if (type_node != NULL)
11515 die_offset = (type_node->skeleton_die != NULL
11516 ? type_node->skeleton_die->die_offset
11517 : comp_unit_die ()->die_offset);
11518 }
11519
11520 output_pubname (die_offset, pub);
11521 }
11522 }
11523
11524 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11525 }
11526
11527 /* Output public names and types tables if necessary. */
11528
11529 static void
11530 output_pubtables (void)
11531 {
11532 if (!want_pubnames () || !info_section_emitted)
11533 return;
11534
11535 switch_to_section (debug_pubnames_section);
11536 output_pubnames (pubname_table);
11537 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11538 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11539 simply won't look for the section. */
11540 switch_to_section (debug_pubtypes_section);
11541 output_pubnames (pubtype_table);
11542 }
11543
11544
11545 /* Output the information that goes into the .debug_aranges table.
11546 Namely, define the beginning and ending address range of the
11547 text section generated for this compilation unit. */
11548
11549 static void
11550 output_aranges (void)
11551 {
11552 unsigned i;
11553 unsigned long aranges_length = size_of_aranges ();
11554
11555 if (!XCOFF_DEBUGGING_INFO)
11556 {
11557 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11558 dw2_asm_output_data (4, 0xffffffff,
11559 "Initial length escape value indicating 64-bit DWARF extension");
11560 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11561 "Length of Address Ranges Info");
11562 }
11563
11564 /* Version number for aranges is still 2, even up to DWARF5. */
11565 dw2_asm_output_data (2, 2, "DWARF aranges version");
11566 if (dwarf_split_debug_info)
11567 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11568 debug_skeleton_info_section,
11569 "Offset of Compilation Unit Info");
11570 else
11571 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11572 debug_info_section,
11573 "Offset of Compilation Unit Info");
11574 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11575 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11576
11577 /* We need to align to twice the pointer size here. */
11578 if (DWARF_ARANGES_PAD_SIZE)
11579 {
11580 /* Pad using a 2 byte words so that padding is correct for any
11581 pointer size. */
11582 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11583 2 * DWARF2_ADDR_SIZE);
11584 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11585 dw2_asm_output_data (2, 0, NULL);
11586 }
11587
11588 /* It is necessary not to output these entries if the sections were
11589 not used; if the sections were not used, the length will be 0 and
11590 the address may end up as 0 if the section is discarded by ld
11591 --gc-sections, leaving an invalid (0, 0) entry that can be
11592 confused with the terminator. */
11593 if (text_section_used)
11594 {
11595 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11596 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11597 text_section_label, "Length");
11598 }
11599 if (cold_text_section_used)
11600 {
11601 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11602 "Address");
11603 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11604 cold_text_section_label, "Length");
11605 }
11606
11607 if (have_multiple_function_sections)
11608 {
11609 unsigned fde_idx;
11610 dw_fde_ref fde;
11611
11612 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11613 {
11614 if (DECL_IGNORED_P (fde->decl))
11615 continue;
11616 if (!fde->in_std_section)
11617 {
11618 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11619 "Address");
11620 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11621 fde->dw_fde_begin, "Length");
11622 }
11623 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11624 {
11625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11626 "Address");
11627 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11628 fde->dw_fde_second_begin, "Length");
11629 }
11630 }
11631 }
11632
11633 /* Output the terminator words. */
11634 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11635 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11636 }
11637
11638 /* Add a new entry to .debug_ranges. Return its index into
11639 ranges_table vector. */
11640
11641 static unsigned int
11642 add_ranges_num (int num, bool maybe_new_sec)
11643 {
11644 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11645 vec_safe_push (ranges_table, r);
11646 return vec_safe_length (ranges_table) - 1;
11647 }
11648
11649 /* Add a new entry to .debug_ranges corresponding to a block, or a
11650 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11651 this entry might be in a different section from previous range. */
11652
11653 static unsigned int
11654 add_ranges (const_tree block, bool maybe_new_sec)
11655 {
11656 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11657 }
11658
11659 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11660 chain, or middle entry of a chain that will be directly referred to. */
11661
11662 static void
11663 note_rnglist_head (unsigned int offset)
11664 {
11665 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11666 return;
11667 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11668 }
11669
11670 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11671 When using dwarf_split_debug_info, address attributes in dies destined
11672 for the final executable should be direct references--setting the
11673 parameter force_direct ensures this behavior. */
11674
11675 static void
11676 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11677 bool *added, bool force_direct)
11678 {
11679 unsigned int in_use = vec_safe_length (ranges_by_label);
11680 unsigned int offset;
11681 dw_ranges_by_label rbl = { begin, end };
11682 vec_safe_push (ranges_by_label, rbl);
11683 offset = add_ranges_num (-(int)in_use - 1, true);
11684 if (!*added)
11685 {
11686 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11687 *added = true;
11688 note_rnglist_head (offset);
11689 }
11690 }
11691
11692 /* Emit .debug_ranges section. */
11693
11694 static void
11695 output_ranges (void)
11696 {
11697 unsigned i;
11698 static const char *const start_fmt = "Offset %#x";
11699 const char *fmt = start_fmt;
11700 dw_ranges *r;
11701
11702 switch_to_section (debug_ranges_section);
11703 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11704 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11705 {
11706 int block_num = r->num;
11707
11708 if (block_num > 0)
11709 {
11710 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11711 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11712
11713 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11714 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11715
11716 /* If all code is in the text section, then the compilation
11717 unit base address defaults to DW_AT_low_pc, which is the
11718 base of the text section. */
11719 if (!have_multiple_function_sections)
11720 {
11721 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11722 text_section_label,
11723 fmt, i * 2 * DWARF2_ADDR_SIZE);
11724 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11725 text_section_label, NULL);
11726 }
11727
11728 /* Otherwise, the compilation unit base address is zero,
11729 which allows us to use absolute addresses, and not worry
11730 about whether the target supports cross-section
11731 arithmetic. */
11732 else
11733 {
11734 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11735 fmt, i * 2 * DWARF2_ADDR_SIZE);
11736 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11737 }
11738
11739 fmt = NULL;
11740 }
11741
11742 /* Negative block_num stands for an index into ranges_by_label. */
11743 else if (block_num < 0)
11744 {
11745 int lab_idx = - block_num - 1;
11746
11747 if (!have_multiple_function_sections)
11748 {
11749 gcc_unreachable ();
11750 #if 0
11751 /* If we ever use add_ranges_by_labels () for a single
11752 function section, all we have to do is to take out
11753 the #if 0 above. */
11754 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11755 (*ranges_by_label)[lab_idx].begin,
11756 text_section_label,
11757 fmt, i * 2 * DWARF2_ADDR_SIZE);
11758 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11759 (*ranges_by_label)[lab_idx].end,
11760 text_section_label, NULL);
11761 #endif
11762 }
11763 else
11764 {
11765 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11766 (*ranges_by_label)[lab_idx].begin,
11767 fmt, i * 2 * DWARF2_ADDR_SIZE);
11768 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11769 (*ranges_by_label)[lab_idx].end,
11770 NULL);
11771 }
11772 }
11773 else
11774 {
11775 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11776 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11777 fmt = start_fmt;
11778 }
11779 }
11780 }
11781
11782 /* Non-zero if .debug_line_str should be used for .debug_line section
11783 strings or strings that are likely shareable with those. */
11784 #define DWARF5_USE_DEBUG_LINE_STR \
11785 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11786 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11787 /* FIXME: there is no .debug_line_str.dwo section, \
11788 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11789 && !dwarf_split_debug_info)
11790
11791 /* Assign .debug_rnglists indexes. */
11792
11793 static void
11794 index_rnglists (void)
11795 {
11796 unsigned i;
11797 dw_ranges *r;
11798
11799 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11800 if (r->label)
11801 r->idx = rnglist_idx++;
11802 }
11803
11804 /* Emit .debug_rnglists section. */
11805
11806 static void
11807 output_rnglists (unsigned generation)
11808 {
11809 unsigned i;
11810 dw_ranges *r;
11811 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11812 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11813 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11814
11815 switch_to_section (debug_ranges_section);
11816 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11817 /* There are up to 4 unique ranges labels per generation.
11818 See also init_sections_and_labels. */
11819 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11820 2 + generation * 4);
11821 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11822 3 + generation * 4);
11823 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11824 dw2_asm_output_data (4, 0xffffffff,
11825 "Initial length escape value indicating "
11826 "64-bit DWARF extension");
11827 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11828 "Length of Range Lists");
11829 ASM_OUTPUT_LABEL (asm_out_file, l1);
11830 output_dwarf_version ();
11831 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11832 dw2_asm_output_data (1, 0, "Segment Size");
11833 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11834 about relocation sizes and primarily care about the size of .debug*
11835 sections in linked shared libraries and executables, then
11836 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11837 into it are usually larger than just DW_FORM_sec_offset offsets
11838 into the .debug_rnglists section. */
11839 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11840 "Offset Entry Count");
11841 if (dwarf_split_debug_info)
11842 {
11843 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11844 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11845 if (r->label)
11846 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11847 ranges_base_label, NULL);
11848 }
11849
11850 const char *lab = "";
11851 unsigned int len = vec_safe_length (ranges_table);
11852 const char *base = NULL;
11853 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11854 {
11855 int block_num = r->num;
11856
11857 if (r->label)
11858 {
11859 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11860 lab = r->label;
11861 }
11862 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11863 base = NULL;
11864 if (block_num > 0)
11865 {
11866 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11867 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11868
11869 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11870 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11871
11872 if (HAVE_AS_LEB128)
11873 {
11874 /* If all code is in the text section, then the compilation
11875 unit base address defaults to DW_AT_low_pc, which is the
11876 base of the text section. */
11877 if (!have_multiple_function_sections)
11878 {
11879 dw2_asm_output_data (1, DW_RLE_offset_pair,
11880 "DW_RLE_offset_pair (%s)", lab);
11881 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11882 "Range begin address (%s)", lab);
11883 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11884 "Range end address (%s)", lab);
11885 continue;
11886 }
11887 if (base == NULL)
11888 {
11889 dw_ranges *r2 = NULL;
11890 if (i < len - 1)
11891 r2 = &(*ranges_table)[i + 1];
11892 if (r2
11893 && r2->num != 0
11894 && r2->label == NULL
11895 && !r2->maybe_new_sec)
11896 {
11897 dw2_asm_output_data (1, DW_RLE_base_address,
11898 "DW_RLE_base_address (%s)", lab);
11899 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11900 "Base address (%s)", lab);
11901 strcpy (basebuf, blabel);
11902 base = basebuf;
11903 }
11904 }
11905 if (base)
11906 {
11907 dw2_asm_output_data (1, DW_RLE_offset_pair,
11908 "DW_RLE_offset_pair (%s)", lab);
11909 dw2_asm_output_delta_uleb128 (blabel, base,
11910 "Range begin address (%s)", lab);
11911 dw2_asm_output_delta_uleb128 (elabel, base,
11912 "Range end address (%s)", lab);
11913 continue;
11914 }
11915 dw2_asm_output_data (1, DW_RLE_start_length,
11916 "DW_RLE_start_length (%s)", lab);
11917 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11918 "Range begin address (%s)", lab);
11919 dw2_asm_output_delta_uleb128 (elabel, blabel,
11920 "Range length (%s)", lab);
11921 }
11922 else
11923 {
11924 dw2_asm_output_data (1, DW_RLE_start_end,
11925 "DW_RLE_start_end (%s)", lab);
11926 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11927 "Range begin address (%s)", lab);
11928 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11929 "Range end address (%s)", lab);
11930 }
11931 }
11932
11933 /* Negative block_num stands for an index into ranges_by_label. */
11934 else if (block_num < 0)
11935 {
11936 int lab_idx = - block_num - 1;
11937 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11938 const char *elabel = (*ranges_by_label)[lab_idx].end;
11939
11940 if (!have_multiple_function_sections)
11941 gcc_unreachable ();
11942 if (HAVE_AS_LEB128)
11943 {
11944 dw2_asm_output_data (1, DW_RLE_start_length,
11945 "DW_RLE_start_length (%s)", lab);
11946 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11947 "Range begin address (%s)", lab);
11948 dw2_asm_output_delta_uleb128 (elabel, blabel,
11949 "Range length (%s)", lab);
11950 }
11951 else
11952 {
11953 dw2_asm_output_data (1, DW_RLE_start_end,
11954 "DW_RLE_start_end (%s)", lab);
11955 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11956 "Range begin address (%s)", lab);
11957 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11958 "Range end address (%s)", lab);
11959 }
11960 }
11961 else
11962 dw2_asm_output_data (1, DW_RLE_end_of_list,
11963 "DW_RLE_end_of_list (%s)", lab);
11964 }
11965 ASM_OUTPUT_LABEL (asm_out_file, l2);
11966 }
11967
11968 /* Data structure containing information about input files. */
11969 struct file_info
11970 {
11971 const char *path; /* Complete file name. */
11972 const char *fname; /* File name part. */
11973 int length; /* Length of entire string. */
11974 struct dwarf_file_data * file_idx; /* Index in input file table. */
11975 int dir_idx; /* Index in directory table. */
11976 };
11977
11978 /* Data structure containing information about directories with source
11979 files. */
11980 struct dir_info
11981 {
11982 const char *path; /* Path including directory name. */
11983 int length; /* Path length. */
11984 int prefix; /* Index of directory entry which is a prefix. */
11985 int count; /* Number of files in this directory. */
11986 int dir_idx; /* Index of directory used as base. */
11987 };
11988
11989 /* Callback function for file_info comparison. We sort by looking at
11990 the directories in the path. */
11991
11992 static int
11993 file_info_cmp (const void *p1, const void *p2)
11994 {
11995 const struct file_info *const s1 = (const struct file_info *) p1;
11996 const struct file_info *const s2 = (const struct file_info *) p2;
11997 const unsigned char *cp1;
11998 const unsigned char *cp2;
11999
12000 /* Take care of file names without directories. We need to make sure that
12001 we return consistent values to qsort since some will get confused if
12002 we return the same value when identical operands are passed in opposite
12003 orders. So if neither has a directory, return 0 and otherwise return
12004 1 or -1 depending on which one has the directory. We want the one with
12005 the directory to sort after the one without, so all no directory files
12006 are at the start (normally only the compilation unit file). */
12007 if ((s1->path == s1->fname || s2->path == s2->fname))
12008 return (s2->path == s2->fname) - (s1->path == s1->fname);
12009
12010 cp1 = (const unsigned char *) s1->path;
12011 cp2 = (const unsigned char *) s2->path;
12012
12013 while (1)
12014 {
12015 ++cp1;
12016 ++cp2;
12017 /* Reached the end of the first path? If so, handle like above,
12018 but now we want longer directory prefixes before shorter ones. */
12019 if ((cp1 == (const unsigned char *) s1->fname)
12020 || (cp2 == (const unsigned char *) s2->fname))
12021 return ((cp1 == (const unsigned char *) s1->fname)
12022 - (cp2 == (const unsigned char *) s2->fname));
12023
12024 /* Character of current path component the same? */
12025 else if (*cp1 != *cp2)
12026 return *cp1 - *cp2;
12027 }
12028 }
12029
12030 struct file_name_acquire_data
12031 {
12032 struct file_info *files;
12033 int used_files;
12034 int max_files;
12035 };
12036
12037 /* Traversal function for the hash table. */
12038
12039 int
12040 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12041 {
12042 struct dwarf_file_data *d = *slot;
12043 struct file_info *fi;
12044 const char *f;
12045
12046 gcc_assert (fnad->max_files >= d->emitted_number);
12047
12048 if (! d->emitted_number)
12049 return 1;
12050
12051 gcc_assert (fnad->max_files != fnad->used_files);
12052
12053 fi = fnad->files + fnad->used_files++;
12054
12055 /* Skip all leading "./". */
12056 f = d->filename;
12057 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12058 f += 2;
12059
12060 /* Create a new array entry. */
12061 fi->path = f;
12062 fi->length = strlen (f);
12063 fi->file_idx = d;
12064
12065 /* Search for the file name part. */
12066 f = strrchr (f, DIR_SEPARATOR);
12067 #if defined (DIR_SEPARATOR_2)
12068 {
12069 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12070
12071 if (g != NULL)
12072 {
12073 if (f == NULL || f < g)
12074 f = g;
12075 }
12076 }
12077 #endif
12078
12079 fi->fname = f == NULL ? fi->path : f + 1;
12080 return 1;
12081 }
12082
12083 /* Helper function for output_file_names. Emit a FORM encoded
12084 string STR, with assembly comment start ENTRY_KIND and
12085 index IDX */
12086
12087 static void
12088 output_line_string (enum dwarf_form form, const char *str,
12089 const char *entry_kind, unsigned int idx)
12090 {
12091 switch (form)
12092 {
12093 case DW_FORM_string:
12094 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12095 break;
12096 case DW_FORM_line_strp:
12097 if (!debug_line_str_hash)
12098 debug_line_str_hash
12099 = hash_table<indirect_string_hasher>::create_ggc (10);
12100
12101 struct indirect_string_node *node;
12102 node = find_AT_string_in_table (str, debug_line_str_hash);
12103 set_indirect_string (node);
12104 node->form = form;
12105 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12106 debug_line_str_section, "%s: %#x: \"%s\"",
12107 entry_kind, 0, node->str);
12108 break;
12109 default:
12110 gcc_unreachable ();
12111 }
12112 }
12113
12114 /* Output the directory table and the file name table. We try to minimize
12115 the total amount of memory needed. A heuristic is used to avoid large
12116 slowdowns with many input files. */
12117
12118 static void
12119 output_file_names (void)
12120 {
12121 struct file_name_acquire_data fnad;
12122 int numfiles;
12123 struct file_info *files;
12124 struct dir_info *dirs;
12125 int *saved;
12126 int *savehere;
12127 int *backmap;
12128 int ndirs;
12129 int idx_offset;
12130 int i;
12131
12132 if (!last_emitted_file)
12133 {
12134 if (dwarf_version >= 5)
12135 {
12136 dw2_asm_output_data (1, 0, "Directory entry format count");
12137 dw2_asm_output_data_uleb128 (0, "Directories count");
12138 dw2_asm_output_data (1, 0, "File name entry format count");
12139 dw2_asm_output_data_uleb128 (0, "File names count");
12140 }
12141 else
12142 {
12143 dw2_asm_output_data (1, 0, "End directory table");
12144 dw2_asm_output_data (1, 0, "End file name table");
12145 }
12146 return;
12147 }
12148
12149 numfiles = last_emitted_file->emitted_number;
12150
12151 /* Allocate the various arrays we need. */
12152 files = XALLOCAVEC (struct file_info, numfiles);
12153 dirs = XALLOCAVEC (struct dir_info, numfiles);
12154
12155 fnad.files = files;
12156 fnad.used_files = 0;
12157 fnad.max_files = numfiles;
12158 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12159 gcc_assert (fnad.used_files == fnad.max_files);
12160
12161 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12162
12163 /* Find all the different directories used. */
12164 dirs[0].path = files[0].path;
12165 dirs[0].length = files[0].fname - files[0].path;
12166 dirs[0].prefix = -1;
12167 dirs[0].count = 1;
12168 dirs[0].dir_idx = 0;
12169 files[0].dir_idx = 0;
12170 ndirs = 1;
12171
12172 for (i = 1; i < numfiles; i++)
12173 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12174 && memcmp (dirs[ndirs - 1].path, files[i].path,
12175 dirs[ndirs - 1].length) == 0)
12176 {
12177 /* Same directory as last entry. */
12178 files[i].dir_idx = ndirs - 1;
12179 ++dirs[ndirs - 1].count;
12180 }
12181 else
12182 {
12183 int j;
12184
12185 /* This is a new directory. */
12186 dirs[ndirs].path = files[i].path;
12187 dirs[ndirs].length = files[i].fname - files[i].path;
12188 dirs[ndirs].count = 1;
12189 dirs[ndirs].dir_idx = ndirs;
12190 files[i].dir_idx = ndirs;
12191
12192 /* Search for a prefix. */
12193 dirs[ndirs].prefix = -1;
12194 for (j = 0; j < ndirs; j++)
12195 if (dirs[j].length < dirs[ndirs].length
12196 && dirs[j].length > 1
12197 && (dirs[ndirs].prefix == -1
12198 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12199 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12200 dirs[ndirs].prefix = j;
12201
12202 ++ndirs;
12203 }
12204
12205 /* Now to the actual work. We have to find a subset of the directories which
12206 allow expressing the file name using references to the directory table
12207 with the least amount of characters. We do not do an exhaustive search
12208 where we would have to check out every combination of every single
12209 possible prefix. Instead we use a heuristic which provides nearly optimal
12210 results in most cases and never is much off. */
12211 saved = XALLOCAVEC (int, ndirs);
12212 savehere = XALLOCAVEC (int, ndirs);
12213
12214 memset (saved, '\0', ndirs * sizeof (saved[0]));
12215 for (i = 0; i < ndirs; i++)
12216 {
12217 int j;
12218 int total;
12219
12220 /* We can always save some space for the current directory. But this
12221 does not mean it will be enough to justify adding the directory. */
12222 savehere[i] = dirs[i].length;
12223 total = (savehere[i] - saved[i]) * dirs[i].count;
12224
12225 for (j = i + 1; j < ndirs; j++)
12226 {
12227 savehere[j] = 0;
12228 if (saved[j] < dirs[i].length)
12229 {
12230 /* Determine whether the dirs[i] path is a prefix of the
12231 dirs[j] path. */
12232 int k;
12233
12234 k = dirs[j].prefix;
12235 while (k != -1 && k != (int) i)
12236 k = dirs[k].prefix;
12237
12238 if (k == (int) i)
12239 {
12240 /* Yes it is. We can possibly save some memory by
12241 writing the filenames in dirs[j] relative to
12242 dirs[i]. */
12243 savehere[j] = dirs[i].length;
12244 total += (savehere[j] - saved[j]) * dirs[j].count;
12245 }
12246 }
12247 }
12248
12249 /* Check whether we can save enough to justify adding the dirs[i]
12250 directory. */
12251 if (total > dirs[i].length + 1)
12252 {
12253 /* It's worthwhile adding. */
12254 for (j = i; j < ndirs; j++)
12255 if (savehere[j] > 0)
12256 {
12257 /* Remember how much we saved for this directory so far. */
12258 saved[j] = savehere[j];
12259
12260 /* Remember the prefix directory. */
12261 dirs[j].dir_idx = i;
12262 }
12263 }
12264 }
12265
12266 /* Emit the directory name table. */
12267 idx_offset = dirs[0].length > 0 ? 1 : 0;
12268 enum dwarf_form str_form = DW_FORM_string;
12269 enum dwarf_form idx_form = DW_FORM_udata;
12270 if (dwarf_version >= 5)
12271 {
12272 const char *comp_dir = comp_dir_string ();
12273 if (comp_dir == NULL)
12274 comp_dir = "";
12275 dw2_asm_output_data (1, 1, "Directory entry format count");
12276 if (DWARF5_USE_DEBUG_LINE_STR)
12277 str_form = DW_FORM_line_strp;
12278 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12279 dw2_asm_output_data_uleb128 (str_form, "%s",
12280 get_DW_FORM_name (str_form));
12281 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12282 if (str_form == DW_FORM_string)
12283 {
12284 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12285 for (i = 1 - idx_offset; i < ndirs; i++)
12286 dw2_asm_output_nstring (dirs[i].path,
12287 dirs[i].length
12288 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12289 "Directory Entry: %#x", i + idx_offset);
12290 }
12291 else
12292 {
12293 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12294 for (i = 1 - idx_offset; i < ndirs; i++)
12295 {
12296 const char *str
12297 = ggc_alloc_string (dirs[i].path,
12298 dirs[i].length
12299 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12300 output_line_string (str_form, str, "Directory Entry",
12301 (unsigned) i + idx_offset);
12302 }
12303 }
12304 }
12305 else
12306 {
12307 for (i = 1 - idx_offset; i < ndirs; i++)
12308 dw2_asm_output_nstring (dirs[i].path,
12309 dirs[i].length
12310 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12311 "Directory Entry: %#x", i + idx_offset);
12312
12313 dw2_asm_output_data (1, 0, "End directory table");
12314 }
12315
12316 /* We have to emit them in the order of emitted_number since that's
12317 used in the debug info generation. To do this efficiently we
12318 generate a back-mapping of the indices first. */
12319 backmap = XALLOCAVEC (int, numfiles);
12320 for (i = 0; i < numfiles; i++)
12321 backmap[files[i].file_idx->emitted_number - 1] = i;
12322
12323 if (dwarf_version >= 5)
12324 {
12325 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12326 if (filename0 == NULL)
12327 filename0 = "";
12328 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12329 DW_FORM_data2. Choose one based on the number of directories
12330 and how much space would they occupy in each encoding.
12331 If we have at most 256 directories, all indexes fit into
12332 a single byte, so DW_FORM_data1 is most compact (if there
12333 are at most 128 directories, DW_FORM_udata would be as
12334 compact as that, but not shorter and slower to decode). */
12335 if (ndirs + idx_offset <= 256)
12336 idx_form = DW_FORM_data1;
12337 /* If there are more than 65536 directories, we have to use
12338 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12339 Otherwise, compute what space would occupy if all the indexes
12340 used DW_FORM_udata - sum - and compare that to how large would
12341 be DW_FORM_data2 encoding, and pick the more efficient one. */
12342 else if (ndirs + idx_offset <= 65536)
12343 {
12344 unsigned HOST_WIDE_INT sum = 1;
12345 for (i = 0; i < numfiles; i++)
12346 {
12347 int file_idx = backmap[i];
12348 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12349 sum += size_of_uleb128 (dir_idx);
12350 }
12351 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12352 idx_form = DW_FORM_data2;
12353 }
12354 #ifdef VMS_DEBUGGING_INFO
12355 dw2_asm_output_data (1, 4, "File name entry format count");
12356 #else
12357 dw2_asm_output_data (1, 2, "File name entry format count");
12358 #endif
12359 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12360 dw2_asm_output_data_uleb128 (str_form, "%s",
12361 get_DW_FORM_name (str_form));
12362 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12363 "DW_LNCT_directory_index");
12364 dw2_asm_output_data_uleb128 (idx_form, "%s",
12365 get_DW_FORM_name (idx_form));
12366 #ifdef VMS_DEBUGGING_INFO
12367 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12368 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12369 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12370 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12371 #endif
12372 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12373
12374 output_line_string (str_form, filename0, "File Entry", 0);
12375
12376 /* Include directory index. */
12377 if (idx_form != DW_FORM_udata)
12378 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12379 0, NULL);
12380 else
12381 dw2_asm_output_data_uleb128 (0, NULL);
12382
12383 #ifdef VMS_DEBUGGING_INFO
12384 dw2_asm_output_data_uleb128 (0, NULL);
12385 dw2_asm_output_data_uleb128 (0, NULL);
12386 #endif
12387 }
12388
12389 /* Now write all the file names. */
12390 for (i = 0; i < numfiles; i++)
12391 {
12392 int file_idx = backmap[i];
12393 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12394
12395 #ifdef VMS_DEBUGGING_INFO
12396 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12397
12398 /* Setting these fields can lead to debugger miscomparisons,
12399 but VMS Debug requires them to be set correctly. */
12400
12401 int ver;
12402 long long cdt;
12403 long siz;
12404 int maxfilelen = (strlen (files[file_idx].path)
12405 + dirs[dir_idx].length
12406 + MAX_VMS_VERSION_LEN + 1);
12407 char *filebuf = XALLOCAVEC (char, maxfilelen);
12408
12409 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12410 snprintf (filebuf, maxfilelen, "%s;%d",
12411 files[file_idx].path + dirs[dir_idx].length, ver);
12412
12413 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12414
12415 /* Include directory index. */
12416 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12417 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12418 dir_idx + idx_offset, NULL);
12419 else
12420 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12421
12422 /* Modification time. */
12423 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12424 &cdt, 0, 0, 0) == 0)
12425 ? cdt : 0, NULL);
12426
12427 /* File length in bytes. */
12428 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12429 0, &siz, 0, 0) == 0)
12430 ? siz : 0, NULL);
12431 #else
12432 output_line_string (str_form,
12433 files[file_idx].path + dirs[dir_idx].length,
12434 "File Entry", (unsigned) i + 1);
12435
12436 /* Include directory index. */
12437 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12438 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12439 dir_idx + idx_offset, NULL);
12440 else
12441 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12442
12443 if (dwarf_version >= 5)
12444 continue;
12445
12446 /* Modification time. */
12447 dw2_asm_output_data_uleb128 (0, NULL);
12448
12449 /* File length in bytes. */
12450 dw2_asm_output_data_uleb128 (0, NULL);
12451 #endif /* VMS_DEBUGGING_INFO */
12452 }
12453
12454 if (dwarf_version < 5)
12455 dw2_asm_output_data (1, 0, "End file name table");
12456 }
12457
12458
12459 /* Output one line number table into the .debug_line section. */
12460
12461 static void
12462 output_one_line_info_table (dw_line_info_table *table)
12463 {
12464 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12465 unsigned int current_line = 1;
12466 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12467 dw_line_info_entry *ent, *prev_addr;
12468 size_t i;
12469 unsigned int view;
12470
12471 view = 0;
12472
12473 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12474 {
12475 switch (ent->opcode)
12476 {
12477 case LI_set_address:
12478 /* ??? Unfortunately, we have little choice here currently, and
12479 must always use the most general form. GCC does not know the
12480 address delta itself, so we can't use DW_LNS_advance_pc. Many
12481 ports do have length attributes which will give an upper bound
12482 on the address range. We could perhaps use length attributes
12483 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12484 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12485
12486 view = 0;
12487
12488 /* This can handle any delta. This takes
12489 4+DWARF2_ADDR_SIZE bytes. */
12490 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12491 debug_variable_location_views
12492 ? ", reset view to 0" : "");
12493 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12494 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12495 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12496
12497 prev_addr = ent;
12498 break;
12499
12500 case LI_adv_address:
12501 {
12502 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12503 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12504 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12505
12506 view++;
12507
12508 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12509 dw2_asm_output_delta (2, line_label, prev_label,
12510 "from %s to %s", prev_label, line_label);
12511
12512 prev_addr = ent;
12513 break;
12514 }
12515
12516 case LI_set_line:
12517 if (ent->val == current_line)
12518 {
12519 /* We still need to start a new row, so output a copy insn. */
12520 dw2_asm_output_data (1, DW_LNS_copy,
12521 "copy line %u", current_line);
12522 }
12523 else
12524 {
12525 int line_offset = ent->val - current_line;
12526 int line_delta = line_offset - DWARF_LINE_BASE;
12527
12528 current_line = ent->val;
12529 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12530 {
12531 /* This can handle deltas from -10 to 234, using the current
12532 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12533 This takes 1 byte. */
12534 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12535 "line %u", current_line);
12536 }
12537 else
12538 {
12539 /* This can handle any delta. This takes at least 4 bytes,
12540 depending on the value being encoded. */
12541 dw2_asm_output_data (1, DW_LNS_advance_line,
12542 "advance to line %u", current_line);
12543 dw2_asm_output_data_sleb128 (line_offset, NULL);
12544 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12545 }
12546 }
12547 break;
12548
12549 case LI_set_file:
12550 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12551 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12552 break;
12553
12554 case LI_set_column:
12555 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12556 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12557 break;
12558
12559 case LI_negate_stmt:
12560 current_is_stmt = !current_is_stmt;
12561 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12562 "is_stmt %d", current_is_stmt);
12563 break;
12564
12565 case LI_set_prologue_end:
12566 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12567 "set prologue end");
12568 break;
12569
12570 case LI_set_epilogue_begin:
12571 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12572 "set epilogue begin");
12573 break;
12574
12575 case LI_set_discriminator:
12576 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12577 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12578 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12579 dw2_asm_output_data_uleb128 (ent->val, NULL);
12580 break;
12581 }
12582 }
12583
12584 /* Emit debug info for the address of the end of the table. */
12585 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12586 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12587 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12588 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12589
12590 dw2_asm_output_data (1, 0, "end sequence");
12591 dw2_asm_output_data_uleb128 (1, NULL);
12592 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12593 }
12594
12595 /* Output the source line number correspondence information. This
12596 information goes into the .debug_line section. */
12597
12598 static void
12599 output_line_info (bool prologue_only)
12600 {
12601 static unsigned int generation;
12602 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12603 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12604 bool saw_one = false;
12605 int opc;
12606
12607 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12608 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12609 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12610 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12611
12612 if (!XCOFF_DEBUGGING_INFO)
12613 {
12614 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12615 dw2_asm_output_data (4, 0xffffffff,
12616 "Initial length escape value indicating 64-bit DWARF extension");
12617 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12618 "Length of Source Line Info");
12619 }
12620
12621 ASM_OUTPUT_LABEL (asm_out_file, l1);
12622
12623 output_dwarf_version ();
12624 if (dwarf_version >= 5)
12625 {
12626 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12627 dw2_asm_output_data (1, 0, "Segment Size");
12628 }
12629 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12630 ASM_OUTPUT_LABEL (asm_out_file, p1);
12631
12632 /* Define the architecture-dependent minimum instruction length (in bytes).
12633 In this implementation of DWARF, this field is used for information
12634 purposes only. Since GCC generates assembly language, we have no
12635 a priori knowledge of how many instruction bytes are generated for each
12636 source line, and therefore can use only the DW_LNE_set_address and
12637 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12638 this as '1', which is "correct enough" for all architectures,
12639 and don't let the target override. */
12640 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12641
12642 if (dwarf_version >= 4)
12643 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12644 "Maximum Operations Per Instruction");
12645 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12646 "Default is_stmt_start flag");
12647 dw2_asm_output_data (1, DWARF_LINE_BASE,
12648 "Line Base Value (Special Opcodes)");
12649 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12650 "Line Range Value (Special Opcodes)");
12651 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12652 "Special Opcode Base");
12653
12654 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12655 {
12656 int n_op_args;
12657 switch (opc)
12658 {
12659 case DW_LNS_advance_pc:
12660 case DW_LNS_advance_line:
12661 case DW_LNS_set_file:
12662 case DW_LNS_set_column:
12663 case DW_LNS_fixed_advance_pc:
12664 case DW_LNS_set_isa:
12665 n_op_args = 1;
12666 break;
12667 default:
12668 n_op_args = 0;
12669 break;
12670 }
12671
12672 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12673 opc, n_op_args);
12674 }
12675
12676 /* Write out the information about the files we use. */
12677 output_file_names ();
12678 ASM_OUTPUT_LABEL (asm_out_file, p2);
12679 if (prologue_only)
12680 {
12681 /* Output the marker for the end of the line number info. */
12682 ASM_OUTPUT_LABEL (asm_out_file, l2);
12683 return;
12684 }
12685
12686 if (separate_line_info)
12687 {
12688 dw_line_info_table *table;
12689 size_t i;
12690
12691 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12692 if (table->in_use)
12693 {
12694 output_one_line_info_table (table);
12695 saw_one = true;
12696 }
12697 }
12698 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12699 {
12700 output_one_line_info_table (cold_text_section_line_info);
12701 saw_one = true;
12702 }
12703
12704 /* ??? Some Darwin linkers crash on a .debug_line section with no
12705 sequences. Further, merely a DW_LNE_end_sequence entry is not
12706 sufficient -- the address column must also be initialized.
12707 Make sure to output at least one set_address/end_sequence pair,
12708 choosing .text since that section is always present. */
12709 if (text_section_line_info->in_use || !saw_one)
12710 output_one_line_info_table (text_section_line_info);
12711
12712 /* Output the marker for the end of the line number info. */
12713 ASM_OUTPUT_LABEL (asm_out_file, l2);
12714 }
12715 \f
12716 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12717
12718 static inline bool
12719 need_endianity_attribute_p (bool reverse)
12720 {
12721 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12722 }
12723
12724 /* Given a pointer to a tree node for some base type, return a pointer to
12725 a DIE that describes the given type. REVERSE is true if the type is
12726 to be interpreted in the reverse storage order wrt the target order.
12727
12728 This routine must only be called for GCC type nodes that correspond to
12729 Dwarf base (fundamental) types. */
12730
12731 static dw_die_ref
12732 base_type_die (tree type, bool reverse)
12733 {
12734 dw_die_ref base_type_result;
12735 enum dwarf_type encoding;
12736 bool fpt_used = false;
12737 struct fixed_point_type_info fpt_info;
12738 tree type_bias = NULL_TREE;
12739
12740 /* If this is a subtype that should not be emitted as a subrange type,
12741 use the base type. See subrange_type_for_debug_p. */
12742 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12743 type = TREE_TYPE (type);
12744
12745 switch (TREE_CODE (type))
12746 {
12747 case INTEGER_TYPE:
12748 if ((dwarf_version >= 4 || !dwarf_strict)
12749 && TYPE_NAME (type)
12750 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12751 && DECL_IS_BUILTIN (TYPE_NAME (type))
12752 && DECL_NAME (TYPE_NAME (type)))
12753 {
12754 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12755 if (strcmp (name, "char16_t") == 0
12756 || strcmp (name, "char32_t") == 0)
12757 {
12758 encoding = DW_ATE_UTF;
12759 break;
12760 }
12761 }
12762 if ((dwarf_version >= 3 || !dwarf_strict)
12763 && lang_hooks.types.get_fixed_point_type_info)
12764 {
12765 memset (&fpt_info, 0, sizeof (fpt_info));
12766 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12767 {
12768 fpt_used = true;
12769 encoding = ((TYPE_UNSIGNED (type))
12770 ? DW_ATE_unsigned_fixed
12771 : DW_ATE_signed_fixed);
12772 break;
12773 }
12774 }
12775 if (TYPE_STRING_FLAG (type))
12776 {
12777 if (TYPE_UNSIGNED (type))
12778 encoding = DW_ATE_unsigned_char;
12779 else
12780 encoding = DW_ATE_signed_char;
12781 }
12782 else if (TYPE_UNSIGNED (type))
12783 encoding = DW_ATE_unsigned;
12784 else
12785 encoding = DW_ATE_signed;
12786
12787 if (!dwarf_strict
12788 && lang_hooks.types.get_type_bias)
12789 type_bias = lang_hooks.types.get_type_bias (type);
12790 break;
12791
12792 case REAL_TYPE:
12793 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12794 {
12795 if (dwarf_version >= 3 || !dwarf_strict)
12796 encoding = DW_ATE_decimal_float;
12797 else
12798 encoding = DW_ATE_lo_user;
12799 }
12800 else
12801 encoding = DW_ATE_float;
12802 break;
12803
12804 case FIXED_POINT_TYPE:
12805 if (!(dwarf_version >= 3 || !dwarf_strict))
12806 encoding = DW_ATE_lo_user;
12807 else if (TYPE_UNSIGNED (type))
12808 encoding = DW_ATE_unsigned_fixed;
12809 else
12810 encoding = DW_ATE_signed_fixed;
12811 break;
12812
12813 /* Dwarf2 doesn't know anything about complex ints, so use
12814 a user defined type for it. */
12815 case COMPLEX_TYPE:
12816 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12817 encoding = DW_ATE_complex_float;
12818 else
12819 encoding = DW_ATE_lo_user;
12820 break;
12821
12822 case BOOLEAN_TYPE:
12823 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12824 encoding = DW_ATE_boolean;
12825 break;
12826
12827 default:
12828 /* No other TREE_CODEs are Dwarf fundamental types. */
12829 gcc_unreachable ();
12830 }
12831
12832 base_type_result = new_die_raw (DW_TAG_base_type);
12833
12834 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12835 int_size_in_bytes (type));
12836 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12837
12838 if (need_endianity_attribute_p (reverse))
12839 add_AT_unsigned (base_type_result, DW_AT_endianity,
12840 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12841
12842 add_alignment_attribute (base_type_result, type);
12843
12844 if (fpt_used)
12845 {
12846 switch (fpt_info.scale_factor_kind)
12847 {
12848 case fixed_point_scale_factor_binary:
12849 add_AT_int (base_type_result, DW_AT_binary_scale,
12850 fpt_info.scale_factor.binary);
12851 break;
12852
12853 case fixed_point_scale_factor_decimal:
12854 add_AT_int (base_type_result, DW_AT_decimal_scale,
12855 fpt_info.scale_factor.decimal);
12856 break;
12857
12858 case fixed_point_scale_factor_arbitrary:
12859 /* Arbitrary scale factors cannot be described in standard DWARF,
12860 yet. */
12861 if (!dwarf_strict)
12862 {
12863 /* Describe the scale factor as a rational constant. */
12864 const dw_die_ref scale_factor
12865 = new_die (DW_TAG_constant, comp_unit_die (), type);
12866
12867 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12868 fpt_info.scale_factor.arbitrary.numerator);
12869 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12870 fpt_info.scale_factor.arbitrary.denominator);
12871
12872 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12873 }
12874 break;
12875
12876 default:
12877 gcc_unreachable ();
12878 }
12879 }
12880
12881 if (type_bias)
12882 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12883 dw_scalar_form_constant
12884 | dw_scalar_form_exprloc
12885 | dw_scalar_form_reference,
12886 NULL);
12887
12888 return base_type_result;
12889 }
12890
12891 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12892 named 'auto' in its type: return true for it, false otherwise. */
12893
12894 static inline bool
12895 is_cxx_auto (tree type)
12896 {
12897 if (is_cxx ())
12898 {
12899 tree name = TYPE_IDENTIFIER (type);
12900 if (name == get_identifier ("auto")
12901 || name == get_identifier ("decltype(auto)"))
12902 return true;
12903 }
12904 return false;
12905 }
12906
12907 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12908 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12909
12910 static inline int
12911 is_base_type (tree type)
12912 {
12913 switch (TREE_CODE (type))
12914 {
12915 case INTEGER_TYPE:
12916 case REAL_TYPE:
12917 case FIXED_POINT_TYPE:
12918 case COMPLEX_TYPE:
12919 case BOOLEAN_TYPE:
12920 return 1;
12921
12922 case VOID_TYPE:
12923 case ARRAY_TYPE:
12924 case RECORD_TYPE:
12925 case UNION_TYPE:
12926 case QUAL_UNION_TYPE:
12927 case ENUMERAL_TYPE:
12928 case FUNCTION_TYPE:
12929 case METHOD_TYPE:
12930 case POINTER_TYPE:
12931 case REFERENCE_TYPE:
12932 case NULLPTR_TYPE:
12933 case OFFSET_TYPE:
12934 case LANG_TYPE:
12935 case VECTOR_TYPE:
12936 return 0;
12937
12938 default:
12939 if (is_cxx_auto (type))
12940 return 0;
12941 gcc_unreachable ();
12942 }
12943
12944 return 0;
12945 }
12946
12947 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12948 node, return the size in bits for the type if it is a constant, or else
12949 return the alignment for the type if the type's size is not constant, or
12950 else return BITS_PER_WORD if the type actually turns out to be an
12951 ERROR_MARK node. */
12952
12953 static inline unsigned HOST_WIDE_INT
12954 simple_type_size_in_bits (const_tree type)
12955 {
12956 if (TREE_CODE (type) == ERROR_MARK)
12957 return BITS_PER_WORD;
12958 else if (TYPE_SIZE (type) == NULL_TREE)
12959 return 0;
12960 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12961 return tree_to_uhwi (TYPE_SIZE (type));
12962 else
12963 return TYPE_ALIGN (type);
12964 }
12965
12966 /* Similarly, but return an offset_int instead of UHWI. */
12967
12968 static inline offset_int
12969 offset_int_type_size_in_bits (const_tree type)
12970 {
12971 if (TREE_CODE (type) == ERROR_MARK)
12972 return BITS_PER_WORD;
12973 else if (TYPE_SIZE (type) == NULL_TREE)
12974 return 0;
12975 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12976 return wi::to_offset (TYPE_SIZE (type));
12977 else
12978 return TYPE_ALIGN (type);
12979 }
12980
12981 /* Given a pointer to a tree node for a subrange type, return a pointer
12982 to a DIE that describes the given type. */
12983
12984 static dw_die_ref
12985 subrange_type_die (tree type, tree low, tree high, tree bias,
12986 dw_die_ref context_die)
12987 {
12988 dw_die_ref subrange_die;
12989 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12990
12991 if (context_die == NULL)
12992 context_die = comp_unit_die ();
12993
12994 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12995
12996 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12997 {
12998 /* The size of the subrange type and its base type do not match,
12999 so we need to generate a size attribute for the subrange type. */
13000 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13001 }
13002
13003 add_alignment_attribute (subrange_die, type);
13004
13005 if (low)
13006 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13007 if (high)
13008 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13009 if (bias && !dwarf_strict)
13010 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13011 dw_scalar_form_constant
13012 | dw_scalar_form_exprloc
13013 | dw_scalar_form_reference,
13014 NULL);
13015
13016 return subrange_die;
13017 }
13018
13019 /* Returns the (const and/or volatile) cv_qualifiers associated with
13020 the decl node. This will normally be augmented with the
13021 cv_qualifiers of the underlying type in add_type_attribute. */
13022
13023 static int
13024 decl_quals (const_tree decl)
13025 {
13026 return ((TREE_READONLY (decl)
13027 /* The C++ front-end correctly marks reference-typed
13028 variables as readonly, but from a language (and debug
13029 info) standpoint they are not const-qualified. */
13030 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13031 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13032 | (TREE_THIS_VOLATILE (decl)
13033 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13034 }
13035
13036 /* Determine the TYPE whose qualifiers match the largest strict subset
13037 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13038 qualifiers outside QUAL_MASK. */
13039
13040 static int
13041 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13042 {
13043 tree t;
13044 int best_rank = 0, best_qual = 0, max_rank;
13045
13046 type_quals &= qual_mask;
13047 max_rank = popcount_hwi (type_quals) - 1;
13048
13049 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13050 t = TYPE_NEXT_VARIANT (t))
13051 {
13052 int q = TYPE_QUALS (t) & qual_mask;
13053
13054 if ((q & type_quals) == q && q != type_quals
13055 && check_base_type (t, type))
13056 {
13057 int rank = popcount_hwi (q);
13058
13059 if (rank > best_rank)
13060 {
13061 best_rank = rank;
13062 best_qual = q;
13063 }
13064 }
13065 }
13066
13067 return best_qual;
13068 }
13069
13070 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13071 static const dwarf_qual_info_t dwarf_qual_info[] =
13072 {
13073 { TYPE_QUAL_CONST, DW_TAG_const_type },
13074 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13075 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13076 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13077 };
13078 static const unsigned int dwarf_qual_info_size
13079 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13080
13081 /* If DIE is a qualified DIE of some base DIE with the same parent,
13082 return the base DIE, otherwise return NULL. Set MASK to the
13083 qualifiers added compared to the returned DIE. */
13084
13085 static dw_die_ref
13086 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13087 {
13088 unsigned int i;
13089 for (i = 0; i < dwarf_qual_info_size; i++)
13090 if (die->die_tag == dwarf_qual_info[i].t)
13091 break;
13092 if (i == dwarf_qual_info_size)
13093 return NULL;
13094 if (vec_safe_length (die->die_attr) != 1)
13095 return NULL;
13096 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13097 if (type == NULL || type->die_parent != die->die_parent)
13098 return NULL;
13099 *mask |= dwarf_qual_info[i].q;
13100 if (depth)
13101 {
13102 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13103 if (ret)
13104 return ret;
13105 }
13106 return type;
13107 }
13108
13109 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13110 entry that chains the modifiers specified by CV_QUALS in front of the
13111 given type. REVERSE is true if the type is to be interpreted in the
13112 reverse storage order wrt the target order. */
13113
13114 static dw_die_ref
13115 modified_type_die (tree type, int cv_quals, bool reverse,
13116 dw_die_ref context_die)
13117 {
13118 enum tree_code code = TREE_CODE (type);
13119 dw_die_ref mod_type_die;
13120 dw_die_ref sub_die = NULL;
13121 tree item_type = NULL;
13122 tree qualified_type;
13123 tree name, low, high;
13124 dw_die_ref mod_scope;
13125 /* Only these cv-qualifiers are currently handled. */
13126 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13127 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13128 ENCODE_QUAL_ADDR_SPACE(~0U));
13129 const bool reverse_base_type
13130 = need_endianity_attribute_p (reverse) && is_base_type (type);
13131
13132 if (code == ERROR_MARK)
13133 return NULL;
13134
13135 if (lang_hooks.types.get_debug_type)
13136 {
13137 tree debug_type = lang_hooks.types.get_debug_type (type);
13138
13139 if (debug_type != NULL_TREE && debug_type != type)
13140 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13141 }
13142
13143 cv_quals &= cv_qual_mask;
13144
13145 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13146 tag modifier (and not an attribute) old consumers won't be able
13147 to handle it. */
13148 if (dwarf_version < 3)
13149 cv_quals &= ~TYPE_QUAL_RESTRICT;
13150
13151 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13152 if (dwarf_version < 5)
13153 cv_quals &= ~TYPE_QUAL_ATOMIC;
13154
13155 /* See if we already have the appropriately qualified variant of
13156 this type. */
13157 qualified_type = get_qualified_type (type, cv_quals);
13158
13159 if (qualified_type == sizetype)
13160 {
13161 /* Try not to expose the internal sizetype type's name. */
13162 if (TYPE_NAME (qualified_type)
13163 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13164 {
13165 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13166
13167 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13168 && (TYPE_PRECISION (t)
13169 == TYPE_PRECISION (qualified_type))
13170 && (TYPE_UNSIGNED (t)
13171 == TYPE_UNSIGNED (qualified_type)));
13172 qualified_type = t;
13173 }
13174 else if (qualified_type == sizetype
13175 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13176 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13177 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13178 qualified_type = size_type_node;
13179 }
13180
13181 /* If we do, then we can just use its DIE, if it exists. */
13182 if (qualified_type)
13183 {
13184 mod_type_die = lookup_type_die (qualified_type);
13185
13186 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13187 dealt with specially: the DIE with the attribute, if it exists, is
13188 placed immediately after the regular DIE for the same base type. */
13189 if (mod_type_die
13190 && (!reverse_base_type
13191 || ((mod_type_die = mod_type_die->die_sib) != NULL
13192 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13193 return mod_type_die;
13194 }
13195
13196 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13197
13198 /* Handle C typedef types. */
13199 if (name
13200 && TREE_CODE (name) == TYPE_DECL
13201 && DECL_ORIGINAL_TYPE (name)
13202 && !DECL_ARTIFICIAL (name))
13203 {
13204 tree dtype = TREE_TYPE (name);
13205
13206 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13207 if (qualified_type == dtype && !reverse_base_type)
13208 {
13209 tree origin = decl_ultimate_origin (name);
13210
13211 /* Typedef variants that have an abstract origin don't get their own
13212 type DIE (see gen_typedef_die), so fall back on the ultimate
13213 abstract origin instead. */
13214 if (origin != NULL && origin != name)
13215 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13216 context_die);
13217
13218 /* For a named type, use the typedef. */
13219 gen_type_die (qualified_type, context_die);
13220 return lookup_type_die (qualified_type);
13221 }
13222 else
13223 {
13224 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13225 dquals &= cv_qual_mask;
13226 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13227 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13228 /* cv-unqualified version of named type. Just use
13229 the unnamed type to which it refers. */
13230 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13231 reverse, context_die);
13232 /* Else cv-qualified version of named type; fall through. */
13233 }
13234 }
13235
13236 mod_scope = scope_die_for (type, context_die);
13237
13238 if (cv_quals)
13239 {
13240 int sub_quals = 0, first_quals = 0;
13241 unsigned i;
13242 dw_die_ref first = NULL, last = NULL;
13243
13244 /* Determine a lesser qualified type that most closely matches
13245 this one. Then generate DW_TAG_* entries for the remaining
13246 qualifiers. */
13247 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13248 cv_qual_mask);
13249 if (sub_quals && use_debug_types)
13250 {
13251 bool needed = false;
13252 /* If emitting type units, make sure the order of qualifiers
13253 is canonical. Thus, start from unqualified type if
13254 an earlier qualifier is missing in sub_quals, but some later
13255 one is present there. */
13256 for (i = 0; i < dwarf_qual_info_size; i++)
13257 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13258 needed = true;
13259 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13260 {
13261 sub_quals = 0;
13262 break;
13263 }
13264 }
13265 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13266 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13267 {
13268 /* As not all intermediate qualified DIEs have corresponding
13269 tree types, ensure that qualified DIEs in the same scope
13270 as their DW_AT_type are emitted after their DW_AT_type,
13271 only with other qualified DIEs for the same type possibly
13272 in between them. Determine the range of such qualified
13273 DIEs now (first being the base type, last being corresponding
13274 last qualified DIE for it). */
13275 unsigned int count = 0;
13276 first = qualified_die_p (mod_type_die, &first_quals,
13277 dwarf_qual_info_size);
13278 if (first == NULL)
13279 first = mod_type_die;
13280 gcc_assert ((first_quals & ~sub_quals) == 0);
13281 for (count = 0, last = first;
13282 count < (1U << dwarf_qual_info_size);
13283 count++, last = last->die_sib)
13284 {
13285 int quals = 0;
13286 if (last == mod_scope->die_child)
13287 break;
13288 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13289 != first)
13290 break;
13291 }
13292 }
13293
13294 for (i = 0; i < dwarf_qual_info_size; i++)
13295 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13296 {
13297 dw_die_ref d;
13298 if (first && first != last)
13299 {
13300 for (d = first->die_sib; ; d = d->die_sib)
13301 {
13302 int quals = 0;
13303 qualified_die_p (d, &quals, dwarf_qual_info_size);
13304 if (quals == (first_quals | dwarf_qual_info[i].q))
13305 break;
13306 if (d == last)
13307 {
13308 d = NULL;
13309 break;
13310 }
13311 }
13312 if (d)
13313 {
13314 mod_type_die = d;
13315 continue;
13316 }
13317 }
13318 if (first)
13319 {
13320 d = new_die_raw (dwarf_qual_info[i].t);
13321 add_child_die_after (mod_scope, d, last);
13322 last = d;
13323 }
13324 else
13325 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13326 if (mod_type_die)
13327 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13328 mod_type_die = d;
13329 first_quals |= dwarf_qual_info[i].q;
13330 }
13331 }
13332 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13333 {
13334 dwarf_tag tag = DW_TAG_pointer_type;
13335 if (code == REFERENCE_TYPE)
13336 {
13337 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13338 tag = DW_TAG_rvalue_reference_type;
13339 else
13340 tag = DW_TAG_reference_type;
13341 }
13342 mod_type_die = new_die (tag, mod_scope, type);
13343
13344 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13345 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13346 add_alignment_attribute (mod_type_die, type);
13347 item_type = TREE_TYPE (type);
13348
13349 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13350 if (!ADDR_SPACE_GENERIC_P (as))
13351 {
13352 int action = targetm.addr_space.debug (as);
13353 if (action >= 0)
13354 {
13355 /* Positive values indicate an address_class. */
13356 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13357 }
13358 else
13359 {
13360 /* Negative values indicate an (inverted) segment base reg. */
13361 dw_loc_descr_ref d
13362 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13363 add_AT_loc (mod_type_die, DW_AT_segment, d);
13364 }
13365 }
13366 }
13367 else if (code == INTEGER_TYPE
13368 && TREE_TYPE (type) != NULL_TREE
13369 && subrange_type_for_debug_p (type, &low, &high))
13370 {
13371 tree bias = NULL_TREE;
13372 if (lang_hooks.types.get_type_bias)
13373 bias = lang_hooks.types.get_type_bias (type);
13374 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13375 item_type = TREE_TYPE (type);
13376 }
13377 else if (is_base_type (type))
13378 {
13379 mod_type_die = base_type_die (type, reverse);
13380
13381 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13382 if (reverse_base_type)
13383 {
13384 dw_die_ref after_die
13385 = modified_type_die (type, cv_quals, false, context_die);
13386 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13387 }
13388 else
13389 add_child_die (comp_unit_die (), mod_type_die);
13390
13391 add_pubtype (type, mod_type_die);
13392 }
13393 else
13394 {
13395 gen_type_die (type, context_die);
13396
13397 /* We have to get the type_main_variant here (and pass that to the
13398 `lookup_type_die' routine) because the ..._TYPE node we have
13399 might simply be a *copy* of some original type node (where the
13400 copy was created to help us keep track of typedef names) and
13401 that copy might have a different TYPE_UID from the original
13402 ..._TYPE node. */
13403 if (TREE_CODE (type) == FUNCTION_TYPE
13404 || TREE_CODE (type) == METHOD_TYPE)
13405 {
13406 /* For function/method types, can't just use type_main_variant here,
13407 because that can have different ref-qualifiers for C++,
13408 but try to canonicalize. */
13409 tree main = TYPE_MAIN_VARIANT (type);
13410 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13411 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13412 && check_base_type (t, main)
13413 && check_lang_type (t, type))
13414 return lookup_type_die (t);
13415 return lookup_type_die (type);
13416 }
13417 else if (TREE_CODE (type) != VECTOR_TYPE
13418 && TREE_CODE (type) != ARRAY_TYPE)
13419 return lookup_type_die (type_main_variant (type));
13420 else
13421 /* Vectors have the debugging information in the type,
13422 not the main variant. */
13423 return lookup_type_die (type);
13424 }
13425
13426 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13427 don't output a DW_TAG_typedef, since there isn't one in the
13428 user's program; just attach a DW_AT_name to the type.
13429 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13430 if the base type already has the same name. */
13431 if (name
13432 && ((TREE_CODE (name) != TYPE_DECL
13433 && (qualified_type == TYPE_MAIN_VARIANT (type)
13434 || (cv_quals == TYPE_UNQUALIFIED)))
13435 || (TREE_CODE (name) == TYPE_DECL
13436 && TREE_TYPE (name) == qualified_type
13437 && DECL_NAME (name))))
13438 {
13439 if (TREE_CODE (name) == TYPE_DECL)
13440 /* Could just call add_name_and_src_coords_attributes here,
13441 but since this is a builtin type it doesn't have any
13442 useful source coordinates anyway. */
13443 name = DECL_NAME (name);
13444 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13445 }
13446 /* This probably indicates a bug. */
13447 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13448 {
13449 name = TYPE_IDENTIFIER (type);
13450 add_name_attribute (mod_type_die,
13451 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13452 }
13453
13454 if (qualified_type && !reverse_base_type)
13455 equate_type_number_to_die (qualified_type, mod_type_die);
13456
13457 if (item_type)
13458 /* We must do this after the equate_type_number_to_die call, in case
13459 this is a recursive type. This ensures that the modified_type_die
13460 recursion will terminate even if the type is recursive. Recursive
13461 types are possible in Ada. */
13462 sub_die = modified_type_die (item_type,
13463 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13464 reverse,
13465 context_die);
13466
13467 if (sub_die != NULL)
13468 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13469
13470 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13471 if (TYPE_ARTIFICIAL (type))
13472 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13473
13474 return mod_type_die;
13475 }
13476
13477 /* Generate DIEs for the generic parameters of T.
13478 T must be either a generic type or a generic function.
13479 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13480
13481 static void
13482 gen_generic_params_dies (tree t)
13483 {
13484 tree parms, args;
13485 int parms_num, i;
13486 dw_die_ref die = NULL;
13487 int non_default;
13488
13489 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13490 return;
13491
13492 if (TYPE_P (t))
13493 die = lookup_type_die (t);
13494 else if (DECL_P (t))
13495 die = lookup_decl_die (t);
13496
13497 gcc_assert (die);
13498
13499 parms = lang_hooks.get_innermost_generic_parms (t);
13500 if (!parms)
13501 /* T has no generic parameter. It means T is neither a generic type
13502 or function. End of story. */
13503 return;
13504
13505 parms_num = TREE_VEC_LENGTH (parms);
13506 args = lang_hooks.get_innermost_generic_args (t);
13507 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13508 non_default = int_cst_value (TREE_CHAIN (args));
13509 else
13510 non_default = TREE_VEC_LENGTH (args);
13511 for (i = 0; i < parms_num; i++)
13512 {
13513 tree parm, arg, arg_pack_elems;
13514 dw_die_ref parm_die;
13515
13516 parm = TREE_VEC_ELT (parms, i);
13517 arg = TREE_VEC_ELT (args, i);
13518 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13519 gcc_assert (parm && TREE_VALUE (parm) && arg);
13520
13521 if (parm && TREE_VALUE (parm) && arg)
13522 {
13523 /* If PARM represents a template parameter pack,
13524 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13525 by DW_TAG_template_*_parameter DIEs for the argument
13526 pack elements of ARG. Note that ARG would then be
13527 an argument pack. */
13528 if (arg_pack_elems)
13529 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13530 arg_pack_elems,
13531 die);
13532 else
13533 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13534 true /* emit name */, die);
13535 if (i >= non_default)
13536 add_AT_flag (parm_die, DW_AT_default_value, 1);
13537 }
13538 }
13539 }
13540
13541 /* Create and return a DIE for PARM which should be
13542 the representation of a generic type parameter.
13543 For instance, in the C++ front end, PARM would be a template parameter.
13544 ARG is the argument to PARM.
13545 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13546 name of the PARM.
13547 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13548 as a child node. */
13549
13550 static dw_die_ref
13551 generic_parameter_die (tree parm, tree arg,
13552 bool emit_name_p,
13553 dw_die_ref parent_die)
13554 {
13555 dw_die_ref tmpl_die = NULL;
13556 const char *name = NULL;
13557
13558 if (!parm || !DECL_NAME (parm) || !arg)
13559 return NULL;
13560
13561 /* We support non-type generic parameters and arguments,
13562 type generic parameters and arguments, as well as
13563 generic generic parameters (a.k.a. template template parameters in C++)
13564 and arguments. */
13565 if (TREE_CODE (parm) == PARM_DECL)
13566 /* PARM is a nontype generic parameter */
13567 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13568 else if (TREE_CODE (parm) == TYPE_DECL)
13569 /* PARM is a type generic parameter. */
13570 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13571 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13572 /* PARM is a generic generic parameter.
13573 Its DIE is a GNU extension. It shall have a
13574 DW_AT_name attribute to represent the name of the template template
13575 parameter, and a DW_AT_GNU_template_name attribute to represent the
13576 name of the template template argument. */
13577 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13578 parent_die, parm);
13579 else
13580 gcc_unreachable ();
13581
13582 if (tmpl_die)
13583 {
13584 tree tmpl_type;
13585
13586 /* If PARM is a generic parameter pack, it means we are
13587 emitting debug info for a template argument pack element.
13588 In other terms, ARG is a template argument pack element.
13589 In that case, we don't emit any DW_AT_name attribute for
13590 the die. */
13591 if (emit_name_p)
13592 {
13593 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13594 gcc_assert (name);
13595 add_AT_string (tmpl_die, DW_AT_name, name);
13596 }
13597
13598 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13599 {
13600 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13601 TMPL_DIE should have a child DW_AT_type attribute that is set
13602 to the type of the argument to PARM, which is ARG.
13603 If PARM is a type generic parameter, TMPL_DIE should have a
13604 child DW_AT_type that is set to ARG. */
13605 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13606 add_type_attribute (tmpl_die, tmpl_type,
13607 (TREE_THIS_VOLATILE (tmpl_type)
13608 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13609 false, parent_die);
13610 }
13611 else
13612 {
13613 /* So TMPL_DIE is a DIE representing a
13614 a generic generic template parameter, a.k.a template template
13615 parameter in C++ and arg is a template. */
13616
13617 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13618 to the name of the argument. */
13619 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13620 if (name)
13621 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13622 }
13623
13624 if (TREE_CODE (parm) == PARM_DECL)
13625 /* So PARM is a non-type generic parameter.
13626 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13627 attribute of TMPL_DIE which value represents the value
13628 of ARG.
13629 We must be careful here:
13630 The value of ARG might reference some function decls.
13631 We might currently be emitting debug info for a generic
13632 type and types are emitted before function decls, we don't
13633 know if the function decls referenced by ARG will actually be
13634 emitted after cgraph computations.
13635 So must defer the generation of the DW_AT_const_value to
13636 after cgraph is ready. */
13637 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13638 }
13639
13640 return tmpl_die;
13641 }
13642
13643 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13644 PARM_PACK must be a template parameter pack. The returned DIE
13645 will be child DIE of PARENT_DIE. */
13646
13647 static dw_die_ref
13648 template_parameter_pack_die (tree parm_pack,
13649 tree parm_pack_args,
13650 dw_die_ref parent_die)
13651 {
13652 dw_die_ref die;
13653 int j;
13654
13655 gcc_assert (parent_die && parm_pack);
13656
13657 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13658 add_name_and_src_coords_attributes (die, parm_pack);
13659 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13660 generic_parameter_die (parm_pack,
13661 TREE_VEC_ELT (parm_pack_args, j),
13662 false /* Don't emit DW_AT_name */,
13663 die);
13664 return die;
13665 }
13666
13667 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13668 an enumerated type. */
13669
13670 static inline int
13671 type_is_enum (const_tree type)
13672 {
13673 return TREE_CODE (type) == ENUMERAL_TYPE;
13674 }
13675
13676 /* Return the DBX register number described by a given RTL node. */
13677
13678 static unsigned int
13679 dbx_reg_number (const_rtx rtl)
13680 {
13681 unsigned regno = REGNO (rtl);
13682
13683 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13684
13685 #ifdef LEAF_REG_REMAP
13686 if (crtl->uses_only_leaf_regs)
13687 {
13688 int leaf_reg = LEAF_REG_REMAP (regno);
13689 if (leaf_reg != -1)
13690 regno = (unsigned) leaf_reg;
13691 }
13692 #endif
13693
13694 regno = DBX_REGISTER_NUMBER (regno);
13695 gcc_assert (regno != INVALID_REGNUM);
13696 return regno;
13697 }
13698
13699 /* Optionally add a DW_OP_piece term to a location description expression.
13700 DW_OP_piece is only added if the location description expression already
13701 doesn't end with DW_OP_piece. */
13702
13703 static void
13704 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13705 {
13706 dw_loc_descr_ref loc;
13707
13708 if (*list_head != NULL)
13709 {
13710 /* Find the end of the chain. */
13711 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13712 ;
13713
13714 if (loc->dw_loc_opc != DW_OP_piece)
13715 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13716 }
13717 }
13718
13719 /* Return a location descriptor that designates a machine register or
13720 zero if there is none. */
13721
13722 static dw_loc_descr_ref
13723 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13724 {
13725 rtx regs;
13726
13727 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13728 return 0;
13729
13730 /* We only use "frame base" when we're sure we're talking about the
13731 post-prologue local stack frame. We do this by *not* running
13732 register elimination until this point, and recognizing the special
13733 argument pointer and soft frame pointer rtx's.
13734 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13735 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13736 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13737 {
13738 dw_loc_descr_ref result = NULL;
13739
13740 if (dwarf_version >= 4 || !dwarf_strict)
13741 {
13742 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13743 initialized);
13744 if (result)
13745 add_loc_descr (&result,
13746 new_loc_descr (DW_OP_stack_value, 0, 0));
13747 }
13748 return result;
13749 }
13750
13751 regs = targetm.dwarf_register_span (rtl);
13752
13753 if (REG_NREGS (rtl) > 1 || regs)
13754 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13755 else
13756 {
13757 unsigned int dbx_regnum = dbx_reg_number (rtl);
13758 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13759 return 0;
13760 return one_reg_loc_descriptor (dbx_regnum, initialized);
13761 }
13762 }
13763
13764 /* Return a location descriptor that designates a machine register for
13765 a given hard register number. */
13766
13767 static dw_loc_descr_ref
13768 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13769 {
13770 dw_loc_descr_ref reg_loc_descr;
13771
13772 if (regno <= 31)
13773 reg_loc_descr
13774 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13775 else
13776 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13777
13778 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13779 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13780
13781 return reg_loc_descr;
13782 }
13783
13784 /* Given an RTL of a register, return a location descriptor that
13785 designates a value that spans more than one register. */
13786
13787 static dw_loc_descr_ref
13788 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13789 enum var_init_status initialized)
13790 {
13791 int size, i;
13792 dw_loc_descr_ref loc_result = NULL;
13793
13794 /* Simple, contiguous registers. */
13795 if (regs == NULL_RTX)
13796 {
13797 unsigned reg = REGNO (rtl);
13798 int nregs;
13799
13800 #ifdef LEAF_REG_REMAP
13801 if (crtl->uses_only_leaf_regs)
13802 {
13803 int leaf_reg = LEAF_REG_REMAP (reg);
13804 if (leaf_reg != -1)
13805 reg = (unsigned) leaf_reg;
13806 }
13807 #endif
13808
13809 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13810 nregs = REG_NREGS (rtl);
13811
13812 /* At present we only track constant-sized pieces. */
13813 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13814 return NULL;
13815 size /= nregs;
13816
13817 loc_result = NULL;
13818 while (nregs--)
13819 {
13820 dw_loc_descr_ref t;
13821
13822 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13823 VAR_INIT_STATUS_INITIALIZED);
13824 add_loc_descr (&loc_result, t);
13825 add_loc_descr_op_piece (&loc_result, size);
13826 ++reg;
13827 }
13828 return loc_result;
13829 }
13830
13831 /* Now onto stupid register sets in non contiguous locations. */
13832
13833 gcc_assert (GET_CODE (regs) == PARALLEL);
13834
13835 /* At present we only track constant-sized pieces. */
13836 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13837 return NULL;
13838 loc_result = NULL;
13839
13840 for (i = 0; i < XVECLEN (regs, 0); ++i)
13841 {
13842 dw_loc_descr_ref t;
13843
13844 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13845 VAR_INIT_STATUS_INITIALIZED);
13846 add_loc_descr (&loc_result, t);
13847 add_loc_descr_op_piece (&loc_result, size);
13848 }
13849
13850 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13851 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13852 return loc_result;
13853 }
13854
13855 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13856
13857 /* Return a location descriptor that designates a constant i,
13858 as a compound operation from constant (i >> shift), constant shift
13859 and DW_OP_shl. */
13860
13861 static dw_loc_descr_ref
13862 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13863 {
13864 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13865 add_loc_descr (&ret, int_loc_descriptor (shift));
13866 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13867 return ret;
13868 }
13869
13870 /* Return a location descriptor that designates constant POLY_I. */
13871
13872 static dw_loc_descr_ref
13873 int_loc_descriptor (poly_int64 poly_i)
13874 {
13875 enum dwarf_location_atom op;
13876
13877 HOST_WIDE_INT i;
13878 if (!poly_i.is_constant (&i))
13879 {
13880 /* Create location descriptions for the non-constant part and
13881 add any constant offset at the end. */
13882 dw_loc_descr_ref ret = NULL;
13883 HOST_WIDE_INT constant = poly_i.coeffs[0];
13884 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13885 {
13886 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13887 if (coeff != 0)
13888 {
13889 dw_loc_descr_ref start = ret;
13890 unsigned int factor;
13891 int bias;
13892 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13893 (j, &factor, &bias);
13894
13895 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13896 add COEFF * (REGNO / FACTOR) now and subtract
13897 COEFF * BIAS from the final constant part. */
13898 constant -= coeff * bias;
13899 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13900 if (coeff % factor == 0)
13901 coeff /= factor;
13902 else
13903 {
13904 int amount = exact_log2 (factor);
13905 gcc_assert (amount >= 0);
13906 add_loc_descr (&ret, int_loc_descriptor (amount));
13907 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13908 }
13909 if (coeff != 1)
13910 {
13911 add_loc_descr (&ret, int_loc_descriptor (coeff));
13912 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13913 }
13914 if (start)
13915 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13916 }
13917 }
13918 loc_descr_plus_const (&ret, constant);
13919 return ret;
13920 }
13921
13922 /* Pick the smallest representation of a constant, rather than just
13923 defaulting to the LEB encoding. */
13924 if (i >= 0)
13925 {
13926 int clz = clz_hwi (i);
13927 int ctz = ctz_hwi (i);
13928 if (i <= 31)
13929 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13930 else if (i <= 0xff)
13931 op = DW_OP_const1u;
13932 else if (i <= 0xffff)
13933 op = DW_OP_const2u;
13934 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13935 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13936 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13937 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13938 while DW_OP_const4u is 5 bytes. */
13939 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13940 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13941 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13942 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13943 while DW_OP_const4u is 5 bytes. */
13944 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13945
13946 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13947 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13948 <= 4)
13949 {
13950 /* As i >= 2**31, the double cast above will yield a negative number.
13951 Since wrapping is defined in DWARF expressions we can output big
13952 positive integers as small negative ones, regardless of the size
13953 of host wide ints.
13954
13955 Here, since the evaluator will handle 32-bit values and since i >=
13956 2**31, we know it's going to be interpreted as a negative literal:
13957 store it this way if we can do better than 5 bytes this way. */
13958 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13959 }
13960 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13961 op = DW_OP_const4u;
13962
13963 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13964 least 6 bytes: see if we can do better before falling back to it. */
13965 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13966 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13967 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13968 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13969 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13970 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13971 >= HOST_BITS_PER_WIDE_INT)
13972 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13973 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13974 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13975 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13976 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13977 && size_of_uleb128 (i) > 6)
13978 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13979 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13980 else
13981 op = DW_OP_constu;
13982 }
13983 else
13984 {
13985 if (i >= -0x80)
13986 op = DW_OP_const1s;
13987 else if (i >= -0x8000)
13988 op = DW_OP_const2s;
13989 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13990 {
13991 if (size_of_int_loc_descriptor (i) < 5)
13992 {
13993 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13994 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13995 return ret;
13996 }
13997 op = DW_OP_const4s;
13998 }
13999 else
14000 {
14001 if (size_of_int_loc_descriptor (i)
14002 < (unsigned long) 1 + size_of_sleb128 (i))
14003 {
14004 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14005 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14006 return ret;
14007 }
14008 op = DW_OP_consts;
14009 }
14010 }
14011
14012 return new_loc_descr (op, i, 0);
14013 }
14014
14015 /* Likewise, for unsigned constants. */
14016
14017 static dw_loc_descr_ref
14018 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14019 {
14020 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14021 const unsigned HOST_WIDE_INT max_uint
14022 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14023
14024 /* If possible, use the clever signed constants handling. */
14025 if (i <= max_int)
14026 return int_loc_descriptor ((HOST_WIDE_INT) i);
14027
14028 /* Here, we are left with positive numbers that cannot be represented as
14029 HOST_WIDE_INT, i.e.:
14030 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14031
14032 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14033 whereas may be better to output a negative integer: thanks to integer
14034 wrapping, we know that:
14035 x = x - 2 ** DWARF2_ADDR_SIZE
14036 = x - 2 * (max (HOST_WIDE_INT) + 1)
14037 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14038 small negative integers. Let's try that in cases it will clearly improve
14039 the encoding: there is no gain turning DW_OP_const4u into
14040 DW_OP_const4s. */
14041 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14042 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14043 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14044 {
14045 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14046
14047 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14048 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14049 const HOST_WIDE_INT second_shift
14050 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14051
14052 /* So we finally have:
14053 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14054 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14055 return int_loc_descriptor (second_shift);
14056 }
14057
14058 /* Last chance: fallback to a simple constant operation. */
14059 return new_loc_descr
14060 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14061 ? DW_OP_const4u
14062 : DW_OP_const8u,
14063 i, 0);
14064 }
14065
14066 /* Generate and return a location description that computes the unsigned
14067 comparison of the two stack top entries (a OP b where b is the top-most
14068 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14069 LE_EXPR, GT_EXPR or GE_EXPR. */
14070
14071 static dw_loc_descr_ref
14072 uint_comparison_loc_list (enum tree_code kind)
14073 {
14074 enum dwarf_location_atom op, flip_op;
14075 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14076
14077 switch (kind)
14078 {
14079 case LT_EXPR:
14080 op = DW_OP_lt;
14081 break;
14082 case LE_EXPR:
14083 op = DW_OP_le;
14084 break;
14085 case GT_EXPR:
14086 op = DW_OP_gt;
14087 break;
14088 case GE_EXPR:
14089 op = DW_OP_ge;
14090 break;
14091 default:
14092 gcc_unreachable ();
14093 }
14094
14095 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14096 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14097
14098 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14099 possible to perform unsigned comparisons: we just have to distinguish
14100 three cases:
14101
14102 1. when a and b have the same sign (as signed integers); then we should
14103 return: a OP(signed) b;
14104
14105 2. when a is a negative signed integer while b is a positive one, then a
14106 is a greater unsigned integer than b; likewise when a and b's roles
14107 are flipped.
14108
14109 So first, compare the sign of the two operands. */
14110 ret = new_loc_descr (DW_OP_over, 0, 0);
14111 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14112 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14113 /* If they have different signs (i.e. they have different sign bits), then
14114 the stack top value has now the sign bit set and thus it's smaller than
14115 zero. */
14116 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14117 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14118 add_loc_descr (&ret, bra_node);
14119
14120 /* We are in case 1. At this point, we know both operands have the same
14121 sign, to it's safe to use the built-in signed comparison. */
14122 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14123 add_loc_descr (&ret, jmp_node);
14124
14125 /* We are in case 2. Here, we know both operands do not have the same sign,
14126 so we have to flip the signed comparison. */
14127 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14128 tmp = new_loc_descr (flip_op, 0, 0);
14129 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14130 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14131 add_loc_descr (&ret, tmp);
14132
14133 /* This dummy operation is necessary to make the two branches join. */
14134 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14135 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14136 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14137 add_loc_descr (&ret, tmp);
14138
14139 return ret;
14140 }
14141
14142 /* Likewise, but takes the location description lists (might be destructive on
14143 them). Return NULL if either is NULL or if concatenation fails. */
14144
14145 static dw_loc_list_ref
14146 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14147 enum tree_code kind)
14148 {
14149 if (left == NULL || right == NULL)
14150 return NULL;
14151
14152 add_loc_list (&left, right);
14153 if (left == NULL)
14154 return NULL;
14155
14156 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14157 return left;
14158 }
14159
14160 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14161 without actually allocating it. */
14162
14163 static unsigned long
14164 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14165 {
14166 return size_of_int_loc_descriptor (i >> shift)
14167 + size_of_int_loc_descriptor (shift)
14168 + 1;
14169 }
14170
14171 /* Return size_of_locs (int_loc_descriptor (i)) without
14172 actually allocating it. */
14173
14174 static unsigned long
14175 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14176 {
14177 unsigned long s;
14178
14179 if (i >= 0)
14180 {
14181 int clz, ctz;
14182 if (i <= 31)
14183 return 1;
14184 else if (i <= 0xff)
14185 return 2;
14186 else if (i <= 0xffff)
14187 return 3;
14188 clz = clz_hwi (i);
14189 ctz = ctz_hwi (i);
14190 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14191 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14192 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14193 - clz - 5);
14194 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14195 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14196 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14197 - clz - 8);
14198 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14199 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14200 <= 4)
14201 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14202 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14203 return 5;
14204 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14205 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14206 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14207 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14208 - clz - 8);
14209 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14210 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14211 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14212 - clz - 16);
14213 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14214 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14215 && s > 6)
14216 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14217 - clz - 32);
14218 else
14219 return 1 + s;
14220 }
14221 else
14222 {
14223 if (i >= -0x80)
14224 return 2;
14225 else if (i >= -0x8000)
14226 return 3;
14227 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14228 {
14229 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14230 {
14231 s = size_of_int_loc_descriptor (-i) + 1;
14232 if (s < 5)
14233 return s;
14234 }
14235 return 5;
14236 }
14237 else
14238 {
14239 unsigned long r = 1 + size_of_sleb128 (i);
14240 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14241 {
14242 s = size_of_int_loc_descriptor (-i) + 1;
14243 if (s < r)
14244 return s;
14245 }
14246 return r;
14247 }
14248 }
14249 }
14250
14251 /* Return loc description representing "address" of integer value.
14252 This can appear only as toplevel expression. */
14253
14254 static dw_loc_descr_ref
14255 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14256 {
14257 int litsize;
14258 dw_loc_descr_ref loc_result = NULL;
14259
14260 if (!(dwarf_version >= 4 || !dwarf_strict))
14261 return NULL;
14262
14263 litsize = size_of_int_loc_descriptor (i);
14264 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14265 is more compact. For DW_OP_stack_value we need:
14266 litsize + 1 (DW_OP_stack_value)
14267 and for DW_OP_implicit_value:
14268 1 (DW_OP_implicit_value) + 1 (length) + size. */
14269 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14270 {
14271 loc_result = int_loc_descriptor (i);
14272 add_loc_descr (&loc_result,
14273 new_loc_descr (DW_OP_stack_value, 0, 0));
14274 return loc_result;
14275 }
14276
14277 loc_result = new_loc_descr (DW_OP_implicit_value,
14278 size, 0);
14279 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14280 loc_result->dw_loc_oprnd2.v.val_int = i;
14281 return loc_result;
14282 }
14283
14284 /* Return a location descriptor that designates a base+offset location. */
14285
14286 static dw_loc_descr_ref
14287 based_loc_descr (rtx reg, poly_int64 offset,
14288 enum var_init_status initialized)
14289 {
14290 unsigned int regno;
14291 dw_loc_descr_ref result;
14292 dw_fde_ref fde = cfun->fde;
14293
14294 /* We only use "frame base" when we're sure we're talking about the
14295 post-prologue local stack frame. We do this by *not* running
14296 register elimination until this point, and recognizing the special
14297 argument pointer and soft frame pointer rtx's. */
14298 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14299 {
14300 rtx elim = (ira_use_lra_p
14301 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14302 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14303
14304 if (elim != reg)
14305 {
14306 elim = strip_offset_and_add (elim, &offset);
14307 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14308 && (elim == hard_frame_pointer_rtx
14309 || elim == stack_pointer_rtx))
14310 || elim == (frame_pointer_needed
14311 ? hard_frame_pointer_rtx
14312 : stack_pointer_rtx));
14313
14314 /* If drap register is used to align stack, use frame
14315 pointer + offset to access stack variables. If stack
14316 is aligned without drap, use stack pointer + offset to
14317 access stack variables. */
14318 if (crtl->stack_realign_tried
14319 && reg == frame_pointer_rtx)
14320 {
14321 int base_reg
14322 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14323 ? HARD_FRAME_POINTER_REGNUM
14324 : REGNO (elim));
14325 return new_reg_loc_descr (base_reg, offset);
14326 }
14327
14328 gcc_assert (frame_pointer_fb_offset_valid);
14329 offset += frame_pointer_fb_offset;
14330 HOST_WIDE_INT const_offset;
14331 if (offset.is_constant (&const_offset))
14332 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14333 else
14334 {
14335 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14336 loc_descr_plus_const (&ret, offset);
14337 return ret;
14338 }
14339 }
14340 }
14341
14342 regno = REGNO (reg);
14343 #ifdef LEAF_REG_REMAP
14344 if (crtl->uses_only_leaf_regs)
14345 {
14346 int leaf_reg = LEAF_REG_REMAP (regno);
14347 if (leaf_reg != -1)
14348 regno = (unsigned) leaf_reg;
14349 }
14350 #endif
14351 regno = DWARF_FRAME_REGNUM (regno);
14352
14353 HOST_WIDE_INT const_offset;
14354 if (!optimize && fde
14355 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14356 && offset.is_constant (&const_offset))
14357 {
14358 /* Use cfa+offset to represent the location of arguments passed
14359 on the stack when drap is used to align stack.
14360 Only do this when not optimizing, for optimized code var-tracking
14361 is supposed to track where the arguments live and the register
14362 used as vdrap or drap in some spot might be used for something
14363 else in other part of the routine. */
14364 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14365 }
14366
14367 result = new_reg_loc_descr (regno, offset);
14368
14369 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14370 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14371
14372 return result;
14373 }
14374
14375 /* Return true if this RTL expression describes a base+offset calculation. */
14376
14377 static inline int
14378 is_based_loc (const_rtx rtl)
14379 {
14380 return (GET_CODE (rtl) == PLUS
14381 && ((REG_P (XEXP (rtl, 0))
14382 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14383 && CONST_INT_P (XEXP (rtl, 1)))));
14384 }
14385
14386 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14387 failed. */
14388
14389 static dw_loc_descr_ref
14390 tls_mem_loc_descriptor (rtx mem)
14391 {
14392 tree base;
14393 dw_loc_descr_ref loc_result;
14394
14395 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14396 return NULL;
14397
14398 base = get_base_address (MEM_EXPR (mem));
14399 if (base == NULL
14400 || !VAR_P (base)
14401 || !DECL_THREAD_LOCAL_P (base))
14402 return NULL;
14403
14404 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14405 if (loc_result == NULL)
14406 return NULL;
14407
14408 if (maybe_ne (MEM_OFFSET (mem), 0))
14409 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14410
14411 return loc_result;
14412 }
14413
14414 /* Output debug info about reason why we failed to expand expression as dwarf
14415 expression. */
14416
14417 static void
14418 expansion_failed (tree expr, rtx rtl, char const *reason)
14419 {
14420 if (dump_file && (dump_flags & TDF_DETAILS))
14421 {
14422 fprintf (dump_file, "Failed to expand as dwarf: ");
14423 if (expr)
14424 print_generic_expr (dump_file, expr, dump_flags);
14425 if (rtl)
14426 {
14427 fprintf (dump_file, "\n");
14428 print_rtl (dump_file, rtl);
14429 }
14430 fprintf (dump_file, "\nReason: %s\n", reason);
14431 }
14432 }
14433
14434 /* Helper function for const_ok_for_output. */
14435
14436 static bool
14437 const_ok_for_output_1 (rtx rtl)
14438 {
14439 if (targetm.const_not_ok_for_debug_p (rtl))
14440 {
14441 if (GET_CODE (rtl) != UNSPEC)
14442 {
14443 expansion_failed (NULL_TREE, rtl,
14444 "Expression rejected for debug by the backend.\n");
14445 return false;
14446 }
14447
14448 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14449 the target hook doesn't explicitly allow it in debug info, assume
14450 we can't express it in the debug info. */
14451 /* Don't complain about TLS UNSPECs, those are just too hard to
14452 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14453 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14454 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14455 if (flag_checking
14456 && (XVECLEN (rtl, 0) == 0
14457 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14458 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14459 inform (current_function_decl
14460 ? DECL_SOURCE_LOCATION (current_function_decl)
14461 : UNKNOWN_LOCATION,
14462 #if NUM_UNSPEC_VALUES > 0
14463 "non-delegitimized UNSPEC %s (%d) found in variable location",
14464 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14465 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14466 XINT (rtl, 1));
14467 #else
14468 "non-delegitimized UNSPEC %d found in variable location",
14469 XINT (rtl, 1));
14470 #endif
14471 expansion_failed (NULL_TREE, rtl,
14472 "UNSPEC hasn't been delegitimized.\n");
14473 return false;
14474 }
14475
14476 if (CONST_POLY_INT_P (rtl))
14477 return false;
14478
14479 if (targetm.const_not_ok_for_debug_p (rtl))
14480 {
14481 expansion_failed (NULL_TREE, rtl,
14482 "Expression rejected for debug by the backend.\n");
14483 return false;
14484 }
14485
14486 /* FIXME: Refer to PR60655. It is possible for simplification
14487 of rtl expressions in var tracking to produce such expressions.
14488 We should really identify / validate expressions
14489 enclosed in CONST that can be handled by assemblers on various
14490 targets and only handle legitimate cases here. */
14491 switch (GET_CODE (rtl))
14492 {
14493 case SYMBOL_REF:
14494 break;
14495 case NOT:
14496 case NEG:
14497 return false;
14498 default:
14499 return true;
14500 }
14501
14502 if (CONSTANT_POOL_ADDRESS_P (rtl))
14503 {
14504 bool marked;
14505 get_pool_constant_mark (rtl, &marked);
14506 /* If all references to this pool constant were optimized away,
14507 it was not output and thus we can't represent it. */
14508 if (!marked)
14509 {
14510 expansion_failed (NULL_TREE, rtl,
14511 "Constant was removed from constant pool.\n");
14512 return false;
14513 }
14514 }
14515
14516 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14517 return false;
14518
14519 /* Avoid references to external symbols in debug info, on several targets
14520 the linker might even refuse to link when linking a shared library,
14521 and in many other cases the relocations for .debug_info/.debug_loc are
14522 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14523 to be defined within the same shared library or executable are fine. */
14524 if (SYMBOL_REF_EXTERNAL_P (rtl))
14525 {
14526 tree decl = SYMBOL_REF_DECL (rtl);
14527
14528 if (decl == NULL || !targetm.binds_local_p (decl))
14529 {
14530 expansion_failed (NULL_TREE, rtl,
14531 "Symbol not defined in current TU.\n");
14532 return false;
14533 }
14534 }
14535
14536 return true;
14537 }
14538
14539 /* Return true if constant RTL can be emitted in DW_OP_addr or
14540 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14541 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14542
14543 static bool
14544 const_ok_for_output (rtx rtl)
14545 {
14546 if (GET_CODE (rtl) == SYMBOL_REF)
14547 return const_ok_for_output_1 (rtl);
14548
14549 if (GET_CODE (rtl) == CONST)
14550 {
14551 subrtx_var_iterator::array_type array;
14552 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14553 if (!const_ok_for_output_1 (*iter))
14554 return false;
14555 return true;
14556 }
14557
14558 return true;
14559 }
14560
14561 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14562 if possible, NULL otherwise. */
14563
14564 static dw_die_ref
14565 base_type_for_mode (machine_mode mode, bool unsignedp)
14566 {
14567 dw_die_ref type_die;
14568 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14569
14570 if (type == NULL)
14571 return NULL;
14572 switch (TREE_CODE (type))
14573 {
14574 case INTEGER_TYPE:
14575 case REAL_TYPE:
14576 break;
14577 default:
14578 return NULL;
14579 }
14580 type_die = lookup_type_die (type);
14581 if (!type_die)
14582 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14583 comp_unit_die ());
14584 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14585 return NULL;
14586 return type_die;
14587 }
14588
14589 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14590 type matching MODE, or, if MODE is narrower than or as wide as
14591 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14592 possible. */
14593
14594 static dw_loc_descr_ref
14595 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14596 {
14597 machine_mode outer_mode = mode;
14598 dw_die_ref type_die;
14599 dw_loc_descr_ref cvt;
14600
14601 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14602 {
14603 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14604 return op;
14605 }
14606 type_die = base_type_for_mode (outer_mode, 1);
14607 if (type_die == NULL)
14608 return NULL;
14609 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14610 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14611 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14612 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14613 add_loc_descr (&op, cvt);
14614 return op;
14615 }
14616
14617 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14618
14619 static dw_loc_descr_ref
14620 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14621 dw_loc_descr_ref op1)
14622 {
14623 dw_loc_descr_ref ret = op0;
14624 add_loc_descr (&ret, op1);
14625 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14626 if (STORE_FLAG_VALUE != 1)
14627 {
14628 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14629 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14630 }
14631 return ret;
14632 }
14633
14634 /* Subroutine of scompare_loc_descriptor for the case in which we're
14635 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14636 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14637
14638 static dw_loc_descr_ref
14639 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14640 scalar_int_mode op_mode,
14641 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14642 {
14643 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14644 dw_loc_descr_ref cvt;
14645
14646 if (type_die == NULL)
14647 return NULL;
14648 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14649 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14650 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14651 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14652 add_loc_descr (&op0, cvt);
14653 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14654 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14655 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14656 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14657 add_loc_descr (&op1, cvt);
14658 return compare_loc_descriptor (op, op0, op1);
14659 }
14660
14661 /* Subroutine of scompare_loc_descriptor for the case in which we're
14662 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14663 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14664
14665 static dw_loc_descr_ref
14666 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14667 scalar_int_mode op_mode,
14668 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14669 {
14670 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14671 /* For eq/ne, if the operands are known to be zero-extended,
14672 there is no need to do the fancy shifting up. */
14673 if (op == DW_OP_eq || op == DW_OP_ne)
14674 {
14675 dw_loc_descr_ref last0, last1;
14676 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14677 ;
14678 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14679 ;
14680 /* deref_size zero extends, and for constants we can check
14681 whether they are zero extended or not. */
14682 if (((last0->dw_loc_opc == DW_OP_deref_size
14683 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14684 || (CONST_INT_P (XEXP (rtl, 0))
14685 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14686 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14687 && ((last1->dw_loc_opc == DW_OP_deref_size
14688 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14689 || (CONST_INT_P (XEXP (rtl, 1))
14690 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14691 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14692 return compare_loc_descriptor (op, op0, op1);
14693
14694 /* EQ/NE comparison against constant in narrower type than
14695 DWARF2_ADDR_SIZE can be performed either as
14696 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14697 DW_OP_{eq,ne}
14698 or
14699 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14700 DW_OP_{eq,ne}. Pick whatever is shorter. */
14701 if (CONST_INT_P (XEXP (rtl, 1))
14702 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14703 && (size_of_int_loc_descriptor (shift) + 1
14704 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14705 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14706 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14707 & GET_MODE_MASK (op_mode))))
14708 {
14709 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14710 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14711 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14712 & GET_MODE_MASK (op_mode));
14713 return compare_loc_descriptor (op, op0, op1);
14714 }
14715 }
14716 add_loc_descr (&op0, int_loc_descriptor (shift));
14717 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14718 if (CONST_INT_P (XEXP (rtl, 1)))
14719 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14720 else
14721 {
14722 add_loc_descr (&op1, int_loc_descriptor (shift));
14723 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14724 }
14725 return compare_loc_descriptor (op, op0, op1);
14726 }
14727
14728 /* Return location descriptor for unsigned comparison OP RTL. */
14729
14730 static dw_loc_descr_ref
14731 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14732 machine_mode mem_mode)
14733 {
14734 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14735 dw_loc_descr_ref op0, op1;
14736
14737 if (op_mode == VOIDmode)
14738 op_mode = GET_MODE (XEXP (rtl, 1));
14739 if (op_mode == VOIDmode)
14740 return NULL;
14741
14742 scalar_int_mode int_op_mode;
14743 if (dwarf_strict
14744 && dwarf_version < 5
14745 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14746 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14747 return NULL;
14748
14749 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14750 VAR_INIT_STATUS_INITIALIZED);
14751 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14752 VAR_INIT_STATUS_INITIALIZED);
14753
14754 if (op0 == NULL || op1 == NULL)
14755 return NULL;
14756
14757 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14758 {
14759 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14760 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14761
14762 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14763 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14764 }
14765 return compare_loc_descriptor (op, op0, op1);
14766 }
14767
14768 /* Return location descriptor for unsigned comparison OP RTL. */
14769
14770 static dw_loc_descr_ref
14771 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14772 machine_mode mem_mode)
14773 {
14774 dw_loc_descr_ref op0, op1;
14775
14776 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14777 if (test_op_mode == VOIDmode)
14778 test_op_mode = GET_MODE (XEXP (rtl, 1));
14779
14780 scalar_int_mode op_mode;
14781 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14782 return NULL;
14783
14784 if (dwarf_strict
14785 && dwarf_version < 5
14786 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14787 return NULL;
14788
14789 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14790 VAR_INIT_STATUS_INITIALIZED);
14791 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14792 VAR_INIT_STATUS_INITIALIZED);
14793
14794 if (op0 == NULL || op1 == NULL)
14795 return NULL;
14796
14797 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14798 {
14799 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14800 dw_loc_descr_ref last0, last1;
14801 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14802 ;
14803 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14804 ;
14805 if (CONST_INT_P (XEXP (rtl, 0)))
14806 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14807 /* deref_size zero extends, so no need to mask it again. */
14808 else if (last0->dw_loc_opc != DW_OP_deref_size
14809 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14810 {
14811 add_loc_descr (&op0, int_loc_descriptor (mask));
14812 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14813 }
14814 if (CONST_INT_P (XEXP (rtl, 1)))
14815 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14816 /* deref_size zero extends, so no need to mask it again. */
14817 else if (last1->dw_loc_opc != DW_OP_deref_size
14818 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14819 {
14820 add_loc_descr (&op1, int_loc_descriptor (mask));
14821 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14822 }
14823 }
14824 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14825 {
14826 HOST_WIDE_INT bias = 1;
14827 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14828 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14829 if (CONST_INT_P (XEXP (rtl, 1)))
14830 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14831 + INTVAL (XEXP (rtl, 1)));
14832 else
14833 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14834 bias, 0));
14835 }
14836 return compare_loc_descriptor (op, op0, op1);
14837 }
14838
14839 /* Return location descriptor for {U,S}{MIN,MAX}. */
14840
14841 static dw_loc_descr_ref
14842 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14843 machine_mode mem_mode)
14844 {
14845 enum dwarf_location_atom op;
14846 dw_loc_descr_ref op0, op1, ret;
14847 dw_loc_descr_ref bra_node, drop_node;
14848
14849 scalar_int_mode int_mode;
14850 if (dwarf_strict
14851 && dwarf_version < 5
14852 && (!is_a <scalar_int_mode> (mode, &int_mode)
14853 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14854 return NULL;
14855
14856 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14857 VAR_INIT_STATUS_INITIALIZED);
14858 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14859 VAR_INIT_STATUS_INITIALIZED);
14860
14861 if (op0 == NULL || op1 == NULL)
14862 return NULL;
14863
14864 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14865 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14866 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14867 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14868 {
14869 /* Checked by the caller. */
14870 int_mode = as_a <scalar_int_mode> (mode);
14871 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14872 {
14873 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14874 add_loc_descr (&op0, int_loc_descriptor (mask));
14875 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14876 add_loc_descr (&op1, int_loc_descriptor (mask));
14877 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14878 }
14879 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14880 {
14881 HOST_WIDE_INT bias = 1;
14882 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14883 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14884 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14885 }
14886 }
14887 else if (is_a <scalar_int_mode> (mode, &int_mode)
14888 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14889 {
14890 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14891 add_loc_descr (&op0, int_loc_descriptor (shift));
14892 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14893 add_loc_descr (&op1, int_loc_descriptor (shift));
14894 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14895 }
14896 else if (is_a <scalar_int_mode> (mode, &int_mode)
14897 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14898 {
14899 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14900 dw_loc_descr_ref cvt;
14901 if (type_die == NULL)
14902 return NULL;
14903 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14904 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14905 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14906 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14907 add_loc_descr (&op0, cvt);
14908 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14909 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14910 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14911 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14912 add_loc_descr (&op1, cvt);
14913 }
14914
14915 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14916 op = DW_OP_lt;
14917 else
14918 op = DW_OP_gt;
14919 ret = op0;
14920 add_loc_descr (&ret, op1);
14921 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14922 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14923 add_loc_descr (&ret, bra_node);
14924 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14925 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14926 add_loc_descr (&ret, drop_node);
14927 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14928 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14929 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14930 && is_a <scalar_int_mode> (mode, &int_mode)
14931 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14932 ret = convert_descriptor_to_mode (int_mode, ret);
14933 return ret;
14934 }
14935
14936 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14937 but after converting arguments to type_die, afterwards
14938 convert back to unsigned. */
14939
14940 static dw_loc_descr_ref
14941 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14942 scalar_int_mode mode, machine_mode mem_mode)
14943 {
14944 dw_loc_descr_ref cvt, op0, op1;
14945
14946 if (type_die == NULL)
14947 return NULL;
14948 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14949 VAR_INIT_STATUS_INITIALIZED);
14950 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14951 VAR_INIT_STATUS_INITIALIZED);
14952 if (op0 == NULL || op1 == NULL)
14953 return NULL;
14954 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14955 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14956 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14957 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14958 add_loc_descr (&op0, cvt);
14959 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14960 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14961 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14962 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14963 add_loc_descr (&op1, cvt);
14964 add_loc_descr (&op0, op1);
14965 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14966 return convert_descriptor_to_mode (mode, op0);
14967 }
14968
14969 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14970 const0 is DW_OP_lit0 or corresponding typed constant,
14971 const1 is DW_OP_lit1 or corresponding typed constant
14972 and constMSB is constant with just the MSB bit set
14973 for the mode):
14974 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14975 L1: const0 DW_OP_swap
14976 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14977 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14978 L3: DW_OP_drop
14979 L4: DW_OP_nop
14980
14981 CTZ is similar:
14982 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14983 L1: const0 DW_OP_swap
14984 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14985 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14986 L3: DW_OP_drop
14987 L4: DW_OP_nop
14988
14989 FFS is similar:
14990 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14991 L1: const1 DW_OP_swap
14992 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14993 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14994 L3: DW_OP_drop
14995 L4: DW_OP_nop */
14996
14997 static dw_loc_descr_ref
14998 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14999 machine_mode mem_mode)
15000 {
15001 dw_loc_descr_ref op0, ret, tmp;
15002 HOST_WIDE_INT valv;
15003 dw_loc_descr_ref l1jump, l1label;
15004 dw_loc_descr_ref l2jump, l2label;
15005 dw_loc_descr_ref l3jump, l3label;
15006 dw_loc_descr_ref l4jump, l4label;
15007 rtx msb;
15008
15009 if (GET_MODE (XEXP (rtl, 0)) != mode)
15010 return NULL;
15011
15012 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15013 VAR_INIT_STATUS_INITIALIZED);
15014 if (op0 == NULL)
15015 return NULL;
15016 ret = op0;
15017 if (GET_CODE (rtl) == CLZ)
15018 {
15019 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15020 valv = GET_MODE_BITSIZE (mode);
15021 }
15022 else if (GET_CODE (rtl) == FFS)
15023 valv = 0;
15024 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15025 valv = GET_MODE_BITSIZE (mode);
15026 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15027 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15028 add_loc_descr (&ret, l1jump);
15029 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15030 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15031 VAR_INIT_STATUS_INITIALIZED);
15032 if (tmp == NULL)
15033 return NULL;
15034 add_loc_descr (&ret, tmp);
15035 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15036 add_loc_descr (&ret, l4jump);
15037 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15038 ? const1_rtx : const0_rtx,
15039 mode, mem_mode,
15040 VAR_INIT_STATUS_INITIALIZED);
15041 if (l1label == NULL)
15042 return NULL;
15043 add_loc_descr (&ret, l1label);
15044 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15045 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15046 add_loc_descr (&ret, l2label);
15047 if (GET_CODE (rtl) != CLZ)
15048 msb = const1_rtx;
15049 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15050 msb = GEN_INT (HOST_WIDE_INT_1U
15051 << (GET_MODE_BITSIZE (mode) - 1));
15052 else
15053 msb = immed_wide_int_const
15054 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15055 GET_MODE_PRECISION (mode)), mode);
15056 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15057 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15058 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15059 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15060 else
15061 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15062 VAR_INIT_STATUS_INITIALIZED);
15063 if (tmp == NULL)
15064 return NULL;
15065 add_loc_descr (&ret, tmp);
15066 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15067 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15068 add_loc_descr (&ret, l3jump);
15069 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15070 VAR_INIT_STATUS_INITIALIZED);
15071 if (tmp == NULL)
15072 return NULL;
15073 add_loc_descr (&ret, tmp);
15074 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15075 ? DW_OP_shl : DW_OP_shr, 0, 0));
15076 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15077 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15078 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15079 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15080 add_loc_descr (&ret, l2jump);
15081 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15082 add_loc_descr (&ret, l3label);
15083 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15084 add_loc_descr (&ret, l4label);
15085 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15086 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15087 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15088 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15089 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15090 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15091 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15092 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15093 return ret;
15094 }
15095
15096 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15097 const1 is DW_OP_lit1 or corresponding typed constant):
15098 const0 DW_OP_swap
15099 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15100 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15101 L2: DW_OP_drop
15102
15103 PARITY is similar:
15104 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15105 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15106 L2: DW_OP_drop */
15107
15108 static dw_loc_descr_ref
15109 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15110 machine_mode mem_mode)
15111 {
15112 dw_loc_descr_ref op0, ret, tmp;
15113 dw_loc_descr_ref l1jump, l1label;
15114 dw_loc_descr_ref l2jump, l2label;
15115
15116 if (GET_MODE (XEXP (rtl, 0)) != mode)
15117 return NULL;
15118
15119 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15120 VAR_INIT_STATUS_INITIALIZED);
15121 if (op0 == NULL)
15122 return NULL;
15123 ret = op0;
15124 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15125 VAR_INIT_STATUS_INITIALIZED);
15126 if (tmp == NULL)
15127 return NULL;
15128 add_loc_descr (&ret, tmp);
15129 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15130 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15131 add_loc_descr (&ret, l1label);
15132 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15133 add_loc_descr (&ret, l2jump);
15134 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15135 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15136 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15137 VAR_INIT_STATUS_INITIALIZED);
15138 if (tmp == NULL)
15139 return NULL;
15140 add_loc_descr (&ret, tmp);
15141 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15142 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15143 ? DW_OP_plus : DW_OP_xor, 0, 0));
15144 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15145 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15146 VAR_INIT_STATUS_INITIALIZED);
15147 add_loc_descr (&ret, tmp);
15148 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15149 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15150 add_loc_descr (&ret, l1jump);
15151 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15152 add_loc_descr (&ret, l2label);
15153 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15154 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15155 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15156 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15157 return ret;
15158 }
15159
15160 /* BSWAP (constS is initial shift count, either 56 or 24):
15161 constS const0
15162 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15163 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15164 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15165 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15166 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15167
15168 static dw_loc_descr_ref
15169 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15170 machine_mode mem_mode)
15171 {
15172 dw_loc_descr_ref op0, ret, tmp;
15173 dw_loc_descr_ref l1jump, l1label;
15174 dw_loc_descr_ref l2jump, l2label;
15175
15176 if (BITS_PER_UNIT != 8
15177 || (GET_MODE_BITSIZE (mode) != 32
15178 && GET_MODE_BITSIZE (mode) != 64))
15179 return NULL;
15180
15181 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15182 VAR_INIT_STATUS_INITIALIZED);
15183 if (op0 == NULL)
15184 return NULL;
15185
15186 ret = op0;
15187 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15188 mode, mem_mode,
15189 VAR_INIT_STATUS_INITIALIZED);
15190 if (tmp == NULL)
15191 return NULL;
15192 add_loc_descr (&ret, tmp);
15193 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15194 VAR_INIT_STATUS_INITIALIZED);
15195 if (tmp == NULL)
15196 return NULL;
15197 add_loc_descr (&ret, tmp);
15198 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15199 add_loc_descr (&ret, l1label);
15200 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15201 mode, mem_mode,
15202 VAR_INIT_STATUS_INITIALIZED);
15203 add_loc_descr (&ret, tmp);
15204 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15205 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15206 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15207 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15208 VAR_INIT_STATUS_INITIALIZED);
15209 if (tmp == NULL)
15210 return NULL;
15211 add_loc_descr (&ret, tmp);
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15213 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15216 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15217 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15218 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15219 VAR_INIT_STATUS_INITIALIZED);
15220 add_loc_descr (&ret, tmp);
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15222 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15223 add_loc_descr (&ret, l2jump);
15224 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15225 VAR_INIT_STATUS_INITIALIZED);
15226 add_loc_descr (&ret, tmp);
15227 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15228 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15229 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15230 add_loc_descr (&ret, l1jump);
15231 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15232 add_loc_descr (&ret, l2label);
15233 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15234 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15235 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15236 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15237 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15238 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15239 return ret;
15240 }
15241
15242 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15243 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15244 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15245 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15246
15247 ROTATERT is similar:
15248 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15249 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15250 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15251
15252 static dw_loc_descr_ref
15253 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15254 machine_mode mem_mode)
15255 {
15256 rtx rtlop1 = XEXP (rtl, 1);
15257 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15258 int i;
15259
15260 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15261 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15262 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15263 VAR_INIT_STATUS_INITIALIZED);
15264 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15265 VAR_INIT_STATUS_INITIALIZED);
15266 if (op0 == NULL || op1 == NULL)
15267 return NULL;
15268 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15269 for (i = 0; i < 2; i++)
15270 {
15271 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15272 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15273 mode, mem_mode,
15274 VAR_INIT_STATUS_INITIALIZED);
15275 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15276 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15277 ? DW_OP_const4u
15278 : HOST_BITS_PER_WIDE_INT == 64
15279 ? DW_OP_const8u : DW_OP_constu,
15280 GET_MODE_MASK (mode), 0);
15281 else
15282 mask[i] = NULL;
15283 if (mask[i] == NULL)
15284 return NULL;
15285 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15286 }
15287 ret = op0;
15288 add_loc_descr (&ret, op1);
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15291 if (GET_CODE (rtl) == ROTATERT)
15292 {
15293 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15294 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15295 GET_MODE_BITSIZE (mode), 0));
15296 }
15297 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15298 if (mask[0] != NULL)
15299 add_loc_descr (&ret, mask[0]);
15300 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15301 if (mask[1] != NULL)
15302 {
15303 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15304 add_loc_descr (&ret, mask[1]);
15305 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15306 }
15307 if (GET_CODE (rtl) == ROTATE)
15308 {
15309 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15310 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15311 GET_MODE_BITSIZE (mode), 0));
15312 }
15313 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15314 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15315 return ret;
15316 }
15317
15318 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15319 for DEBUG_PARAMETER_REF RTL. */
15320
15321 static dw_loc_descr_ref
15322 parameter_ref_descriptor (rtx rtl)
15323 {
15324 dw_loc_descr_ref ret;
15325 dw_die_ref ref;
15326
15327 if (dwarf_strict)
15328 return NULL;
15329 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15330 /* With LTO during LTRANS we get the late DIE that refers to the early
15331 DIE, thus we add another indirection here. This seems to confuse
15332 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15333 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15334 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15335 if (ref)
15336 {
15337 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15338 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15339 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15340 }
15341 else
15342 {
15343 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15344 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15345 }
15346 return ret;
15347 }
15348
15349 /* The following routine converts the RTL for a variable or parameter
15350 (resident in memory) into an equivalent Dwarf representation of a
15351 mechanism for getting the address of that same variable onto the top of a
15352 hypothetical "address evaluation" stack.
15353
15354 When creating memory location descriptors, we are effectively transforming
15355 the RTL for a memory-resident object into its Dwarf postfix expression
15356 equivalent. This routine recursively descends an RTL tree, turning
15357 it into Dwarf postfix code as it goes.
15358
15359 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15360
15361 MEM_MODE is the mode of the memory reference, needed to handle some
15362 autoincrement addressing modes.
15363
15364 Return 0 if we can't represent the location. */
15365
15366 dw_loc_descr_ref
15367 mem_loc_descriptor (rtx rtl, machine_mode mode,
15368 machine_mode mem_mode,
15369 enum var_init_status initialized)
15370 {
15371 dw_loc_descr_ref mem_loc_result = NULL;
15372 enum dwarf_location_atom op;
15373 dw_loc_descr_ref op0, op1;
15374 rtx inner = NULL_RTX;
15375 poly_int64 offset;
15376
15377 if (mode == VOIDmode)
15378 mode = GET_MODE (rtl);
15379
15380 /* Note that for a dynamically sized array, the location we will generate a
15381 description of here will be the lowest numbered location which is
15382 actually within the array. That's *not* necessarily the same as the
15383 zeroth element of the array. */
15384
15385 rtl = targetm.delegitimize_address (rtl);
15386
15387 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15388 return NULL;
15389
15390 scalar_int_mode int_mode, inner_mode, op1_mode;
15391 switch (GET_CODE (rtl))
15392 {
15393 case POST_INC:
15394 case POST_DEC:
15395 case POST_MODIFY:
15396 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15397
15398 case SUBREG:
15399 /* The case of a subreg may arise when we have a local (register)
15400 variable or a formal (register) parameter which doesn't quite fill
15401 up an entire register. For now, just assume that it is
15402 legitimate to make the Dwarf info refer to the whole register which
15403 contains the given subreg. */
15404 if (!subreg_lowpart_p (rtl))
15405 break;
15406 inner = SUBREG_REG (rtl);
15407 /* FALLTHRU */
15408 case TRUNCATE:
15409 if (inner == NULL_RTX)
15410 inner = XEXP (rtl, 0);
15411 if (is_a <scalar_int_mode> (mode, &int_mode)
15412 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15413 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15414 #ifdef POINTERS_EXTEND_UNSIGNED
15415 || (int_mode == Pmode && mem_mode != VOIDmode)
15416 #endif
15417 )
15418 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15419 {
15420 mem_loc_result = mem_loc_descriptor (inner,
15421 inner_mode,
15422 mem_mode, initialized);
15423 break;
15424 }
15425 if (dwarf_strict && dwarf_version < 5)
15426 break;
15427 if (is_a <scalar_int_mode> (mode, &int_mode)
15428 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15429 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15430 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15431 {
15432 dw_die_ref type_die;
15433 dw_loc_descr_ref cvt;
15434
15435 mem_loc_result = mem_loc_descriptor (inner,
15436 GET_MODE (inner),
15437 mem_mode, initialized);
15438 if (mem_loc_result == NULL)
15439 break;
15440 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15441 if (type_die == NULL)
15442 {
15443 mem_loc_result = NULL;
15444 break;
15445 }
15446 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15447 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15448 else
15449 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15450 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15451 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15452 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15453 add_loc_descr (&mem_loc_result, cvt);
15454 if (is_a <scalar_int_mode> (mode, &int_mode)
15455 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15456 {
15457 /* Convert it to untyped afterwards. */
15458 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15459 add_loc_descr (&mem_loc_result, cvt);
15460 }
15461 }
15462 break;
15463
15464 case REG:
15465 if (!is_a <scalar_int_mode> (mode, &int_mode)
15466 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15467 && rtl != arg_pointer_rtx
15468 && rtl != frame_pointer_rtx
15469 #ifdef POINTERS_EXTEND_UNSIGNED
15470 && (int_mode != Pmode || mem_mode == VOIDmode)
15471 #endif
15472 ))
15473 {
15474 dw_die_ref type_die;
15475 unsigned int dbx_regnum;
15476
15477 if (dwarf_strict && dwarf_version < 5)
15478 break;
15479 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15480 break;
15481 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15482 if (type_die == NULL)
15483 break;
15484
15485 dbx_regnum = dbx_reg_number (rtl);
15486 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15487 break;
15488 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15489 dbx_regnum, 0);
15490 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15491 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15492 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15493 break;
15494 }
15495 /* Whenever a register number forms a part of the description of the
15496 method for calculating the (dynamic) address of a memory resident
15497 object, DWARF rules require the register number be referred to as
15498 a "base register". This distinction is not based in any way upon
15499 what category of register the hardware believes the given register
15500 belongs to. This is strictly DWARF terminology we're dealing with
15501 here. Note that in cases where the location of a memory-resident
15502 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15503 OP_CONST (0)) the actual DWARF location descriptor that we generate
15504 may just be OP_BASEREG (basereg). This may look deceptively like
15505 the object in question was allocated to a register (rather than in
15506 memory) so DWARF consumers need to be aware of the subtle
15507 distinction between OP_REG and OP_BASEREG. */
15508 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15509 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15510 else if (stack_realign_drap
15511 && crtl->drap_reg
15512 && crtl->args.internal_arg_pointer == rtl
15513 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15514 {
15515 /* If RTL is internal_arg_pointer, which has been optimized
15516 out, use DRAP instead. */
15517 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15518 VAR_INIT_STATUS_INITIALIZED);
15519 }
15520 break;
15521
15522 case SIGN_EXTEND:
15523 case ZERO_EXTEND:
15524 if (!is_a <scalar_int_mode> (mode, &int_mode)
15525 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15526 break;
15527 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15528 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15529 if (op0 == 0)
15530 break;
15531 else if (GET_CODE (rtl) == ZERO_EXTEND
15532 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15533 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15534 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15535 to expand zero extend as two shifts instead of
15536 masking. */
15537 && GET_MODE_SIZE (inner_mode) <= 4)
15538 {
15539 mem_loc_result = op0;
15540 add_loc_descr (&mem_loc_result,
15541 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15542 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15543 }
15544 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15545 {
15546 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15547 shift *= BITS_PER_UNIT;
15548 if (GET_CODE (rtl) == SIGN_EXTEND)
15549 op = DW_OP_shra;
15550 else
15551 op = DW_OP_shr;
15552 mem_loc_result = op0;
15553 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15554 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15555 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15556 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15557 }
15558 else if (!dwarf_strict || dwarf_version >= 5)
15559 {
15560 dw_die_ref type_die1, type_die2;
15561 dw_loc_descr_ref cvt;
15562
15563 type_die1 = base_type_for_mode (inner_mode,
15564 GET_CODE (rtl) == ZERO_EXTEND);
15565 if (type_die1 == NULL)
15566 break;
15567 type_die2 = base_type_for_mode (int_mode, 1);
15568 if (type_die2 == NULL)
15569 break;
15570 mem_loc_result = op0;
15571 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15572 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15573 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15574 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15575 add_loc_descr (&mem_loc_result, cvt);
15576 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15577 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15578 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15579 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15580 add_loc_descr (&mem_loc_result, cvt);
15581 }
15582 break;
15583
15584 case MEM:
15585 {
15586 rtx new_rtl = avoid_constant_pool_reference (rtl);
15587 if (new_rtl != rtl)
15588 {
15589 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15590 initialized);
15591 if (mem_loc_result != NULL)
15592 return mem_loc_result;
15593 }
15594 }
15595 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15596 get_address_mode (rtl), mode,
15597 VAR_INIT_STATUS_INITIALIZED);
15598 if (mem_loc_result == NULL)
15599 mem_loc_result = tls_mem_loc_descriptor (rtl);
15600 if (mem_loc_result != NULL)
15601 {
15602 if (!is_a <scalar_int_mode> (mode, &int_mode)
15603 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15604 {
15605 dw_die_ref type_die;
15606 dw_loc_descr_ref deref;
15607 HOST_WIDE_INT size;
15608
15609 if (dwarf_strict && dwarf_version < 5)
15610 return NULL;
15611 if (!GET_MODE_SIZE (mode).is_constant (&size))
15612 return NULL;
15613 type_die
15614 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15615 if (type_die == NULL)
15616 return NULL;
15617 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15618 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15619 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15620 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15621 add_loc_descr (&mem_loc_result, deref);
15622 }
15623 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15624 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15625 else
15626 add_loc_descr (&mem_loc_result,
15627 new_loc_descr (DW_OP_deref_size,
15628 GET_MODE_SIZE (int_mode), 0));
15629 }
15630 break;
15631
15632 case LO_SUM:
15633 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15634
15635 case LABEL_REF:
15636 /* Some ports can transform a symbol ref into a label ref, because
15637 the symbol ref is too far away and has to be dumped into a constant
15638 pool. */
15639 case CONST:
15640 case SYMBOL_REF:
15641 if (!is_a <scalar_int_mode> (mode, &int_mode)
15642 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15643 #ifdef POINTERS_EXTEND_UNSIGNED
15644 && (int_mode != Pmode || mem_mode == VOIDmode)
15645 #endif
15646 ))
15647 break;
15648 if (GET_CODE (rtl) == SYMBOL_REF
15649 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15650 {
15651 dw_loc_descr_ref temp;
15652
15653 /* If this is not defined, we have no way to emit the data. */
15654 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15655 break;
15656
15657 temp = new_addr_loc_descr (rtl, dtprel_true);
15658
15659 /* We check for DWARF 5 here because gdb did not implement
15660 DW_OP_form_tls_address until after 7.12. */
15661 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15662 ? DW_OP_form_tls_address
15663 : DW_OP_GNU_push_tls_address),
15664 0, 0);
15665 add_loc_descr (&mem_loc_result, temp);
15666
15667 break;
15668 }
15669
15670 if (!const_ok_for_output (rtl))
15671 {
15672 if (GET_CODE (rtl) == CONST)
15673 switch (GET_CODE (XEXP (rtl, 0)))
15674 {
15675 case NOT:
15676 op = DW_OP_not;
15677 goto try_const_unop;
15678 case NEG:
15679 op = DW_OP_neg;
15680 goto try_const_unop;
15681 try_const_unop:
15682 rtx arg;
15683 arg = XEXP (XEXP (rtl, 0), 0);
15684 if (!CONSTANT_P (arg))
15685 arg = gen_rtx_CONST (int_mode, arg);
15686 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15687 initialized);
15688 if (op0)
15689 {
15690 mem_loc_result = op0;
15691 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15692 }
15693 break;
15694 default:
15695 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15696 mem_mode, initialized);
15697 break;
15698 }
15699 break;
15700 }
15701
15702 symref:
15703 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15704 vec_safe_push (used_rtx_array, rtl);
15705 break;
15706
15707 case CONCAT:
15708 case CONCATN:
15709 case VAR_LOCATION:
15710 case DEBUG_IMPLICIT_PTR:
15711 expansion_failed (NULL_TREE, rtl,
15712 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15713 return 0;
15714
15715 case ENTRY_VALUE:
15716 if (dwarf_strict && dwarf_version < 5)
15717 return NULL;
15718 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15719 {
15720 if (!is_a <scalar_int_mode> (mode, &int_mode)
15721 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15722 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15723 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15724 else
15725 {
15726 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15727 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15728 return NULL;
15729 op0 = one_reg_loc_descriptor (dbx_regnum,
15730 VAR_INIT_STATUS_INITIALIZED);
15731 }
15732 }
15733 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15734 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15735 {
15736 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15737 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15738 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15739 return NULL;
15740 }
15741 else
15742 gcc_unreachable ();
15743 if (op0 == NULL)
15744 return NULL;
15745 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15746 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15747 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15748 break;
15749
15750 case DEBUG_PARAMETER_REF:
15751 mem_loc_result = parameter_ref_descriptor (rtl);
15752 break;
15753
15754 case PRE_MODIFY:
15755 /* Extract the PLUS expression nested inside and fall into
15756 PLUS code below. */
15757 rtl = XEXP (rtl, 1);
15758 goto plus;
15759
15760 case PRE_INC:
15761 case PRE_DEC:
15762 /* Turn these into a PLUS expression and fall into the PLUS code
15763 below. */
15764 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15765 gen_int_mode (GET_CODE (rtl) == PRE_INC
15766 ? GET_MODE_UNIT_SIZE (mem_mode)
15767 : -GET_MODE_UNIT_SIZE (mem_mode),
15768 mode));
15769
15770 /* fall through */
15771
15772 case PLUS:
15773 plus:
15774 if (is_based_loc (rtl)
15775 && is_a <scalar_int_mode> (mode, &int_mode)
15776 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15777 || XEXP (rtl, 0) == arg_pointer_rtx
15778 || XEXP (rtl, 0) == frame_pointer_rtx))
15779 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15780 INTVAL (XEXP (rtl, 1)),
15781 VAR_INIT_STATUS_INITIALIZED);
15782 else
15783 {
15784 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15785 VAR_INIT_STATUS_INITIALIZED);
15786 if (mem_loc_result == 0)
15787 break;
15788
15789 if (CONST_INT_P (XEXP (rtl, 1))
15790 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15791 <= DWARF2_ADDR_SIZE))
15792 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15793 else
15794 {
15795 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15796 VAR_INIT_STATUS_INITIALIZED);
15797 if (op1 == 0)
15798 return NULL;
15799 add_loc_descr (&mem_loc_result, op1);
15800 add_loc_descr (&mem_loc_result,
15801 new_loc_descr (DW_OP_plus, 0, 0));
15802 }
15803 }
15804 break;
15805
15806 /* If a pseudo-reg is optimized away, it is possible for it to
15807 be replaced with a MEM containing a multiply or shift. */
15808 case MINUS:
15809 op = DW_OP_minus;
15810 goto do_binop;
15811
15812 case MULT:
15813 op = DW_OP_mul;
15814 goto do_binop;
15815
15816 case DIV:
15817 if ((!dwarf_strict || dwarf_version >= 5)
15818 && is_a <scalar_int_mode> (mode, &int_mode)
15819 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15820 {
15821 mem_loc_result = typed_binop (DW_OP_div, rtl,
15822 base_type_for_mode (mode, 0),
15823 int_mode, mem_mode);
15824 break;
15825 }
15826 op = DW_OP_div;
15827 goto do_binop;
15828
15829 case UMOD:
15830 op = DW_OP_mod;
15831 goto do_binop;
15832
15833 case ASHIFT:
15834 op = DW_OP_shl;
15835 goto do_shift;
15836
15837 case ASHIFTRT:
15838 op = DW_OP_shra;
15839 goto do_shift;
15840
15841 case LSHIFTRT:
15842 op = DW_OP_shr;
15843 goto do_shift;
15844
15845 do_shift:
15846 if (!is_a <scalar_int_mode> (mode, &int_mode))
15847 break;
15848 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15849 VAR_INIT_STATUS_INITIALIZED);
15850 {
15851 rtx rtlop1 = XEXP (rtl, 1);
15852 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15853 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15854 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15855 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15856 VAR_INIT_STATUS_INITIALIZED);
15857 }
15858
15859 if (op0 == 0 || op1 == 0)
15860 break;
15861
15862 mem_loc_result = op0;
15863 add_loc_descr (&mem_loc_result, op1);
15864 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15865 break;
15866
15867 case AND:
15868 op = DW_OP_and;
15869 goto do_binop;
15870
15871 case IOR:
15872 op = DW_OP_or;
15873 goto do_binop;
15874
15875 case XOR:
15876 op = DW_OP_xor;
15877 goto do_binop;
15878
15879 do_binop:
15880 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15881 VAR_INIT_STATUS_INITIALIZED);
15882 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15883 VAR_INIT_STATUS_INITIALIZED);
15884
15885 if (op0 == 0 || op1 == 0)
15886 break;
15887
15888 mem_loc_result = op0;
15889 add_loc_descr (&mem_loc_result, op1);
15890 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15891 break;
15892
15893 case MOD:
15894 if ((!dwarf_strict || dwarf_version >= 5)
15895 && is_a <scalar_int_mode> (mode, &int_mode)
15896 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15897 {
15898 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15899 base_type_for_mode (mode, 0),
15900 int_mode, mem_mode);
15901 break;
15902 }
15903
15904 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15905 VAR_INIT_STATUS_INITIALIZED);
15906 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15907 VAR_INIT_STATUS_INITIALIZED);
15908
15909 if (op0 == 0 || op1 == 0)
15910 break;
15911
15912 mem_loc_result = op0;
15913 add_loc_descr (&mem_loc_result, op1);
15914 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15915 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15916 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15917 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15918 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15919 break;
15920
15921 case UDIV:
15922 if ((!dwarf_strict || dwarf_version >= 5)
15923 && is_a <scalar_int_mode> (mode, &int_mode))
15924 {
15925 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15926 {
15927 op = DW_OP_div;
15928 goto do_binop;
15929 }
15930 mem_loc_result = typed_binop (DW_OP_div, rtl,
15931 base_type_for_mode (int_mode, 1),
15932 int_mode, mem_mode);
15933 }
15934 break;
15935
15936 case NOT:
15937 op = DW_OP_not;
15938 goto do_unop;
15939
15940 case ABS:
15941 op = DW_OP_abs;
15942 goto do_unop;
15943
15944 case NEG:
15945 op = DW_OP_neg;
15946 goto do_unop;
15947
15948 do_unop:
15949 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15950 VAR_INIT_STATUS_INITIALIZED);
15951
15952 if (op0 == 0)
15953 break;
15954
15955 mem_loc_result = op0;
15956 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15957 break;
15958
15959 case CONST_INT:
15960 if (!is_a <scalar_int_mode> (mode, &int_mode)
15961 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15962 #ifdef POINTERS_EXTEND_UNSIGNED
15963 || (int_mode == Pmode
15964 && mem_mode != VOIDmode
15965 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15966 #endif
15967 )
15968 {
15969 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15970 break;
15971 }
15972 if ((!dwarf_strict || dwarf_version >= 5)
15973 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15974 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15975 {
15976 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15977 scalar_int_mode amode;
15978 if (type_die == NULL)
15979 return NULL;
15980 if (INTVAL (rtl) >= 0
15981 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15982 .exists (&amode))
15983 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15984 /* const DW_OP_convert <XXX> vs.
15985 DW_OP_const_type <XXX, 1, const>. */
15986 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15987 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15988 {
15989 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15990 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15991 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15992 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15993 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15994 add_loc_descr (&mem_loc_result, op0);
15995 return mem_loc_result;
15996 }
15997 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15998 INTVAL (rtl));
15999 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16000 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16001 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16002 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16003 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16004 else
16005 {
16006 mem_loc_result->dw_loc_oprnd2.val_class
16007 = dw_val_class_const_double;
16008 mem_loc_result->dw_loc_oprnd2.v.val_double
16009 = double_int::from_shwi (INTVAL (rtl));
16010 }
16011 }
16012 break;
16013
16014 case CONST_DOUBLE:
16015 if (!dwarf_strict || dwarf_version >= 5)
16016 {
16017 dw_die_ref type_die;
16018
16019 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16020 CONST_DOUBLE rtx could represent either a large integer
16021 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16022 the value is always a floating point constant.
16023
16024 When it is an integer, a CONST_DOUBLE is used whenever
16025 the constant requires 2 HWIs to be adequately represented.
16026 We output CONST_DOUBLEs as blocks. */
16027 if (mode == VOIDmode
16028 || (GET_MODE (rtl) == VOIDmode
16029 && maybe_ne (GET_MODE_BITSIZE (mode),
16030 HOST_BITS_PER_DOUBLE_INT)))
16031 break;
16032 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16033 if (type_die == NULL)
16034 return NULL;
16035 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16036 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16037 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16038 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16039 #if TARGET_SUPPORTS_WIDE_INT == 0
16040 if (!SCALAR_FLOAT_MODE_P (mode))
16041 {
16042 mem_loc_result->dw_loc_oprnd2.val_class
16043 = dw_val_class_const_double;
16044 mem_loc_result->dw_loc_oprnd2.v.val_double
16045 = rtx_to_double_int (rtl);
16046 }
16047 else
16048 #endif
16049 {
16050 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16051 unsigned int length = GET_MODE_SIZE (float_mode);
16052 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16053
16054 insert_float (rtl, array);
16055 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16056 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16057 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16058 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16059 }
16060 }
16061 break;
16062
16063 case CONST_WIDE_INT:
16064 if (!dwarf_strict || dwarf_version >= 5)
16065 {
16066 dw_die_ref type_die;
16067
16068 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16069 if (type_die == NULL)
16070 return NULL;
16071 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16072 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16073 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16074 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16075 mem_loc_result->dw_loc_oprnd2.val_class
16076 = dw_val_class_wide_int;
16077 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16078 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16079 }
16080 break;
16081
16082 case CONST_POLY_INT:
16083 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16084 break;
16085
16086 case EQ:
16087 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16088 break;
16089
16090 case GE:
16091 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16092 break;
16093
16094 case GT:
16095 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16096 break;
16097
16098 case LE:
16099 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16100 break;
16101
16102 case LT:
16103 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16104 break;
16105
16106 case NE:
16107 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16108 break;
16109
16110 case GEU:
16111 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16112 break;
16113
16114 case GTU:
16115 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16116 break;
16117
16118 case LEU:
16119 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16120 break;
16121
16122 case LTU:
16123 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16124 break;
16125
16126 case UMIN:
16127 case UMAX:
16128 if (!SCALAR_INT_MODE_P (mode))
16129 break;
16130 /* FALLTHRU */
16131 case SMIN:
16132 case SMAX:
16133 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16134 break;
16135
16136 case ZERO_EXTRACT:
16137 case SIGN_EXTRACT:
16138 if (CONST_INT_P (XEXP (rtl, 1))
16139 && CONST_INT_P (XEXP (rtl, 2))
16140 && is_a <scalar_int_mode> (mode, &int_mode)
16141 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16142 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16143 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16144 && ((unsigned) INTVAL (XEXP (rtl, 1))
16145 + (unsigned) INTVAL (XEXP (rtl, 2))
16146 <= GET_MODE_BITSIZE (int_mode)))
16147 {
16148 int shift, size;
16149 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16150 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16151 if (op0 == 0)
16152 break;
16153 if (GET_CODE (rtl) == SIGN_EXTRACT)
16154 op = DW_OP_shra;
16155 else
16156 op = DW_OP_shr;
16157 mem_loc_result = op0;
16158 size = INTVAL (XEXP (rtl, 1));
16159 shift = INTVAL (XEXP (rtl, 2));
16160 if (BITS_BIG_ENDIAN)
16161 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16162 if (shift + size != (int) DWARF2_ADDR_SIZE)
16163 {
16164 add_loc_descr (&mem_loc_result,
16165 int_loc_descriptor (DWARF2_ADDR_SIZE
16166 - shift - size));
16167 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16168 }
16169 if (size != (int) DWARF2_ADDR_SIZE)
16170 {
16171 add_loc_descr (&mem_loc_result,
16172 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16173 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16174 }
16175 }
16176 break;
16177
16178 case IF_THEN_ELSE:
16179 {
16180 dw_loc_descr_ref op2, bra_node, drop_node;
16181 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16182 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16183 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16184 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16185 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16186 VAR_INIT_STATUS_INITIALIZED);
16187 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16188 VAR_INIT_STATUS_INITIALIZED);
16189 if (op0 == NULL || op1 == NULL || op2 == NULL)
16190 break;
16191
16192 mem_loc_result = op1;
16193 add_loc_descr (&mem_loc_result, op2);
16194 add_loc_descr (&mem_loc_result, op0);
16195 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16196 add_loc_descr (&mem_loc_result, bra_node);
16197 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16198 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16199 add_loc_descr (&mem_loc_result, drop_node);
16200 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16201 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16202 }
16203 break;
16204
16205 case FLOAT_EXTEND:
16206 case FLOAT_TRUNCATE:
16207 case FLOAT:
16208 case UNSIGNED_FLOAT:
16209 case FIX:
16210 case UNSIGNED_FIX:
16211 if (!dwarf_strict || dwarf_version >= 5)
16212 {
16213 dw_die_ref type_die;
16214 dw_loc_descr_ref cvt;
16215
16216 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16217 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16218 if (op0 == NULL)
16219 break;
16220 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16221 && (GET_CODE (rtl) == FLOAT
16222 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16223 {
16224 type_die = base_type_for_mode (int_mode,
16225 GET_CODE (rtl) == UNSIGNED_FLOAT);
16226 if (type_die == NULL)
16227 break;
16228 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16229 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16230 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16231 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16232 add_loc_descr (&op0, cvt);
16233 }
16234 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16235 if (type_die == NULL)
16236 break;
16237 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16238 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16239 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16240 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16241 add_loc_descr (&op0, cvt);
16242 if (is_a <scalar_int_mode> (mode, &int_mode)
16243 && (GET_CODE (rtl) == FIX
16244 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16245 {
16246 op0 = convert_descriptor_to_mode (int_mode, op0);
16247 if (op0 == NULL)
16248 break;
16249 }
16250 mem_loc_result = op0;
16251 }
16252 break;
16253
16254 case CLZ:
16255 case CTZ:
16256 case FFS:
16257 if (is_a <scalar_int_mode> (mode, &int_mode))
16258 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16259 break;
16260
16261 case POPCOUNT:
16262 case PARITY:
16263 if (is_a <scalar_int_mode> (mode, &int_mode))
16264 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16265 break;
16266
16267 case BSWAP:
16268 if (is_a <scalar_int_mode> (mode, &int_mode))
16269 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16270 break;
16271
16272 case ROTATE:
16273 case ROTATERT:
16274 if (is_a <scalar_int_mode> (mode, &int_mode))
16275 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16276 break;
16277
16278 case COMPARE:
16279 /* In theory, we could implement the above. */
16280 /* DWARF cannot represent the unsigned compare operations
16281 natively. */
16282 case SS_MULT:
16283 case US_MULT:
16284 case SS_DIV:
16285 case US_DIV:
16286 case SS_PLUS:
16287 case US_PLUS:
16288 case SS_MINUS:
16289 case US_MINUS:
16290 case SS_NEG:
16291 case US_NEG:
16292 case SS_ABS:
16293 case SS_ASHIFT:
16294 case US_ASHIFT:
16295 case SS_TRUNCATE:
16296 case US_TRUNCATE:
16297 case UNORDERED:
16298 case ORDERED:
16299 case UNEQ:
16300 case UNGE:
16301 case UNGT:
16302 case UNLE:
16303 case UNLT:
16304 case LTGT:
16305 case FRACT_CONVERT:
16306 case UNSIGNED_FRACT_CONVERT:
16307 case SAT_FRACT:
16308 case UNSIGNED_SAT_FRACT:
16309 case SQRT:
16310 case ASM_OPERANDS:
16311 case VEC_MERGE:
16312 case VEC_SELECT:
16313 case VEC_CONCAT:
16314 case VEC_DUPLICATE:
16315 case VEC_SERIES:
16316 case UNSPEC:
16317 case HIGH:
16318 case FMA:
16319 case STRICT_LOW_PART:
16320 case CONST_VECTOR:
16321 case CONST_FIXED:
16322 case CLRSB:
16323 case CLOBBER:
16324 case CLOBBER_HIGH:
16325 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16326 can't express it in the debug info. This can happen e.g. with some
16327 TLS UNSPECs. */
16328 break;
16329
16330 case CONST_STRING:
16331 resolve_one_addr (&rtl);
16332 goto symref;
16333
16334 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16335 the expression. An UNSPEC rtx represents a raw DWARF operation,
16336 new_loc_descr is called for it to build the operation directly.
16337 Otherwise mem_loc_descriptor is called recursively. */
16338 case PARALLEL:
16339 {
16340 int index = 0;
16341 dw_loc_descr_ref exp_result = NULL;
16342
16343 for (; index < XVECLEN (rtl, 0); index++)
16344 {
16345 rtx elem = XVECEXP (rtl, 0, index);
16346 if (GET_CODE (elem) == UNSPEC)
16347 {
16348 /* Each DWARF operation UNSPEC contain two operands, if
16349 one operand is not used for the operation, const0_rtx is
16350 passed. */
16351 gcc_assert (XVECLEN (elem, 0) == 2);
16352
16353 HOST_WIDE_INT dw_op = XINT (elem, 1);
16354 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16355 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16356 exp_result
16357 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16358 oprnd2);
16359 }
16360 else
16361 exp_result
16362 = mem_loc_descriptor (elem, mode, mem_mode,
16363 VAR_INIT_STATUS_INITIALIZED);
16364
16365 if (!mem_loc_result)
16366 mem_loc_result = exp_result;
16367 else
16368 add_loc_descr (&mem_loc_result, exp_result);
16369 }
16370
16371 break;
16372 }
16373
16374 default:
16375 if (flag_checking)
16376 {
16377 print_rtl (stderr, rtl);
16378 gcc_unreachable ();
16379 }
16380 break;
16381 }
16382
16383 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16384 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16385
16386 return mem_loc_result;
16387 }
16388
16389 /* Return a descriptor that describes the concatenation of two locations.
16390 This is typically a complex variable. */
16391
16392 static dw_loc_descr_ref
16393 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16394 {
16395 /* At present we only track constant-sized pieces. */
16396 unsigned int size0, size1;
16397 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16398 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16399 return 0;
16400
16401 dw_loc_descr_ref cc_loc_result = NULL;
16402 dw_loc_descr_ref x0_ref
16403 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16404 dw_loc_descr_ref x1_ref
16405 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16406
16407 if (x0_ref == 0 || x1_ref == 0)
16408 return 0;
16409
16410 cc_loc_result = x0_ref;
16411 add_loc_descr_op_piece (&cc_loc_result, size0);
16412
16413 add_loc_descr (&cc_loc_result, x1_ref);
16414 add_loc_descr_op_piece (&cc_loc_result, size1);
16415
16416 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16417 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16418
16419 return cc_loc_result;
16420 }
16421
16422 /* Return a descriptor that describes the concatenation of N
16423 locations. */
16424
16425 static dw_loc_descr_ref
16426 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16427 {
16428 unsigned int i;
16429 dw_loc_descr_ref cc_loc_result = NULL;
16430 unsigned int n = XVECLEN (concatn, 0);
16431 unsigned int size;
16432
16433 for (i = 0; i < n; ++i)
16434 {
16435 dw_loc_descr_ref ref;
16436 rtx x = XVECEXP (concatn, 0, i);
16437
16438 /* At present we only track constant-sized pieces. */
16439 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16440 return NULL;
16441
16442 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16443 if (ref == NULL)
16444 return NULL;
16445
16446 add_loc_descr (&cc_loc_result, ref);
16447 add_loc_descr_op_piece (&cc_loc_result, size);
16448 }
16449
16450 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16451 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16452
16453 return cc_loc_result;
16454 }
16455
16456 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16457 for DEBUG_IMPLICIT_PTR RTL. */
16458
16459 static dw_loc_descr_ref
16460 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16461 {
16462 dw_loc_descr_ref ret;
16463 dw_die_ref ref;
16464
16465 if (dwarf_strict && dwarf_version < 5)
16466 return NULL;
16467 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16468 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16469 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16470 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16471 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16472 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16473 if (ref)
16474 {
16475 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16476 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16477 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16478 }
16479 else
16480 {
16481 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16482 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16483 }
16484 return ret;
16485 }
16486
16487 /* Output a proper Dwarf location descriptor for a variable or parameter
16488 which is either allocated in a register or in a memory location. For a
16489 register, we just generate an OP_REG and the register number. For a
16490 memory location we provide a Dwarf postfix expression describing how to
16491 generate the (dynamic) address of the object onto the address stack.
16492
16493 MODE is mode of the decl if this loc_descriptor is going to be used in
16494 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16495 allowed, VOIDmode otherwise.
16496
16497 If we don't know how to describe it, return 0. */
16498
16499 static dw_loc_descr_ref
16500 loc_descriptor (rtx rtl, machine_mode mode,
16501 enum var_init_status initialized)
16502 {
16503 dw_loc_descr_ref loc_result = NULL;
16504 scalar_int_mode int_mode;
16505
16506 switch (GET_CODE (rtl))
16507 {
16508 case SUBREG:
16509 /* The case of a subreg may arise when we have a local (register)
16510 variable or a formal (register) parameter which doesn't quite fill
16511 up an entire register. For now, just assume that it is
16512 legitimate to make the Dwarf info refer to the whole register which
16513 contains the given subreg. */
16514 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16515 loc_result = loc_descriptor (SUBREG_REG (rtl),
16516 GET_MODE (SUBREG_REG (rtl)), initialized);
16517 else
16518 goto do_default;
16519 break;
16520
16521 case REG:
16522 loc_result = reg_loc_descriptor (rtl, initialized);
16523 break;
16524
16525 case MEM:
16526 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16527 GET_MODE (rtl), initialized);
16528 if (loc_result == NULL)
16529 loc_result = tls_mem_loc_descriptor (rtl);
16530 if (loc_result == NULL)
16531 {
16532 rtx new_rtl = avoid_constant_pool_reference (rtl);
16533 if (new_rtl != rtl)
16534 loc_result = loc_descriptor (new_rtl, mode, initialized);
16535 }
16536 break;
16537
16538 case CONCAT:
16539 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16540 initialized);
16541 break;
16542
16543 case CONCATN:
16544 loc_result = concatn_loc_descriptor (rtl, initialized);
16545 break;
16546
16547 case VAR_LOCATION:
16548 /* Single part. */
16549 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16550 {
16551 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16552 if (GET_CODE (loc) == EXPR_LIST)
16553 loc = XEXP (loc, 0);
16554 loc_result = loc_descriptor (loc, mode, initialized);
16555 break;
16556 }
16557
16558 rtl = XEXP (rtl, 1);
16559 /* FALLTHRU */
16560
16561 case PARALLEL:
16562 {
16563 rtvec par_elems = XVEC (rtl, 0);
16564 int num_elem = GET_NUM_ELEM (par_elems);
16565 machine_mode mode;
16566 int i, size;
16567
16568 /* Create the first one, so we have something to add to. */
16569 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16570 VOIDmode, initialized);
16571 if (loc_result == NULL)
16572 return NULL;
16573 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16574 /* At present we only track constant-sized pieces. */
16575 if (!GET_MODE_SIZE (mode).is_constant (&size))
16576 return NULL;
16577 add_loc_descr_op_piece (&loc_result, size);
16578 for (i = 1; i < num_elem; i++)
16579 {
16580 dw_loc_descr_ref temp;
16581
16582 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16583 VOIDmode, initialized);
16584 if (temp == NULL)
16585 return NULL;
16586 add_loc_descr (&loc_result, temp);
16587 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16588 /* At present we only track constant-sized pieces. */
16589 if (!GET_MODE_SIZE (mode).is_constant (&size))
16590 return NULL;
16591 add_loc_descr_op_piece (&loc_result, size);
16592 }
16593 }
16594 break;
16595
16596 case CONST_INT:
16597 if (mode != VOIDmode && mode != BLKmode)
16598 {
16599 int_mode = as_a <scalar_int_mode> (mode);
16600 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16601 INTVAL (rtl));
16602 }
16603 break;
16604
16605 case CONST_DOUBLE:
16606 if (mode == VOIDmode)
16607 mode = GET_MODE (rtl);
16608
16609 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16610 {
16611 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16612
16613 /* Note that a CONST_DOUBLE rtx could represent either an integer
16614 or a floating-point constant. A CONST_DOUBLE is used whenever
16615 the constant requires more than one word in order to be
16616 adequately represented. We output CONST_DOUBLEs as blocks. */
16617 scalar_mode smode = as_a <scalar_mode> (mode);
16618 loc_result = new_loc_descr (DW_OP_implicit_value,
16619 GET_MODE_SIZE (smode), 0);
16620 #if TARGET_SUPPORTS_WIDE_INT == 0
16621 if (!SCALAR_FLOAT_MODE_P (smode))
16622 {
16623 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16624 loc_result->dw_loc_oprnd2.v.val_double
16625 = rtx_to_double_int (rtl);
16626 }
16627 else
16628 #endif
16629 {
16630 unsigned int length = GET_MODE_SIZE (smode);
16631 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16632
16633 insert_float (rtl, array);
16634 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16635 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16636 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16637 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16638 }
16639 }
16640 break;
16641
16642 case CONST_WIDE_INT:
16643 if (mode == VOIDmode)
16644 mode = GET_MODE (rtl);
16645
16646 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16647 {
16648 int_mode = as_a <scalar_int_mode> (mode);
16649 loc_result = new_loc_descr (DW_OP_implicit_value,
16650 GET_MODE_SIZE (int_mode), 0);
16651 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16652 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16653 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16654 }
16655 break;
16656
16657 case CONST_VECTOR:
16658 if (mode == VOIDmode)
16659 mode = GET_MODE (rtl);
16660
16661 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16662 {
16663 unsigned int length;
16664 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16665 return NULL;
16666
16667 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16668 unsigned char *array
16669 = ggc_vec_alloc<unsigned char> (length * elt_size);
16670 unsigned int i;
16671 unsigned char *p;
16672 machine_mode imode = GET_MODE_INNER (mode);
16673
16674 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16675 switch (GET_MODE_CLASS (mode))
16676 {
16677 case MODE_VECTOR_INT:
16678 for (i = 0, p = array; i < length; i++, p += elt_size)
16679 {
16680 rtx elt = CONST_VECTOR_ELT (rtl, i);
16681 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16682 }
16683 break;
16684
16685 case MODE_VECTOR_FLOAT:
16686 for (i = 0, p = array; i < length; i++, p += elt_size)
16687 {
16688 rtx elt = CONST_VECTOR_ELT (rtl, i);
16689 insert_float (elt, p);
16690 }
16691 break;
16692
16693 default:
16694 gcc_unreachable ();
16695 }
16696
16697 loc_result = new_loc_descr (DW_OP_implicit_value,
16698 length * elt_size, 0);
16699 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16700 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16701 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16702 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16703 }
16704 break;
16705
16706 case CONST:
16707 if (mode == VOIDmode
16708 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16709 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16710 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16711 {
16712 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16713 break;
16714 }
16715 /* FALLTHROUGH */
16716 case SYMBOL_REF:
16717 if (!const_ok_for_output (rtl))
16718 break;
16719 /* FALLTHROUGH */
16720 case LABEL_REF:
16721 if (is_a <scalar_int_mode> (mode, &int_mode)
16722 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16723 && (dwarf_version >= 4 || !dwarf_strict))
16724 {
16725 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16726 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16727 vec_safe_push (used_rtx_array, rtl);
16728 }
16729 break;
16730
16731 case DEBUG_IMPLICIT_PTR:
16732 loc_result = implicit_ptr_descriptor (rtl, 0);
16733 break;
16734
16735 case PLUS:
16736 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16737 && CONST_INT_P (XEXP (rtl, 1)))
16738 {
16739 loc_result
16740 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16741 break;
16742 }
16743 /* FALLTHRU */
16744 do_default:
16745 default:
16746 if ((is_a <scalar_int_mode> (mode, &int_mode)
16747 && GET_MODE (rtl) == int_mode
16748 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16749 && dwarf_version >= 4)
16750 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16751 {
16752 /* Value expression. */
16753 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16754 if (loc_result)
16755 add_loc_descr (&loc_result,
16756 new_loc_descr (DW_OP_stack_value, 0, 0));
16757 }
16758 break;
16759 }
16760
16761 return loc_result;
16762 }
16763
16764 /* We need to figure out what section we should use as the base for the
16765 address ranges where a given location is valid.
16766 1. If this particular DECL has a section associated with it, use that.
16767 2. If this function has a section associated with it, use that.
16768 3. Otherwise, use the text section.
16769 XXX: If you split a variable across multiple sections, we won't notice. */
16770
16771 static const char *
16772 secname_for_decl (const_tree decl)
16773 {
16774 const char *secname;
16775
16776 if (VAR_OR_FUNCTION_DECL_P (decl)
16777 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16778 && DECL_SECTION_NAME (decl))
16779 secname = DECL_SECTION_NAME (decl);
16780 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16781 secname = DECL_SECTION_NAME (current_function_decl);
16782 else if (cfun && in_cold_section_p)
16783 secname = crtl->subsections.cold_section_label;
16784 else
16785 secname = text_section_label;
16786
16787 return secname;
16788 }
16789
16790 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16791
16792 static bool
16793 decl_by_reference_p (tree decl)
16794 {
16795 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16796 || VAR_P (decl))
16797 && DECL_BY_REFERENCE (decl));
16798 }
16799
16800 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16801 for VARLOC. */
16802
16803 static dw_loc_descr_ref
16804 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16805 enum var_init_status initialized)
16806 {
16807 int have_address = 0;
16808 dw_loc_descr_ref descr;
16809 machine_mode mode;
16810
16811 if (want_address != 2)
16812 {
16813 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16814 /* Single part. */
16815 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16816 {
16817 varloc = PAT_VAR_LOCATION_LOC (varloc);
16818 if (GET_CODE (varloc) == EXPR_LIST)
16819 varloc = XEXP (varloc, 0);
16820 mode = GET_MODE (varloc);
16821 if (MEM_P (varloc))
16822 {
16823 rtx addr = XEXP (varloc, 0);
16824 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16825 mode, initialized);
16826 if (descr)
16827 have_address = 1;
16828 else
16829 {
16830 rtx x = avoid_constant_pool_reference (varloc);
16831 if (x != varloc)
16832 descr = mem_loc_descriptor (x, mode, VOIDmode,
16833 initialized);
16834 }
16835 }
16836 else
16837 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16838 }
16839 else
16840 return 0;
16841 }
16842 else
16843 {
16844 if (GET_CODE (varloc) == VAR_LOCATION)
16845 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16846 else
16847 mode = DECL_MODE (loc);
16848 descr = loc_descriptor (varloc, mode, initialized);
16849 have_address = 1;
16850 }
16851
16852 if (!descr)
16853 return 0;
16854
16855 if (want_address == 2 && !have_address
16856 && (dwarf_version >= 4 || !dwarf_strict))
16857 {
16858 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16859 {
16860 expansion_failed (loc, NULL_RTX,
16861 "DWARF address size mismatch");
16862 return 0;
16863 }
16864 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16865 have_address = 1;
16866 }
16867 /* Show if we can't fill the request for an address. */
16868 if (want_address && !have_address)
16869 {
16870 expansion_failed (loc, NULL_RTX,
16871 "Want address and only have value");
16872 return 0;
16873 }
16874
16875 /* If we've got an address and don't want one, dereference. */
16876 if (!want_address && have_address)
16877 {
16878 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16879 enum dwarf_location_atom op;
16880
16881 if (size > DWARF2_ADDR_SIZE || size == -1)
16882 {
16883 expansion_failed (loc, NULL_RTX,
16884 "DWARF address size mismatch");
16885 return 0;
16886 }
16887 else if (size == DWARF2_ADDR_SIZE)
16888 op = DW_OP_deref;
16889 else
16890 op = DW_OP_deref_size;
16891
16892 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16893 }
16894
16895 return descr;
16896 }
16897
16898 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16899 if it is not possible. */
16900
16901 static dw_loc_descr_ref
16902 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16903 {
16904 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16905 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16906 else if (dwarf_version >= 3 || !dwarf_strict)
16907 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16908 else
16909 return NULL;
16910 }
16911
16912 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16913 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16914
16915 static dw_loc_descr_ref
16916 dw_sra_loc_expr (tree decl, rtx loc)
16917 {
16918 rtx p;
16919 unsigned HOST_WIDE_INT padsize = 0;
16920 dw_loc_descr_ref descr, *descr_tail;
16921 unsigned HOST_WIDE_INT decl_size;
16922 rtx varloc;
16923 enum var_init_status initialized;
16924
16925 if (DECL_SIZE (decl) == NULL
16926 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16927 return NULL;
16928
16929 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16930 descr = NULL;
16931 descr_tail = &descr;
16932
16933 for (p = loc; p; p = XEXP (p, 1))
16934 {
16935 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16936 rtx loc_note = *decl_piece_varloc_ptr (p);
16937 dw_loc_descr_ref cur_descr;
16938 dw_loc_descr_ref *tail, last = NULL;
16939 unsigned HOST_WIDE_INT opsize = 0;
16940
16941 if (loc_note == NULL_RTX
16942 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16943 {
16944 padsize += bitsize;
16945 continue;
16946 }
16947 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16948 varloc = NOTE_VAR_LOCATION (loc_note);
16949 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16950 if (cur_descr == NULL)
16951 {
16952 padsize += bitsize;
16953 continue;
16954 }
16955
16956 /* Check that cur_descr either doesn't use
16957 DW_OP_*piece operations, or their sum is equal
16958 to bitsize. Otherwise we can't embed it. */
16959 for (tail = &cur_descr; *tail != NULL;
16960 tail = &(*tail)->dw_loc_next)
16961 if ((*tail)->dw_loc_opc == DW_OP_piece)
16962 {
16963 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16964 * BITS_PER_UNIT;
16965 last = *tail;
16966 }
16967 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16968 {
16969 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16970 last = *tail;
16971 }
16972
16973 if (last != NULL && opsize != bitsize)
16974 {
16975 padsize += bitsize;
16976 /* Discard the current piece of the descriptor and release any
16977 addr_table entries it uses. */
16978 remove_loc_list_addr_table_entries (cur_descr);
16979 continue;
16980 }
16981
16982 /* If there is a hole, add DW_OP_*piece after empty DWARF
16983 expression, which means that those bits are optimized out. */
16984 if (padsize)
16985 {
16986 if (padsize > decl_size)
16987 {
16988 remove_loc_list_addr_table_entries (cur_descr);
16989 goto discard_descr;
16990 }
16991 decl_size -= padsize;
16992 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16993 if (*descr_tail == NULL)
16994 {
16995 remove_loc_list_addr_table_entries (cur_descr);
16996 goto discard_descr;
16997 }
16998 descr_tail = &(*descr_tail)->dw_loc_next;
16999 padsize = 0;
17000 }
17001 *descr_tail = cur_descr;
17002 descr_tail = tail;
17003 if (bitsize > decl_size)
17004 goto discard_descr;
17005 decl_size -= bitsize;
17006 if (last == NULL)
17007 {
17008 HOST_WIDE_INT offset = 0;
17009 if (GET_CODE (varloc) == VAR_LOCATION
17010 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17011 {
17012 varloc = PAT_VAR_LOCATION_LOC (varloc);
17013 if (GET_CODE (varloc) == EXPR_LIST)
17014 varloc = XEXP (varloc, 0);
17015 }
17016 do
17017 {
17018 if (GET_CODE (varloc) == CONST
17019 || GET_CODE (varloc) == SIGN_EXTEND
17020 || GET_CODE (varloc) == ZERO_EXTEND)
17021 varloc = XEXP (varloc, 0);
17022 else if (GET_CODE (varloc) == SUBREG)
17023 varloc = SUBREG_REG (varloc);
17024 else
17025 break;
17026 }
17027 while (1);
17028 /* DW_OP_bit_size offset should be zero for register
17029 or implicit location descriptions and empty location
17030 descriptions, but for memory addresses needs big endian
17031 adjustment. */
17032 if (MEM_P (varloc))
17033 {
17034 unsigned HOST_WIDE_INT memsize;
17035 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17036 goto discard_descr;
17037 memsize *= BITS_PER_UNIT;
17038 if (memsize != bitsize)
17039 {
17040 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17041 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17042 goto discard_descr;
17043 if (memsize < bitsize)
17044 goto discard_descr;
17045 if (BITS_BIG_ENDIAN)
17046 offset = memsize - bitsize;
17047 }
17048 }
17049
17050 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17051 if (*descr_tail == NULL)
17052 goto discard_descr;
17053 descr_tail = &(*descr_tail)->dw_loc_next;
17054 }
17055 }
17056
17057 /* If there were any non-empty expressions, add padding till the end of
17058 the decl. */
17059 if (descr != NULL && decl_size != 0)
17060 {
17061 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17062 if (*descr_tail == NULL)
17063 goto discard_descr;
17064 }
17065 return descr;
17066
17067 discard_descr:
17068 /* Discard the descriptor and release any addr_table entries it uses. */
17069 remove_loc_list_addr_table_entries (descr);
17070 return NULL;
17071 }
17072
17073 /* Return the dwarf representation of the location list LOC_LIST of
17074 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17075 function. */
17076
17077 static dw_loc_list_ref
17078 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17079 {
17080 const char *endname, *secname;
17081 var_loc_view endview;
17082 rtx varloc;
17083 enum var_init_status initialized;
17084 struct var_loc_node *node;
17085 dw_loc_descr_ref descr;
17086 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17087 dw_loc_list_ref list = NULL;
17088 dw_loc_list_ref *listp = &list;
17089
17090 /* Now that we know what section we are using for a base,
17091 actually construct the list of locations.
17092 The first location information is what is passed to the
17093 function that creates the location list, and the remaining
17094 locations just get added on to that list.
17095 Note that we only know the start address for a location
17096 (IE location changes), so to build the range, we use
17097 the range [current location start, next location start].
17098 This means we have to special case the last node, and generate
17099 a range of [last location start, end of function label]. */
17100
17101 if (cfun && crtl->has_bb_partition)
17102 {
17103 bool save_in_cold_section_p = in_cold_section_p;
17104 in_cold_section_p = first_function_block_is_cold;
17105 if (loc_list->last_before_switch == NULL)
17106 in_cold_section_p = !in_cold_section_p;
17107 secname = secname_for_decl (decl);
17108 in_cold_section_p = save_in_cold_section_p;
17109 }
17110 else
17111 secname = secname_for_decl (decl);
17112
17113 for (node = loc_list->first; node; node = node->next)
17114 {
17115 bool range_across_switch = false;
17116 if (GET_CODE (node->loc) == EXPR_LIST
17117 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17118 {
17119 if (GET_CODE (node->loc) == EXPR_LIST)
17120 {
17121 descr = NULL;
17122 /* This requires DW_OP_{,bit_}piece, which is not usable
17123 inside DWARF expressions. */
17124 if (want_address == 2)
17125 descr = dw_sra_loc_expr (decl, node->loc);
17126 }
17127 else
17128 {
17129 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17130 varloc = NOTE_VAR_LOCATION (node->loc);
17131 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17132 }
17133 if (descr)
17134 {
17135 /* If section switch happens in between node->label
17136 and node->next->label (or end of function) and
17137 we can't emit it as a single entry list,
17138 emit two ranges, first one ending at the end
17139 of first partition and second one starting at the
17140 beginning of second partition. */
17141 if (node == loc_list->last_before_switch
17142 && (node != loc_list->first || loc_list->first->next
17143 /* If we are to emit a view number, we will emit
17144 a loclist rather than a single location
17145 expression for the entire function (see
17146 loc_list_has_views), so we have to split the
17147 range that straddles across partitions. */
17148 || !ZERO_VIEW_P (node->view))
17149 && current_function_decl)
17150 {
17151 endname = cfun->fde->dw_fde_end;
17152 endview = 0;
17153 range_across_switch = true;
17154 }
17155 /* The variable has a location between NODE->LABEL and
17156 NODE->NEXT->LABEL. */
17157 else if (node->next)
17158 endname = node->next->label, endview = node->next->view;
17159 /* If the variable has a location at the last label
17160 it keeps its location until the end of function. */
17161 else if (!current_function_decl)
17162 endname = text_end_label, endview = 0;
17163 else
17164 {
17165 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17166 current_function_funcdef_no);
17167 endname = ggc_strdup (label_id);
17168 endview = 0;
17169 }
17170
17171 *listp = new_loc_list (descr, node->label, node->view,
17172 endname, endview, secname);
17173 if (TREE_CODE (decl) == PARM_DECL
17174 && node == loc_list->first
17175 && NOTE_P (node->loc)
17176 && strcmp (node->label, endname) == 0)
17177 (*listp)->force = true;
17178 listp = &(*listp)->dw_loc_next;
17179 }
17180 }
17181
17182 if (cfun
17183 && crtl->has_bb_partition
17184 && node == loc_list->last_before_switch)
17185 {
17186 bool save_in_cold_section_p = in_cold_section_p;
17187 in_cold_section_p = !first_function_block_is_cold;
17188 secname = secname_for_decl (decl);
17189 in_cold_section_p = save_in_cold_section_p;
17190 }
17191
17192 if (range_across_switch)
17193 {
17194 if (GET_CODE (node->loc) == EXPR_LIST)
17195 descr = dw_sra_loc_expr (decl, node->loc);
17196 else
17197 {
17198 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17199 varloc = NOTE_VAR_LOCATION (node->loc);
17200 descr = dw_loc_list_1 (decl, varloc, want_address,
17201 initialized);
17202 }
17203 gcc_assert (descr);
17204 /* The variable has a location between NODE->LABEL and
17205 NODE->NEXT->LABEL. */
17206 if (node->next)
17207 endname = node->next->label, endview = node->next->view;
17208 else
17209 endname = cfun->fde->dw_fde_second_end, endview = 0;
17210 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17211 endname, endview, secname);
17212 listp = &(*listp)->dw_loc_next;
17213 }
17214 }
17215
17216 /* Try to avoid the overhead of a location list emitting a location
17217 expression instead, but only if we didn't have more than one
17218 location entry in the first place. If some entries were not
17219 representable, we don't want to pretend a single entry that was
17220 applies to the entire scope in which the variable is
17221 available. */
17222 if (list && loc_list->first->next)
17223 gen_llsym (list);
17224 else
17225 maybe_gen_llsym (list);
17226
17227 return list;
17228 }
17229
17230 /* Return if the loc_list has only single element and thus can be represented
17231 as location description. */
17232
17233 static bool
17234 single_element_loc_list_p (dw_loc_list_ref list)
17235 {
17236 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17237 return !list->ll_symbol;
17238 }
17239
17240 /* Duplicate a single element of location list. */
17241
17242 static inline dw_loc_descr_ref
17243 copy_loc_descr (dw_loc_descr_ref ref)
17244 {
17245 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17246 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17247 return copy;
17248 }
17249
17250 /* To each location in list LIST append loc descr REF. */
17251
17252 static void
17253 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17254 {
17255 dw_loc_descr_ref copy;
17256 add_loc_descr (&list->expr, ref);
17257 list = list->dw_loc_next;
17258 while (list)
17259 {
17260 copy = copy_loc_descr (ref);
17261 add_loc_descr (&list->expr, copy);
17262 while (copy->dw_loc_next)
17263 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17264 list = list->dw_loc_next;
17265 }
17266 }
17267
17268 /* To each location in list LIST prepend loc descr REF. */
17269
17270 static void
17271 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17272 {
17273 dw_loc_descr_ref copy;
17274 dw_loc_descr_ref ref_end = list->expr;
17275 add_loc_descr (&ref, list->expr);
17276 list->expr = ref;
17277 list = list->dw_loc_next;
17278 while (list)
17279 {
17280 dw_loc_descr_ref end = list->expr;
17281 list->expr = copy = copy_loc_descr (ref);
17282 while (copy->dw_loc_next != ref_end)
17283 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17284 copy->dw_loc_next = end;
17285 list = list->dw_loc_next;
17286 }
17287 }
17288
17289 /* Given two lists RET and LIST
17290 produce location list that is result of adding expression in LIST
17291 to expression in RET on each position in program.
17292 Might be destructive on both RET and LIST.
17293
17294 TODO: We handle only simple cases of RET or LIST having at most one
17295 element. General case would involve sorting the lists in program order
17296 and merging them that will need some additional work.
17297 Adding that will improve quality of debug info especially for SRA-ed
17298 structures. */
17299
17300 static void
17301 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17302 {
17303 if (!list)
17304 return;
17305 if (!*ret)
17306 {
17307 *ret = list;
17308 return;
17309 }
17310 if (!list->dw_loc_next)
17311 {
17312 add_loc_descr_to_each (*ret, list->expr);
17313 return;
17314 }
17315 if (!(*ret)->dw_loc_next)
17316 {
17317 prepend_loc_descr_to_each (list, (*ret)->expr);
17318 *ret = list;
17319 return;
17320 }
17321 expansion_failed (NULL_TREE, NULL_RTX,
17322 "Don't know how to merge two non-trivial"
17323 " location lists.\n");
17324 *ret = NULL;
17325 return;
17326 }
17327
17328 /* LOC is constant expression. Try a luck, look it up in constant
17329 pool and return its loc_descr of its address. */
17330
17331 static dw_loc_descr_ref
17332 cst_pool_loc_descr (tree loc)
17333 {
17334 /* Get an RTL for this, if something has been emitted. */
17335 rtx rtl = lookup_constant_def (loc);
17336
17337 if (!rtl || !MEM_P (rtl))
17338 {
17339 gcc_assert (!rtl);
17340 return 0;
17341 }
17342 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17343
17344 /* TODO: We might get more coverage if we was actually delaying expansion
17345 of all expressions till end of compilation when constant pools are fully
17346 populated. */
17347 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17348 {
17349 expansion_failed (loc, NULL_RTX,
17350 "CST value in contant pool but not marked.");
17351 return 0;
17352 }
17353 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17354 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17355 }
17356
17357 /* Return dw_loc_list representing address of addr_expr LOC
17358 by looking for inner INDIRECT_REF expression and turning
17359 it into simple arithmetics.
17360
17361 See loc_list_from_tree for the meaning of CONTEXT. */
17362
17363 static dw_loc_list_ref
17364 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17365 loc_descr_context *context)
17366 {
17367 tree obj, offset;
17368 poly_int64 bitsize, bitpos, bytepos;
17369 machine_mode mode;
17370 int unsignedp, reversep, volatilep = 0;
17371 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17372
17373 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17374 &bitsize, &bitpos, &offset, &mode,
17375 &unsignedp, &reversep, &volatilep);
17376 STRIP_NOPS (obj);
17377 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17378 {
17379 expansion_failed (loc, NULL_RTX, "bitfield access");
17380 return 0;
17381 }
17382 if (!INDIRECT_REF_P (obj))
17383 {
17384 expansion_failed (obj,
17385 NULL_RTX, "no indirect ref in inner refrence");
17386 return 0;
17387 }
17388 if (!offset && known_eq (bitpos, 0))
17389 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17390 context);
17391 else if (toplev
17392 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17393 && (dwarf_version >= 4 || !dwarf_strict))
17394 {
17395 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17396 if (!list_ret)
17397 return 0;
17398 if (offset)
17399 {
17400 /* Variable offset. */
17401 list_ret1 = loc_list_from_tree (offset, 0, context);
17402 if (list_ret1 == 0)
17403 return 0;
17404 add_loc_list (&list_ret, list_ret1);
17405 if (!list_ret)
17406 return 0;
17407 add_loc_descr_to_each (list_ret,
17408 new_loc_descr (DW_OP_plus, 0, 0));
17409 }
17410 HOST_WIDE_INT value;
17411 if (bytepos.is_constant (&value) && value > 0)
17412 add_loc_descr_to_each (list_ret,
17413 new_loc_descr (DW_OP_plus_uconst, value, 0));
17414 else if (maybe_ne (bytepos, 0))
17415 loc_list_plus_const (list_ret, bytepos);
17416 add_loc_descr_to_each (list_ret,
17417 new_loc_descr (DW_OP_stack_value, 0, 0));
17418 }
17419 return list_ret;
17420 }
17421
17422 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17423 all operations from LOC are nops, move to the last one. Insert in NOPS all
17424 operations that are skipped. */
17425
17426 static void
17427 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17428 hash_set<dw_loc_descr_ref> &nops)
17429 {
17430 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17431 {
17432 nops.add (loc);
17433 loc = loc->dw_loc_next;
17434 }
17435 }
17436
17437 /* Helper for loc_descr_without_nops: free the location description operation
17438 P. */
17439
17440 bool
17441 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17442 {
17443 ggc_free (loc);
17444 return true;
17445 }
17446
17447 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17448 finishes LOC. */
17449
17450 static void
17451 loc_descr_without_nops (dw_loc_descr_ref &loc)
17452 {
17453 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17454 return;
17455
17456 /* Set of all DW_OP_nop operations we remove. */
17457 hash_set<dw_loc_descr_ref> nops;
17458
17459 /* First, strip all prefix NOP operations in order to keep the head of the
17460 operations list. */
17461 loc_descr_to_next_no_nop (loc, nops);
17462
17463 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17464 {
17465 /* For control flow operations: strip "prefix" nops in destination
17466 labels. */
17467 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17468 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17469 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17470 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17471
17472 /* Do the same for the operations that follow, then move to the next
17473 iteration. */
17474 if (cur->dw_loc_next != NULL)
17475 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17476 cur = cur->dw_loc_next;
17477 }
17478
17479 nops.traverse<void *, free_loc_descr> (NULL);
17480 }
17481
17482
17483 struct dwarf_procedure_info;
17484
17485 /* Helper structure for location descriptions generation. */
17486 struct loc_descr_context
17487 {
17488 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17489 NULL_TREE if DW_OP_push_object_address in invalid for this location
17490 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17491 tree context_type;
17492 /* The ..._DECL node that should be translated as a
17493 DW_OP_push_object_address operation. */
17494 tree base_decl;
17495 /* Information about the DWARF procedure we are currently generating. NULL if
17496 we are not generating a DWARF procedure. */
17497 struct dwarf_procedure_info *dpi;
17498 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17499 by consumer. Used for DW_TAG_generic_subrange attributes. */
17500 bool placeholder_arg;
17501 /* True if PLACEHOLDER_EXPR has been seen. */
17502 bool placeholder_seen;
17503 };
17504
17505 /* DWARF procedures generation
17506
17507 DWARF expressions (aka. location descriptions) are used to encode variable
17508 things such as sizes or offsets. Such computations can have redundant parts
17509 that can be factorized in order to reduce the size of the output debug
17510 information. This is the whole point of DWARF procedures.
17511
17512 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17513 already factorized into functions ("size functions") in order to handle very
17514 big and complex types. Such functions are quite simple: they have integral
17515 arguments, they return an integral result and their body contains only a
17516 return statement with arithmetic expressions. This is the only kind of
17517 function we are interested in translating into DWARF procedures, here.
17518
17519 DWARF expressions and DWARF procedure are executed using a stack, so we have
17520 to define some calling convention for them to interact. Let's say that:
17521
17522 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17523 all arguments in reverse order (right-to-left) so that when the DWARF
17524 procedure execution starts, the first argument is the top of the stack.
17525
17526 - Then, when returning, the DWARF procedure must have consumed all arguments
17527 on the stack, must have pushed the result and touched nothing else.
17528
17529 - Each integral argument and the result are integral types can be hold in a
17530 single stack slot.
17531
17532 - We call "frame offset" the number of stack slots that are "under DWARF
17533 procedure control": it includes the arguments slots, the temporaries and
17534 the result slot. Thus, it is equal to the number of arguments when the
17535 procedure execution starts and must be equal to one (the result) when it
17536 returns. */
17537
17538 /* Helper structure used when generating operations for a DWARF procedure. */
17539 struct dwarf_procedure_info
17540 {
17541 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17542 currently translated. */
17543 tree fndecl;
17544 /* The number of arguments FNDECL takes. */
17545 unsigned args_count;
17546 };
17547
17548 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17549 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17550 equate it to this DIE. */
17551
17552 static dw_die_ref
17553 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17554 dw_die_ref parent_die)
17555 {
17556 dw_die_ref dwarf_proc_die;
17557
17558 if ((dwarf_version < 3 && dwarf_strict)
17559 || location == NULL)
17560 return NULL;
17561
17562 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17563 if (fndecl)
17564 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17565 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17566 return dwarf_proc_die;
17567 }
17568
17569 /* Return whether TYPE is a supported type as a DWARF procedure argument
17570 type or return type (we handle only scalar types and pointer types that
17571 aren't wider than the DWARF expression evaluation stack. */
17572
17573 static bool
17574 is_handled_procedure_type (tree type)
17575 {
17576 return ((INTEGRAL_TYPE_P (type)
17577 || TREE_CODE (type) == OFFSET_TYPE
17578 || TREE_CODE (type) == POINTER_TYPE)
17579 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17580 }
17581
17582 /* Helper for resolve_args_picking: do the same but stop when coming across
17583 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17584 offset *before* evaluating the corresponding operation. */
17585
17586 static bool
17587 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17588 struct dwarf_procedure_info *dpi,
17589 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17590 {
17591 /* The "frame_offset" identifier is already used to name a macro... */
17592 unsigned frame_offset_ = initial_frame_offset;
17593 dw_loc_descr_ref l;
17594
17595 for (l = loc; l != NULL;)
17596 {
17597 bool existed;
17598 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17599
17600 /* If we already met this node, there is nothing to compute anymore. */
17601 if (existed)
17602 {
17603 /* Make sure that the stack size is consistent wherever the execution
17604 flow comes from. */
17605 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17606 break;
17607 }
17608 l_frame_offset = frame_offset_;
17609
17610 /* If needed, relocate the picking offset with respect to the frame
17611 offset. */
17612 if (l->frame_offset_rel)
17613 {
17614 unsigned HOST_WIDE_INT off;
17615 switch (l->dw_loc_opc)
17616 {
17617 case DW_OP_pick:
17618 off = l->dw_loc_oprnd1.v.val_unsigned;
17619 break;
17620 case DW_OP_dup:
17621 off = 0;
17622 break;
17623 case DW_OP_over:
17624 off = 1;
17625 break;
17626 default:
17627 gcc_unreachable ();
17628 }
17629 /* frame_offset_ is the size of the current stack frame, including
17630 incoming arguments. Besides, the arguments are pushed
17631 right-to-left. Thus, in order to access the Nth argument from
17632 this operation node, the picking has to skip temporaries *plus*
17633 one stack slot per argument (0 for the first one, 1 for the second
17634 one, etc.).
17635
17636 The targetted argument number (N) is already set as the operand,
17637 and the number of temporaries can be computed with:
17638 frame_offsets_ - dpi->args_count */
17639 off += frame_offset_ - dpi->args_count;
17640
17641 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17642 if (off > 255)
17643 return false;
17644
17645 if (off == 0)
17646 {
17647 l->dw_loc_opc = DW_OP_dup;
17648 l->dw_loc_oprnd1.v.val_unsigned = 0;
17649 }
17650 else if (off == 1)
17651 {
17652 l->dw_loc_opc = DW_OP_over;
17653 l->dw_loc_oprnd1.v.val_unsigned = 0;
17654 }
17655 else
17656 {
17657 l->dw_loc_opc = DW_OP_pick;
17658 l->dw_loc_oprnd1.v.val_unsigned = off;
17659 }
17660 }
17661
17662 /* Update frame_offset according to the effect the current operation has
17663 on the stack. */
17664 switch (l->dw_loc_opc)
17665 {
17666 case DW_OP_deref:
17667 case DW_OP_swap:
17668 case DW_OP_rot:
17669 case DW_OP_abs:
17670 case DW_OP_neg:
17671 case DW_OP_not:
17672 case DW_OP_plus_uconst:
17673 case DW_OP_skip:
17674 case DW_OP_reg0:
17675 case DW_OP_reg1:
17676 case DW_OP_reg2:
17677 case DW_OP_reg3:
17678 case DW_OP_reg4:
17679 case DW_OP_reg5:
17680 case DW_OP_reg6:
17681 case DW_OP_reg7:
17682 case DW_OP_reg8:
17683 case DW_OP_reg9:
17684 case DW_OP_reg10:
17685 case DW_OP_reg11:
17686 case DW_OP_reg12:
17687 case DW_OP_reg13:
17688 case DW_OP_reg14:
17689 case DW_OP_reg15:
17690 case DW_OP_reg16:
17691 case DW_OP_reg17:
17692 case DW_OP_reg18:
17693 case DW_OP_reg19:
17694 case DW_OP_reg20:
17695 case DW_OP_reg21:
17696 case DW_OP_reg22:
17697 case DW_OP_reg23:
17698 case DW_OP_reg24:
17699 case DW_OP_reg25:
17700 case DW_OP_reg26:
17701 case DW_OP_reg27:
17702 case DW_OP_reg28:
17703 case DW_OP_reg29:
17704 case DW_OP_reg30:
17705 case DW_OP_reg31:
17706 case DW_OP_bregx:
17707 case DW_OP_piece:
17708 case DW_OP_deref_size:
17709 case DW_OP_nop:
17710 case DW_OP_bit_piece:
17711 case DW_OP_implicit_value:
17712 case DW_OP_stack_value:
17713 break;
17714
17715 case DW_OP_addr:
17716 case DW_OP_const1u:
17717 case DW_OP_const1s:
17718 case DW_OP_const2u:
17719 case DW_OP_const2s:
17720 case DW_OP_const4u:
17721 case DW_OP_const4s:
17722 case DW_OP_const8u:
17723 case DW_OP_const8s:
17724 case DW_OP_constu:
17725 case DW_OP_consts:
17726 case DW_OP_dup:
17727 case DW_OP_over:
17728 case DW_OP_pick:
17729 case DW_OP_lit0:
17730 case DW_OP_lit1:
17731 case DW_OP_lit2:
17732 case DW_OP_lit3:
17733 case DW_OP_lit4:
17734 case DW_OP_lit5:
17735 case DW_OP_lit6:
17736 case DW_OP_lit7:
17737 case DW_OP_lit8:
17738 case DW_OP_lit9:
17739 case DW_OP_lit10:
17740 case DW_OP_lit11:
17741 case DW_OP_lit12:
17742 case DW_OP_lit13:
17743 case DW_OP_lit14:
17744 case DW_OP_lit15:
17745 case DW_OP_lit16:
17746 case DW_OP_lit17:
17747 case DW_OP_lit18:
17748 case DW_OP_lit19:
17749 case DW_OP_lit20:
17750 case DW_OP_lit21:
17751 case DW_OP_lit22:
17752 case DW_OP_lit23:
17753 case DW_OP_lit24:
17754 case DW_OP_lit25:
17755 case DW_OP_lit26:
17756 case DW_OP_lit27:
17757 case DW_OP_lit28:
17758 case DW_OP_lit29:
17759 case DW_OP_lit30:
17760 case DW_OP_lit31:
17761 case DW_OP_breg0:
17762 case DW_OP_breg1:
17763 case DW_OP_breg2:
17764 case DW_OP_breg3:
17765 case DW_OP_breg4:
17766 case DW_OP_breg5:
17767 case DW_OP_breg6:
17768 case DW_OP_breg7:
17769 case DW_OP_breg8:
17770 case DW_OP_breg9:
17771 case DW_OP_breg10:
17772 case DW_OP_breg11:
17773 case DW_OP_breg12:
17774 case DW_OP_breg13:
17775 case DW_OP_breg14:
17776 case DW_OP_breg15:
17777 case DW_OP_breg16:
17778 case DW_OP_breg17:
17779 case DW_OP_breg18:
17780 case DW_OP_breg19:
17781 case DW_OP_breg20:
17782 case DW_OP_breg21:
17783 case DW_OP_breg22:
17784 case DW_OP_breg23:
17785 case DW_OP_breg24:
17786 case DW_OP_breg25:
17787 case DW_OP_breg26:
17788 case DW_OP_breg27:
17789 case DW_OP_breg28:
17790 case DW_OP_breg29:
17791 case DW_OP_breg30:
17792 case DW_OP_breg31:
17793 case DW_OP_fbreg:
17794 case DW_OP_push_object_address:
17795 case DW_OP_call_frame_cfa:
17796 case DW_OP_GNU_variable_value:
17797 ++frame_offset_;
17798 break;
17799
17800 case DW_OP_drop:
17801 case DW_OP_xderef:
17802 case DW_OP_and:
17803 case DW_OP_div:
17804 case DW_OP_minus:
17805 case DW_OP_mod:
17806 case DW_OP_mul:
17807 case DW_OP_or:
17808 case DW_OP_plus:
17809 case DW_OP_shl:
17810 case DW_OP_shr:
17811 case DW_OP_shra:
17812 case DW_OP_xor:
17813 case DW_OP_bra:
17814 case DW_OP_eq:
17815 case DW_OP_ge:
17816 case DW_OP_gt:
17817 case DW_OP_le:
17818 case DW_OP_lt:
17819 case DW_OP_ne:
17820 case DW_OP_regx:
17821 case DW_OP_xderef_size:
17822 --frame_offset_;
17823 break;
17824
17825 case DW_OP_call2:
17826 case DW_OP_call4:
17827 case DW_OP_call_ref:
17828 {
17829 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17830 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17831
17832 if (stack_usage == NULL)
17833 return false;
17834 frame_offset_ += *stack_usage;
17835 break;
17836 }
17837
17838 case DW_OP_implicit_pointer:
17839 case DW_OP_entry_value:
17840 case DW_OP_const_type:
17841 case DW_OP_regval_type:
17842 case DW_OP_deref_type:
17843 case DW_OP_convert:
17844 case DW_OP_reinterpret:
17845 case DW_OP_form_tls_address:
17846 case DW_OP_GNU_push_tls_address:
17847 case DW_OP_GNU_uninit:
17848 case DW_OP_GNU_encoded_addr:
17849 case DW_OP_GNU_implicit_pointer:
17850 case DW_OP_GNU_entry_value:
17851 case DW_OP_GNU_const_type:
17852 case DW_OP_GNU_regval_type:
17853 case DW_OP_GNU_deref_type:
17854 case DW_OP_GNU_convert:
17855 case DW_OP_GNU_reinterpret:
17856 case DW_OP_GNU_parameter_ref:
17857 /* loc_list_from_tree will probably not output these operations for
17858 size functions, so assume they will not appear here. */
17859 /* Fall through... */
17860
17861 default:
17862 gcc_unreachable ();
17863 }
17864
17865 /* Now, follow the control flow (except subroutine calls). */
17866 switch (l->dw_loc_opc)
17867 {
17868 case DW_OP_bra:
17869 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17870 frame_offsets))
17871 return false;
17872 /* Fall through. */
17873
17874 case DW_OP_skip:
17875 l = l->dw_loc_oprnd1.v.val_loc;
17876 break;
17877
17878 case DW_OP_stack_value:
17879 return true;
17880
17881 default:
17882 l = l->dw_loc_next;
17883 break;
17884 }
17885 }
17886
17887 return true;
17888 }
17889
17890 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17891 operations) in order to resolve the operand of DW_OP_pick operations that
17892 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17893 offset *before* LOC is executed. Return if all relocations were
17894 successful. */
17895
17896 static bool
17897 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17898 struct dwarf_procedure_info *dpi)
17899 {
17900 /* Associate to all visited operations the frame offset *before* evaluating
17901 this operation. */
17902 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17903
17904 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17905 frame_offsets);
17906 }
17907
17908 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17909 Return NULL if it is not possible. */
17910
17911 static dw_die_ref
17912 function_to_dwarf_procedure (tree fndecl)
17913 {
17914 struct loc_descr_context ctx;
17915 struct dwarf_procedure_info dpi;
17916 dw_die_ref dwarf_proc_die;
17917 tree tree_body = DECL_SAVED_TREE (fndecl);
17918 dw_loc_descr_ref loc_body, epilogue;
17919
17920 tree cursor;
17921 unsigned i;
17922
17923 /* Do not generate multiple DWARF procedures for the same function
17924 declaration. */
17925 dwarf_proc_die = lookup_decl_die (fndecl);
17926 if (dwarf_proc_die != NULL)
17927 return dwarf_proc_die;
17928
17929 /* DWARF procedures are available starting with the DWARFv3 standard. */
17930 if (dwarf_version < 3 && dwarf_strict)
17931 return NULL;
17932
17933 /* We handle only functions for which we still have a body, that return a
17934 supported type and that takes arguments with supported types. Note that
17935 there is no point translating functions that return nothing. */
17936 if (tree_body == NULL_TREE
17937 || DECL_RESULT (fndecl) == NULL_TREE
17938 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17939 return NULL;
17940
17941 for (cursor = DECL_ARGUMENTS (fndecl);
17942 cursor != NULL_TREE;
17943 cursor = TREE_CHAIN (cursor))
17944 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17945 return NULL;
17946
17947 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17948 if (TREE_CODE (tree_body) != RETURN_EXPR)
17949 return NULL;
17950 tree_body = TREE_OPERAND (tree_body, 0);
17951 if (TREE_CODE (tree_body) != MODIFY_EXPR
17952 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17953 return NULL;
17954 tree_body = TREE_OPERAND (tree_body, 1);
17955
17956 /* Try to translate the body expression itself. Note that this will probably
17957 cause an infinite recursion if its call graph has a cycle. This is very
17958 unlikely for size functions, however, so don't bother with such things at
17959 the moment. */
17960 ctx.context_type = NULL_TREE;
17961 ctx.base_decl = NULL_TREE;
17962 ctx.dpi = &dpi;
17963 ctx.placeholder_arg = false;
17964 ctx.placeholder_seen = false;
17965 dpi.fndecl = fndecl;
17966 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17967 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17968 if (!loc_body)
17969 return NULL;
17970
17971 /* After evaluating all operands in "loc_body", we should still have on the
17972 stack all arguments plus the desired function result (top of the stack).
17973 Generate code in order to keep only the result in our stack frame. */
17974 epilogue = NULL;
17975 for (i = 0; i < dpi.args_count; ++i)
17976 {
17977 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17978 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17979 op_couple->dw_loc_next->dw_loc_next = epilogue;
17980 epilogue = op_couple;
17981 }
17982 add_loc_descr (&loc_body, epilogue);
17983 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17984 return NULL;
17985
17986 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17987 because they are considered useful. Now there is an epilogue, they are
17988 not anymore, so give it another try. */
17989 loc_descr_without_nops (loc_body);
17990
17991 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17992 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17993 though, given that size functions do not come from source, so they should
17994 not have a dedicated DW_TAG_subprogram DIE. */
17995 dwarf_proc_die
17996 = new_dwarf_proc_die (loc_body, fndecl,
17997 get_context_die (DECL_CONTEXT (fndecl)));
17998
17999 /* The called DWARF procedure consumes one stack slot per argument and
18000 returns one stack slot. */
18001 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18002
18003 return dwarf_proc_die;
18004 }
18005
18006
18007 /* Generate Dwarf location list representing LOC.
18008 If WANT_ADDRESS is false, expression computing LOC will be computed
18009 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18010 if WANT_ADDRESS is 2, expression computing address useable in location
18011 will be returned (i.e. DW_OP_reg can be used
18012 to refer to register values).
18013
18014 CONTEXT provides information to customize the location descriptions
18015 generation. Its context_type field specifies what type is implicitly
18016 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18017 will not be generated.
18018
18019 Its DPI field determines whether we are generating a DWARF expression for a
18020 DWARF procedure, so PARM_DECL references are processed specifically.
18021
18022 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18023 and dpi fields were null. */
18024
18025 static dw_loc_list_ref
18026 loc_list_from_tree_1 (tree loc, int want_address,
18027 struct loc_descr_context *context)
18028 {
18029 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18030 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18031 int have_address = 0;
18032 enum dwarf_location_atom op;
18033
18034 /* ??? Most of the time we do not take proper care for sign/zero
18035 extending the values properly. Hopefully this won't be a real
18036 problem... */
18037
18038 if (context != NULL
18039 && context->base_decl == loc
18040 && want_address == 0)
18041 {
18042 if (dwarf_version >= 3 || !dwarf_strict)
18043 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18044 NULL, 0, NULL, 0, NULL);
18045 else
18046 return NULL;
18047 }
18048
18049 switch (TREE_CODE (loc))
18050 {
18051 case ERROR_MARK:
18052 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18053 return 0;
18054
18055 case PLACEHOLDER_EXPR:
18056 /* This case involves extracting fields from an object to determine the
18057 position of other fields. It is supposed to appear only as the first
18058 operand of COMPONENT_REF nodes and to reference precisely the type
18059 that the context allows. */
18060 if (context != NULL
18061 && TREE_TYPE (loc) == context->context_type
18062 && want_address >= 1)
18063 {
18064 if (dwarf_version >= 3 || !dwarf_strict)
18065 {
18066 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18067 have_address = 1;
18068 break;
18069 }
18070 else
18071 return NULL;
18072 }
18073 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18074 the single argument passed by consumer. */
18075 else if (context != NULL
18076 && context->placeholder_arg
18077 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18078 && want_address == 0)
18079 {
18080 ret = new_loc_descr (DW_OP_pick, 0, 0);
18081 ret->frame_offset_rel = 1;
18082 context->placeholder_seen = true;
18083 break;
18084 }
18085 else
18086 expansion_failed (loc, NULL_RTX,
18087 "PLACEHOLDER_EXPR for an unexpected type");
18088 break;
18089
18090 case CALL_EXPR:
18091 {
18092 const int nargs = call_expr_nargs (loc);
18093 tree callee = get_callee_fndecl (loc);
18094 int i;
18095 dw_die_ref dwarf_proc;
18096
18097 if (callee == NULL_TREE)
18098 goto call_expansion_failed;
18099
18100 /* We handle only functions that return an integer. */
18101 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18102 goto call_expansion_failed;
18103
18104 dwarf_proc = function_to_dwarf_procedure (callee);
18105 if (dwarf_proc == NULL)
18106 goto call_expansion_failed;
18107
18108 /* Evaluate arguments right-to-left so that the first argument will
18109 be the top-most one on the stack. */
18110 for (i = nargs - 1; i >= 0; --i)
18111 {
18112 dw_loc_descr_ref loc_descr
18113 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18114 context);
18115
18116 if (loc_descr == NULL)
18117 goto call_expansion_failed;
18118
18119 add_loc_descr (&ret, loc_descr);
18120 }
18121
18122 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18123 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18124 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18125 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18126 add_loc_descr (&ret, ret1);
18127 break;
18128
18129 call_expansion_failed:
18130 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18131 /* There are no opcodes for these operations. */
18132 return 0;
18133 }
18134
18135 case PREINCREMENT_EXPR:
18136 case PREDECREMENT_EXPR:
18137 case POSTINCREMENT_EXPR:
18138 case POSTDECREMENT_EXPR:
18139 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18140 /* There are no opcodes for these operations. */
18141 return 0;
18142
18143 case ADDR_EXPR:
18144 /* If we already want an address, see if there is INDIRECT_REF inside
18145 e.g. for &this->field. */
18146 if (want_address)
18147 {
18148 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18149 (loc, want_address == 2, context);
18150 if (list_ret)
18151 have_address = 1;
18152 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18153 && (ret = cst_pool_loc_descr (loc)))
18154 have_address = 1;
18155 }
18156 /* Otherwise, process the argument and look for the address. */
18157 if (!list_ret && !ret)
18158 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18159 else
18160 {
18161 if (want_address)
18162 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18163 return NULL;
18164 }
18165 break;
18166
18167 case VAR_DECL:
18168 if (DECL_THREAD_LOCAL_P (loc))
18169 {
18170 rtx rtl;
18171 enum dwarf_location_atom tls_op;
18172 enum dtprel_bool dtprel = dtprel_false;
18173
18174 if (targetm.have_tls)
18175 {
18176 /* If this is not defined, we have no way to emit the
18177 data. */
18178 if (!targetm.asm_out.output_dwarf_dtprel)
18179 return 0;
18180
18181 /* The way DW_OP_GNU_push_tls_address is specified, we
18182 can only look up addresses of objects in the current
18183 module. We used DW_OP_addr as first op, but that's
18184 wrong, because DW_OP_addr is relocated by the debug
18185 info consumer, while DW_OP_GNU_push_tls_address
18186 operand shouldn't be. */
18187 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18188 return 0;
18189 dtprel = dtprel_true;
18190 /* We check for DWARF 5 here because gdb did not implement
18191 DW_OP_form_tls_address until after 7.12. */
18192 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18193 : DW_OP_GNU_push_tls_address);
18194 }
18195 else
18196 {
18197 if (!targetm.emutls.debug_form_tls_address
18198 || !(dwarf_version >= 3 || !dwarf_strict))
18199 return 0;
18200 /* We stuffed the control variable into the DECL_VALUE_EXPR
18201 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18202 no longer appear in gimple code. We used the control
18203 variable in specific so that we could pick it up here. */
18204 loc = DECL_VALUE_EXPR (loc);
18205 tls_op = DW_OP_form_tls_address;
18206 }
18207
18208 rtl = rtl_for_decl_location (loc);
18209 if (rtl == NULL_RTX)
18210 return 0;
18211
18212 if (!MEM_P (rtl))
18213 return 0;
18214 rtl = XEXP (rtl, 0);
18215 if (! CONSTANT_P (rtl))
18216 return 0;
18217
18218 ret = new_addr_loc_descr (rtl, dtprel);
18219 ret1 = new_loc_descr (tls_op, 0, 0);
18220 add_loc_descr (&ret, ret1);
18221
18222 have_address = 1;
18223 break;
18224 }
18225 /* FALLTHRU */
18226
18227 case PARM_DECL:
18228 if (context != NULL && context->dpi != NULL
18229 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18230 {
18231 /* We are generating code for a DWARF procedure and we want to access
18232 one of its arguments: find the appropriate argument offset and let
18233 the resolve_args_picking pass compute the offset that complies
18234 with the stack frame size. */
18235 unsigned i = 0;
18236 tree cursor;
18237
18238 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18239 cursor != NULL_TREE && cursor != loc;
18240 cursor = TREE_CHAIN (cursor), ++i)
18241 ;
18242 /* If we are translating a DWARF procedure, all referenced parameters
18243 must belong to the current function. */
18244 gcc_assert (cursor != NULL_TREE);
18245
18246 ret = new_loc_descr (DW_OP_pick, i, 0);
18247 ret->frame_offset_rel = 1;
18248 break;
18249 }
18250 /* FALLTHRU */
18251
18252 case RESULT_DECL:
18253 if (DECL_HAS_VALUE_EXPR_P (loc))
18254 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18255 want_address, context);
18256 /* FALLTHRU */
18257
18258 case FUNCTION_DECL:
18259 {
18260 rtx rtl;
18261 var_loc_list *loc_list = lookup_decl_loc (loc);
18262
18263 if (loc_list && loc_list->first)
18264 {
18265 list_ret = dw_loc_list (loc_list, loc, want_address);
18266 have_address = want_address != 0;
18267 break;
18268 }
18269 rtl = rtl_for_decl_location (loc);
18270 if (rtl == NULL_RTX)
18271 {
18272 if (TREE_CODE (loc) != FUNCTION_DECL
18273 && early_dwarf
18274 && current_function_decl
18275 && want_address != 1
18276 && ! DECL_IGNORED_P (loc)
18277 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18278 || POINTER_TYPE_P (TREE_TYPE (loc)))
18279 && DECL_CONTEXT (loc) == current_function_decl
18280 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18281 <= DWARF2_ADDR_SIZE))
18282 {
18283 dw_die_ref ref = lookup_decl_die (loc);
18284 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18285 if (ref)
18286 {
18287 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18288 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18289 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18290 }
18291 else
18292 {
18293 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18294 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18295 }
18296 break;
18297 }
18298 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18299 return 0;
18300 }
18301 else if (CONST_INT_P (rtl))
18302 {
18303 HOST_WIDE_INT val = INTVAL (rtl);
18304 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18305 val &= GET_MODE_MASK (DECL_MODE (loc));
18306 ret = int_loc_descriptor (val);
18307 }
18308 else if (GET_CODE (rtl) == CONST_STRING)
18309 {
18310 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18311 return 0;
18312 }
18313 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18314 ret = new_addr_loc_descr (rtl, dtprel_false);
18315 else
18316 {
18317 machine_mode mode, mem_mode;
18318
18319 /* Certain constructs can only be represented at top-level. */
18320 if (want_address == 2)
18321 {
18322 ret = loc_descriptor (rtl, VOIDmode,
18323 VAR_INIT_STATUS_INITIALIZED);
18324 have_address = 1;
18325 }
18326 else
18327 {
18328 mode = GET_MODE (rtl);
18329 mem_mode = VOIDmode;
18330 if (MEM_P (rtl))
18331 {
18332 mem_mode = mode;
18333 mode = get_address_mode (rtl);
18334 rtl = XEXP (rtl, 0);
18335 have_address = 1;
18336 }
18337 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18338 VAR_INIT_STATUS_INITIALIZED);
18339 }
18340 if (!ret)
18341 expansion_failed (loc, rtl,
18342 "failed to produce loc descriptor for rtl");
18343 }
18344 }
18345 break;
18346
18347 case MEM_REF:
18348 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18349 {
18350 have_address = 1;
18351 goto do_plus;
18352 }
18353 /* Fallthru. */
18354 case INDIRECT_REF:
18355 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18356 have_address = 1;
18357 break;
18358
18359 case TARGET_MEM_REF:
18360 case SSA_NAME:
18361 case DEBUG_EXPR_DECL:
18362 return NULL;
18363
18364 case COMPOUND_EXPR:
18365 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18366 context);
18367
18368 CASE_CONVERT:
18369 case VIEW_CONVERT_EXPR:
18370 case SAVE_EXPR:
18371 case MODIFY_EXPR:
18372 case NON_LVALUE_EXPR:
18373 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18374 context);
18375
18376 case COMPONENT_REF:
18377 case BIT_FIELD_REF:
18378 case ARRAY_REF:
18379 case ARRAY_RANGE_REF:
18380 case REALPART_EXPR:
18381 case IMAGPART_EXPR:
18382 {
18383 tree obj, offset;
18384 poly_int64 bitsize, bitpos, bytepos;
18385 machine_mode mode;
18386 int unsignedp, reversep, volatilep = 0;
18387
18388 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18389 &unsignedp, &reversep, &volatilep);
18390
18391 gcc_assert (obj != loc);
18392
18393 list_ret = loc_list_from_tree_1 (obj,
18394 want_address == 2
18395 && known_eq (bitpos, 0)
18396 && !offset ? 2 : 1,
18397 context);
18398 /* TODO: We can extract value of the small expression via shifting even
18399 for nonzero bitpos. */
18400 if (list_ret == 0)
18401 return 0;
18402 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18403 || !multiple_p (bitsize, BITS_PER_UNIT))
18404 {
18405 expansion_failed (loc, NULL_RTX,
18406 "bitfield access");
18407 return 0;
18408 }
18409
18410 if (offset != NULL_TREE)
18411 {
18412 /* Variable offset. */
18413 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18414 if (list_ret1 == 0)
18415 return 0;
18416 add_loc_list (&list_ret, list_ret1);
18417 if (!list_ret)
18418 return 0;
18419 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18420 }
18421
18422 HOST_WIDE_INT value;
18423 if (bytepos.is_constant (&value) && value > 0)
18424 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18425 value, 0));
18426 else if (maybe_ne (bytepos, 0))
18427 loc_list_plus_const (list_ret, bytepos);
18428
18429 have_address = 1;
18430 break;
18431 }
18432
18433 case INTEGER_CST:
18434 if ((want_address || !tree_fits_shwi_p (loc))
18435 && (ret = cst_pool_loc_descr (loc)))
18436 have_address = 1;
18437 else if (want_address == 2
18438 && tree_fits_shwi_p (loc)
18439 && (ret = address_of_int_loc_descriptor
18440 (int_size_in_bytes (TREE_TYPE (loc)),
18441 tree_to_shwi (loc))))
18442 have_address = 1;
18443 else if (tree_fits_shwi_p (loc))
18444 ret = int_loc_descriptor (tree_to_shwi (loc));
18445 else if (tree_fits_uhwi_p (loc))
18446 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18447 else
18448 {
18449 expansion_failed (loc, NULL_RTX,
18450 "Integer operand is not host integer");
18451 return 0;
18452 }
18453 break;
18454
18455 case CONSTRUCTOR:
18456 case REAL_CST:
18457 case STRING_CST:
18458 case COMPLEX_CST:
18459 if ((ret = cst_pool_loc_descr (loc)))
18460 have_address = 1;
18461 else if (TREE_CODE (loc) == CONSTRUCTOR)
18462 {
18463 tree type = TREE_TYPE (loc);
18464 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18465 unsigned HOST_WIDE_INT offset = 0;
18466 unsigned HOST_WIDE_INT cnt;
18467 constructor_elt *ce;
18468
18469 if (TREE_CODE (type) == RECORD_TYPE)
18470 {
18471 /* This is very limited, but it's enough to output
18472 pointers to member functions, as long as the
18473 referenced function is defined in the current
18474 translation unit. */
18475 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18476 {
18477 tree val = ce->value;
18478
18479 tree field = ce->index;
18480
18481 if (val)
18482 STRIP_NOPS (val);
18483
18484 if (!field || DECL_BIT_FIELD (field))
18485 {
18486 expansion_failed (loc, NULL_RTX,
18487 "bitfield in record type constructor");
18488 size = offset = (unsigned HOST_WIDE_INT)-1;
18489 ret = NULL;
18490 break;
18491 }
18492
18493 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18494 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18495 gcc_assert (pos + fieldsize <= size);
18496 if (pos < offset)
18497 {
18498 expansion_failed (loc, NULL_RTX,
18499 "out-of-order fields in record constructor");
18500 size = offset = (unsigned HOST_WIDE_INT)-1;
18501 ret = NULL;
18502 break;
18503 }
18504 if (pos > offset)
18505 {
18506 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18507 add_loc_descr (&ret, ret1);
18508 offset = pos;
18509 }
18510 if (val && fieldsize != 0)
18511 {
18512 ret1 = loc_descriptor_from_tree (val, want_address, context);
18513 if (!ret1)
18514 {
18515 expansion_failed (loc, NULL_RTX,
18516 "unsupported expression in field");
18517 size = offset = (unsigned HOST_WIDE_INT)-1;
18518 ret = NULL;
18519 break;
18520 }
18521 add_loc_descr (&ret, ret1);
18522 }
18523 if (fieldsize)
18524 {
18525 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18526 add_loc_descr (&ret, ret1);
18527 offset = pos + fieldsize;
18528 }
18529 }
18530
18531 if (offset != size)
18532 {
18533 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18534 add_loc_descr (&ret, ret1);
18535 offset = size;
18536 }
18537
18538 have_address = !!want_address;
18539 }
18540 else
18541 expansion_failed (loc, NULL_RTX,
18542 "constructor of non-record type");
18543 }
18544 else
18545 /* We can construct small constants here using int_loc_descriptor. */
18546 expansion_failed (loc, NULL_RTX,
18547 "constructor or constant not in constant pool");
18548 break;
18549
18550 case TRUTH_AND_EXPR:
18551 case TRUTH_ANDIF_EXPR:
18552 case BIT_AND_EXPR:
18553 op = DW_OP_and;
18554 goto do_binop;
18555
18556 case TRUTH_XOR_EXPR:
18557 case BIT_XOR_EXPR:
18558 op = DW_OP_xor;
18559 goto do_binop;
18560
18561 case TRUTH_OR_EXPR:
18562 case TRUTH_ORIF_EXPR:
18563 case BIT_IOR_EXPR:
18564 op = DW_OP_or;
18565 goto do_binop;
18566
18567 case FLOOR_DIV_EXPR:
18568 case CEIL_DIV_EXPR:
18569 case ROUND_DIV_EXPR:
18570 case TRUNC_DIV_EXPR:
18571 case EXACT_DIV_EXPR:
18572 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18573 return 0;
18574 op = DW_OP_div;
18575 goto do_binop;
18576
18577 case MINUS_EXPR:
18578 op = DW_OP_minus;
18579 goto do_binop;
18580
18581 case FLOOR_MOD_EXPR:
18582 case CEIL_MOD_EXPR:
18583 case ROUND_MOD_EXPR:
18584 case TRUNC_MOD_EXPR:
18585 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18586 {
18587 op = DW_OP_mod;
18588 goto do_binop;
18589 }
18590 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18591 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18592 if (list_ret == 0 || list_ret1 == 0)
18593 return 0;
18594
18595 add_loc_list (&list_ret, list_ret1);
18596 if (list_ret == 0)
18597 return 0;
18598 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18599 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18600 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18601 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18602 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18603 break;
18604
18605 case MULT_EXPR:
18606 op = DW_OP_mul;
18607 goto do_binop;
18608
18609 case LSHIFT_EXPR:
18610 op = DW_OP_shl;
18611 goto do_binop;
18612
18613 case RSHIFT_EXPR:
18614 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18615 goto do_binop;
18616
18617 case POINTER_PLUS_EXPR:
18618 case PLUS_EXPR:
18619 do_plus:
18620 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18621 {
18622 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18623 smarter to encode their opposite. The DW_OP_plus_uconst operation
18624 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18625 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18626 bytes, Y being the size of the operation that pushes the opposite
18627 of the addend. So let's choose the smallest representation. */
18628 const tree tree_addend = TREE_OPERAND (loc, 1);
18629 offset_int wi_addend;
18630 HOST_WIDE_INT shwi_addend;
18631 dw_loc_descr_ref loc_naddend;
18632
18633 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18634 if (list_ret == 0)
18635 return 0;
18636
18637 /* Try to get the literal to push. It is the opposite of the addend,
18638 so as we rely on wrapping during DWARF evaluation, first decode
18639 the literal as a "DWARF-sized" signed number. */
18640 wi_addend = wi::to_offset (tree_addend);
18641 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18642 shwi_addend = wi_addend.to_shwi ();
18643 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18644 ? int_loc_descriptor (-shwi_addend)
18645 : NULL;
18646
18647 if (loc_naddend != NULL
18648 && ((unsigned) size_of_uleb128 (shwi_addend)
18649 > size_of_loc_descr (loc_naddend)))
18650 {
18651 add_loc_descr_to_each (list_ret, loc_naddend);
18652 add_loc_descr_to_each (list_ret,
18653 new_loc_descr (DW_OP_minus, 0, 0));
18654 }
18655 else
18656 {
18657 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18658 {
18659 loc_naddend = loc_cur;
18660 loc_cur = loc_cur->dw_loc_next;
18661 ggc_free (loc_naddend);
18662 }
18663 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18664 }
18665 break;
18666 }
18667
18668 op = DW_OP_plus;
18669 goto do_binop;
18670
18671 case LE_EXPR:
18672 op = DW_OP_le;
18673 goto do_comp_binop;
18674
18675 case GE_EXPR:
18676 op = DW_OP_ge;
18677 goto do_comp_binop;
18678
18679 case LT_EXPR:
18680 op = DW_OP_lt;
18681 goto do_comp_binop;
18682
18683 case GT_EXPR:
18684 op = DW_OP_gt;
18685 goto do_comp_binop;
18686
18687 do_comp_binop:
18688 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18689 {
18690 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18691 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18692 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18693 TREE_CODE (loc));
18694 break;
18695 }
18696 else
18697 goto do_binop;
18698
18699 case EQ_EXPR:
18700 op = DW_OP_eq;
18701 goto do_binop;
18702
18703 case NE_EXPR:
18704 op = DW_OP_ne;
18705 goto do_binop;
18706
18707 do_binop:
18708 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18709 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18710 if (list_ret == 0 || list_ret1 == 0)
18711 return 0;
18712
18713 add_loc_list (&list_ret, list_ret1);
18714 if (list_ret == 0)
18715 return 0;
18716 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18717 break;
18718
18719 case TRUTH_NOT_EXPR:
18720 case BIT_NOT_EXPR:
18721 op = DW_OP_not;
18722 goto do_unop;
18723
18724 case ABS_EXPR:
18725 op = DW_OP_abs;
18726 goto do_unop;
18727
18728 case NEGATE_EXPR:
18729 op = DW_OP_neg;
18730 goto do_unop;
18731
18732 do_unop:
18733 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18734 if (list_ret == 0)
18735 return 0;
18736
18737 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18738 break;
18739
18740 case MIN_EXPR:
18741 case MAX_EXPR:
18742 {
18743 const enum tree_code code =
18744 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18745
18746 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18747 build2 (code, integer_type_node,
18748 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18749 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18750 }
18751
18752 /* fall through */
18753
18754 case COND_EXPR:
18755 {
18756 dw_loc_descr_ref lhs
18757 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18758 dw_loc_list_ref rhs
18759 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18760 dw_loc_descr_ref bra_node, jump_node, tmp;
18761
18762 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18763 if (list_ret == 0 || lhs == 0 || rhs == 0)
18764 return 0;
18765
18766 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18767 add_loc_descr_to_each (list_ret, bra_node);
18768
18769 add_loc_list (&list_ret, rhs);
18770 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18771 add_loc_descr_to_each (list_ret, jump_node);
18772
18773 add_loc_descr_to_each (list_ret, lhs);
18774 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18775 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18776
18777 /* ??? Need a node to point the skip at. Use a nop. */
18778 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18779 add_loc_descr_to_each (list_ret, tmp);
18780 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18781 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18782 }
18783 break;
18784
18785 case FIX_TRUNC_EXPR:
18786 return 0;
18787
18788 default:
18789 /* Leave front-end specific codes as simply unknown. This comes
18790 up, for instance, with the C STMT_EXPR. */
18791 if ((unsigned int) TREE_CODE (loc)
18792 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18793 {
18794 expansion_failed (loc, NULL_RTX,
18795 "language specific tree node");
18796 return 0;
18797 }
18798
18799 /* Otherwise this is a generic code; we should just lists all of
18800 these explicitly. We forgot one. */
18801 if (flag_checking)
18802 gcc_unreachable ();
18803
18804 /* In a release build, we want to degrade gracefully: better to
18805 generate incomplete debugging information than to crash. */
18806 return NULL;
18807 }
18808
18809 if (!ret && !list_ret)
18810 return 0;
18811
18812 if (want_address == 2 && !have_address
18813 && (dwarf_version >= 4 || !dwarf_strict))
18814 {
18815 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18816 {
18817 expansion_failed (loc, NULL_RTX,
18818 "DWARF address size mismatch");
18819 return 0;
18820 }
18821 if (ret)
18822 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18823 else
18824 add_loc_descr_to_each (list_ret,
18825 new_loc_descr (DW_OP_stack_value, 0, 0));
18826 have_address = 1;
18827 }
18828 /* Show if we can't fill the request for an address. */
18829 if (want_address && !have_address)
18830 {
18831 expansion_failed (loc, NULL_RTX,
18832 "Want address and only have value");
18833 return 0;
18834 }
18835
18836 gcc_assert (!ret || !list_ret);
18837
18838 /* If we've got an address and don't want one, dereference. */
18839 if (!want_address && have_address)
18840 {
18841 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18842
18843 if (size > DWARF2_ADDR_SIZE || size == -1)
18844 {
18845 expansion_failed (loc, NULL_RTX,
18846 "DWARF address size mismatch");
18847 return 0;
18848 }
18849 else if (size == DWARF2_ADDR_SIZE)
18850 op = DW_OP_deref;
18851 else
18852 op = DW_OP_deref_size;
18853
18854 if (ret)
18855 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18856 else
18857 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18858 }
18859 if (ret)
18860 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18861
18862 return list_ret;
18863 }
18864
18865 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18866 expressions. */
18867
18868 static dw_loc_list_ref
18869 loc_list_from_tree (tree loc, int want_address,
18870 struct loc_descr_context *context)
18871 {
18872 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18873
18874 for (dw_loc_list_ref loc_cur = result;
18875 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18876 loc_descr_without_nops (loc_cur->expr);
18877 return result;
18878 }
18879
18880 /* Same as above but return only single location expression. */
18881 static dw_loc_descr_ref
18882 loc_descriptor_from_tree (tree loc, int want_address,
18883 struct loc_descr_context *context)
18884 {
18885 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18886 if (!ret)
18887 return NULL;
18888 if (ret->dw_loc_next)
18889 {
18890 expansion_failed (loc, NULL_RTX,
18891 "Location list where only loc descriptor needed");
18892 return NULL;
18893 }
18894 return ret->expr;
18895 }
18896
18897 /* Given a value, round it up to the lowest multiple of `boundary'
18898 which is not less than the value itself. */
18899
18900 static inline HOST_WIDE_INT
18901 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18902 {
18903 return (((value + boundary - 1) / boundary) * boundary);
18904 }
18905
18906 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18907 pointer to the declared type for the relevant field variable, or return
18908 `integer_type_node' if the given node turns out to be an
18909 ERROR_MARK node. */
18910
18911 static inline tree
18912 field_type (const_tree decl)
18913 {
18914 tree type;
18915
18916 if (TREE_CODE (decl) == ERROR_MARK)
18917 return integer_type_node;
18918
18919 type = DECL_BIT_FIELD_TYPE (decl);
18920 if (type == NULL_TREE)
18921 type = TREE_TYPE (decl);
18922
18923 return type;
18924 }
18925
18926 /* Given a pointer to a tree node, return the alignment in bits for
18927 it, or else return BITS_PER_WORD if the node actually turns out to
18928 be an ERROR_MARK node. */
18929
18930 static inline unsigned
18931 simple_type_align_in_bits (const_tree type)
18932 {
18933 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18934 }
18935
18936 static inline unsigned
18937 simple_decl_align_in_bits (const_tree decl)
18938 {
18939 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18940 }
18941
18942 /* Return the result of rounding T up to ALIGN. */
18943
18944 static inline offset_int
18945 round_up_to_align (const offset_int &t, unsigned int align)
18946 {
18947 return wi::udiv_trunc (t + align - 1, align) * align;
18948 }
18949
18950 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18951 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18952 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18953 if we fail to return the size in one of these two forms. */
18954
18955 static dw_loc_descr_ref
18956 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18957 {
18958 tree tree_size;
18959 struct loc_descr_context ctx;
18960
18961 /* Return a constant integer in priority, if possible. */
18962 *cst_size = int_size_in_bytes (type);
18963 if (*cst_size != -1)
18964 return NULL;
18965
18966 ctx.context_type = const_cast<tree> (type);
18967 ctx.base_decl = NULL_TREE;
18968 ctx.dpi = NULL;
18969 ctx.placeholder_arg = false;
18970 ctx.placeholder_seen = false;
18971
18972 type = TYPE_MAIN_VARIANT (type);
18973 tree_size = TYPE_SIZE_UNIT (type);
18974 return ((tree_size != NULL_TREE)
18975 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18976 : NULL);
18977 }
18978
18979 /* Helper structure for RECORD_TYPE processing. */
18980 struct vlr_context
18981 {
18982 /* Root RECORD_TYPE. It is needed to generate data member location
18983 descriptions in variable-length records (VLR), but also to cope with
18984 variants, which are composed of nested structures multiplexed with
18985 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18986 function processing a FIELD_DECL, it is required to be non null. */
18987 tree struct_type;
18988 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18989 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18990 this variant part as part of the root record (in storage units). For
18991 regular records, it must be NULL_TREE. */
18992 tree variant_part_offset;
18993 };
18994
18995 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18996 addressed byte of the "containing object" for the given FIELD_DECL. If
18997 possible, return a native constant through CST_OFFSET (in which case NULL is
18998 returned); otherwise return a DWARF expression that computes the offset.
18999
19000 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19001 that offset is, either because the argument turns out to be a pointer to an
19002 ERROR_MARK node, or because the offset expression is too complex for us.
19003
19004 CTX is required: see the comment for VLR_CONTEXT. */
19005
19006 static dw_loc_descr_ref
19007 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19008 HOST_WIDE_INT *cst_offset)
19009 {
19010 tree tree_result;
19011 dw_loc_list_ref loc_result;
19012
19013 *cst_offset = 0;
19014
19015 if (TREE_CODE (decl) == ERROR_MARK)
19016 return NULL;
19017 else
19018 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19019
19020 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19021 case. */
19022 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19023 return NULL;
19024
19025 #ifdef PCC_BITFIELD_TYPE_MATTERS
19026 /* We used to handle only constant offsets in all cases. Now, we handle
19027 properly dynamic byte offsets only when PCC bitfield type doesn't
19028 matter. */
19029 if (PCC_BITFIELD_TYPE_MATTERS
19030 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19031 {
19032 offset_int object_offset_in_bits;
19033 offset_int object_offset_in_bytes;
19034 offset_int bitpos_int;
19035 tree type;
19036 tree field_size_tree;
19037 offset_int deepest_bitpos;
19038 offset_int field_size_in_bits;
19039 unsigned int type_align_in_bits;
19040 unsigned int decl_align_in_bits;
19041 offset_int type_size_in_bits;
19042
19043 bitpos_int = wi::to_offset (bit_position (decl));
19044 type = field_type (decl);
19045 type_size_in_bits = offset_int_type_size_in_bits (type);
19046 type_align_in_bits = simple_type_align_in_bits (type);
19047
19048 field_size_tree = DECL_SIZE (decl);
19049
19050 /* The size could be unspecified if there was an error, or for
19051 a flexible array member. */
19052 if (!field_size_tree)
19053 field_size_tree = bitsize_zero_node;
19054
19055 /* If the size of the field is not constant, use the type size. */
19056 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19057 field_size_in_bits = wi::to_offset (field_size_tree);
19058 else
19059 field_size_in_bits = type_size_in_bits;
19060
19061 decl_align_in_bits = simple_decl_align_in_bits (decl);
19062
19063 /* The GCC front-end doesn't make any attempt to keep track of the
19064 starting bit offset (relative to the start of the containing
19065 structure type) of the hypothetical "containing object" for a
19066 bit-field. Thus, when computing the byte offset value for the
19067 start of the "containing object" of a bit-field, we must deduce
19068 this information on our own. This can be rather tricky to do in
19069 some cases. For example, handling the following structure type
19070 definition when compiling for an i386/i486 target (which only
19071 aligns long long's to 32-bit boundaries) can be very tricky:
19072
19073 struct S { int field1; long long field2:31; };
19074
19075 Fortunately, there is a simple rule-of-thumb which can be used
19076 in such cases. When compiling for an i386/i486, GCC will
19077 allocate 8 bytes for the structure shown above. It decides to
19078 do this based upon one simple rule for bit-field allocation.
19079 GCC allocates each "containing object" for each bit-field at
19080 the first (i.e. lowest addressed) legitimate alignment boundary
19081 (based upon the required minimum alignment for the declared
19082 type of the field) which it can possibly use, subject to the
19083 condition that there is still enough available space remaining
19084 in the containing object (when allocated at the selected point)
19085 to fully accommodate all of the bits of the bit-field itself.
19086
19087 This simple rule makes it obvious why GCC allocates 8 bytes for
19088 each object of the structure type shown above. When looking
19089 for a place to allocate the "containing object" for `field2',
19090 the compiler simply tries to allocate a 64-bit "containing
19091 object" at each successive 32-bit boundary (starting at zero)
19092 until it finds a place to allocate that 64- bit field such that
19093 at least 31 contiguous (and previously unallocated) bits remain
19094 within that selected 64 bit field. (As it turns out, for the
19095 example above, the compiler finds it is OK to allocate the
19096 "containing object" 64-bit field at bit-offset zero within the
19097 structure type.)
19098
19099 Here we attempt to work backwards from the limited set of facts
19100 we're given, and we try to deduce from those facts, where GCC
19101 must have believed that the containing object started (within
19102 the structure type). The value we deduce is then used (by the
19103 callers of this routine) to generate DW_AT_location and
19104 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19105 the case of DW_AT_location, regular fields as well). */
19106
19107 /* Figure out the bit-distance from the start of the structure to
19108 the "deepest" bit of the bit-field. */
19109 deepest_bitpos = bitpos_int + field_size_in_bits;
19110
19111 /* This is the tricky part. Use some fancy footwork to deduce
19112 where the lowest addressed bit of the containing object must
19113 be. */
19114 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19115
19116 /* Round up to type_align by default. This works best for
19117 bitfields. */
19118 object_offset_in_bits
19119 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19120
19121 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19122 {
19123 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19124
19125 /* Round up to decl_align instead. */
19126 object_offset_in_bits
19127 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19128 }
19129
19130 object_offset_in_bytes
19131 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19132 if (ctx->variant_part_offset == NULL_TREE)
19133 {
19134 *cst_offset = object_offset_in_bytes.to_shwi ();
19135 return NULL;
19136 }
19137 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19138 }
19139 else
19140 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19141 tree_result = byte_position (decl);
19142
19143 if (ctx->variant_part_offset != NULL_TREE)
19144 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19145 ctx->variant_part_offset, tree_result);
19146
19147 /* If the byte offset is a constant, it's simplier to handle a native
19148 constant rather than a DWARF expression. */
19149 if (TREE_CODE (tree_result) == INTEGER_CST)
19150 {
19151 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19152 return NULL;
19153 }
19154 struct loc_descr_context loc_ctx = {
19155 ctx->struct_type, /* context_type */
19156 NULL_TREE, /* base_decl */
19157 NULL, /* dpi */
19158 false, /* placeholder_arg */
19159 false /* placeholder_seen */
19160 };
19161 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19162
19163 /* We want a DWARF expression: abort if we only have a location list with
19164 multiple elements. */
19165 if (!loc_result || !single_element_loc_list_p (loc_result))
19166 return NULL;
19167 else
19168 return loc_result->expr;
19169 }
19170 \f
19171 /* The following routines define various Dwarf attributes and any data
19172 associated with them. */
19173
19174 /* Add a location description attribute value to a DIE.
19175
19176 This emits location attributes suitable for whole variables and
19177 whole parameters. Note that the location attributes for struct fields are
19178 generated by the routine `data_member_location_attribute' below. */
19179
19180 static inline void
19181 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19182 dw_loc_list_ref descr)
19183 {
19184 bool check_no_locviews = true;
19185 if (descr == 0)
19186 return;
19187 if (single_element_loc_list_p (descr))
19188 add_AT_loc (die, attr_kind, descr->expr);
19189 else
19190 {
19191 add_AT_loc_list (die, attr_kind, descr);
19192 gcc_assert (descr->ll_symbol);
19193 if (attr_kind == DW_AT_location && descr->vl_symbol
19194 && dwarf2out_locviews_in_attribute ())
19195 {
19196 add_AT_view_list (die, DW_AT_GNU_locviews);
19197 check_no_locviews = false;
19198 }
19199 }
19200
19201 if (check_no_locviews)
19202 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19203 }
19204
19205 /* Add DW_AT_accessibility attribute to DIE if needed. */
19206
19207 static void
19208 add_accessibility_attribute (dw_die_ref die, tree decl)
19209 {
19210 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19211 children, otherwise the default is DW_ACCESS_public. In DWARF2
19212 the default has always been DW_ACCESS_public. */
19213 if (TREE_PROTECTED (decl))
19214 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19215 else if (TREE_PRIVATE (decl))
19216 {
19217 if (dwarf_version == 2
19218 || die->die_parent == NULL
19219 || die->die_parent->die_tag != DW_TAG_class_type)
19220 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19221 }
19222 else if (dwarf_version > 2
19223 && die->die_parent
19224 && die->die_parent->die_tag == DW_TAG_class_type)
19225 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19226 }
19227
19228 /* Attach the specialized form of location attribute used for data members of
19229 struct and union types. In the special case of a FIELD_DECL node which
19230 represents a bit-field, the "offset" part of this special location
19231 descriptor must indicate the distance in bytes from the lowest-addressed
19232 byte of the containing struct or union type to the lowest-addressed byte of
19233 the "containing object" for the bit-field. (See the `field_byte_offset'
19234 function above).
19235
19236 For any given bit-field, the "containing object" is a hypothetical object
19237 (of some integral or enum type) within which the given bit-field lives. The
19238 type of this hypothetical "containing object" is always the same as the
19239 declared type of the individual bit-field itself (for GCC anyway... the
19240 DWARF spec doesn't actually mandate this). Note that it is the size (in
19241 bytes) of the hypothetical "containing object" which will be given in the
19242 DW_AT_byte_size attribute for this bit-field. (See the
19243 `byte_size_attribute' function below.) It is also used when calculating the
19244 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19245 function below.)
19246
19247 CTX is required: see the comment for VLR_CONTEXT. */
19248
19249 static void
19250 add_data_member_location_attribute (dw_die_ref die,
19251 tree decl,
19252 struct vlr_context *ctx)
19253 {
19254 HOST_WIDE_INT offset;
19255 dw_loc_descr_ref loc_descr = 0;
19256
19257 if (TREE_CODE (decl) == TREE_BINFO)
19258 {
19259 /* We're working on the TAG_inheritance for a base class. */
19260 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19261 {
19262 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19263 aren't at a fixed offset from all (sub)objects of the same
19264 type. We need to extract the appropriate offset from our
19265 vtable. The following dwarf expression means
19266
19267 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19268
19269 This is specific to the V3 ABI, of course. */
19270
19271 dw_loc_descr_ref tmp;
19272
19273 /* Make a copy of the object address. */
19274 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19275 add_loc_descr (&loc_descr, tmp);
19276
19277 /* Extract the vtable address. */
19278 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19279 add_loc_descr (&loc_descr, tmp);
19280
19281 /* Calculate the address of the offset. */
19282 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19283 gcc_assert (offset < 0);
19284
19285 tmp = int_loc_descriptor (-offset);
19286 add_loc_descr (&loc_descr, tmp);
19287 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19288 add_loc_descr (&loc_descr, tmp);
19289
19290 /* Extract the offset. */
19291 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19292 add_loc_descr (&loc_descr, tmp);
19293
19294 /* Add it to the object address. */
19295 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19296 add_loc_descr (&loc_descr, tmp);
19297 }
19298 else
19299 offset = tree_to_shwi (BINFO_OFFSET (decl));
19300 }
19301 else
19302 {
19303 loc_descr = field_byte_offset (decl, ctx, &offset);
19304
19305 /* If loc_descr is available then we know the field offset is dynamic.
19306 However, GDB does not handle dynamic field offsets very well at the
19307 moment. */
19308 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19309 {
19310 loc_descr = NULL;
19311 offset = 0;
19312 }
19313
19314 /* Data member location evalutation starts with the base address on the
19315 stack. Compute the field offset and add it to this base address. */
19316 else if (loc_descr != NULL)
19317 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19318 }
19319
19320 if (! loc_descr)
19321 {
19322 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19323 e.g. GDB only added support to it in November 2016. For DWARF5
19324 we need newer debug info consumers anyway. We might change this
19325 to dwarf_version >= 4 once most consumers catched up. */
19326 if (dwarf_version >= 5
19327 && TREE_CODE (decl) == FIELD_DECL
19328 && DECL_BIT_FIELD_TYPE (decl))
19329 {
19330 tree off = bit_position (decl);
19331 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19332 {
19333 remove_AT (die, DW_AT_byte_size);
19334 remove_AT (die, DW_AT_bit_offset);
19335 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19336 return;
19337 }
19338 }
19339 if (dwarf_version > 2)
19340 {
19341 /* Don't need to output a location expression, just the constant. */
19342 if (offset < 0)
19343 add_AT_int (die, DW_AT_data_member_location, offset);
19344 else
19345 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19346 return;
19347 }
19348 else
19349 {
19350 enum dwarf_location_atom op;
19351
19352 /* The DWARF2 standard says that we should assume that the structure
19353 address is already on the stack, so we can specify a structure
19354 field address by using DW_OP_plus_uconst. */
19355 op = DW_OP_plus_uconst;
19356 loc_descr = new_loc_descr (op, offset, 0);
19357 }
19358 }
19359
19360 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19361 }
19362
19363 /* Writes integer values to dw_vec_const array. */
19364
19365 static void
19366 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19367 {
19368 while (size != 0)
19369 {
19370 *dest++ = val & 0xff;
19371 val >>= 8;
19372 --size;
19373 }
19374 }
19375
19376 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19377
19378 static HOST_WIDE_INT
19379 extract_int (const unsigned char *src, unsigned int size)
19380 {
19381 HOST_WIDE_INT val = 0;
19382
19383 src += size;
19384 while (size != 0)
19385 {
19386 val <<= 8;
19387 val |= *--src & 0xff;
19388 --size;
19389 }
19390 return val;
19391 }
19392
19393 /* Writes wide_int values to dw_vec_const array. */
19394
19395 static void
19396 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19397 {
19398 int i;
19399
19400 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19401 {
19402 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19403 return;
19404 }
19405
19406 /* We'd have to extend this code to support odd sizes. */
19407 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19408
19409 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19410
19411 if (WORDS_BIG_ENDIAN)
19412 for (i = n - 1; i >= 0; i--)
19413 {
19414 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19415 dest += sizeof (HOST_WIDE_INT);
19416 }
19417 else
19418 for (i = 0; i < n; i++)
19419 {
19420 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19421 dest += sizeof (HOST_WIDE_INT);
19422 }
19423 }
19424
19425 /* Writes floating point values to dw_vec_const array. */
19426
19427 static void
19428 insert_float (const_rtx rtl, unsigned char *array)
19429 {
19430 long val[4];
19431 int i;
19432 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19433
19434 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19435
19436 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19437 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19438 {
19439 insert_int (val[i], 4, array);
19440 array += 4;
19441 }
19442 }
19443
19444 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19445 does not have a "location" either in memory or in a register. These
19446 things can arise in GNU C when a constant is passed as an actual parameter
19447 to an inlined function. They can also arise in C++ where declared
19448 constants do not necessarily get memory "homes". */
19449
19450 static bool
19451 add_const_value_attribute (dw_die_ref die, rtx rtl)
19452 {
19453 switch (GET_CODE (rtl))
19454 {
19455 case CONST_INT:
19456 {
19457 HOST_WIDE_INT val = INTVAL (rtl);
19458
19459 if (val < 0)
19460 add_AT_int (die, DW_AT_const_value, val);
19461 else
19462 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19463 }
19464 return true;
19465
19466 case CONST_WIDE_INT:
19467 {
19468 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19469 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19470 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19471 wide_int w = wi::zext (w1, prec);
19472 add_AT_wide (die, DW_AT_const_value, w);
19473 }
19474 return true;
19475
19476 case CONST_DOUBLE:
19477 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19478 floating-point constant. A CONST_DOUBLE is used whenever the
19479 constant requires more than one word in order to be adequately
19480 represented. */
19481 if (TARGET_SUPPORTS_WIDE_INT == 0
19482 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19483 add_AT_double (die, DW_AT_const_value,
19484 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19485 else
19486 {
19487 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19488 unsigned int length = GET_MODE_SIZE (mode);
19489 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19490
19491 insert_float (rtl, array);
19492 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19493 }
19494 return true;
19495
19496 case CONST_VECTOR:
19497 {
19498 unsigned int length;
19499 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19500 return false;
19501
19502 machine_mode mode = GET_MODE (rtl);
19503 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19504 unsigned char *array
19505 = ggc_vec_alloc<unsigned char> (length * elt_size);
19506 unsigned int i;
19507 unsigned char *p;
19508 machine_mode imode = GET_MODE_INNER (mode);
19509
19510 switch (GET_MODE_CLASS (mode))
19511 {
19512 case MODE_VECTOR_INT:
19513 for (i = 0, p = array; i < length; i++, p += elt_size)
19514 {
19515 rtx elt = CONST_VECTOR_ELT (rtl, i);
19516 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19517 }
19518 break;
19519
19520 case MODE_VECTOR_FLOAT:
19521 for (i = 0, p = array; i < length; i++, p += elt_size)
19522 {
19523 rtx elt = CONST_VECTOR_ELT (rtl, i);
19524 insert_float (elt, p);
19525 }
19526 break;
19527
19528 default:
19529 gcc_unreachable ();
19530 }
19531
19532 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19533 }
19534 return true;
19535
19536 case CONST_STRING:
19537 if (dwarf_version >= 4 || !dwarf_strict)
19538 {
19539 dw_loc_descr_ref loc_result;
19540 resolve_one_addr (&rtl);
19541 rtl_addr:
19542 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19543 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19544 add_AT_loc (die, DW_AT_location, loc_result);
19545 vec_safe_push (used_rtx_array, rtl);
19546 return true;
19547 }
19548 return false;
19549
19550 case CONST:
19551 if (CONSTANT_P (XEXP (rtl, 0)))
19552 return add_const_value_attribute (die, XEXP (rtl, 0));
19553 /* FALLTHROUGH */
19554 case SYMBOL_REF:
19555 if (!const_ok_for_output (rtl))
19556 return false;
19557 /* FALLTHROUGH */
19558 case LABEL_REF:
19559 if (dwarf_version >= 4 || !dwarf_strict)
19560 goto rtl_addr;
19561 return false;
19562
19563 case PLUS:
19564 /* In cases where an inlined instance of an inline function is passed
19565 the address of an `auto' variable (which is local to the caller) we
19566 can get a situation where the DECL_RTL of the artificial local
19567 variable (for the inlining) which acts as a stand-in for the
19568 corresponding formal parameter (of the inline function) will look
19569 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19570 exactly a compile-time constant expression, but it isn't the address
19571 of the (artificial) local variable either. Rather, it represents the
19572 *value* which the artificial local variable always has during its
19573 lifetime. We currently have no way to represent such quasi-constant
19574 values in Dwarf, so for now we just punt and generate nothing. */
19575 return false;
19576
19577 case HIGH:
19578 case CONST_FIXED:
19579 return false;
19580
19581 case MEM:
19582 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19583 && MEM_READONLY_P (rtl)
19584 && GET_MODE (rtl) == BLKmode)
19585 {
19586 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19587 return true;
19588 }
19589 return false;
19590
19591 default:
19592 /* No other kinds of rtx should be possible here. */
19593 gcc_unreachable ();
19594 }
19595 return false;
19596 }
19597
19598 /* Determine whether the evaluation of EXPR references any variables
19599 or functions which aren't otherwise used (and therefore may not be
19600 output). */
19601 static tree
19602 reference_to_unused (tree * tp, int * walk_subtrees,
19603 void * data ATTRIBUTE_UNUSED)
19604 {
19605 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19606 *walk_subtrees = 0;
19607
19608 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19609 && ! TREE_ASM_WRITTEN (*tp))
19610 return *tp;
19611 /* ??? The C++ FE emits debug information for using decls, so
19612 putting gcc_unreachable here falls over. See PR31899. For now
19613 be conservative. */
19614 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19615 return *tp;
19616 else if (VAR_P (*tp))
19617 {
19618 varpool_node *node = varpool_node::get (*tp);
19619 if (!node || !node->definition)
19620 return *tp;
19621 }
19622 else if (TREE_CODE (*tp) == FUNCTION_DECL
19623 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19624 {
19625 /* The call graph machinery must have finished analyzing,
19626 optimizing and gimplifying the CU by now.
19627 So if *TP has no call graph node associated
19628 to it, it means *TP will not be emitted. */
19629 if (!cgraph_node::get (*tp))
19630 return *tp;
19631 }
19632 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19633 return *tp;
19634
19635 return NULL_TREE;
19636 }
19637
19638 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19639 for use in a later add_const_value_attribute call. */
19640
19641 static rtx
19642 rtl_for_decl_init (tree init, tree type)
19643 {
19644 rtx rtl = NULL_RTX;
19645
19646 STRIP_NOPS (init);
19647
19648 /* If a variable is initialized with a string constant without embedded
19649 zeros, build CONST_STRING. */
19650 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19651 {
19652 tree enttype = TREE_TYPE (type);
19653 tree domain = TYPE_DOMAIN (type);
19654 scalar_int_mode mode;
19655
19656 if (is_int_mode (TYPE_MODE (enttype), &mode)
19657 && GET_MODE_SIZE (mode) == 1
19658 && domain
19659 && TYPE_MAX_VALUE (domain)
19660 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19661 && integer_zerop (TYPE_MIN_VALUE (domain))
19662 && compare_tree_int (TYPE_MAX_VALUE (domain),
19663 TREE_STRING_LENGTH (init) - 1) == 0
19664 && ((size_t) TREE_STRING_LENGTH (init)
19665 == strlen (TREE_STRING_POINTER (init)) + 1))
19666 {
19667 rtl = gen_rtx_CONST_STRING (VOIDmode,
19668 ggc_strdup (TREE_STRING_POINTER (init)));
19669 rtl = gen_rtx_MEM (BLKmode, rtl);
19670 MEM_READONLY_P (rtl) = 1;
19671 }
19672 }
19673 /* Other aggregates, and complex values, could be represented using
19674 CONCAT: FIXME! */
19675 else if (AGGREGATE_TYPE_P (type)
19676 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19677 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19678 || TREE_CODE (type) == COMPLEX_TYPE)
19679 ;
19680 /* Vectors only work if their mode is supported by the target.
19681 FIXME: generic vectors ought to work too. */
19682 else if (TREE_CODE (type) == VECTOR_TYPE
19683 && !VECTOR_MODE_P (TYPE_MODE (type)))
19684 ;
19685 /* If the initializer is something that we know will expand into an
19686 immediate RTL constant, expand it now. We must be careful not to
19687 reference variables which won't be output. */
19688 else if (initializer_constant_valid_p (init, type)
19689 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19690 {
19691 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19692 possible. */
19693 if (TREE_CODE (type) == VECTOR_TYPE)
19694 switch (TREE_CODE (init))
19695 {
19696 case VECTOR_CST:
19697 break;
19698 case CONSTRUCTOR:
19699 if (TREE_CONSTANT (init))
19700 {
19701 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19702 bool constant_p = true;
19703 tree value;
19704 unsigned HOST_WIDE_INT ix;
19705
19706 /* Even when ctor is constant, it might contain non-*_CST
19707 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19708 belong into VECTOR_CST nodes. */
19709 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19710 if (!CONSTANT_CLASS_P (value))
19711 {
19712 constant_p = false;
19713 break;
19714 }
19715
19716 if (constant_p)
19717 {
19718 init = build_vector_from_ctor (type, elts);
19719 break;
19720 }
19721 }
19722 /* FALLTHRU */
19723
19724 default:
19725 return NULL;
19726 }
19727
19728 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19729
19730 /* If expand_expr returns a MEM, it wasn't immediate. */
19731 gcc_assert (!rtl || !MEM_P (rtl));
19732 }
19733
19734 return rtl;
19735 }
19736
19737 /* Generate RTL for the variable DECL to represent its location. */
19738
19739 static rtx
19740 rtl_for_decl_location (tree decl)
19741 {
19742 rtx rtl;
19743
19744 /* Here we have to decide where we are going to say the parameter "lives"
19745 (as far as the debugger is concerned). We only have a couple of
19746 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19747
19748 DECL_RTL normally indicates where the parameter lives during most of the
19749 activation of the function. If optimization is enabled however, this
19750 could be either NULL or else a pseudo-reg. Both of those cases indicate
19751 that the parameter doesn't really live anywhere (as far as the code
19752 generation parts of GCC are concerned) during most of the function's
19753 activation. That will happen (for example) if the parameter is never
19754 referenced within the function.
19755
19756 We could just generate a location descriptor here for all non-NULL
19757 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19758 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19759 where DECL_RTL is NULL or is a pseudo-reg.
19760
19761 Note however that we can only get away with using DECL_INCOMING_RTL as
19762 a backup substitute for DECL_RTL in certain limited cases. In cases
19763 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19764 we can be sure that the parameter was passed using the same type as it is
19765 declared to have within the function, and that its DECL_INCOMING_RTL
19766 points us to a place where a value of that type is passed.
19767
19768 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19769 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19770 because in these cases DECL_INCOMING_RTL points us to a value of some
19771 type which is *different* from the type of the parameter itself. Thus,
19772 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19773 such cases, the debugger would end up (for example) trying to fetch a
19774 `float' from a place which actually contains the first part of a
19775 `double'. That would lead to really incorrect and confusing
19776 output at debug-time.
19777
19778 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19779 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19780 are a couple of exceptions however. On little-endian machines we can
19781 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19782 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19783 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19784 when (on a little-endian machine) a non-prototyped function has a
19785 parameter declared to be of type `short' or `char'. In such cases,
19786 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19787 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19788 passed `int' value. If the debugger then uses that address to fetch
19789 a `short' or a `char' (on a little-endian machine) the result will be
19790 the correct data, so we allow for such exceptional cases below.
19791
19792 Note that our goal here is to describe the place where the given formal
19793 parameter lives during most of the function's activation (i.e. between the
19794 end of the prologue and the start of the epilogue). We'll do that as best
19795 as we can. Note however that if the given formal parameter is modified
19796 sometime during the execution of the function, then a stack backtrace (at
19797 debug-time) will show the function as having been called with the *new*
19798 value rather than the value which was originally passed in. This happens
19799 rarely enough that it is not a major problem, but it *is* a problem, and
19800 I'd like to fix it.
19801
19802 A future version of dwarf2out.c may generate two additional attributes for
19803 any given DW_TAG_formal_parameter DIE which will describe the "passed
19804 type" and the "passed location" for the given formal parameter in addition
19805 to the attributes we now generate to indicate the "declared type" and the
19806 "active location" for each parameter. This additional set of attributes
19807 could be used by debuggers for stack backtraces. Separately, note that
19808 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19809 This happens (for example) for inlined-instances of inline function formal
19810 parameters which are never referenced. This really shouldn't be
19811 happening. All PARM_DECL nodes should get valid non-NULL
19812 DECL_INCOMING_RTL values. FIXME. */
19813
19814 /* Use DECL_RTL as the "location" unless we find something better. */
19815 rtl = DECL_RTL_IF_SET (decl);
19816
19817 /* When generating abstract instances, ignore everything except
19818 constants, symbols living in memory, and symbols living in
19819 fixed registers. */
19820 if (! reload_completed)
19821 {
19822 if (rtl
19823 && (CONSTANT_P (rtl)
19824 || (MEM_P (rtl)
19825 && CONSTANT_P (XEXP (rtl, 0)))
19826 || (REG_P (rtl)
19827 && VAR_P (decl)
19828 && TREE_STATIC (decl))))
19829 {
19830 rtl = targetm.delegitimize_address (rtl);
19831 return rtl;
19832 }
19833 rtl = NULL_RTX;
19834 }
19835 else if (TREE_CODE (decl) == PARM_DECL)
19836 {
19837 if (rtl == NULL_RTX
19838 || is_pseudo_reg (rtl)
19839 || (MEM_P (rtl)
19840 && is_pseudo_reg (XEXP (rtl, 0))
19841 && DECL_INCOMING_RTL (decl)
19842 && MEM_P (DECL_INCOMING_RTL (decl))
19843 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19844 {
19845 tree declared_type = TREE_TYPE (decl);
19846 tree passed_type = DECL_ARG_TYPE (decl);
19847 machine_mode dmode = TYPE_MODE (declared_type);
19848 machine_mode pmode = TYPE_MODE (passed_type);
19849
19850 /* This decl represents a formal parameter which was optimized out.
19851 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19852 all cases where (rtl == NULL_RTX) just below. */
19853 if (dmode == pmode)
19854 rtl = DECL_INCOMING_RTL (decl);
19855 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19856 && SCALAR_INT_MODE_P (dmode)
19857 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19858 && DECL_INCOMING_RTL (decl))
19859 {
19860 rtx inc = DECL_INCOMING_RTL (decl);
19861 if (REG_P (inc))
19862 rtl = inc;
19863 else if (MEM_P (inc))
19864 {
19865 if (BYTES_BIG_ENDIAN)
19866 rtl = adjust_address_nv (inc, dmode,
19867 GET_MODE_SIZE (pmode)
19868 - GET_MODE_SIZE (dmode));
19869 else
19870 rtl = inc;
19871 }
19872 }
19873 }
19874
19875 /* If the parm was passed in registers, but lives on the stack, then
19876 make a big endian correction if the mode of the type of the
19877 parameter is not the same as the mode of the rtl. */
19878 /* ??? This is the same series of checks that are made in dbxout.c before
19879 we reach the big endian correction code there. It isn't clear if all
19880 of these checks are necessary here, but keeping them all is the safe
19881 thing to do. */
19882 else if (MEM_P (rtl)
19883 && XEXP (rtl, 0) != const0_rtx
19884 && ! CONSTANT_P (XEXP (rtl, 0))
19885 /* Not passed in memory. */
19886 && !MEM_P (DECL_INCOMING_RTL (decl))
19887 /* Not passed by invisible reference. */
19888 && (!REG_P (XEXP (rtl, 0))
19889 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19890 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19891 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19892 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19893 #endif
19894 )
19895 /* Big endian correction check. */
19896 && BYTES_BIG_ENDIAN
19897 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19898 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19899 UNITS_PER_WORD))
19900 {
19901 machine_mode addr_mode = get_address_mode (rtl);
19902 poly_int64 offset = (UNITS_PER_WORD
19903 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19904
19905 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19906 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19907 }
19908 }
19909 else if (VAR_P (decl)
19910 && rtl
19911 && MEM_P (rtl)
19912 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19913 {
19914 machine_mode addr_mode = get_address_mode (rtl);
19915 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19916 GET_MODE (rtl));
19917
19918 /* If a variable is declared "register" yet is smaller than
19919 a register, then if we store the variable to memory, it
19920 looks like we're storing a register-sized value, when in
19921 fact we are not. We need to adjust the offset of the
19922 storage location to reflect the actual value's bytes,
19923 else gdb will not be able to display it. */
19924 if (maybe_ne (offset, 0))
19925 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19926 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19927 }
19928
19929 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19930 and will have been substituted directly into all expressions that use it.
19931 C does not have such a concept, but C++ and other languages do. */
19932 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19933 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19934
19935 if (rtl)
19936 rtl = targetm.delegitimize_address (rtl);
19937
19938 /* If we don't look past the constant pool, we risk emitting a
19939 reference to a constant pool entry that isn't referenced from
19940 code, and thus is not emitted. */
19941 if (rtl)
19942 rtl = avoid_constant_pool_reference (rtl);
19943
19944 /* Try harder to get a rtl. If this symbol ends up not being emitted
19945 in the current CU, resolve_addr will remove the expression referencing
19946 it. */
19947 if (rtl == NULL_RTX
19948 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19949 && VAR_P (decl)
19950 && !DECL_EXTERNAL (decl)
19951 && TREE_STATIC (decl)
19952 && DECL_NAME (decl)
19953 && !DECL_HARD_REGISTER (decl)
19954 && DECL_MODE (decl) != VOIDmode)
19955 {
19956 rtl = make_decl_rtl_for_debug (decl);
19957 if (!MEM_P (rtl)
19958 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19959 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19960 rtl = NULL_RTX;
19961 }
19962
19963 return rtl;
19964 }
19965
19966 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19967 returned. If so, the decl for the COMMON block is returned, and the
19968 value is the offset into the common block for the symbol. */
19969
19970 static tree
19971 fortran_common (tree decl, HOST_WIDE_INT *value)
19972 {
19973 tree val_expr, cvar;
19974 machine_mode mode;
19975 poly_int64 bitsize, bitpos;
19976 tree offset;
19977 HOST_WIDE_INT cbitpos;
19978 int unsignedp, reversep, volatilep = 0;
19979
19980 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19981 it does not have a value (the offset into the common area), or if it
19982 is thread local (as opposed to global) then it isn't common, and shouldn't
19983 be handled as such. */
19984 if (!VAR_P (decl)
19985 || !TREE_STATIC (decl)
19986 || !DECL_HAS_VALUE_EXPR_P (decl)
19987 || !is_fortran ())
19988 return NULL_TREE;
19989
19990 val_expr = DECL_VALUE_EXPR (decl);
19991 if (TREE_CODE (val_expr) != COMPONENT_REF)
19992 return NULL_TREE;
19993
19994 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19995 &unsignedp, &reversep, &volatilep);
19996
19997 if (cvar == NULL_TREE
19998 || !VAR_P (cvar)
19999 || DECL_ARTIFICIAL (cvar)
20000 || !TREE_PUBLIC (cvar)
20001 /* We don't expect to have to cope with variable offsets,
20002 since at present all static data must have a constant size. */
20003 || !bitpos.is_constant (&cbitpos))
20004 return NULL_TREE;
20005
20006 *value = 0;
20007 if (offset != NULL)
20008 {
20009 if (!tree_fits_shwi_p (offset))
20010 return NULL_TREE;
20011 *value = tree_to_shwi (offset);
20012 }
20013 if (cbitpos != 0)
20014 *value += cbitpos / BITS_PER_UNIT;
20015
20016 return cvar;
20017 }
20018
20019 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20020 data attribute for a variable or a parameter. We generate the
20021 DW_AT_const_value attribute only in those cases where the given variable
20022 or parameter does not have a true "location" either in memory or in a
20023 register. This can happen (for example) when a constant is passed as an
20024 actual argument in a call to an inline function. (It's possible that
20025 these things can crop up in other ways also.) Note that one type of
20026 constant value which can be passed into an inlined function is a constant
20027 pointer. This can happen for example if an actual argument in an inlined
20028 function call evaluates to a compile-time constant address.
20029
20030 CACHE_P is true if it is worth caching the location list for DECL,
20031 so that future calls can reuse it rather than regenerate it from scratch.
20032 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20033 since we will need to refer to them each time the function is inlined. */
20034
20035 static bool
20036 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20037 {
20038 rtx rtl;
20039 dw_loc_list_ref list;
20040 var_loc_list *loc_list;
20041 cached_dw_loc_list *cache;
20042
20043 if (early_dwarf)
20044 return false;
20045
20046 if (TREE_CODE (decl) == ERROR_MARK)
20047 return false;
20048
20049 if (get_AT (die, DW_AT_location)
20050 || get_AT (die, DW_AT_const_value))
20051 return true;
20052
20053 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20054 || TREE_CODE (decl) == RESULT_DECL);
20055
20056 /* Try to get some constant RTL for this decl, and use that as the value of
20057 the location. */
20058
20059 rtl = rtl_for_decl_location (decl);
20060 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20061 && add_const_value_attribute (die, rtl))
20062 return true;
20063
20064 /* See if we have single element location list that is equivalent to
20065 a constant value. That way we are better to use add_const_value_attribute
20066 rather than expanding constant value equivalent. */
20067 loc_list = lookup_decl_loc (decl);
20068 if (loc_list
20069 && loc_list->first
20070 && loc_list->first->next == NULL
20071 && NOTE_P (loc_list->first->loc)
20072 && NOTE_VAR_LOCATION (loc_list->first->loc)
20073 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20074 {
20075 struct var_loc_node *node;
20076
20077 node = loc_list->first;
20078 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20079 if (GET_CODE (rtl) == EXPR_LIST)
20080 rtl = XEXP (rtl, 0);
20081 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20082 && add_const_value_attribute (die, rtl))
20083 return true;
20084 }
20085 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20086 list several times. See if we've already cached the contents. */
20087 list = NULL;
20088 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20089 cache_p = false;
20090 if (cache_p)
20091 {
20092 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20093 if (cache)
20094 list = cache->loc_list;
20095 }
20096 if (list == NULL)
20097 {
20098 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20099 NULL);
20100 /* It is usually worth caching this result if the decl is from
20101 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20102 if (cache_p && list && list->dw_loc_next)
20103 {
20104 cached_dw_loc_list **slot
20105 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20106 DECL_UID (decl),
20107 INSERT);
20108 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20109 cache->decl_id = DECL_UID (decl);
20110 cache->loc_list = list;
20111 *slot = cache;
20112 }
20113 }
20114 if (list)
20115 {
20116 add_AT_location_description (die, DW_AT_location, list);
20117 return true;
20118 }
20119 /* None of that worked, so it must not really have a location;
20120 try adding a constant value attribute from the DECL_INITIAL. */
20121 return tree_add_const_value_attribute_for_decl (die, decl);
20122 }
20123
20124 /* Helper function for tree_add_const_value_attribute. Natively encode
20125 initializer INIT into an array. Return true if successful. */
20126
20127 static bool
20128 native_encode_initializer (tree init, unsigned char *array, int size)
20129 {
20130 tree type;
20131
20132 if (init == NULL_TREE)
20133 return false;
20134
20135 STRIP_NOPS (init);
20136 switch (TREE_CODE (init))
20137 {
20138 case STRING_CST:
20139 type = TREE_TYPE (init);
20140 if (TREE_CODE (type) == ARRAY_TYPE)
20141 {
20142 tree enttype = TREE_TYPE (type);
20143 scalar_int_mode mode;
20144
20145 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20146 || GET_MODE_SIZE (mode) != 1)
20147 return false;
20148 if (int_size_in_bytes (type) != size)
20149 return false;
20150 if (size > TREE_STRING_LENGTH (init))
20151 {
20152 memcpy (array, TREE_STRING_POINTER (init),
20153 TREE_STRING_LENGTH (init));
20154 memset (array + TREE_STRING_LENGTH (init),
20155 '\0', size - TREE_STRING_LENGTH (init));
20156 }
20157 else
20158 memcpy (array, TREE_STRING_POINTER (init), size);
20159 return true;
20160 }
20161 return false;
20162 case CONSTRUCTOR:
20163 type = TREE_TYPE (init);
20164 if (int_size_in_bytes (type) != size)
20165 return false;
20166 if (TREE_CODE (type) == ARRAY_TYPE)
20167 {
20168 HOST_WIDE_INT min_index;
20169 unsigned HOST_WIDE_INT cnt;
20170 int curpos = 0, fieldsize;
20171 constructor_elt *ce;
20172
20173 if (TYPE_DOMAIN (type) == NULL_TREE
20174 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20175 return false;
20176
20177 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20178 if (fieldsize <= 0)
20179 return false;
20180
20181 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20182 memset (array, '\0', size);
20183 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20184 {
20185 tree val = ce->value;
20186 tree index = ce->index;
20187 int pos = curpos;
20188 if (index && TREE_CODE (index) == RANGE_EXPR)
20189 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20190 * fieldsize;
20191 else if (index)
20192 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20193
20194 if (val)
20195 {
20196 STRIP_NOPS (val);
20197 if (!native_encode_initializer (val, array + pos, fieldsize))
20198 return false;
20199 }
20200 curpos = pos + fieldsize;
20201 if (index && TREE_CODE (index) == RANGE_EXPR)
20202 {
20203 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20204 - tree_to_shwi (TREE_OPERAND (index, 0));
20205 while (count-- > 0)
20206 {
20207 if (val)
20208 memcpy (array + curpos, array + pos, fieldsize);
20209 curpos += fieldsize;
20210 }
20211 }
20212 gcc_assert (curpos <= size);
20213 }
20214 return true;
20215 }
20216 else if (TREE_CODE (type) == RECORD_TYPE
20217 || TREE_CODE (type) == UNION_TYPE)
20218 {
20219 tree field = NULL_TREE;
20220 unsigned HOST_WIDE_INT cnt;
20221 constructor_elt *ce;
20222
20223 if (int_size_in_bytes (type) != size)
20224 return false;
20225
20226 if (TREE_CODE (type) == RECORD_TYPE)
20227 field = TYPE_FIELDS (type);
20228
20229 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20230 {
20231 tree val = ce->value;
20232 int pos, fieldsize;
20233
20234 if (ce->index != 0)
20235 field = ce->index;
20236
20237 if (val)
20238 STRIP_NOPS (val);
20239
20240 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20241 return false;
20242
20243 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20244 && TYPE_DOMAIN (TREE_TYPE (field))
20245 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20246 return false;
20247 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20248 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20249 return false;
20250 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20251 pos = int_byte_position (field);
20252 gcc_assert (pos + fieldsize <= size);
20253 if (val && fieldsize != 0
20254 && !native_encode_initializer (val, array + pos, fieldsize))
20255 return false;
20256 }
20257 return true;
20258 }
20259 return false;
20260 case VIEW_CONVERT_EXPR:
20261 case NON_LVALUE_EXPR:
20262 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20263 default:
20264 return native_encode_expr (init, array, size) == size;
20265 }
20266 }
20267
20268 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20269 attribute is the const value T. */
20270
20271 static bool
20272 tree_add_const_value_attribute (dw_die_ref die, tree t)
20273 {
20274 tree init;
20275 tree type = TREE_TYPE (t);
20276 rtx rtl;
20277
20278 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20279 return false;
20280
20281 init = t;
20282 gcc_assert (!DECL_P (init));
20283
20284 if (TREE_CODE (init) == INTEGER_CST)
20285 {
20286 if (tree_fits_uhwi_p (init))
20287 {
20288 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20289 return true;
20290 }
20291 if (tree_fits_shwi_p (init))
20292 {
20293 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20294 return true;
20295 }
20296 }
20297 if (! early_dwarf)
20298 {
20299 rtl = rtl_for_decl_init (init, type);
20300 if (rtl)
20301 return add_const_value_attribute (die, rtl);
20302 }
20303 /* If the host and target are sane, try harder. */
20304 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20305 && initializer_constant_valid_p (init, type))
20306 {
20307 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20308 if (size > 0 && (int) size == size)
20309 {
20310 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20311
20312 if (native_encode_initializer (init, array, size))
20313 {
20314 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20315 return true;
20316 }
20317 ggc_free (array);
20318 }
20319 }
20320 return false;
20321 }
20322
20323 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20324 attribute is the const value of T, where T is an integral constant
20325 variable with static storage duration
20326 (so it can't be a PARM_DECL or a RESULT_DECL). */
20327
20328 static bool
20329 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20330 {
20331
20332 if (!decl
20333 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20334 || (VAR_P (decl) && !TREE_STATIC (decl)))
20335 return false;
20336
20337 if (TREE_READONLY (decl)
20338 && ! TREE_THIS_VOLATILE (decl)
20339 && DECL_INITIAL (decl))
20340 /* OK */;
20341 else
20342 return false;
20343
20344 /* Don't add DW_AT_const_value if abstract origin already has one. */
20345 if (get_AT (var_die, DW_AT_const_value))
20346 return false;
20347
20348 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20349 }
20350
20351 /* Convert the CFI instructions for the current function into a
20352 location list. This is used for DW_AT_frame_base when we targeting
20353 a dwarf2 consumer that does not support the dwarf3
20354 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20355 expressions. */
20356
20357 static dw_loc_list_ref
20358 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20359 {
20360 int ix;
20361 dw_fde_ref fde;
20362 dw_loc_list_ref list, *list_tail;
20363 dw_cfi_ref cfi;
20364 dw_cfa_location last_cfa, next_cfa;
20365 const char *start_label, *last_label, *section;
20366 dw_cfa_location remember;
20367
20368 fde = cfun->fde;
20369 gcc_assert (fde != NULL);
20370
20371 section = secname_for_decl (current_function_decl);
20372 list_tail = &list;
20373 list = NULL;
20374
20375 memset (&next_cfa, 0, sizeof (next_cfa));
20376 next_cfa.reg = INVALID_REGNUM;
20377 remember = next_cfa;
20378
20379 start_label = fde->dw_fde_begin;
20380
20381 /* ??? Bald assumption that the CIE opcode list does not contain
20382 advance opcodes. */
20383 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20384 lookup_cfa_1 (cfi, &next_cfa, &remember);
20385
20386 last_cfa = next_cfa;
20387 last_label = start_label;
20388
20389 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20390 {
20391 /* If the first partition contained no CFI adjustments, the
20392 CIE opcodes apply to the whole first partition. */
20393 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20394 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20395 list_tail =&(*list_tail)->dw_loc_next;
20396 start_label = last_label = fde->dw_fde_second_begin;
20397 }
20398
20399 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20400 {
20401 switch (cfi->dw_cfi_opc)
20402 {
20403 case DW_CFA_set_loc:
20404 case DW_CFA_advance_loc1:
20405 case DW_CFA_advance_loc2:
20406 case DW_CFA_advance_loc4:
20407 if (!cfa_equal_p (&last_cfa, &next_cfa))
20408 {
20409 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20410 start_label, 0, last_label, 0, section);
20411
20412 list_tail = &(*list_tail)->dw_loc_next;
20413 last_cfa = next_cfa;
20414 start_label = last_label;
20415 }
20416 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20417 break;
20418
20419 case DW_CFA_advance_loc:
20420 /* The encoding is complex enough that we should never emit this. */
20421 gcc_unreachable ();
20422
20423 default:
20424 lookup_cfa_1 (cfi, &next_cfa, &remember);
20425 break;
20426 }
20427 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20428 {
20429 if (!cfa_equal_p (&last_cfa, &next_cfa))
20430 {
20431 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20432 start_label, 0, last_label, 0, section);
20433
20434 list_tail = &(*list_tail)->dw_loc_next;
20435 last_cfa = next_cfa;
20436 start_label = last_label;
20437 }
20438 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20439 start_label, 0, fde->dw_fde_end, 0, section);
20440 list_tail = &(*list_tail)->dw_loc_next;
20441 start_label = last_label = fde->dw_fde_second_begin;
20442 }
20443 }
20444
20445 if (!cfa_equal_p (&last_cfa, &next_cfa))
20446 {
20447 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20448 start_label, 0, last_label, 0, section);
20449 list_tail = &(*list_tail)->dw_loc_next;
20450 start_label = last_label;
20451 }
20452
20453 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20454 start_label, 0,
20455 fde->dw_fde_second_begin
20456 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20457 section);
20458
20459 maybe_gen_llsym (list);
20460
20461 return list;
20462 }
20463
20464 /* Compute a displacement from the "steady-state frame pointer" to the
20465 frame base (often the same as the CFA), and store it in
20466 frame_pointer_fb_offset. OFFSET is added to the displacement
20467 before the latter is negated. */
20468
20469 static void
20470 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20471 {
20472 rtx reg, elim;
20473
20474 #ifdef FRAME_POINTER_CFA_OFFSET
20475 reg = frame_pointer_rtx;
20476 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20477 #else
20478 reg = arg_pointer_rtx;
20479 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20480 #endif
20481
20482 elim = (ira_use_lra_p
20483 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20484 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20485 elim = strip_offset_and_add (elim, &offset);
20486
20487 frame_pointer_fb_offset = -offset;
20488
20489 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20490 in which to eliminate. This is because it's stack pointer isn't
20491 directly accessible as a register within the ISA. To work around
20492 this, assume that while we cannot provide a proper value for
20493 frame_pointer_fb_offset, we won't need one either. */
20494 frame_pointer_fb_offset_valid
20495 = ((SUPPORTS_STACK_ALIGNMENT
20496 && (elim == hard_frame_pointer_rtx
20497 || elim == stack_pointer_rtx))
20498 || elim == (frame_pointer_needed
20499 ? hard_frame_pointer_rtx
20500 : stack_pointer_rtx));
20501 }
20502
20503 /* Generate a DW_AT_name attribute given some string value to be included as
20504 the value of the attribute. */
20505
20506 static void
20507 add_name_attribute (dw_die_ref die, const char *name_string)
20508 {
20509 if (name_string != NULL && *name_string != 0)
20510 {
20511 if (demangle_name_func)
20512 name_string = (*demangle_name_func) (name_string);
20513
20514 add_AT_string (die, DW_AT_name, name_string);
20515 }
20516 }
20517
20518 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20519 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20520 of TYPE accordingly.
20521
20522 ??? This is a temporary measure until after we're able to generate
20523 regular DWARF for the complex Ada type system. */
20524
20525 static void
20526 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20527 dw_die_ref context_die)
20528 {
20529 tree dtype;
20530 dw_die_ref dtype_die;
20531
20532 if (!lang_hooks.types.descriptive_type)
20533 return;
20534
20535 dtype = lang_hooks.types.descriptive_type (type);
20536 if (!dtype)
20537 return;
20538
20539 dtype_die = lookup_type_die (dtype);
20540 if (!dtype_die)
20541 {
20542 gen_type_die (dtype, context_die);
20543 dtype_die = lookup_type_die (dtype);
20544 gcc_assert (dtype_die);
20545 }
20546
20547 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20548 }
20549
20550 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20551
20552 static const char *
20553 comp_dir_string (void)
20554 {
20555 const char *wd;
20556 char *wd1;
20557 static const char *cached_wd = NULL;
20558
20559 if (cached_wd != NULL)
20560 return cached_wd;
20561
20562 wd = get_src_pwd ();
20563 if (wd == NULL)
20564 return NULL;
20565
20566 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20567 {
20568 int wdlen;
20569
20570 wdlen = strlen (wd);
20571 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20572 strcpy (wd1, wd);
20573 wd1 [wdlen] = DIR_SEPARATOR;
20574 wd1 [wdlen + 1] = 0;
20575 wd = wd1;
20576 }
20577
20578 cached_wd = remap_debug_filename (wd);
20579 return cached_wd;
20580 }
20581
20582 /* Generate a DW_AT_comp_dir attribute for DIE. */
20583
20584 static void
20585 add_comp_dir_attribute (dw_die_ref die)
20586 {
20587 const char * wd = comp_dir_string ();
20588 if (wd != NULL)
20589 add_AT_string (die, DW_AT_comp_dir, wd);
20590 }
20591
20592 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20593 pointer computation, ...), output a representation for that bound according
20594 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20595 loc_list_from_tree for the meaning of CONTEXT. */
20596
20597 static void
20598 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20599 int forms, struct loc_descr_context *context)
20600 {
20601 dw_die_ref context_die, decl_die;
20602 dw_loc_list_ref list;
20603 bool strip_conversions = true;
20604 bool placeholder_seen = false;
20605
20606 while (strip_conversions)
20607 switch (TREE_CODE (value))
20608 {
20609 case ERROR_MARK:
20610 case SAVE_EXPR:
20611 return;
20612
20613 CASE_CONVERT:
20614 case VIEW_CONVERT_EXPR:
20615 value = TREE_OPERAND (value, 0);
20616 break;
20617
20618 default:
20619 strip_conversions = false;
20620 break;
20621 }
20622
20623 /* If possible and permitted, output the attribute as a constant. */
20624 if ((forms & dw_scalar_form_constant) != 0
20625 && TREE_CODE (value) == INTEGER_CST)
20626 {
20627 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20628
20629 /* If HOST_WIDE_INT is big enough then represent the bound as
20630 a constant value. We need to choose a form based on
20631 whether the type is signed or unsigned. We cannot just
20632 call add_AT_unsigned if the value itself is positive
20633 (add_AT_unsigned might add the unsigned value encoded as
20634 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20635 bounds type and then sign extend any unsigned values found
20636 for signed types. This is needed only for
20637 DW_AT_{lower,upper}_bound, since for most other attributes,
20638 consumers will treat DW_FORM_data[1248] as unsigned values,
20639 regardless of the underlying type. */
20640 if (prec <= HOST_BITS_PER_WIDE_INT
20641 || tree_fits_uhwi_p (value))
20642 {
20643 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20644 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20645 else
20646 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20647 }
20648 else
20649 /* Otherwise represent the bound as an unsigned value with
20650 the precision of its type. The precision and signedness
20651 of the type will be necessary to re-interpret it
20652 unambiguously. */
20653 add_AT_wide (die, attr, wi::to_wide (value));
20654 return;
20655 }
20656
20657 /* Otherwise, if it's possible and permitted too, output a reference to
20658 another DIE. */
20659 if ((forms & dw_scalar_form_reference) != 0)
20660 {
20661 tree decl = NULL_TREE;
20662
20663 /* Some type attributes reference an outer type. For instance, the upper
20664 bound of an array may reference an embedding record (this happens in
20665 Ada). */
20666 if (TREE_CODE (value) == COMPONENT_REF
20667 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20668 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20669 decl = TREE_OPERAND (value, 1);
20670
20671 else if (VAR_P (value)
20672 || TREE_CODE (value) == PARM_DECL
20673 || TREE_CODE (value) == RESULT_DECL)
20674 decl = value;
20675
20676 if (decl != NULL_TREE)
20677 {
20678 dw_die_ref decl_die = lookup_decl_die (decl);
20679
20680 /* ??? Can this happen, or should the variable have been bound
20681 first? Probably it can, since I imagine that we try to create
20682 the types of parameters in the order in which they exist in
20683 the list, and won't have created a forward reference to a
20684 later parameter. */
20685 if (decl_die != NULL)
20686 {
20687 add_AT_die_ref (die, attr, decl_die);
20688 return;
20689 }
20690 }
20691 }
20692
20693 /* Last chance: try to create a stack operation procedure to evaluate the
20694 value. Do nothing if even that is not possible or permitted. */
20695 if ((forms & dw_scalar_form_exprloc) == 0)
20696 return;
20697
20698 list = loc_list_from_tree (value, 2, context);
20699 if (context && context->placeholder_arg)
20700 {
20701 placeholder_seen = context->placeholder_seen;
20702 context->placeholder_seen = false;
20703 }
20704 if (list == NULL || single_element_loc_list_p (list))
20705 {
20706 /* If this attribute is not a reference nor constant, it is
20707 a DWARF expression rather than location description. For that
20708 loc_list_from_tree (value, 0, &context) is needed. */
20709 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20710 if (list2 && single_element_loc_list_p (list2))
20711 {
20712 if (placeholder_seen)
20713 {
20714 struct dwarf_procedure_info dpi;
20715 dpi.fndecl = NULL_TREE;
20716 dpi.args_count = 1;
20717 if (!resolve_args_picking (list2->expr, 1, &dpi))
20718 return;
20719 }
20720 add_AT_loc (die, attr, list2->expr);
20721 return;
20722 }
20723 }
20724
20725 /* If that failed to give a single element location list, fall back to
20726 outputting this as a reference... still if permitted. */
20727 if (list == NULL
20728 || (forms & dw_scalar_form_reference) == 0
20729 || placeholder_seen)
20730 return;
20731
20732 if (current_function_decl == 0)
20733 context_die = comp_unit_die ();
20734 else
20735 context_die = lookup_decl_die (current_function_decl);
20736
20737 decl_die = new_die (DW_TAG_variable, context_die, value);
20738 add_AT_flag (decl_die, DW_AT_artificial, 1);
20739 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20740 context_die);
20741 add_AT_location_description (decl_die, DW_AT_location, list);
20742 add_AT_die_ref (die, attr, decl_die);
20743 }
20744
20745 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20746 default. */
20747
20748 static int
20749 lower_bound_default (void)
20750 {
20751 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20752 {
20753 case DW_LANG_C:
20754 case DW_LANG_C89:
20755 case DW_LANG_C99:
20756 case DW_LANG_C11:
20757 case DW_LANG_C_plus_plus:
20758 case DW_LANG_C_plus_plus_11:
20759 case DW_LANG_C_plus_plus_14:
20760 case DW_LANG_ObjC:
20761 case DW_LANG_ObjC_plus_plus:
20762 return 0;
20763 case DW_LANG_Fortran77:
20764 case DW_LANG_Fortran90:
20765 case DW_LANG_Fortran95:
20766 case DW_LANG_Fortran03:
20767 case DW_LANG_Fortran08:
20768 return 1;
20769 case DW_LANG_UPC:
20770 case DW_LANG_D:
20771 case DW_LANG_Python:
20772 return dwarf_version >= 4 ? 0 : -1;
20773 case DW_LANG_Ada95:
20774 case DW_LANG_Ada83:
20775 case DW_LANG_Cobol74:
20776 case DW_LANG_Cobol85:
20777 case DW_LANG_Modula2:
20778 case DW_LANG_PLI:
20779 return dwarf_version >= 4 ? 1 : -1;
20780 default:
20781 return -1;
20782 }
20783 }
20784
20785 /* Given a tree node describing an array bound (either lower or upper) output
20786 a representation for that bound. */
20787
20788 static void
20789 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20790 tree bound, struct loc_descr_context *context)
20791 {
20792 int dflt;
20793
20794 while (1)
20795 switch (TREE_CODE (bound))
20796 {
20797 /* Strip all conversions. */
20798 CASE_CONVERT:
20799 case VIEW_CONVERT_EXPR:
20800 bound = TREE_OPERAND (bound, 0);
20801 break;
20802
20803 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20804 are even omitted when they are the default. */
20805 case INTEGER_CST:
20806 /* If the value for this bound is the default one, we can even omit the
20807 attribute. */
20808 if (bound_attr == DW_AT_lower_bound
20809 && tree_fits_shwi_p (bound)
20810 && (dflt = lower_bound_default ()) != -1
20811 && tree_to_shwi (bound) == dflt)
20812 return;
20813
20814 /* FALLTHRU */
20815
20816 default:
20817 /* Because of the complex interaction there can be with other GNAT
20818 encodings, GDB isn't ready yet to handle proper DWARF description
20819 for self-referencial subrange bounds: let GNAT encodings do the
20820 magic in such a case. */
20821 if (is_ada ()
20822 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20823 && contains_placeholder_p (bound))
20824 return;
20825
20826 add_scalar_info (subrange_die, bound_attr, bound,
20827 dw_scalar_form_constant
20828 | dw_scalar_form_exprloc
20829 | dw_scalar_form_reference,
20830 context);
20831 return;
20832 }
20833 }
20834
20835 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20836 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20837 Note that the block of subscript information for an array type also
20838 includes information about the element type of the given array type.
20839
20840 This function reuses previously set type and bound information if
20841 available. */
20842
20843 static void
20844 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20845 {
20846 unsigned dimension_number;
20847 tree lower, upper;
20848 dw_die_ref child = type_die->die_child;
20849
20850 for (dimension_number = 0;
20851 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20852 type = TREE_TYPE (type), dimension_number++)
20853 {
20854 tree domain = TYPE_DOMAIN (type);
20855
20856 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20857 break;
20858
20859 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20860 and (in GNU C only) variable bounds. Handle all three forms
20861 here. */
20862
20863 /* Find and reuse a previously generated DW_TAG_subrange_type if
20864 available.
20865
20866 For multi-dimensional arrays, as we iterate through the
20867 various dimensions in the enclosing for loop above, we also
20868 iterate through the DIE children and pick at each
20869 DW_TAG_subrange_type previously generated (if available).
20870 Each child DW_TAG_subrange_type DIE describes the range of
20871 the current dimension. At this point we should have as many
20872 DW_TAG_subrange_type's as we have dimensions in the
20873 array. */
20874 dw_die_ref subrange_die = NULL;
20875 if (child)
20876 while (1)
20877 {
20878 child = child->die_sib;
20879 if (child->die_tag == DW_TAG_subrange_type)
20880 subrange_die = child;
20881 if (child == type_die->die_child)
20882 {
20883 /* If we wrapped around, stop looking next time. */
20884 child = NULL;
20885 break;
20886 }
20887 if (child->die_tag == DW_TAG_subrange_type)
20888 break;
20889 }
20890 if (!subrange_die)
20891 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20892
20893 if (domain)
20894 {
20895 /* We have an array type with specified bounds. */
20896 lower = TYPE_MIN_VALUE (domain);
20897 upper = TYPE_MAX_VALUE (domain);
20898
20899 /* Define the index type. */
20900 if (TREE_TYPE (domain)
20901 && !get_AT (subrange_die, DW_AT_type))
20902 {
20903 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20904 TREE_TYPE field. We can't emit debug info for this
20905 because it is an unnamed integral type. */
20906 if (TREE_CODE (domain) == INTEGER_TYPE
20907 && TYPE_NAME (domain) == NULL_TREE
20908 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20909 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20910 ;
20911 else
20912 add_type_attribute (subrange_die, TREE_TYPE (domain),
20913 TYPE_UNQUALIFIED, false, type_die);
20914 }
20915
20916 /* ??? If upper is NULL, the array has unspecified length,
20917 but it does have a lower bound. This happens with Fortran
20918 dimension arr(N:*)
20919 Since the debugger is definitely going to need to know N
20920 to produce useful results, go ahead and output the lower
20921 bound solo, and hope the debugger can cope. */
20922
20923 if (!get_AT (subrange_die, DW_AT_lower_bound))
20924 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20925 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20926 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20927 }
20928
20929 /* Otherwise we have an array type with an unspecified length. The
20930 DWARF-2 spec does not say how to handle this; let's just leave out the
20931 bounds. */
20932 }
20933 }
20934
20935 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20936
20937 static void
20938 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20939 {
20940 dw_die_ref decl_die;
20941 HOST_WIDE_INT size;
20942 dw_loc_descr_ref size_expr = NULL;
20943
20944 switch (TREE_CODE (tree_node))
20945 {
20946 case ERROR_MARK:
20947 size = 0;
20948 break;
20949 case ENUMERAL_TYPE:
20950 case RECORD_TYPE:
20951 case UNION_TYPE:
20952 case QUAL_UNION_TYPE:
20953 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20954 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20955 {
20956 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20957 return;
20958 }
20959 size_expr = type_byte_size (tree_node, &size);
20960 break;
20961 case FIELD_DECL:
20962 /* For a data member of a struct or union, the DW_AT_byte_size is
20963 generally given as the number of bytes normally allocated for an
20964 object of the *declared* type of the member itself. This is true
20965 even for bit-fields. */
20966 size = int_size_in_bytes (field_type (tree_node));
20967 break;
20968 default:
20969 gcc_unreachable ();
20970 }
20971
20972 /* Support for dynamically-sized objects was introduced by DWARFv3.
20973 At the moment, GDB does not handle variable byte sizes very well,
20974 though. */
20975 if ((dwarf_version >= 3 || !dwarf_strict)
20976 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20977 && size_expr != NULL)
20978 add_AT_loc (die, DW_AT_byte_size, size_expr);
20979
20980 /* Note that `size' might be -1 when we get to this point. If it is, that
20981 indicates that the byte size of the entity in question is variable and
20982 that we could not generate a DWARF expression that computes it. */
20983 if (size >= 0)
20984 add_AT_unsigned (die, DW_AT_byte_size, size);
20985 }
20986
20987 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20988 alignment. */
20989
20990 static void
20991 add_alignment_attribute (dw_die_ref die, tree tree_node)
20992 {
20993 if (dwarf_version < 5 && dwarf_strict)
20994 return;
20995
20996 unsigned align;
20997
20998 if (DECL_P (tree_node))
20999 {
21000 if (!DECL_USER_ALIGN (tree_node))
21001 return;
21002
21003 align = DECL_ALIGN_UNIT (tree_node);
21004 }
21005 else if (TYPE_P (tree_node))
21006 {
21007 if (!TYPE_USER_ALIGN (tree_node))
21008 return;
21009
21010 align = TYPE_ALIGN_UNIT (tree_node);
21011 }
21012 else
21013 gcc_unreachable ();
21014
21015 add_AT_unsigned (die, DW_AT_alignment, align);
21016 }
21017
21018 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21019 which specifies the distance in bits from the highest order bit of the
21020 "containing object" for the bit-field to the highest order bit of the
21021 bit-field itself.
21022
21023 For any given bit-field, the "containing object" is a hypothetical object
21024 (of some integral or enum type) within which the given bit-field lives. The
21025 type of this hypothetical "containing object" is always the same as the
21026 declared type of the individual bit-field itself. The determination of the
21027 exact location of the "containing object" for a bit-field is rather
21028 complicated. It's handled by the `field_byte_offset' function (above).
21029
21030 CTX is required: see the comment for VLR_CONTEXT.
21031
21032 Note that it is the size (in bytes) of the hypothetical "containing object"
21033 which will be given in the DW_AT_byte_size attribute for this bit-field.
21034 (See `byte_size_attribute' above). */
21035
21036 static inline void
21037 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21038 {
21039 HOST_WIDE_INT object_offset_in_bytes;
21040 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21041 HOST_WIDE_INT bitpos_int;
21042 HOST_WIDE_INT highest_order_object_bit_offset;
21043 HOST_WIDE_INT highest_order_field_bit_offset;
21044 HOST_WIDE_INT bit_offset;
21045
21046 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21047
21048 /* Must be a field and a bit field. */
21049 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21050
21051 /* We can't yet handle bit-fields whose offsets are variable, so if we
21052 encounter such things, just return without generating any attribute
21053 whatsoever. Likewise for variable or too large size. */
21054 if (! tree_fits_shwi_p (bit_position (decl))
21055 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21056 return;
21057
21058 bitpos_int = int_bit_position (decl);
21059
21060 /* Note that the bit offset is always the distance (in bits) from the
21061 highest-order bit of the "containing object" to the highest-order bit of
21062 the bit-field itself. Since the "high-order end" of any object or field
21063 is different on big-endian and little-endian machines, the computation
21064 below must take account of these differences. */
21065 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21066 highest_order_field_bit_offset = bitpos_int;
21067
21068 if (! BYTES_BIG_ENDIAN)
21069 {
21070 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21071 highest_order_object_bit_offset +=
21072 simple_type_size_in_bits (original_type);
21073 }
21074
21075 bit_offset
21076 = (! BYTES_BIG_ENDIAN
21077 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21078 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21079
21080 if (bit_offset < 0)
21081 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21082 else
21083 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21084 }
21085
21086 /* For a FIELD_DECL node which represents a bit field, output an attribute
21087 which specifies the length in bits of the given field. */
21088
21089 static inline void
21090 add_bit_size_attribute (dw_die_ref die, tree decl)
21091 {
21092 /* Must be a field and a bit field. */
21093 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21094 && DECL_BIT_FIELD_TYPE (decl));
21095
21096 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21097 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21098 }
21099
21100 /* If the compiled language is ANSI C, then add a 'prototyped'
21101 attribute, if arg types are given for the parameters of a function. */
21102
21103 static inline void
21104 add_prototyped_attribute (dw_die_ref die, tree func_type)
21105 {
21106 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21107 {
21108 case DW_LANG_C:
21109 case DW_LANG_C89:
21110 case DW_LANG_C99:
21111 case DW_LANG_C11:
21112 case DW_LANG_ObjC:
21113 if (prototype_p (func_type))
21114 add_AT_flag (die, DW_AT_prototyped, 1);
21115 break;
21116 default:
21117 break;
21118 }
21119 }
21120
21121 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21122 by looking in the type declaration, the object declaration equate table or
21123 the block mapping. */
21124
21125 static inline dw_die_ref
21126 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21127 {
21128 dw_die_ref origin_die = NULL;
21129
21130 if (DECL_P (origin))
21131 {
21132 dw_die_ref c;
21133 origin_die = lookup_decl_die (origin);
21134 /* "Unwrap" the decls DIE which we put in the imported unit context.
21135 We are looking for the abstract copy here. */
21136 if (in_lto_p
21137 && origin_die
21138 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21139 /* ??? Identify this better. */
21140 && c->with_offset)
21141 origin_die = c;
21142 }
21143 else if (TYPE_P (origin))
21144 origin_die = lookup_type_die (origin);
21145 else if (TREE_CODE (origin) == BLOCK)
21146 origin_die = BLOCK_DIE (origin);
21147
21148 /* XXX: Functions that are never lowered don't always have correct block
21149 trees (in the case of java, they simply have no block tree, in some other
21150 languages). For these functions, there is nothing we can really do to
21151 output correct debug info for inlined functions in all cases. Rather
21152 than die, we'll just produce deficient debug info now, in that we will
21153 have variables without a proper abstract origin. In the future, when all
21154 functions are lowered, we should re-add a gcc_assert (origin_die)
21155 here. */
21156
21157 if (origin_die)
21158 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21159 return origin_die;
21160 }
21161
21162 /* We do not currently support the pure_virtual attribute. */
21163
21164 static inline void
21165 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21166 {
21167 if (DECL_VINDEX (func_decl))
21168 {
21169 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21170
21171 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21172 add_AT_loc (die, DW_AT_vtable_elem_location,
21173 new_loc_descr (DW_OP_constu,
21174 tree_to_shwi (DECL_VINDEX (func_decl)),
21175 0));
21176
21177 /* GNU extension: Record what type this method came from originally. */
21178 if (debug_info_level > DINFO_LEVEL_TERSE
21179 && DECL_CONTEXT (func_decl))
21180 add_AT_die_ref (die, DW_AT_containing_type,
21181 lookup_type_die (DECL_CONTEXT (func_decl)));
21182 }
21183 }
21184 \f
21185 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21186 given decl. This used to be a vendor extension until after DWARF 4
21187 standardized it. */
21188
21189 static void
21190 add_linkage_attr (dw_die_ref die, tree decl)
21191 {
21192 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21193
21194 /* Mimic what assemble_name_raw does with a leading '*'. */
21195 if (name[0] == '*')
21196 name = &name[1];
21197
21198 if (dwarf_version >= 4)
21199 add_AT_string (die, DW_AT_linkage_name, name);
21200 else
21201 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21202 }
21203
21204 /* Add source coordinate attributes for the given decl. */
21205
21206 static void
21207 add_src_coords_attributes (dw_die_ref die, tree decl)
21208 {
21209 expanded_location s;
21210
21211 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21212 return;
21213 s = expand_location (DECL_SOURCE_LOCATION (decl));
21214 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21215 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21216 if (debug_column_info && s.column)
21217 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21218 }
21219
21220 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21221
21222 static void
21223 add_linkage_name_raw (dw_die_ref die, tree decl)
21224 {
21225 /* Defer until we have an assembler name set. */
21226 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21227 {
21228 limbo_die_node *asm_name;
21229
21230 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21231 asm_name->die = die;
21232 asm_name->created_for = decl;
21233 asm_name->next = deferred_asm_name;
21234 deferred_asm_name = asm_name;
21235 }
21236 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21237 add_linkage_attr (die, decl);
21238 }
21239
21240 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21241
21242 static void
21243 add_linkage_name (dw_die_ref die, tree decl)
21244 {
21245 if (debug_info_level > DINFO_LEVEL_NONE
21246 && VAR_OR_FUNCTION_DECL_P (decl)
21247 && TREE_PUBLIC (decl)
21248 && !(VAR_P (decl) && DECL_REGISTER (decl))
21249 && die->die_tag != DW_TAG_member)
21250 add_linkage_name_raw (die, decl);
21251 }
21252
21253 /* Add a DW_AT_name attribute and source coordinate attribute for the
21254 given decl, but only if it actually has a name. */
21255
21256 static void
21257 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21258 bool no_linkage_name)
21259 {
21260 tree decl_name;
21261
21262 decl_name = DECL_NAME (decl);
21263 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21264 {
21265 const char *name = dwarf2_name (decl, 0);
21266 if (name)
21267 add_name_attribute (die, name);
21268 if (! DECL_ARTIFICIAL (decl))
21269 add_src_coords_attributes (die, decl);
21270
21271 if (!no_linkage_name)
21272 add_linkage_name (die, decl);
21273 }
21274
21275 #ifdef VMS_DEBUGGING_INFO
21276 /* Get the function's name, as described by its RTL. This may be different
21277 from the DECL_NAME name used in the source file. */
21278 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21279 {
21280 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21281 XEXP (DECL_RTL (decl), 0), false);
21282 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21283 }
21284 #endif /* VMS_DEBUGGING_INFO */
21285 }
21286
21287 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21288
21289 static void
21290 add_discr_value (dw_die_ref die, dw_discr_value *value)
21291 {
21292 dw_attr_node attr;
21293
21294 attr.dw_attr = DW_AT_discr_value;
21295 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21296 attr.dw_attr_val.val_entry = NULL;
21297 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21298 if (value->pos)
21299 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21300 else
21301 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21302 add_dwarf_attr (die, &attr);
21303 }
21304
21305 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21306
21307 static void
21308 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21309 {
21310 dw_attr_node attr;
21311
21312 attr.dw_attr = DW_AT_discr_list;
21313 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21314 attr.dw_attr_val.val_entry = NULL;
21315 attr.dw_attr_val.v.val_discr_list = discr_list;
21316 add_dwarf_attr (die, &attr);
21317 }
21318
21319 static inline dw_discr_list_ref
21320 AT_discr_list (dw_attr_node *attr)
21321 {
21322 return attr->dw_attr_val.v.val_discr_list;
21323 }
21324
21325 #ifdef VMS_DEBUGGING_INFO
21326 /* Output the debug main pointer die for VMS */
21327
21328 void
21329 dwarf2out_vms_debug_main_pointer (void)
21330 {
21331 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21332 dw_die_ref die;
21333
21334 /* Allocate the VMS debug main subprogram die. */
21335 die = new_die_raw (DW_TAG_subprogram);
21336 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21337 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21338 current_function_funcdef_no);
21339 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21340
21341 /* Make it the first child of comp_unit_die (). */
21342 die->die_parent = comp_unit_die ();
21343 if (comp_unit_die ()->die_child)
21344 {
21345 die->die_sib = comp_unit_die ()->die_child->die_sib;
21346 comp_unit_die ()->die_child->die_sib = die;
21347 }
21348 else
21349 {
21350 die->die_sib = die;
21351 comp_unit_die ()->die_child = die;
21352 }
21353 }
21354 #endif /* VMS_DEBUGGING_INFO */
21355
21356 /* walk_tree helper function for uses_local_type, below. */
21357
21358 static tree
21359 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21360 {
21361 if (!TYPE_P (*tp))
21362 *walk_subtrees = 0;
21363 else
21364 {
21365 tree name = TYPE_NAME (*tp);
21366 if (name && DECL_P (name) && decl_function_context (name))
21367 return *tp;
21368 }
21369 return NULL_TREE;
21370 }
21371
21372 /* If TYPE involves a function-local type (including a local typedef to a
21373 non-local type), returns that type; otherwise returns NULL_TREE. */
21374
21375 static tree
21376 uses_local_type (tree type)
21377 {
21378 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21379 return used;
21380 }
21381
21382 /* Return the DIE for the scope that immediately contains this type.
21383 Non-named types that do not involve a function-local type get global
21384 scope. Named types nested in namespaces or other types get their
21385 containing scope. All other types (i.e. function-local named types) get
21386 the current active scope. */
21387
21388 static dw_die_ref
21389 scope_die_for (tree t, dw_die_ref context_die)
21390 {
21391 dw_die_ref scope_die = NULL;
21392 tree containing_scope;
21393
21394 /* Non-types always go in the current scope. */
21395 gcc_assert (TYPE_P (t));
21396
21397 /* Use the scope of the typedef, rather than the scope of the type
21398 it refers to. */
21399 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21400 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21401 else
21402 containing_scope = TYPE_CONTEXT (t);
21403
21404 /* Use the containing namespace if there is one. */
21405 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21406 {
21407 if (context_die == lookup_decl_die (containing_scope))
21408 /* OK */;
21409 else if (debug_info_level > DINFO_LEVEL_TERSE)
21410 context_die = get_context_die (containing_scope);
21411 else
21412 containing_scope = NULL_TREE;
21413 }
21414
21415 /* Ignore function type "scopes" from the C frontend. They mean that
21416 a tagged type is local to a parmlist of a function declarator, but
21417 that isn't useful to DWARF. */
21418 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21419 containing_scope = NULL_TREE;
21420
21421 if (SCOPE_FILE_SCOPE_P (containing_scope))
21422 {
21423 /* If T uses a local type keep it local as well, to avoid references
21424 to function-local DIEs from outside the function. */
21425 if (current_function_decl && uses_local_type (t))
21426 scope_die = context_die;
21427 else
21428 scope_die = comp_unit_die ();
21429 }
21430 else if (TYPE_P (containing_scope))
21431 {
21432 /* For types, we can just look up the appropriate DIE. */
21433 if (debug_info_level > DINFO_LEVEL_TERSE)
21434 scope_die = get_context_die (containing_scope);
21435 else
21436 {
21437 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21438 if (scope_die == NULL)
21439 scope_die = comp_unit_die ();
21440 }
21441 }
21442 else
21443 scope_die = context_die;
21444
21445 return scope_die;
21446 }
21447
21448 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21449
21450 static inline int
21451 local_scope_p (dw_die_ref context_die)
21452 {
21453 for (; context_die; context_die = context_die->die_parent)
21454 if (context_die->die_tag == DW_TAG_inlined_subroutine
21455 || context_die->die_tag == DW_TAG_subprogram)
21456 return 1;
21457
21458 return 0;
21459 }
21460
21461 /* Returns nonzero if CONTEXT_DIE is a class. */
21462
21463 static inline int
21464 class_scope_p (dw_die_ref context_die)
21465 {
21466 return (context_die
21467 && (context_die->die_tag == DW_TAG_structure_type
21468 || context_die->die_tag == DW_TAG_class_type
21469 || context_die->die_tag == DW_TAG_interface_type
21470 || context_die->die_tag == DW_TAG_union_type));
21471 }
21472
21473 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21474 whether or not to treat a DIE in this context as a declaration. */
21475
21476 static inline int
21477 class_or_namespace_scope_p (dw_die_ref context_die)
21478 {
21479 return (class_scope_p (context_die)
21480 || (context_die && context_die->die_tag == DW_TAG_namespace));
21481 }
21482
21483 /* Many forms of DIEs require a "type description" attribute. This
21484 routine locates the proper "type descriptor" die for the type given
21485 by 'type' plus any additional qualifiers given by 'cv_quals', and
21486 adds a DW_AT_type attribute below the given die. */
21487
21488 static void
21489 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21490 bool reverse, dw_die_ref context_die)
21491 {
21492 enum tree_code code = TREE_CODE (type);
21493 dw_die_ref type_die = NULL;
21494
21495 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21496 or fixed-point type, use the inner type. This is because we have no
21497 support for unnamed types in base_type_die. This can happen if this is
21498 an Ada subrange type. Correct solution is emit a subrange type die. */
21499 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21500 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21501 type = TREE_TYPE (type), code = TREE_CODE (type);
21502
21503 if (code == ERROR_MARK
21504 /* Handle a special case. For functions whose return type is void, we
21505 generate *no* type attribute. (Note that no object may have type
21506 `void', so this only applies to function return types). */
21507 || code == VOID_TYPE)
21508 return;
21509
21510 type_die = modified_type_die (type,
21511 cv_quals | TYPE_QUALS (type),
21512 reverse,
21513 context_die);
21514
21515 if (type_die != NULL)
21516 add_AT_die_ref (object_die, DW_AT_type, type_die);
21517 }
21518
21519 /* Given an object die, add the calling convention attribute for the
21520 function call type. */
21521 static void
21522 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21523 {
21524 enum dwarf_calling_convention value = DW_CC_normal;
21525
21526 value = ((enum dwarf_calling_convention)
21527 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21528
21529 if (is_fortran ()
21530 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21531 {
21532 /* DWARF 2 doesn't provide a way to identify a program's source-level
21533 entry point. DW_AT_calling_convention attributes are only meant
21534 to describe functions' calling conventions. However, lacking a
21535 better way to signal the Fortran main program, we used this for
21536 a long time, following existing custom. Now, DWARF 4 has
21537 DW_AT_main_subprogram, which we add below, but some tools still
21538 rely on the old way, which we thus keep. */
21539 value = DW_CC_program;
21540
21541 if (dwarf_version >= 4 || !dwarf_strict)
21542 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21543 }
21544
21545 /* Only add the attribute if the backend requests it, and
21546 is not DW_CC_normal. */
21547 if (value && (value != DW_CC_normal))
21548 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21549 }
21550
21551 /* Given a tree pointer to a struct, class, union, or enum type node, return
21552 a pointer to the (string) tag name for the given type, or zero if the type
21553 was declared without a tag. */
21554
21555 static const char *
21556 type_tag (const_tree type)
21557 {
21558 const char *name = 0;
21559
21560 if (TYPE_NAME (type) != 0)
21561 {
21562 tree t = 0;
21563
21564 /* Find the IDENTIFIER_NODE for the type name. */
21565 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21566 && !TYPE_NAMELESS (type))
21567 t = TYPE_NAME (type);
21568
21569 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21570 a TYPE_DECL node, regardless of whether or not a `typedef' was
21571 involved. */
21572 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21573 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21574 {
21575 /* We want to be extra verbose. Don't call dwarf_name if
21576 DECL_NAME isn't set. The default hook for decl_printable_name
21577 doesn't like that, and in this context it's correct to return
21578 0, instead of "<anonymous>" or the like. */
21579 if (DECL_NAME (TYPE_NAME (type))
21580 && !DECL_NAMELESS (TYPE_NAME (type)))
21581 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21582 }
21583
21584 /* Now get the name as a string, or invent one. */
21585 if (!name && t != 0)
21586 name = IDENTIFIER_POINTER (t);
21587 }
21588
21589 return (name == 0 || *name == '\0') ? 0 : name;
21590 }
21591
21592 /* Return the type associated with a data member, make a special check
21593 for bit field types. */
21594
21595 static inline tree
21596 member_declared_type (const_tree member)
21597 {
21598 return (DECL_BIT_FIELD_TYPE (member)
21599 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21600 }
21601
21602 /* Get the decl's label, as described by its RTL. This may be different
21603 from the DECL_NAME name used in the source file. */
21604
21605 #if 0
21606 static const char *
21607 decl_start_label (tree decl)
21608 {
21609 rtx x;
21610 const char *fnname;
21611
21612 x = DECL_RTL (decl);
21613 gcc_assert (MEM_P (x));
21614
21615 x = XEXP (x, 0);
21616 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21617
21618 fnname = XSTR (x, 0);
21619 return fnname;
21620 }
21621 #endif
21622 \f
21623 /* For variable-length arrays that have been previously generated, but
21624 may be incomplete due to missing subscript info, fill the subscript
21625 info. Return TRUE if this is one of those cases. */
21626 static bool
21627 fill_variable_array_bounds (tree type)
21628 {
21629 if (TREE_ASM_WRITTEN (type)
21630 && TREE_CODE (type) == ARRAY_TYPE
21631 && variably_modified_type_p (type, NULL))
21632 {
21633 dw_die_ref array_die = lookup_type_die (type);
21634 if (!array_die)
21635 return false;
21636 add_subscript_info (array_die, type, !is_ada ());
21637 return true;
21638 }
21639 return false;
21640 }
21641
21642 /* These routines generate the internal representation of the DIE's for
21643 the compilation unit. Debugging information is collected by walking
21644 the declaration trees passed in from dwarf2out_decl(). */
21645
21646 static void
21647 gen_array_type_die (tree type, dw_die_ref context_die)
21648 {
21649 dw_die_ref array_die;
21650
21651 /* GNU compilers represent multidimensional array types as sequences of one
21652 dimensional array types whose element types are themselves array types.
21653 We sometimes squish that down to a single array_type DIE with multiple
21654 subscripts in the Dwarf debugging info. The draft Dwarf specification
21655 say that we are allowed to do this kind of compression in C, because
21656 there is no difference between an array of arrays and a multidimensional
21657 array. We don't do this for Ada to remain as close as possible to the
21658 actual representation, which is especially important against the language
21659 flexibilty wrt arrays of variable size. */
21660
21661 bool collapse_nested_arrays = !is_ada ();
21662
21663 if (fill_variable_array_bounds (type))
21664 return;
21665
21666 dw_die_ref scope_die = scope_die_for (type, context_die);
21667 tree element_type;
21668
21669 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21670 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21671 if (TYPE_STRING_FLAG (type)
21672 && TREE_CODE (type) == ARRAY_TYPE
21673 && is_fortran ()
21674 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21675 {
21676 HOST_WIDE_INT size;
21677
21678 array_die = new_die (DW_TAG_string_type, scope_die, type);
21679 add_name_attribute (array_die, type_tag (type));
21680 equate_type_number_to_die (type, array_die);
21681 size = int_size_in_bytes (type);
21682 if (size >= 0)
21683 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21684 /* ??? We can't annotate types late, but for LTO we may not
21685 generate a location early either (gfortran.dg/save_6.f90). */
21686 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21687 && TYPE_DOMAIN (type) != NULL_TREE
21688 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21689 {
21690 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21691 tree rszdecl = szdecl;
21692
21693 size = int_size_in_bytes (TREE_TYPE (szdecl));
21694 if (!DECL_P (szdecl))
21695 {
21696 if (TREE_CODE (szdecl) == INDIRECT_REF
21697 && DECL_P (TREE_OPERAND (szdecl, 0)))
21698 {
21699 rszdecl = TREE_OPERAND (szdecl, 0);
21700 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21701 != DWARF2_ADDR_SIZE)
21702 size = 0;
21703 }
21704 else
21705 size = 0;
21706 }
21707 if (size > 0)
21708 {
21709 dw_loc_list_ref loc
21710 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21711 NULL);
21712 if (loc)
21713 {
21714 add_AT_location_description (array_die, DW_AT_string_length,
21715 loc);
21716 if (size != DWARF2_ADDR_SIZE)
21717 add_AT_unsigned (array_die, dwarf_version >= 5
21718 ? DW_AT_string_length_byte_size
21719 : DW_AT_byte_size, size);
21720 }
21721 }
21722 }
21723 return;
21724 }
21725
21726 array_die = new_die (DW_TAG_array_type, scope_die, type);
21727 add_name_attribute (array_die, type_tag (type));
21728 equate_type_number_to_die (type, array_die);
21729
21730 if (TREE_CODE (type) == VECTOR_TYPE)
21731 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21732
21733 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21734 if (is_fortran ()
21735 && TREE_CODE (type) == ARRAY_TYPE
21736 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21737 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21738 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21739
21740 #if 0
21741 /* We default the array ordering. Debuggers will probably do the right
21742 things even if DW_AT_ordering is not present. It's not even an issue
21743 until we start to get into multidimensional arrays anyway. If a debugger
21744 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21745 then we'll have to put the DW_AT_ordering attribute back in. (But if
21746 and when we find out that we need to put these in, we will only do so
21747 for multidimensional arrays. */
21748 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21749 #endif
21750
21751 if (TREE_CODE (type) == VECTOR_TYPE)
21752 {
21753 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21754 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21755 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21756 add_bound_info (subrange_die, DW_AT_upper_bound,
21757 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21758 }
21759 else
21760 add_subscript_info (array_die, type, collapse_nested_arrays);
21761
21762 /* Add representation of the type of the elements of this array type and
21763 emit the corresponding DIE if we haven't done it already. */
21764 element_type = TREE_TYPE (type);
21765 if (collapse_nested_arrays)
21766 while (TREE_CODE (element_type) == ARRAY_TYPE)
21767 {
21768 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21769 break;
21770 element_type = TREE_TYPE (element_type);
21771 }
21772
21773 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21774 TREE_CODE (type) == ARRAY_TYPE
21775 && TYPE_REVERSE_STORAGE_ORDER (type),
21776 context_die);
21777
21778 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21779 if (TYPE_ARTIFICIAL (type))
21780 add_AT_flag (array_die, DW_AT_artificial, 1);
21781
21782 if (get_AT (array_die, DW_AT_name))
21783 add_pubtype (type, array_die);
21784
21785 add_alignment_attribute (array_die, type);
21786 }
21787
21788 /* This routine generates DIE for array with hidden descriptor, details
21789 are filled into *info by a langhook. */
21790
21791 static void
21792 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21793 dw_die_ref context_die)
21794 {
21795 const dw_die_ref scope_die = scope_die_for (type, context_die);
21796 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21797 struct loc_descr_context context = { type, info->base_decl, NULL,
21798 false, false };
21799 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21800 int dim;
21801
21802 add_name_attribute (array_die, type_tag (type));
21803 equate_type_number_to_die (type, array_die);
21804
21805 if (info->ndimensions > 1)
21806 switch (info->ordering)
21807 {
21808 case array_descr_ordering_row_major:
21809 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21810 break;
21811 case array_descr_ordering_column_major:
21812 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21813 break;
21814 default:
21815 break;
21816 }
21817
21818 if (dwarf_version >= 3 || !dwarf_strict)
21819 {
21820 if (info->data_location)
21821 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21822 dw_scalar_form_exprloc, &context);
21823 if (info->associated)
21824 add_scalar_info (array_die, DW_AT_associated, info->associated,
21825 dw_scalar_form_constant
21826 | dw_scalar_form_exprloc
21827 | dw_scalar_form_reference, &context);
21828 if (info->allocated)
21829 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21830 dw_scalar_form_constant
21831 | dw_scalar_form_exprloc
21832 | dw_scalar_form_reference, &context);
21833 if (info->stride)
21834 {
21835 const enum dwarf_attribute attr
21836 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21837 const int forms
21838 = (info->stride_in_bits)
21839 ? dw_scalar_form_constant
21840 : (dw_scalar_form_constant
21841 | dw_scalar_form_exprloc
21842 | dw_scalar_form_reference);
21843
21844 add_scalar_info (array_die, attr, info->stride, forms, &context);
21845 }
21846 }
21847 if (dwarf_version >= 5)
21848 {
21849 if (info->rank)
21850 {
21851 add_scalar_info (array_die, DW_AT_rank, info->rank,
21852 dw_scalar_form_constant
21853 | dw_scalar_form_exprloc, &context);
21854 subrange_tag = DW_TAG_generic_subrange;
21855 context.placeholder_arg = true;
21856 }
21857 }
21858
21859 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21860
21861 for (dim = 0; dim < info->ndimensions; dim++)
21862 {
21863 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21864
21865 if (info->dimen[dim].bounds_type)
21866 add_type_attribute (subrange_die,
21867 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21868 false, context_die);
21869 if (info->dimen[dim].lower_bound)
21870 add_bound_info (subrange_die, DW_AT_lower_bound,
21871 info->dimen[dim].lower_bound, &context);
21872 if (info->dimen[dim].upper_bound)
21873 add_bound_info (subrange_die, DW_AT_upper_bound,
21874 info->dimen[dim].upper_bound, &context);
21875 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21876 add_scalar_info (subrange_die, DW_AT_byte_stride,
21877 info->dimen[dim].stride,
21878 dw_scalar_form_constant
21879 | dw_scalar_form_exprloc
21880 | dw_scalar_form_reference,
21881 &context);
21882 }
21883
21884 gen_type_die (info->element_type, context_die);
21885 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21886 TREE_CODE (type) == ARRAY_TYPE
21887 && TYPE_REVERSE_STORAGE_ORDER (type),
21888 context_die);
21889
21890 if (get_AT (array_die, DW_AT_name))
21891 add_pubtype (type, array_die);
21892
21893 add_alignment_attribute (array_die, type);
21894 }
21895
21896 #if 0
21897 static void
21898 gen_entry_point_die (tree decl, dw_die_ref context_die)
21899 {
21900 tree origin = decl_ultimate_origin (decl);
21901 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21902
21903 if (origin != NULL)
21904 add_abstract_origin_attribute (decl_die, origin);
21905 else
21906 {
21907 add_name_and_src_coords_attributes (decl_die, decl);
21908 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21909 TYPE_UNQUALIFIED, false, context_die);
21910 }
21911
21912 if (DECL_ABSTRACT_P (decl))
21913 equate_decl_number_to_die (decl, decl_die);
21914 else
21915 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21916 }
21917 #endif
21918
21919 /* Walk through the list of incomplete types again, trying once more to
21920 emit full debugging info for them. */
21921
21922 static void
21923 retry_incomplete_types (void)
21924 {
21925 set_early_dwarf s;
21926 int i;
21927
21928 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21929 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21930 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21931 vec_safe_truncate (incomplete_types, 0);
21932 }
21933
21934 /* Determine what tag to use for a record type. */
21935
21936 static enum dwarf_tag
21937 record_type_tag (tree type)
21938 {
21939 if (! lang_hooks.types.classify_record)
21940 return DW_TAG_structure_type;
21941
21942 switch (lang_hooks.types.classify_record (type))
21943 {
21944 case RECORD_IS_STRUCT:
21945 return DW_TAG_structure_type;
21946
21947 case RECORD_IS_CLASS:
21948 return DW_TAG_class_type;
21949
21950 case RECORD_IS_INTERFACE:
21951 if (dwarf_version >= 3 || !dwarf_strict)
21952 return DW_TAG_interface_type;
21953 return DW_TAG_structure_type;
21954
21955 default:
21956 gcc_unreachable ();
21957 }
21958 }
21959
21960 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21961 include all of the information about the enumeration values also. Each
21962 enumerated type name/value is listed as a child of the enumerated type
21963 DIE. */
21964
21965 static dw_die_ref
21966 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21967 {
21968 dw_die_ref type_die = lookup_type_die (type);
21969 dw_die_ref orig_type_die = type_die;
21970
21971 if (type_die == NULL)
21972 {
21973 type_die = new_die (DW_TAG_enumeration_type,
21974 scope_die_for (type, context_die), type);
21975 equate_type_number_to_die (type, type_die);
21976 add_name_attribute (type_die, type_tag (type));
21977 if ((dwarf_version >= 4 || !dwarf_strict)
21978 && ENUM_IS_SCOPED (type))
21979 add_AT_flag (type_die, DW_AT_enum_class, 1);
21980 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21981 add_AT_flag (type_die, DW_AT_declaration, 1);
21982 if (!dwarf_strict)
21983 add_AT_unsigned (type_die, DW_AT_encoding,
21984 TYPE_UNSIGNED (type)
21985 ? DW_ATE_unsigned
21986 : DW_ATE_signed);
21987 }
21988 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21989 return type_die;
21990 else
21991 remove_AT (type_die, DW_AT_declaration);
21992
21993 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21994 given enum type is incomplete, do not generate the DW_AT_byte_size
21995 attribute or the DW_AT_element_list attribute. */
21996 if (TYPE_SIZE (type))
21997 {
21998 tree link;
21999
22000 if (!ENUM_IS_OPAQUE (type))
22001 TREE_ASM_WRITTEN (type) = 1;
22002 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22003 add_byte_size_attribute (type_die, type);
22004 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22005 add_alignment_attribute (type_die, type);
22006 if ((dwarf_version >= 3 || !dwarf_strict)
22007 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22008 {
22009 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22010 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22011 context_die);
22012 }
22013 if (TYPE_STUB_DECL (type) != NULL_TREE)
22014 {
22015 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22016 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22017 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22018 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22019 }
22020
22021 /* If the first reference to this type was as the return type of an
22022 inline function, then it may not have a parent. Fix this now. */
22023 if (type_die->die_parent == NULL)
22024 add_child_die (scope_die_for (type, context_die), type_die);
22025
22026 for (link = TYPE_VALUES (type);
22027 link != NULL; link = TREE_CHAIN (link))
22028 {
22029 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22030 tree value = TREE_VALUE (link);
22031
22032 gcc_assert (!ENUM_IS_OPAQUE (type));
22033 add_name_attribute (enum_die,
22034 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22035
22036 if (TREE_CODE (value) == CONST_DECL)
22037 value = DECL_INITIAL (value);
22038
22039 if (simple_type_size_in_bits (TREE_TYPE (value))
22040 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22041 {
22042 /* For constant forms created by add_AT_unsigned DWARF
22043 consumers (GDB, elfutils, etc.) always zero extend
22044 the value. Only when the actual value is negative
22045 do we need to use add_AT_int to generate a constant
22046 form that can represent negative values. */
22047 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22048 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22049 add_AT_unsigned (enum_die, DW_AT_const_value,
22050 (unsigned HOST_WIDE_INT) val);
22051 else
22052 add_AT_int (enum_die, DW_AT_const_value, val);
22053 }
22054 else
22055 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22056 that here. TODO: This should be re-worked to use correct
22057 signed/unsigned double tags for all cases. */
22058 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22059 }
22060
22061 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22062 if (TYPE_ARTIFICIAL (type)
22063 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22064 add_AT_flag (type_die, DW_AT_artificial, 1);
22065 }
22066 else
22067 add_AT_flag (type_die, DW_AT_declaration, 1);
22068
22069 add_pubtype (type, type_die);
22070
22071 return type_die;
22072 }
22073
22074 /* Generate a DIE to represent either a real live formal parameter decl or to
22075 represent just the type of some formal parameter position in some function
22076 type.
22077
22078 Note that this routine is a bit unusual because its argument may be a
22079 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22080 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22081 node. If it's the former then this function is being called to output a
22082 DIE to represent a formal parameter object (or some inlining thereof). If
22083 it's the latter, then this function is only being called to output a
22084 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22085 argument type of some subprogram type.
22086 If EMIT_NAME_P is true, name and source coordinate attributes
22087 are emitted. */
22088
22089 static dw_die_ref
22090 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22091 dw_die_ref context_die)
22092 {
22093 tree node_or_origin = node ? node : origin;
22094 tree ultimate_origin;
22095 dw_die_ref parm_die = NULL;
22096
22097 if (DECL_P (node_or_origin))
22098 {
22099 parm_die = lookup_decl_die (node);
22100
22101 /* If the contexts differ, we may not be talking about the same
22102 thing.
22103 ??? When in LTO the DIE parent is the "abstract" copy and the
22104 context_die is the specification "copy". But this whole block
22105 should eventually be no longer needed. */
22106 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22107 {
22108 if (!DECL_ABSTRACT_P (node))
22109 {
22110 /* This can happen when creating an inlined instance, in
22111 which case we need to create a new DIE that will get
22112 annotated with DW_AT_abstract_origin. */
22113 parm_die = NULL;
22114 }
22115 else
22116 gcc_unreachable ();
22117 }
22118
22119 if (parm_die && parm_die->die_parent == NULL)
22120 {
22121 /* Check that parm_die already has the right attributes that
22122 we would have added below. If any attributes are
22123 missing, fall through to add them. */
22124 if (! DECL_ABSTRACT_P (node_or_origin)
22125 && !get_AT (parm_die, DW_AT_location)
22126 && !get_AT (parm_die, DW_AT_const_value))
22127 /* We are missing location info, and are about to add it. */
22128 ;
22129 else
22130 {
22131 add_child_die (context_die, parm_die);
22132 return parm_die;
22133 }
22134 }
22135 }
22136
22137 /* If we have a previously generated DIE, use it, unless this is an
22138 concrete instance (origin != NULL), in which case we need a new
22139 DIE with a corresponding DW_AT_abstract_origin. */
22140 bool reusing_die;
22141 if (parm_die && origin == NULL)
22142 reusing_die = true;
22143 else
22144 {
22145 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22146 reusing_die = false;
22147 }
22148
22149 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22150 {
22151 case tcc_declaration:
22152 ultimate_origin = decl_ultimate_origin (node_or_origin);
22153 if (node || ultimate_origin)
22154 origin = ultimate_origin;
22155
22156 if (reusing_die)
22157 goto add_location;
22158
22159 if (origin != NULL)
22160 add_abstract_origin_attribute (parm_die, origin);
22161 else if (emit_name_p)
22162 add_name_and_src_coords_attributes (parm_die, node);
22163 if (origin == NULL
22164 || (! DECL_ABSTRACT_P (node_or_origin)
22165 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22166 decl_function_context
22167 (node_or_origin))))
22168 {
22169 tree type = TREE_TYPE (node_or_origin);
22170 if (decl_by_reference_p (node_or_origin))
22171 add_type_attribute (parm_die, TREE_TYPE (type),
22172 TYPE_UNQUALIFIED,
22173 false, context_die);
22174 else
22175 add_type_attribute (parm_die, type,
22176 decl_quals (node_or_origin),
22177 false, context_die);
22178 }
22179 if (origin == NULL && DECL_ARTIFICIAL (node))
22180 add_AT_flag (parm_die, DW_AT_artificial, 1);
22181 add_location:
22182 if (node && node != origin)
22183 equate_decl_number_to_die (node, parm_die);
22184 if (! DECL_ABSTRACT_P (node_or_origin))
22185 add_location_or_const_value_attribute (parm_die, node_or_origin,
22186 node == NULL);
22187
22188 break;
22189
22190 case tcc_type:
22191 /* We were called with some kind of a ..._TYPE node. */
22192 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22193 context_die);
22194 break;
22195
22196 default:
22197 gcc_unreachable ();
22198 }
22199
22200 return parm_die;
22201 }
22202
22203 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22204 children DW_TAG_formal_parameter DIEs representing the arguments of the
22205 parameter pack.
22206
22207 PARM_PACK must be a function parameter pack.
22208 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22209 must point to the subsequent arguments of the function PACK_ARG belongs to.
22210 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22211 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22212 following the last one for which a DIE was generated. */
22213
22214 static dw_die_ref
22215 gen_formal_parameter_pack_die (tree parm_pack,
22216 tree pack_arg,
22217 dw_die_ref subr_die,
22218 tree *next_arg)
22219 {
22220 tree arg;
22221 dw_die_ref parm_pack_die;
22222
22223 gcc_assert (parm_pack
22224 && lang_hooks.function_parameter_pack_p (parm_pack)
22225 && subr_die);
22226
22227 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22228 add_src_coords_attributes (parm_pack_die, parm_pack);
22229
22230 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22231 {
22232 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22233 parm_pack))
22234 break;
22235 gen_formal_parameter_die (arg, NULL,
22236 false /* Don't emit name attribute. */,
22237 parm_pack_die);
22238 }
22239 if (next_arg)
22240 *next_arg = arg;
22241 return parm_pack_die;
22242 }
22243
22244 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22245 at the end of an (ANSI prototyped) formal parameters list. */
22246
22247 static void
22248 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22249 {
22250 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22251 }
22252
22253 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22254 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22255 parameters as specified in some function type specification (except for
22256 those which appear as part of a function *definition*). */
22257
22258 static void
22259 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22260 {
22261 tree link;
22262 tree formal_type = NULL;
22263 tree first_parm_type;
22264 tree arg;
22265
22266 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22267 {
22268 arg = DECL_ARGUMENTS (function_or_method_type);
22269 function_or_method_type = TREE_TYPE (function_or_method_type);
22270 }
22271 else
22272 arg = NULL_TREE;
22273
22274 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22275
22276 /* Make our first pass over the list of formal parameter types and output a
22277 DW_TAG_formal_parameter DIE for each one. */
22278 for (link = first_parm_type; link; )
22279 {
22280 dw_die_ref parm_die;
22281
22282 formal_type = TREE_VALUE (link);
22283 if (formal_type == void_type_node)
22284 break;
22285
22286 /* Output a (nameless) DIE to represent the formal parameter itself. */
22287 parm_die = gen_formal_parameter_die (formal_type, NULL,
22288 true /* Emit name attribute. */,
22289 context_die);
22290 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22291 && link == first_parm_type)
22292 {
22293 add_AT_flag (parm_die, DW_AT_artificial, 1);
22294 if (dwarf_version >= 3 || !dwarf_strict)
22295 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22296 }
22297 else if (arg && DECL_ARTIFICIAL (arg))
22298 add_AT_flag (parm_die, DW_AT_artificial, 1);
22299
22300 link = TREE_CHAIN (link);
22301 if (arg)
22302 arg = DECL_CHAIN (arg);
22303 }
22304
22305 /* If this function type has an ellipsis, add a
22306 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22307 if (formal_type != void_type_node)
22308 gen_unspecified_parameters_die (function_or_method_type, context_die);
22309
22310 /* Make our second (and final) pass over the list of formal parameter types
22311 and output DIEs to represent those types (as necessary). */
22312 for (link = TYPE_ARG_TYPES (function_or_method_type);
22313 link && TREE_VALUE (link);
22314 link = TREE_CHAIN (link))
22315 gen_type_die (TREE_VALUE (link), context_die);
22316 }
22317
22318 /* We want to generate the DIE for TYPE so that we can generate the
22319 die for MEMBER, which has been defined; we will need to refer back
22320 to the member declaration nested within TYPE. If we're trying to
22321 generate minimal debug info for TYPE, processing TYPE won't do the
22322 trick; we need to attach the member declaration by hand. */
22323
22324 static void
22325 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22326 {
22327 gen_type_die (type, context_die);
22328
22329 /* If we're trying to avoid duplicate debug info, we may not have
22330 emitted the member decl for this function. Emit it now. */
22331 if (TYPE_STUB_DECL (type)
22332 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22333 && ! lookup_decl_die (member))
22334 {
22335 dw_die_ref type_die;
22336 gcc_assert (!decl_ultimate_origin (member));
22337
22338 type_die = lookup_type_die_strip_naming_typedef (type);
22339 if (TREE_CODE (member) == FUNCTION_DECL)
22340 gen_subprogram_die (member, type_die);
22341 else if (TREE_CODE (member) == FIELD_DECL)
22342 {
22343 /* Ignore the nameless fields that are used to skip bits but handle
22344 C++ anonymous unions and structs. */
22345 if (DECL_NAME (member) != NULL_TREE
22346 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22347 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22348 {
22349 struct vlr_context vlr_ctx = {
22350 DECL_CONTEXT (member), /* struct_type */
22351 NULL_TREE /* variant_part_offset */
22352 };
22353 gen_type_die (member_declared_type (member), type_die);
22354 gen_field_die (member, &vlr_ctx, type_die);
22355 }
22356 }
22357 else
22358 gen_variable_die (member, NULL_TREE, type_die);
22359 }
22360 }
22361 \f
22362 /* Forward declare these functions, because they are mutually recursive
22363 with their set_block_* pairing functions. */
22364 static void set_decl_origin_self (tree);
22365
22366 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22367 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22368 that it points to the node itself, thus indicating that the node is its
22369 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22370 the given node is NULL, recursively descend the decl/block tree which
22371 it is the root of, and for each other ..._DECL or BLOCK node contained
22372 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22373 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22374 values to point to themselves. */
22375
22376 static void
22377 set_block_origin_self (tree stmt)
22378 {
22379 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22380 {
22381 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22382
22383 {
22384 tree local_decl;
22385
22386 for (local_decl = BLOCK_VARS (stmt);
22387 local_decl != NULL_TREE;
22388 local_decl = DECL_CHAIN (local_decl))
22389 /* Do not recurse on nested functions since the inlining status
22390 of parent and child can be different as per the DWARF spec. */
22391 if (TREE_CODE (local_decl) != FUNCTION_DECL
22392 && !DECL_EXTERNAL (local_decl))
22393 set_decl_origin_self (local_decl);
22394 }
22395
22396 {
22397 tree subblock;
22398
22399 for (subblock = BLOCK_SUBBLOCKS (stmt);
22400 subblock != NULL_TREE;
22401 subblock = BLOCK_CHAIN (subblock))
22402 set_block_origin_self (subblock); /* Recurse. */
22403 }
22404 }
22405 }
22406
22407 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22408 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22409 node to so that it points to the node itself, thus indicating that the
22410 node represents its own (abstract) origin. Additionally, if the
22411 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22412 the decl/block tree of which the given node is the root of, and for
22413 each other ..._DECL or BLOCK node contained therein whose
22414 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22415 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22416 point to themselves. */
22417
22418 static void
22419 set_decl_origin_self (tree decl)
22420 {
22421 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22422 {
22423 DECL_ABSTRACT_ORIGIN (decl) = decl;
22424 if (TREE_CODE (decl) == FUNCTION_DECL)
22425 {
22426 tree arg;
22427
22428 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22429 DECL_ABSTRACT_ORIGIN (arg) = arg;
22430 if (DECL_INITIAL (decl) != NULL_TREE
22431 && DECL_INITIAL (decl) != error_mark_node)
22432 set_block_origin_self (DECL_INITIAL (decl));
22433 }
22434 }
22435 }
22436 \f
22437 /* Mark the early DIE for DECL as the abstract instance. */
22438
22439 static void
22440 dwarf2out_abstract_function (tree decl)
22441 {
22442 dw_die_ref old_die;
22443
22444 /* Make sure we have the actual abstract inline, not a clone. */
22445 decl = DECL_ORIGIN (decl);
22446
22447 if (DECL_IGNORED_P (decl))
22448 return;
22449
22450 old_die = lookup_decl_die (decl);
22451 /* With early debug we always have an old DIE unless we are in LTO
22452 and the user did not compile but only link with debug. */
22453 if (in_lto_p && ! old_die)
22454 return;
22455 gcc_assert (old_die != NULL);
22456 if (get_AT (old_die, DW_AT_inline)
22457 || get_AT (old_die, DW_AT_abstract_origin))
22458 /* We've already generated the abstract instance. */
22459 return;
22460
22461 /* Go ahead and put DW_AT_inline on the DIE. */
22462 if (DECL_DECLARED_INLINE_P (decl))
22463 {
22464 if (cgraph_function_possibly_inlined_p (decl))
22465 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22466 else
22467 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22468 }
22469 else
22470 {
22471 if (cgraph_function_possibly_inlined_p (decl))
22472 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22473 else
22474 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22475 }
22476
22477 if (DECL_DECLARED_INLINE_P (decl)
22478 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22479 add_AT_flag (old_die, DW_AT_artificial, 1);
22480
22481 set_decl_origin_self (decl);
22482 }
22483
22484 /* Helper function of premark_used_types() which gets called through
22485 htab_traverse.
22486
22487 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22488 marked as unused by prune_unused_types. */
22489
22490 bool
22491 premark_used_types_helper (tree const &type, void *)
22492 {
22493 dw_die_ref die;
22494
22495 die = lookup_type_die (type);
22496 if (die != NULL)
22497 die->die_perennial_p = 1;
22498 return true;
22499 }
22500
22501 /* Helper function of premark_types_used_by_global_vars which gets called
22502 through htab_traverse.
22503
22504 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22505 marked as unused by prune_unused_types. The DIE of the type is marked
22506 only if the global variable using the type will actually be emitted. */
22507
22508 int
22509 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22510 void *)
22511 {
22512 struct types_used_by_vars_entry *entry;
22513 dw_die_ref die;
22514
22515 entry = (struct types_used_by_vars_entry *) *slot;
22516 gcc_assert (entry->type != NULL
22517 && entry->var_decl != NULL);
22518 die = lookup_type_die (entry->type);
22519 if (die)
22520 {
22521 /* Ask cgraph if the global variable really is to be emitted.
22522 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22523 varpool_node *node = varpool_node::get (entry->var_decl);
22524 if (node && node->definition)
22525 {
22526 die->die_perennial_p = 1;
22527 /* Keep the parent DIEs as well. */
22528 while ((die = die->die_parent) && die->die_perennial_p == 0)
22529 die->die_perennial_p = 1;
22530 }
22531 }
22532 return 1;
22533 }
22534
22535 /* Mark all members of used_types_hash as perennial. */
22536
22537 static void
22538 premark_used_types (struct function *fun)
22539 {
22540 if (fun && fun->used_types_hash)
22541 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22542 }
22543
22544 /* Mark all members of types_used_by_vars_entry as perennial. */
22545
22546 static void
22547 premark_types_used_by_global_vars (void)
22548 {
22549 if (types_used_by_vars_hash)
22550 types_used_by_vars_hash
22551 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22552 }
22553
22554 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22555 for CA_LOC call arg loc node. */
22556
22557 static dw_die_ref
22558 gen_call_site_die (tree decl, dw_die_ref subr_die,
22559 struct call_arg_loc_node *ca_loc)
22560 {
22561 dw_die_ref stmt_die = NULL, die;
22562 tree block = ca_loc->block;
22563
22564 while (block
22565 && block != DECL_INITIAL (decl)
22566 && TREE_CODE (block) == BLOCK)
22567 {
22568 stmt_die = BLOCK_DIE (block);
22569 if (stmt_die)
22570 break;
22571 block = BLOCK_SUPERCONTEXT (block);
22572 }
22573 if (stmt_die == NULL)
22574 stmt_die = subr_die;
22575 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22576 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22577 if (ca_loc->tail_call_p)
22578 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22579 if (ca_loc->symbol_ref)
22580 {
22581 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22582 if (tdie)
22583 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22584 else
22585 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22586 false);
22587 }
22588 return die;
22589 }
22590
22591 /* Generate a DIE to represent a declared function (either file-scope or
22592 block-local). */
22593
22594 static void
22595 gen_subprogram_die (tree decl, dw_die_ref context_die)
22596 {
22597 tree origin = decl_ultimate_origin (decl);
22598 dw_die_ref subr_die;
22599 dw_die_ref old_die = lookup_decl_die (decl);
22600
22601 /* This function gets called multiple times for different stages of
22602 the debug process. For example, for func() in this code:
22603
22604 namespace S
22605 {
22606 void func() { ... }
22607 }
22608
22609 ...we get called 4 times. Twice in early debug and twice in
22610 late debug:
22611
22612 Early debug
22613 -----------
22614
22615 1. Once while generating func() within the namespace. This is
22616 the declaration. The declaration bit below is set, as the
22617 context is the namespace.
22618
22619 A new DIE will be generated with DW_AT_declaration set.
22620
22621 2. Once for func() itself. This is the specification. The
22622 declaration bit below is clear as the context is the CU.
22623
22624 We will use the cached DIE from (1) to create a new DIE with
22625 DW_AT_specification pointing to the declaration in (1).
22626
22627 Late debug via rest_of_handle_final()
22628 -------------------------------------
22629
22630 3. Once generating func() within the namespace. This is also the
22631 declaration, as in (1), but this time we will early exit below
22632 as we have a cached DIE and a declaration needs no additional
22633 annotations (no locations), as the source declaration line
22634 info is enough.
22635
22636 4. Once for func() itself. As in (2), this is the specification,
22637 but this time we will re-use the cached DIE, and just annotate
22638 it with the location information that should now be available.
22639
22640 For something without namespaces, but with abstract instances, we
22641 are also called a multiple times:
22642
22643 class Base
22644 {
22645 public:
22646 Base (); // constructor declaration (1)
22647 };
22648
22649 Base::Base () { } // constructor specification (2)
22650
22651 Early debug
22652 -----------
22653
22654 1. Once for the Base() constructor by virtue of it being a
22655 member of the Base class. This is done via
22656 rest_of_type_compilation.
22657
22658 This is a declaration, so a new DIE will be created with
22659 DW_AT_declaration.
22660
22661 2. Once for the Base() constructor definition, but this time
22662 while generating the abstract instance of the base
22663 constructor (__base_ctor) which is being generated via early
22664 debug of reachable functions.
22665
22666 Even though we have a cached version of the declaration (1),
22667 we will create a DW_AT_specification of the declaration DIE
22668 in (1).
22669
22670 3. Once for the __base_ctor itself, but this time, we generate
22671 an DW_AT_abstract_origin version of the DW_AT_specification in
22672 (2).
22673
22674 Late debug via rest_of_handle_final
22675 -----------------------------------
22676
22677 4. One final time for the __base_ctor (which will have a cached
22678 DIE with DW_AT_abstract_origin created in (3). This time,
22679 we will just annotate the location information now
22680 available.
22681 */
22682 int declaration = (current_function_decl != decl
22683 || class_or_namespace_scope_p (context_die));
22684
22685 /* A declaration that has been previously dumped needs no
22686 additional information. */
22687 if (old_die && declaration)
22688 return;
22689
22690 /* Now that the C++ front end lazily declares artificial member fns, we
22691 might need to retrofit the declaration into its class. */
22692 if (!declaration && !origin && !old_die
22693 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22694 && !class_or_namespace_scope_p (context_die)
22695 && debug_info_level > DINFO_LEVEL_TERSE)
22696 old_die = force_decl_die (decl);
22697
22698 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22699 if (origin != NULL)
22700 {
22701 gcc_assert (!declaration || local_scope_p (context_die));
22702
22703 /* Fixup die_parent for the abstract instance of a nested
22704 inline function. */
22705 if (old_die && old_die->die_parent == NULL)
22706 add_child_die (context_die, old_die);
22707
22708 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22709 {
22710 /* If we have a DW_AT_abstract_origin we have a working
22711 cached version. */
22712 subr_die = old_die;
22713 }
22714 else
22715 {
22716 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22717 add_abstract_origin_attribute (subr_die, origin);
22718 /* This is where the actual code for a cloned function is.
22719 Let's emit linkage name attribute for it. This helps
22720 debuggers to e.g, set breakpoints into
22721 constructors/destructors when the user asks "break
22722 K::K". */
22723 add_linkage_name (subr_die, decl);
22724 }
22725 }
22726 /* A cached copy, possibly from early dwarf generation. Reuse as
22727 much as possible. */
22728 else if (old_die)
22729 {
22730 if (!get_AT_flag (old_die, DW_AT_declaration)
22731 /* We can have a normal definition following an inline one in the
22732 case of redefinition of GNU C extern inlines.
22733 It seems reasonable to use AT_specification in this case. */
22734 && !get_AT (old_die, DW_AT_inline))
22735 {
22736 /* Detect and ignore this case, where we are trying to output
22737 something we have already output. */
22738 if (get_AT (old_die, DW_AT_low_pc)
22739 || get_AT (old_die, DW_AT_ranges))
22740 return;
22741
22742 /* If we have no location information, this must be a
22743 partially generated DIE from early dwarf generation.
22744 Fall through and generate it. */
22745 }
22746
22747 /* If the definition comes from the same place as the declaration,
22748 maybe use the old DIE. We always want the DIE for this function
22749 that has the *_pc attributes to be under comp_unit_die so the
22750 debugger can find it. We also need to do this for abstract
22751 instances of inlines, since the spec requires the out-of-line copy
22752 to have the same parent. For local class methods, this doesn't
22753 apply; we just use the old DIE. */
22754 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22755 struct dwarf_file_data * file_index = lookup_filename (s.file);
22756 if (((is_unit_die (old_die->die_parent)
22757 /* This condition fixes the inconsistency/ICE with the
22758 following Fortran test (or some derivative thereof) while
22759 building libgfortran:
22760
22761 module some_m
22762 contains
22763 logical function funky (FLAG)
22764 funky = .true.
22765 end function
22766 end module
22767 */
22768 || (old_die->die_parent
22769 && old_die->die_parent->die_tag == DW_TAG_module)
22770 || local_scope_p (old_die->die_parent)
22771 || context_die == NULL)
22772 && (DECL_ARTIFICIAL (decl)
22773 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22774 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22775 == (unsigned) s.line)
22776 && (!debug_column_info
22777 || s.column == 0
22778 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22779 == (unsigned) s.column)))))
22780 /* With LTO if there's an abstract instance for
22781 the old DIE, this is a concrete instance and
22782 thus re-use the DIE. */
22783 || get_AT (old_die, DW_AT_abstract_origin))
22784 {
22785 subr_die = old_die;
22786
22787 /* Clear out the declaration attribute, but leave the
22788 parameters so they can be augmented with location
22789 information later. Unless this was a declaration, in
22790 which case, wipe out the nameless parameters and recreate
22791 them further down. */
22792 if (remove_AT (subr_die, DW_AT_declaration))
22793 {
22794
22795 remove_AT (subr_die, DW_AT_object_pointer);
22796 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22797 }
22798 }
22799 /* Make a specification pointing to the previously built
22800 declaration. */
22801 else
22802 {
22803 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22804 add_AT_specification (subr_die, old_die);
22805 add_pubname (decl, subr_die);
22806 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22807 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22808 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22809 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22810 if (debug_column_info
22811 && s.column
22812 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22813 != (unsigned) s.column))
22814 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22815
22816 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22817 emit the real type on the definition die. */
22818 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22819 {
22820 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22821 if (die == auto_die || die == decltype_auto_die)
22822 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22823 TYPE_UNQUALIFIED, false, context_die);
22824 }
22825
22826 /* When we process the method declaration, we haven't seen
22827 the out-of-class defaulted definition yet, so we have to
22828 recheck now. */
22829 if ((dwarf_version >= 5 || ! dwarf_strict)
22830 && !get_AT (subr_die, DW_AT_defaulted))
22831 {
22832 int defaulted
22833 = lang_hooks.decls.decl_dwarf_attribute (decl,
22834 DW_AT_defaulted);
22835 if (defaulted != -1)
22836 {
22837 /* Other values must have been handled before. */
22838 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22839 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22840 }
22841 }
22842 }
22843 }
22844 /* Create a fresh DIE for anything else. */
22845 else
22846 {
22847 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22848
22849 if (TREE_PUBLIC (decl))
22850 add_AT_flag (subr_die, DW_AT_external, 1);
22851
22852 add_name_and_src_coords_attributes (subr_die, decl);
22853 add_pubname (decl, subr_die);
22854 if (debug_info_level > DINFO_LEVEL_TERSE)
22855 {
22856 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22857 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22858 TYPE_UNQUALIFIED, false, context_die);
22859 }
22860
22861 add_pure_or_virtual_attribute (subr_die, decl);
22862 if (DECL_ARTIFICIAL (decl))
22863 add_AT_flag (subr_die, DW_AT_artificial, 1);
22864
22865 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22866 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22867
22868 add_alignment_attribute (subr_die, decl);
22869
22870 add_accessibility_attribute (subr_die, decl);
22871 }
22872
22873 /* Unless we have an existing non-declaration DIE, equate the new
22874 DIE. */
22875 if (!old_die || is_declaration_die (old_die))
22876 equate_decl_number_to_die (decl, subr_die);
22877
22878 if (declaration)
22879 {
22880 if (!old_die || !get_AT (old_die, DW_AT_inline))
22881 {
22882 add_AT_flag (subr_die, DW_AT_declaration, 1);
22883
22884 /* If this is an explicit function declaration then generate
22885 a DW_AT_explicit attribute. */
22886 if ((dwarf_version >= 3 || !dwarf_strict)
22887 && lang_hooks.decls.decl_dwarf_attribute (decl,
22888 DW_AT_explicit) == 1)
22889 add_AT_flag (subr_die, DW_AT_explicit, 1);
22890
22891 /* If this is a C++11 deleted special function member then generate
22892 a DW_AT_deleted attribute. */
22893 if ((dwarf_version >= 5 || !dwarf_strict)
22894 && lang_hooks.decls.decl_dwarf_attribute (decl,
22895 DW_AT_deleted) == 1)
22896 add_AT_flag (subr_die, DW_AT_deleted, 1);
22897
22898 /* If this is a C++11 defaulted special function member then
22899 generate a DW_AT_defaulted attribute. */
22900 if (dwarf_version >= 5 || !dwarf_strict)
22901 {
22902 int defaulted
22903 = lang_hooks.decls.decl_dwarf_attribute (decl,
22904 DW_AT_defaulted);
22905 if (defaulted != -1)
22906 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22907 }
22908
22909 /* If this is a C++11 non-static member function with & ref-qualifier
22910 then generate a DW_AT_reference attribute. */
22911 if ((dwarf_version >= 5 || !dwarf_strict)
22912 && lang_hooks.decls.decl_dwarf_attribute (decl,
22913 DW_AT_reference) == 1)
22914 add_AT_flag (subr_die, DW_AT_reference, 1);
22915
22916 /* If this is a C++11 non-static member function with &&
22917 ref-qualifier then generate a DW_AT_reference attribute. */
22918 if ((dwarf_version >= 5 || !dwarf_strict)
22919 && lang_hooks.decls.decl_dwarf_attribute (decl,
22920 DW_AT_rvalue_reference)
22921 == 1)
22922 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22923 }
22924 }
22925 /* For non DECL_EXTERNALs, if range information is available, fill
22926 the DIE with it. */
22927 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22928 {
22929 HOST_WIDE_INT cfa_fb_offset;
22930
22931 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22932
22933 if (!crtl->has_bb_partition)
22934 {
22935 dw_fde_ref fde = fun->fde;
22936 if (fde->dw_fde_begin)
22937 {
22938 /* We have already generated the labels. */
22939 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22940 fde->dw_fde_end, false);
22941 }
22942 else
22943 {
22944 /* Create start/end labels and add the range. */
22945 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22946 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22947 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22948 current_function_funcdef_no);
22949 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22950 current_function_funcdef_no);
22951 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22952 false);
22953 }
22954
22955 #if VMS_DEBUGGING_INFO
22956 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22957 Section 2.3 Prologue and Epilogue Attributes:
22958 When a breakpoint is set on entry to a function, it is generally
22959 desirable for execution to be suspended, not on the very first
22960 instruction of the function, but rather at a point after the
22961 function's frame has been set up, after any language defined local
22962 declaration processing has been completed, and before execution of
22963 the first statement of the function begins. Debuggers generally
22964 cannot properly determine where this point is. Similarly for a
22965 breakpoint set on exit from a function. The prologue and epilogue
22966 attributes allow a compiler to communicate the location(s) to use. */
22967
22968 {
22969 if (fde->dw_fde_vms_end_prologue)
22970 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22971 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22972
22973 if (fde->dw_fde_vms_begin_epilogue)
22974 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22975 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22976 }
22977 #endif
22978
22979 }
22980 else
22981 {
22982 /* Generate pubnames entries for the split function code ranges. */
22983 dw_fde_ref fde = fun->fde;
22984
22985 if (fde->dw_fde_second_begin)
22986 {
22987 if (dwarf_version >= 3 || !dwarf_strict)
22988 {
22989 /* We should use ranges for non-contiguous code section
22990 addresses. Use the actual code range for the initial
22991 section, since the HOT/COLD labels might precede an
22992 alignment offset. */
22993 bool range_list_added = false;
22994 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22995 fde->dw_fde_end, &range_list_added,
22996 false);
22997 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22998 fde->dw_fde_second_end,
22999 &range_list_added, false);
23000 if (range_list_added)
23001 add_ranges (NULL);
23002 }
23003 else
23004 {
23005 /* There is no real support in DW2 for this .. so we make
23006 a work-around. First, emit the pub name for the segment
23007 containing the function label. Then make and emit a
23008 simplified subprogram DIE for the second segment with the
23009 name pre-fixed by __hot/cold_sect_of_. We use the same
23010 linkage name for the second die so that gdb will find both
23011 sections when given "b foo". */
23012 const char *name = NULL;
23013 tree decl_name = DECL_NAME (decl);
23014 dw_die_ref seg_die;
23015
23016 /* Do the 'primary' section. */
23017 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23018 fde->dw_fde_end, false);
23019
23020 /* Build a minimal DIE for the secondary section. */
23021 seg_die = new_die (DW_TAG_subprogram,
23022 subr_die->die_parent, decl);
23023
23024 if (TREE_PUBLIC (decl))
23025 add_AT_flag (seg_die, DW_AT_external, 1);
23026
23027 if (decl_name != NULL
23028 && IDENTIFIER_POINTER (decl_name) != NULL)
23029 {
23030 name = dwarf2_name (decl, 1);
23031 if (! DECL_ARTIFICIAL (decl))
23032 add_src_coords_attributes (seg_die, decl);
23033
23034 add_linkage_name (seg_die, decl);
23035 }
23036 gcc_assert (name != NULL);
23037 add_pure_or_virtual_attribute (seg_die, decl);
23038 if (DECL_ARTIFICIAL (decl))
23039 add_AT_flag (seg_die, DW_AT_artificial, 1);
23040
23041 name = concat ("__second_sect_of_", name, NULL);
23042 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23043 fde->dw_fde_second_end, false);
23044 add_name_attribute (seg_die, name);
23045 if (want_pubnames ())
23046 add_pubname_string (name, seg_die);
23047 }
23048 }
23049 else
23050 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23051 false);
23052 }
23053
23054 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23055
23056 /* We define the "frame base" as the function's CFA. This is more
23057 convenient for several reasons: (1) It's stable across the prologue
23058 and epilogue, which makes it better than just a frame pointer,
23059 (2) With dwarf3, there exists a one-byte encoding that allows us
23060 to reference the .debug_frame data by proxy, but failing that,
23061 (3) We can at least reuse the code inspection and interpretation
23062 code that determines the CFA position at various points in the
23063 function. */
23064 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23065 {
23066 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23067 add_AT_loc (subr_die, DW_AT_frame_base, op);
23068 }
23069 else
23070 {
23071 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23072 if (list->dw_loc_next)
23073 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23074 else
23075 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23076 }
23077
23078 /* Compute a displacement from the "steady-state frame pointer" to
23079 the CFA. The former is what all stack slots and argument slots
23080 will reference in the rtl; the latter is what we've told the
23081 debugger about. We'll need to adjust all frame_base references
23082 by this displacement. */
23083 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23084
23085 if (fun->static_chain_decl)
23086 {
23087 /* DWARF requires here a location expression that computes the
23088 address of the enclosing subprogram's frame base. The machinery
23089 in tree-nested.c is supposed to store this specific address in the
23090 last field of the FRAME record. */
23091 const tree frame_type
23092 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23093 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23094
23095 tree fb_expr
23096 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23097 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23098 fb_expr, fb_decl, NULL_TREE);
23099
23100 add_AT_location_description (subr_die, DW_AT_static_link,
23101 loc_list_from_tree (fb_expr, 0, NULL));
23102 }
23103
23104 resolve_variable_values ();
23105 }
23106
23107 /* Generate child dies for template paramaters. */
23108 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23109 gen_generic_params_dies (decl);
23110
23111 /* Now output descriptions of the arguments for this function. This gets
23112 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23113 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23114 `...' at the end of the formal parameter list. In order to find out if
23115 there was a trailing ellipsis or not, we must instead look at the type
23116 associated with the FUNCTION_DECL. This will be a node of type
23117 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23118 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23119 an ellipsis at the end. */
23120
23121 /* In the case where we are describing a mere function declaration, all we
23122 need to do here (and all we *can* do here) is to describe the *types* of
23123 its formal parameters. */
23124 if (debug_info_level <= DINFO_LEVEL_TERSE)
23125 ;
23126 else if (declaration)
23127 gen_formal_types_die (decl, subr_die);
23128 else
23129 {
23130 /* Generate DIEs to represent all known formal parameters. */
23131 tree parm = DECL_ARGUMENTS (decl);
23132 tree generic_decl = early_dwarf
23133 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23134 tree generic_decl_parm = generic_decl
23135 ? DECL_ARGUMENTS (generic_decl)
23136 : NULL;
23137
23138 /* Now we want to walk the list of parameters of the function and
23139 emit their relevant DIEs.
23140
23141 We consider the case of DECL being an instance of a generic function
23142 as well as it being a normal function.
23143
23144 If DECL is an instance of a generic function we walk the
23145 parameters of the generic function declaration _and_ the parameters of
23146 DECL itself. This is useful because we want to emit specific DIEs for
23147 function parameter packs and those are declared as part of the
23148 generic function declaration. In that particular case,
23149 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23150 That DIE has children DIEs representing the set of arguments
23151 of the pack. Note that the set of pack arguments can be empty.
23152 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23153 children DIE.
23154
23155 Otherwise, we just consider the parameters of DECL. */
23156 while (generic_decl_parm || parm)
23157 {
23158 if (generic_decl_parm
23159 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23160 gen_formal_parameter_pack_die (generic_decl_parm,
23161 parm, subr_die,
23162 &parm);
23163 else if (parm)
23164 {
23165 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23166
23167 if (early_dwarf
23168 && parm == DECL_ARGUMENTS (decl)
23169 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23170 && parm_die
23171 && (dwarf_version >= 3 || !dwarf_strict))
23172 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23173
23174 parm = DECL_CHAIN (parm);
23175 }
23176 else if (parm)
23177 parm = DECL_CHAIN (parm);
23178
23179 if (generic_decl_parm)
23180 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23181 }
23182
23183 /* Decide whether we need an unspecified_parameters DIE at the end.
23184 There are 2 more cases to do this for: 1) the ansi ... declaration -
23185 this is detectable when the end of the arg list is not a
23186 void_type_node 2) an unprototyped function declaration (not a
23187 definition). This just means that we have no info about the
23188 parameters at all. */
23189 if (early_dwarf)
23190 {
23191 if (prototype_p (TREE_TYPE (decl)))
23192 {
23193 /* This is the prototyped case, check for.... */
23194 if (stdarg_p (TREE_TYPE (decl)))
23195 gen_unspecified_parameters_die (decl, subr_die);
23196 }
23197 else if (DECL_INITIAL (decl) == NULL_TREE)
23198 gen_unspecified_parameters_die (decl, subr_die);
23199 }
23200 }
23201
23202 if (subr_die != old_die)
23203 /* Add the calling convention attribute if requested. */
23204 add_calling_convention_attribute (subr_die, decl);
23205
23206 /* Output Dwarf info for all of the stuff within the body of the function
23207 (if it has one - it may be just a declaration).
23208
23209 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23210 a function. This BLOCK actually represents the outermost binding contour
23211 for the function, i.e. the contour in which the function's formal
23212 parameters and labels get declared. Curiously, it appears that the front
23213 end doesn't actually put the PARM_DECL nodes for the current function onto
23214 the BLOCK_VARS list for this outer scope, but are strung off of the
23215 DECL_ARGUMENTS list for the function instead.
23216
23217 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23218 the LABEL_DECL nodes for the function however, and we output DWARF info
23219 for those in decls_for_scope. Just within the `outer_scope' there will be
23220 a BLOCK node representing the function's outermost pair of curly braces,
23221 and any blocks used for the base and member initializers of a C++
23222 constructor function. */
23223 tree outer_scope = DECL_INITIAL (decl);
23224 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23225 {
23226 int call_site_note_count = 0;
23227 int tail_call_site_note_count = 0;
23228
23229 /* Emit a DW_TAG_variable DIE for a named return value. */
23230 if (DECL_NAME (DECL_RESULT (decl)))
23231 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23232
23233 /* The first time through decls_for_scope we will generate the
23234 DIEs for the locals. The second time, we fill in the
23235 location info. */
23236 decls_for_scope (outer_scope, subr_die);
23237
23238 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23239 {
23240 struct call_arg_loc_node *ca_loc;
23241 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23242 {
23243 dw_die_ref die = NULL;
23244 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23245 rtx arg, next_arg;
23246
23247 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23248 ? XEXP (ca_loc->call_arg_loc_note, 0)
23249 : NULL_RTX);
23250 arg; arg = next_arg)
23251 {
23252 dw_loc_descr_ref reg, val;
23253 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23254 dw_die_ref cdie, tdie = NULL;
23255
23256 next_arg = XEXP (arg, 1);
23257 if (REG_P (XEXP (XEXP (arg, 0), 0))
23258 && next_arg
23259 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23260 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23261 && REGNO (XEXP (XEXP (arg, 0), 0))
23262 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23263 next_arg = XEXP (next_arg, 1);
23264 if (mode == VOIDmode)
23265 {
23266 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23267 if (mode == VOIDmode)
23268 mode = GET_MODE (XEXP (arg, 0));
23269 }
23270 if (mode == VOIDmode || mode == BLKmode)
23271 continue;
23272 /* Get dynamic information about call target only if we
23273 have no static information: we cannot generate both
23274 DW_AT_call_origin and DW_AT_call_target
23275 attributes. */
23276 if (ca_loc->symbol_ref == NULL_RTX)
23277 {
23278 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23279 {
23280 tloc = XEXP (XEXP (arg, 0), 1);
23281 continue;
23282 }
23283 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23284 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23285 {
23286 tlocc = XEXP (XEXP (arg, 0), 1);
23287 continue;
23288 }
23289 }
23290 reg = NULL;
23291 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23292 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23293 VAR_INIT_STATUS_INITIALIZED);
23294 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23295 {
23296 rtx mem = XEXP (XEXP (arg, 0), 0);
23297 reg = mem_loc_descriptor (XEXP (mem, 0),
23298 get_address_mode (mem),
23299 GET_MODE (mem),
23300 VAR_INIT_STATUS_INITIALIZED);
23301 }
23302 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23303 == DEBUG_PARAMETER_REF)
23304 {
23305 tree tdecl
23306 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23307 tdie = lookup_decl_die (tdecl);
23308 if (tdie == NULL)
23309 continue;
23310 }
23311 else
23312 continue;
23313 if (reg == NULL
23314 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23315 != DEBUG_PARAMETER_REF)
23316 continue;
23317 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23318 VOIDmode,
23319 VAR_INIT_STATUS_INITIALIZED);
23320 if (val == NULL)
23321 continue;
23322 if (die == NULL)
23323 die = gen_call_site_die (decl, subr_die, ca_loc);
23324 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23325 NULL_TREE);
23326 if (reg != NULL)
23327 add_AT_loc (cdie, DW_AT_location, reg);
23328 else if (tdie != NULL)
23329 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23330 tdie);
23331 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23332 if (next_arg != XEXP (arg, 1))
23333 {
23334 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23335 if (mode == VOIDmode)
23336 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23337 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23338 0), 1),
23339 mode, VOIDmode,
23340 VAR_INIT_STATUS_INITIALIZED);
23341 if (val != NULL)
23342 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23343 val);
23344 }
23345 }
23346 if (die == NULL
23347 && (ca_loc->symbol_ref || tloc))
23348 die = gen_call_site_die (decl, subr_die, ca_loc);
23349 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23350 {
23351 dw_loc_descr_ref tval = NULL;
23352
23353 if (tloc != NULL_RTX)
23354 tval = mem_loc_descriptor (tloc,
23355 GET_MODE (tloc) == VOIDmode
23356 ? Pmode : GET_MODE (tloc),
23357 VOIDmode,
23358 VAR_INIT_STATUS_INITIALIZED);
23359 if (tval)
23360 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23361 else if (tlocc != NULL_RTX)
23362 {
23363 tval = mem_loc_descriptor (tlocc,
23364 GET_MODE (tlocc) == VOIDmode
23365 ? Pmode : GET_MODE (tlocc),
23366 VOIDmode,
23367 VAR_INIT_STATUS_INITIALIZED);
23368 if (tval)
23369 add_AT_loc (die,
23370 dwarf_AT (DW_AT_call_target_clobbered),
23371 tval);
23372 }
23373 }
23374 if (die != NULL)
23375 {
23376 call_site_note_count++;
23377 if (ca_loc->tail_call_p)
23378 tail_call_site_note_count++;
23379 }
23380 }
23381 }
23382 call_arg_locations = NULL;
23383 call_arg_loc_last = NULL;
23384 if (tail_call_site_count >= 0
23385 && tail_call_site_count == tail_call_site_note_count
23386 && (!dwarf_strict || dwarf_version >= 5))
23387 {
23388 if (call_site_count >= 0
23389 && call_site_count == call_site_note_count)
23390 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23391 else
23392 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23393 }
23394 call_site_count = -1;
23395 tail_call_site_count = -1;
23396 }
23397
23398 /* Mark used types after we have created DIEs for the functions scopes. */
23399 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23400 }
23401
23402 /* Returns a hash value for X (which really is a die_struct). */
23403
23404 hashval_t
23405 block_die_hasher::hash (die_struct *d)
23406 {
23407 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23408 }
23409
23410 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23411 as decl_id and die_parent of die_struct Y. */
23412
23413 bool
23414 block_die_hasher::equal (die_struct *x, die_struct *y)
23415 {
23416 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23417 }
23418
23419 /* Hold information about markers for inlined entry points. */
23420 struct GTY ((for_user)) inline_entry_data
23421 {
23422 /* The block that's the inlined_function_outer_scope for an inlined
23423 function. */
23424 tree block;
23425
23426 /* The label at the inlined entry point. */
23427 const char *label_pfx;
23428 unsigned int label_num;
23429
23430 /* The view number to be used as the inlined entry point. */
23431 var_loc_view view;
23432 };
23433
23434 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23435 {
23436 typedef tree compare_type;
23437 static inline hashval_t hash (const inline_entry_data *);
23438 static inline bool equal (const inline_entry_data *, const_tree);
23439 };
23440
23441 /* Hash table routines for inline_entry_data. */
23442
23443 inline hashval_t
23444 inline_entry_data_hasher::hash (const inline_entry_data *data)
23445 {
23446 return htab_hash_pointer (data->block);
23447 }
23448
23449 inline bool
23450 inline_entry_data_hasher::equal (const inline_entry_data *data,
23451 const_tree block)
23452 {
23453 return data->block == block;
23454 }
23455
23456 /* Inlined entry points pending DIE creation in this compilation unit. */
23457
23458 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23459
23460
23461 /* Return TRUE if DECL, which may have been previously generated as
23462 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23463 true if decl (or its origin) is either an extern declaration or a
23464 class/namespace scoped declaration.
23465
23466 The declare_in_namespace support causes us to get two DIEs for one
23467 variable, both of which are declarations. We want to avoid
23468 considering one to be a specification, so we must test for
23469 DECLARATION and DW_AT_declaration. */
23470 static inline bool
23471 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23472 {
23473 return (old_die && TREE_STATIC (decl) && !declaration
23474 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23475 }
23476
23477 /* Return true if DECL is a local static. */
23478
23479 static inline bool
23480 local_function_static (tree decl)
23481 {
23482 gcc_assert (VAR_P (decl));
23483 return TREE_STATIC (decl)
23484 && DECL_CONTEXT (decl)
23485 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23486 }
23487
23488 /* Generate a DIE to represent a declared data object.
23489 Either DECL or ORIGIN must be non-null. */
23490
23491 static void
23492 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23493 {
23494 HOST_WIDE_INT off = 0;
23495 tree com_decl;
23496 tree decl_or_origin = decl ? decl : origin;
23497 tree ultimate_origin;
23498 dw_die_ref var_die;
23499 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23500 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23501 || class_or_namespace_scope_p (context_die));
23502 bool specialization_p = false;
23503 bool no_linkage_name = false;
23504
23505 /* While C++ inline static data members have definitions inside of the
23506 class, force the first DIE to be a declaration, then let gen_member_die
23507 reparent it to the class context and call gen_variable_die again
23508 to create the outside of the class DIE for the definition. */
23509 if (!declaration
23510 && old_die == NULL
23511 && decl
23512 && DECL_CONTEXT (decl)
23513 && TYPE_P (DECL_CONTEXT (decl))
23514 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23515 {
23516 declaration = true;
23517 if (dwarf_version < 5)
23518 no_linkage_name = true;
23519 }
23520
23521 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23522 if (decl || ultimate_origin)
23523 origin = ultimate_origin;
23524 com_decl = fortran_common (decl_or_origin, &off);
23525
23526 /* Symbol in common gets emitted as a child of the common block, in the form
23527 of a data member. */
23528 if (com_decl)
23529 {
23530 dw_die_ref com_die;
23531 dw_loc_list_ref loc = NULL;
23532 die_node com_die_arg;
23533
23534 var_die = lookup_decl_die (decl_or_origin);
23535 if (var_die)
23536 {
23537 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23538 {
23539 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23540 if (loc)
23541 {
23542 if (off)
23543 {
23544 /* Optimize the common case. */
23545 if (single_element_loc_list_p (loc)
23546 && loc->expr->dw_loc_opc == DW_OP_addr
23547 && loc->expr->dw_loc_next == NULL
23548 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23549 == SYMBOL_REF)
23550 {
23551 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23552 loc->expr->dw_loc_oprnd1.v.val_addr
23553 = plus_constant (GET_MODE (x), x , off);
23554 }
23555 else
23556 loc_list_plus_const (loc, off);
23557 }
23558 add_AT_location_description (var_die, DW_AT_location, loc);
23559 remove_AT (var_die, DW_AT_declaration);
23560 }
23561 }
23562 return;
23563 }
23564
23565 if (common_block_die_table == NULL)
23566 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23567
23568 com_die_arg.decl_id = DECL_UID (com_decl);
23569 com_die_arg.die_parent = context_die;
23570 com_die = common_block_die_table->find (&com_die_arg);
23571 if (! early_dwarf)
23572 loc = loc_list_from_tree (com_decl, 2, NULL);
23573 if (com_die == NULL)
23574 {
23575 const char *cnam
23576 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23577 die_node **slot;
23578
23579 com_die = new_die (DW_TAG_common_block, context_die, decl);
23580 add_name_and_src_coords_attributes (com_die, com_decl);
23581 if (loc)
23582 {
23583 add_AT_location_description (com_die, DW_AT_location, loc);
23584 /* Avoid sharing the same loc descriptor between
23585 DW_TAG_common_block and DW_TAG_variable. */
23586 loc = loc_list_from_tree (com_decl, 2, NULL);
23587 }
23588 else if (DECL_EXTERNAL (decl_or_origin))
23589 add_AT_flag (com_die, DW_AT_declaration, 1);
23590 if (want_pubnames ())
23591 add_pubname_string (cnam, com_die); /* ??? needed? */
23592 com_die->decl_id = DECL_UID (com_decl);
23593 slot = common_block_die_table->find_slot (com_die, INSERT);
23594 *slot = com_die;
23595 }
23596 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23597 {
23598 add_AT_location_description (com_die, DW_AT_location, loc);
23599 loc = loc_list_from_tree (com_decl, 2, NULL);
23600 remove_AT (com_die, DW_AT_declaration);
23601 }
23602 var_die = new_die (DW_TAG_variable, com_die, decl);
23603 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23604 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23605 decl_quals (decl_or_origin), false,
23606 context_die);
23607 add_alignment_attribute (var_die, decl);
23608 add_AT_flag (var_die, DW_AT_external, 1);
23609 if (loc)
23610 {
23611 if (off)
23612 {
23613 /* Optimize the common case. */
23614 if (single_element_loc_list_p (loc)
23615 && loc->expr->dw_loc_opc == DW_OP_addr
23616 && loc->expr->dw_loc_next == NULL
23617 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23618 {
23619 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23620 loc->expr->dw_loc_oprnd1.v.val_addr
23621 = plus_constant (GET_MODE (x), x, off);
23622 }
23623 else
23624 loc_list_plus_const (loc, off);
23625 }
23626 add_AT_location_description (var_die, DW_AT_location, loc);
23627 }
23628 else if (DECL_EXTERNAL (decl_or_origin))
23629 add_AT_flag (var_die, DW_AT_declaration, 1);
23630 if (decl)
23631 equate_decl_number_to_die (decl, var_die);
23632 return;
23633 }
23634
23635 if (old_die)
23636 {
23637 if (declaration)
23638 {
23639 /* A declaration that has been previously dumped, needs no
23640 further annotations, since it doesn't need location on
23641 the second pass. */
23642 return;
23643 }
23644 else if (decl_will_get_specification_p (old_die, decl, declaration)
23645 && !get_AT (old_die, DW_AT_specification))
23646 {
23647 /* Fall-thru so we can make a new variable die along with a
23648 DW_AT_specification. */
23649 }
23650 else if (origin && old_die->die_parent != context_die)
23651 {
23652 /* If we will be creating an inlined instance, we need a
23653 new DIE that will get annotated with
23654 DW_AT_abstract_origin. */
23655 gcc_assert (!DECL_ABSTRACT_P (decl));
23656 }
23657 else
23658 {
23659 /* If a DIE was dumped early, it still needs location info.
23660 Skip to where we fill the location bits. */
23661 var_die = old_die;
23662
23663 /* ??? In LTRANS we cannot annotate early created variably
23664 modified type DIEs without copying them and adjusting all
23665 references to them. Thus we dumped them again. Also add a
23666 reference to them but beware of -g0 compile and -g link
23667 in which case the reference will be already present. */
23668 tree type = TREE_TYPE (decl_or_origin);
23669 if (in_lto_p
23670 && ! get_AT (var_die, DW_AT_type)
23671 && variably_modified_type_p
23672 (type, decl_function_context (decl_or_origin)))
23673 {
23674 if (decl_by_reference_p (decl_or_origin))
23675 add_type_attribute (var_die, TREE_TYPE (type),
23676 TYPE_UNQUALIFIED, false, context_die);
23677 else
23678 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23679 false, context_die);
23680 }
23681
23682 goto gen_variable_die_location;
23683 }
23684 }
23685
23686 /* For static data members, the declaration in the class is supposed
23687 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23688 also in DWARF2; the specification should still be DW_TAG_variable
23689 referencing the DW_TAG_member DIE. */
23690 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23691 var_die = new_die (DW_TAG_member, context_die, decl);
23692 else
23693 var_die = new_die (DW_TAG_variable, context_die, decl);
23694
23695 if (origin != NULL)
23696 add_abstract_origin_attribute (var_die, origin);
23697
23698 /* Loop unrolling can create multiple blocks that refer to the same
23699 static variable, so we must test for the DW_AT_declaration flag.
23700
23701 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23702 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23703 sharing them.
23704
23705 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23706 else if (decl_will_get_specification_p (old_die, decl, declaration))
23707 {
23708 /* This is a definition of a C++ class level static. */
23709 add_AT_specification (var_die, old_die);
23710 specialization_p = true;
23711 if (DECL_NAME (decl))
23712 {
23713 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23714 struct dwarf_file_data * file_index = lookup_filename (s.file);
23715
23716 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23717 add_AT_file (var_die, DW_AT_decl_file, file_index);
23718
23719 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23720 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23721
23722 if (debug_column_info
23723 && s.column
23724 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23725 != (unsigned) s.column))
23726 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23727
23728 if (old_die->die_tag == DW_TAG_member)
23729 add_linkage_name (var_die, decl);
23730 }
23731 }
23732 else
23733 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23734
23735 if ((origin == NULL && !specialization_p)
23736 || (origin != NULL
23737 && !DECL_ABSTRACT_P (decl_or_origin)
23738 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23739 decl_function_context
23740 (decl_or_origin))))
23741 {
23742 tree type = TREE_TYPE (decl_or_origin);
23743
23744 if (decl_by_reference_p (decl_or_origin))
23745 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23746 context_die);
23747 else
23748 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23749 context_die);
23750 }
23751
23752 if (origin == NULL && !specialization_p)
23753 {
23754 if (TREE_PUBLIC (decl))
23755 add_AT_flag (var_die, DW_AT_external, 1);
23756
23757 if (DECL_ARTIFICIAL (decl))
23758 add_AT_flag (var_die, DW_AT_artificial, 1);
23759
23760 add_alignment_attribute (var_die, decl);
23761
23762 add_accessibility_attribute (var_die, decl);
23763 }
23764
23765 if (declaration)
23766 add_AT_flag (var_die, DW_AT_declaration, 1);
23767
23768 if (decl && (DECL_ABSTRACT_P (decl)
23769 || !old_die || is_declaration_die (old_die)))
23770 equate_decl_number_to_die (decl, var_die);
23771
23772 gen_variable_die_location:
23773 if (! declaration
23774 && (! DECL_ABSTRACT_P (decl_or_origin)
23775 /* Local static vars are shared between all clones/inlines,
23776 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23777 already set. */
23778 || (VAR_P (decl_or_origin)
23779 && TREE_STATIC (decl_or_origin)
23780 && DECL_RTL_SET_P (decl_or_origin))))
23781 {
23782 if (early_dwarf)
23783 add_pubname (decl_or_origin, var_die);
23784 else
23785 add_location_or_const_value_attribute (var_die, decl_or_origin,
23786 decl == NULL);
23787 }
23788 else
23789 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23790
23791 if ((dwarf_version >= 4 || !dwarf_strict)
23792 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23793 DW_AT_const_expr) == 1
23794 && !get_AT (var_die, DW_AT_const_expr)
23795 && !specialization_p)
23796 add_AT_flag (var_die, DW_AT_const_expr, 1);
23797
23798 if (!dwarf_strict)
23799 {
23800 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23801 DW_AT_inline);
23802 if (inl != -1
23803 && !get_AT (var_die, DW_AT_inline)
23804 && !specialization_p)
23805 add_AT_unsigned (var_die, DW_AT_inline, inl);
23806 }
23807 }
23808
23809 /* Generate a DIE to represent a named constant. */
23810
23811 static void
23812 gen_const_die (tree decl, dw_die_ref context_die)
23813 {
23814 dw_die_ref const_die;
23815 tree type = TREE_TYPE (decl);
23816
23817 const_die = lookup_decl_die (decl);
23818 if (const_die)
23819 return;
23820
23821 const_die = new_die (DW_TAG_constant, context_die, decl);
23822 equate_decl_number_to_die (decl, const_die);
23823 add_name_and_src_coords_attributes (const_die, decl);
23824 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23825 if (TREE_PUBLIC (decl))
23826 add_AT_flag (const_die, DW_AT_external, 1);
23827 if (DECL_ARTIFICIAL (decl))
23828 add_AT_flag (const_die, DW_AT_artificial, 1);
23829 tree_add_const_value_attribute_for_decl (const_die, decl);
23830 }
23831
23832 /* Generate a DIE to represent a label identifier. */
23833
23834 static void
23835 gen_label_die (tree decl, dw_die_ref context_die)
23836 {
23837 tree origin = decl_ultimate_origin (decl);
23838 dw_die_ref lbl_die = lookup_decl_die (decl);
23839 rtx insn;
23840 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23841
23842 if (!lbl_die)
23843 {
23844 lbl_die = new_die (DW_TAG_label, context_die, decl);
23845 equate_decl_number_to_die (decl, lbl_die);
23846
23847 if (origin != NULL)
23848 add_abstract_origin_attribute (lbl_die, origin);
23849 else
23850 add_name_and_src_coords_attributes (lbl_die, decl);
23851 }
23852
23853 if (DECL_ABSTRACT_P (decl))
23854 equate_decl_number_to_die (decl, lbl_die);
23855 else if (! early_dwarf)
23856 {
23857 insn = DECL_RTL_IF_SET (decl);
23858
23859 /* Deleted labels are programmer specified labels which have been
23860 eliminated because of various optimizations. We still emit them
23861 here so that it is possible to put breakpoints on them. */
23862 if (insn
23863 && (LABEL_P (insn)
23864 || ((NOTE_P (insn)
23865 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23866 {
23867 /* When optimization is enabled (via -O) some parts of the compiler
23868 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23869 represent source-level labels which were explicitly declared by
23870 the user. This really shouldn't be happening though, so catch
23871 it if it ever does happen. */
23872 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23873
23874 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23875 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23876 }
23877 else if (insn
23878 && NOTE_P (insn)
23879 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23880 && CODE_LABEL_NUMBER (insn) != -1)
23881 {
23882 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23883 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23884 }
23885 }
23886 }
23887
23888 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23889 attributes to the DIE for a block STMT, to describe where the inlined
23890 function was called from. This is similar to add_src_coords_attributes. */
23891
23892 static inline void
23893 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23894 {
23895 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23896
23897 if (dwarf_version >= 3 || !dwarf_strict)
23898 {
23899 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23900 add_AT_unsigned (die, DW_AT_call_line, s.line);
23901 if (debug_column_info && s.column)
23902 add_AT_unsigned (die, DW_AT_call_column, s.column);
23903 }
23904 }
23905
23906
23907 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23908 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23909
23910 static inline void
23911 add_high_low_attributes (tree stmt, dw_die_ref die)
23912 {
23913 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23914
23915 if (inline_entry_data **iedp
23916 = !inline_entry_data_table ? NULL
23917 : inline_entry_data_table->find_slot_with_hash (stmt,
23918 htab_hash_pointer (stmt),
23919 NO_INSERT))
23920 {
23921 inline_entry_data *ied = *iedp;
23922 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23923 gcc_assert (debug_inline_points);
23924 gcc_assert (inlined_function_outer_scope_p (stmt));
23925
23926 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23927 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23928
23929 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23930 && !dwarf_strict)
23931 {
23932 if (!output_asm_line_debug_info ())
23933 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23934 else
23935 {
23936 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23937 /* FIXME: this will resolve to a small number. Could we
23938 possibly emit smaller data? Ideally we'd emit a
23939 uleb128, but that would make the size of DIEs
23940 impossible for the compiler to compute, since it's
23941 the assembler that computes the value of the view
23942 label in this case. Ideally, we'd have a single form
23943 encompassing both the address and the view, and
23944 indirecting them through a table might make things
23945 easier, but even that would be more wasteful,
23946 space-wise, than what we have now. */
23947 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23948 }
23949 }
23950
23951 inline_entry_data_table->clear_slot (iedp);
23952 }
23953
23954 if (BLOCK_FRAGMENT_CHAIN (stmt)
23955 && (dwarf_version >= 3 || !dwarf_strict))
23956 {
23957 tree chain, superblock = NULL_TREE;
23958 dw_die_ref pdie;
23959 dw_attr_node *attr = NULL;
23960
23961 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23962 {
23963 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23964 BLOCK_NUMBER (stmt));
23965 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23966 }
23967
23968 /* Optimize duplicate .debug_ranges lists or even tails of
23969 lists. If this BLOCK has same ranges as its supercontext,
23970 lookup DW_AT_ranges attribute in the supercontext (and
23971 recursively so), verify that the ranges_table contains the
23972 right values and use it instead of adding a new .debug_range. */
23973 for (chain = stmt, pdie = die;
23974 BLOCK_SAME_RANGE (chain);
23975 chain = BLOCK_SUPERCONTEXT (chain))
23976 {
23977 dw_attr_node *new_attr;
23978
23979 pdie = pdie->die_parent;
23980 if (pdie == NULL)
23981 break;
23982 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23983 break;
23984 new_attr = get_AT (pdie, DW_AT_ranges);
23985 if (new_attr == NULL
23986 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23987 break;
23988 attr = new_attr;
23989 superblock = BLOCK_SUPERCONTEXT (chain);
23990 }
23991 if (attr != NULL
23992 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23993 == BLOCK_NUMBER (superblock))
23994 && BLOCK_FRAGMENT_CHAIN (superblock))
23995 {
23996 unsigned long off = attr->dw_attr_val.v.val_offset;
23997 unsigned long supercnt = 0, thiscnt = 0;
23998 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23999 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24000 {
24001 ++supercnt;
24002 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24003 == BLOCK_NUMBER (chain));
24004 }
24005 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24006 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24007 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24008 ++thiscnt;
24009 gcc_assert (supercnt >= thiscnt);
24010 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24011 false);
24012 note_rnglist_head (off + supercnt - thiscnt);
24013 return;
24014 }
24015
24016 unsigned int offset = add_ranges (stmt, true);
24017 add_AT_range_list (die, DW_AT_ranges, offset, false);
24018 note_rnglist_head (offset);
24019
24020 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24021 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24022 do
24023 {
24024 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24025 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24026 chain = BLOCK_FRAGMENT_CHAIN (chain);
24027 }
24028 while (chain);
24029 add_ranges (NULL);
24030 }
24031 else
24032 {
24033 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24034 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24035 BLOCK_NUMBER (stmt));
24036 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24037 BLOCK_NUMBER (stmt));
24038 add_AT_low_high_pc (die, label, label_high, false);
24039 }
24040 }
24041
24042 /* Generate a DIE for a lexical block. */
24043
24044 static void
24045 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24046 {
24047 dw_die_ref old_die = BLOCK_DIE (stmt);
24048 dw_die_ref stmt_die = NULL;
24049 if (!old_die)
24050 {
24051 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24052 BLOCK_DIE (stmt) = stmt_die;
24053 }
24054
24055 if (BLOCK_ABSTRACT (stmt))
24056 {
24057 if (old_die)
24058 {
24059 /* This must have been generated early and it won't even
24060 need location information since it's a DW_AT_inline
24061 function. */
24062 if (flag_checking)
24063 for (dw_die_ref c = context_die; c; c = c->die_parent)
24064 if (c->die_tag == DW_TAG_inlined_subroutine
24065 || c->die_tag == DW_TAG_subprogram)
24066 {
24067 gcc_assert (get_AT (c, DW_AT_inline));
24068 break;
24069 }
24070 return;
24071 }
24072 }
24073 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24074 {
24075 /* If this is an inlined instance, create a new lexical die for
24076 anything below to attach DW_AT_abstract_origin to. */
24077 if (old_die)
24078 {
24079 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24080 BLOCK_DIE (stmt) = stmt_die;
24081 old_die = NULL;
24082 }
24083
24084 tree origin = block_ultimate_origin (stmt);
24085 if (origin != NULL_TREE && origin != stmt)
24086 add_abstract_origin_attribute (stmt_die, origin);
24087 }
24088
24089 if (old_die)
24090 stmt_die = old_die;
24091
24092 /* A non abstract block whose blocks have already been reordered
24093 should have the instruction range for this block. If so, set the
24094 high/low attributes. */
24095 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24096 {
24097 gcc_assert (stmt_die);
24098 add_high_low_attributes (stmt, stmt_die);
24099 }
24100
24101 decls_for_scope (stmt, stmt_die);
24102 }
24103
24104 /* Generate a DIE for an inlined subprogram. */
24105
24106 static void
24107 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24108 {
24109 tree decl;
24110
24111 /* The instance of function that is effectively being inlined shall not
24112 be abstract. */
24113 gcc_assert (! BLOCK_ABSTRACT (stmt));
24114
24115 decl = block_ultimate_origin (stmt);
24116
24117 /* Make sure any inlined functions are known to be inlineable. */
24118 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24119 || cgraph_function_possibly_inlined_p (decl));
24120
24121 if (! BLOCK_ABSTRACT (stmt))
24122 {
24123 dw_die_ref subr_die
24124 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24125
24126 if (call_arg_locations || debug_inline_points)
24127 BLOCK_DIE (stmt) = subr_die;
24128 add_abstract_origin_attribute (subr_die, decl);
24129 if (TREE_ASM_WRITTEN (stmt))
24130 add_high_low_attributes (stmt, subr_die);
24131 add_call_src_coords_attributes (stmt, subr_die);
24132
24133 decls_for_scope (stmt, subr_die);
24134 }
24135 }
24136
24137 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24138 the comment for VLR_CONTEXT. */
24139
24140 static void
24141 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24142 {
24143 dw_die_ref decl_die;
24144
24145 if (TREE_TYPE (decl) == error_mark_node)
24146 return;
24147
24148 decl_die = new_die (DW_TAG_member, context_die, decl);
24149 add_name_and_src_coords_attributes (decl_die, decl);
24150 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24151 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24152 context_die);
24153
24154 if (DECL_BIT_FIELD_TYPE (decl))
24155 {
24156 add_byte_size_attribute (decl_die, decl);
24157 add_bit_size_attribute (decl_die, decl);
24158 add_bit_offset_attribute (decl_die, decl, ctx);
24159 }
24160
24161 add_alignment_attribute (decl_die, decl);
24162
24163 /* If we have a variant part offset, then we are supposed to process a member
24164 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24165 trees. */
24166 gcc_assert (ctx->variant_part_offset == NULL_TREE
24167 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24168 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24169 add_data_member_location_attribute (decl_die, decl, ctx);
24170
24171 if (DECL_ARTIFICIAL (decl))
24172 add_AT_flag (decl_die, DW_AT_artificial, 1);
24173
24174 add_accessibility_attribute (decl_die, decl);
24175
24176 /* Equate decl number to die, so that we can look up this decl later on. */
24177 equate_decl_number_to_die (decl, decl_die);
24178 }
24179
24180 /* Generate a DIE for a pointer to a member type. TYPE can be an
24181 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24182 pointer to member function. */
24183
24184 static void
24185 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24186 {
24187 if (lookup_type_die (type))
24188 return;
24189
24190 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24191 scope_die_for (type, context_die), type);
24192
24193 equate_type_number_to_die (type, ptr_die);
24194 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24195 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24196 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24197 context_die);
24198 add_alignment_attribute (ptr_die, type);
24199
24200 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24201 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24202 {
24203 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24204 add_AT_loc (ptr_die, DW_AT_use_location, op);
24205 }
24206 }
24207
24208 static char *producer_string;
24209
24210 /* Return a heap allocated producer string including command line options
24211 if -grecord-gcc-switches. */
24212
24213 static char *
24214 gen_producer_string (void)
24215 {
24216 size_t j;
24217 auto_vec<const char *> switches;
24218 const char *language_string = lang_hooks.name;
24219 char *producer, *tail;
24220 const char *p;
24221 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24222 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24223
24224 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24225 switch (save_decoded_options[j].opt_index)
24226 {
24227 case OPT_o:
24228 case OPT_d:
24229 case OPT_dumpbase:
24230 case OPT_dumpdir:
24231 case OPT_auxbase:
24232 case OPT_auxbase_strip:
24233 case OPT_quiet:
24234 case OPT_version:
24235 case OPT_v:
24236 case OPT_w:
24237 case OPT_L:
24238 case OPT_D:
24239 case OPT_I:
24240 case OPT_U:
24241 case OPT_SPECIAL_unknown:
24242 case OPT_SPECIAL_ignore:
24243 case OPT_SPECIAL_deprecated:
24244 case OPT_SPECIAL_program_name:
24245 case OPT_SPECIAL_input_file:
24246 case OPT_grecord_gcc_switches:
24247 case OPT__output_pch_:
24248 case OPT_fdiagnostics_show_location_:
24249 case OPT_fdiagnostics_show_option:
24250 case OPT_fdiagnostics_show_caret:
24251 case OPT_fdiagnostics_show_labels:
24252 case OPT_fdiagnostics_show_line_numbers:
24253 case OPT_fdiagnostics_color_:
24254 case OPT_fverbose_asm:
24255 case OPT____:
24256 case OPT__sysroot_:
24257 case OPT_nostdinc:
24258 case OPT_nostdinc__:
24259 case OPT_fpreprocessed:
24260 case OPT_fltrans_output_list_:
24261 case OPT_fresolution_:
24262 case OPT_fdebug_prefix_map_:
24263 case OPT_fmacro_prefix_map_:
24264 case OPT_ffile_prefix_map_:
24265 case OPT_fcompare_debug:
24266 case OPT_fchecking:
24267 case OPT_fchecking_:
24268 /* Ignore these. */
24269 continue;
24270 default:
24271 if (cl_options[save_decoded_options[j].opt_index].flags
24272 & CL_NO_DWARF_RECORD)
24273 continue;
24274 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24275 == '-');
24276 switch (save_decoded_options[j].canonical_option[0][1])
24277 {
24278 case 'M':
24279 case 'i':
24280 case 'W':
24281 continue;
24282 case 'f':
24283 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24284 "dump", 4) == 0)
24285 continue;
24286 break;
24287 default:
24288 break;
24289 }
24290 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24291 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24292 break;
24293 }
24294
24295 producer = XNEWVEC (char, plen + 1 + len + 1);
24296 tail = producer;
24297 sprintf (tail, "%s %s", language_string, version_string);
24298 tail += plen;
24299
24300 FOR_EACH_VEC_ELT (switches, j, p)
24301 {
24302 len = strlen (p);
24303 *tail = ' ';
24304 memcpy (tail + 1, p, len);
24305 tail += len + 1;
24306 }
24307
24308 *tail = '\0';
24309 return producer;
24310 }
24311
24312 /* Given a C and/or C++ language/version string return the "highest".
24313 C++ is assumed to be "higher" than C in this case. Used for merging
24314 LTO translation unit languages. */
24315 static const char *
24316 highest_c_language (const char *lang1, const char *lang2)
24317 {
24318 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24319 return "GNU C++17";
24320 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24321 return "GNU C++14";
24322 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24323 return "GNU C++11";
24324 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24325 return "GNU C++98";
24326
24327 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24328 return "GNU C17";
24329 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24330 return "GNU C11";
24331 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24332 return "GNU C99";
24333 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24334 return "GNU C89";
24335
24336 gcc_unreachable ();
24337 }
24338
24339
24340 /* Generate the DIE for the compilation unit. */
24341
24342 static dw_die_ref
24343 gen_compile_unit_die (const char *filename)
24344 {
24345 dw_die_ref die;
24346 const char *language_string = lang_hooks.name;
24347 int language;
24348
24349 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24350
24351 if (filename)
24352 {
24353 add_name_attribute (die, filename);
24354 /* Don't add cwd for <built-in>. */
24355 if (filename[0] != '<')
24356 add_comp_dir_attribute (die);
24357 }
24358
24359 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24360
24361 /* If our producer is LTO try to figure out a common language to use
24362 from the global list of translation units. */
24363 if (strcmp (language_string, "GNU GIMPLE") == 0)
24364 {
24365 unsigned i;
24366 tree t;
24367 const char *common_lang = NULL;
24368
24369 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24370 {
24371 if (!TRANSLATION_UNIT_LANGUAGE (t))
24372 continue;
24373 if (!common_lang)
24374 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24375 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24376 ;
24377 else if (strncmp (common_lang, "GNU C", 5) == 0
24378 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24379 /* Mixing C and C++ is ok, use C++ in that case. */
24380 common_lang = highest_c_language (common_lang,
24381 TRANSLATION_UNIT_LANGUAGE (t));
24382 else
24383 {
24384 /* Fall back to C. */
24385 common_lang = NULL;
24386 break;
24387 }
24388 }
24389
24390 if (common_lang)
24391 language_string = common_lang;
24392 }
24393
24394 language = DW_LANG_C;
24395 if (strncmp (language_string, "GNU C", 5) == 0
24396 && ISDIGIT (language_string[5]))
24397 {
24398 language = DW_LANG_C89;
24399 if (dwarf_version >= 3 || !dwarf_strict)
24400 {
24401 if (strcmp (language_string, "GNU C89") != 0)
24402 language = DW_LANG_C99;
24403
24404 if (dwarf_version >= 5 /* || !dwarf_strict */)
24405 if (strcmp (language_string, "GNU C11") == 0
24406 || strcmp (language_string, "GNU C17") == 0)
24407 language = DW_LANG_C11;
24408 }
24409 }
24410 else if (strncmp (language_string, "GNU C++", 7) == 0)
24411 {
24412 language = DW_LANG_C_plus_plus;
24413 if (dwarf_version >= 5 /* || !dwarf_strict */)
24414 {
24415 if (strcmp (language_string, "GNU C++11") == 0)
24416 language = DW_LANG_C_plus_plus_11;
24417 else if (strcmp (language_string, "GNU C++14") == 0)
24418 language = DW_LANG_C_plus_plus_14;
24419 else if (strcmp (language_string, "GNU C++17") == 0)
24420 /* For now. */
24421 language = DW_LANG_C_plus_plus_14;
24422 }
24423 }
24424 else if (strcmp (language_string, "GNU F77") == 0)
24425 language = DW_LANG_Fortran77;
24426 else if (dwarf_version >= 3 || !dwarf_strict)
24427 {
24428 if (strcmp (language_string, "GNU Ada") == 0)
24429 language = DW_LANG_Ada95;
24430 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24431 {
24432 language = DW_LANG_Fortran95;
24433 if (dwarf_version >= 5 /* || !dwarf_strict */)
24434 {
24435 if (strcmp (language_string, "GNU Fortran2003") == 0)
24436 language = DW_LANG_Fortran03;
24437 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24438 language = DW_LANG_Fortran08;
24439 }
24440 }
24441 else if (strcmp (language_string, "GNU Objective-C") == 0)
24442 language = DW_LANG_ObjC;
24443 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24444 language = DW_LANG_ObjC_plus_plus;
24445 else if (dwarf_version >= 5 || !dwarf_strict)
24446 {
24447 if (strcmp (language_string, "GNU Go") == 0)
24448 language = DW_LANG_Go;
24449 }
24450 }
24451 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24452 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24453 language = DW_LANG_Fortran90;
24454 /* Likewise for Ada. */
24455 else if (strcmp (language_string, "GNU Ada") == 0)
24456 language = DW_LANG_Ada83;
24457
24458 add_AT_unsigned (die, DW_AT_language, language);
24459
24460 switch (language)
24461 {
24462 case DW_LANG_Fortran77:
24463 case DW_LANG_Fortran90:
24464 case DW_LANG_Fortran95:
24465 case DW_LANG_Fortran03:
24466 case DW_LANG_Fortran08:
24467 /* Fortran has case insensitive identifiers and the front-end
24468 lowercases everything. */
24469 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24470 break;
24471 default:
24472 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24473 break;
24474 }
24475 return die;
24476 }
24477
24478 /* Generate the DIE for a base class. */
24479
24480 static void
24481 gen_inheritance_die (tree binfo, tree access, tree type,
24482 dw_die_ref context_die)
24483 {
24484 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24485 struct vlr_context ctx = { type, NULL };
24486
24487 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24488 context_die);
24489 add_data_member_location_attribute (die, binfo, &ctx);
24490
24491 if (BINFO_VIRTUAL_P (binfo))
24492 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24493
24494 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24495 children, otherwise the default is DW_ACCESS_public. In DWARF2
24496 the default has always been DW_ACCESS_private. */
24497 if (access == access_public_node)
24498 {
24499 if (dwarf_version == 2
24500 || context_die->die_tag == DW_TAG_class_type)
24501 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24502 }
24503 else if (access == access_protected_node)
24504 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24505 else if (dwarf_version > 2
24506 && context_die->die_tag != DW_TAG_class_type)
24507 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24508 }
24509
24510 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24511 structure. */
24512 static bool
24513 is_variant_part (tree decl)
24514 {
24515 return (TREE_CODE (decl) == FIELD_DECL
24516 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24517 }
24518
24519 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24520 return the FIELD_DECL. Return NULL_TREE otherwise. */
24521
24522 static tree
24523 analyze_discr_in_predicate (tree operand, tree struct_type)
24524 {
24525 bool continue_stripping = true;
24526 while (continue_stripping)
24527 switch (TREE_CODE (operand))
24528 {
24529 CASE_CONVERT:
24530 operand = TREE_OPERAND (operand, 0);
24531 break;
24532 default:
24533 continue_stripping = false;
24534 break;
24535 }
24536
24537 /* Match field access to members of struct_type only. */
24538 if (TREE_CODE (operand) == COMPONENT_REF
24539 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24540 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24541 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24542 return TREE_OPERAND (operand, 1);
24543 else
24544 return NULL_TREE;
24545 }
24546
24547 /* Check that SRC is a constant integer that can be represented as a native
24548 integer constant (either signed or unsigned). If so, store it into DEST and
24549 return true. Return false otherwise. */
24550
24551 static bool
24552 get_discr_value (tree src, dw_discr_value *dest)
24553 {
24554 tree discr_type = TREE_TYPE (src);
24555
24556 if (lang_hooks.types.get_debug_type)
24557 {
24558 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24559 if (debug_type != NULL)
24560 discr_type = debug_type;
24561 }
24562
24563 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24564 return false;
24565
24566 /* Signedness can vary between the original type and the debug type. This
24567 can happen for character types in Ada for instance: the character type
24568 used for code generation can be signed, to be compatible with the C one,
24569 but from a debugger point of view, it must be unsigned. */
24570 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24571 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24572
24573 if (is_orig_unsigned != is_debug_unsigned)
24574 src = fold_convert (discr_type, src);
24575
24576 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24577 return false;
24578
24579 dest->pos = is_debug_unsigned;
24580 if (is_debug_unsigned)
24581 dest->v.uval = tree_to_uhwi (src);
24582 else
24583 dest->v.sval = tree_to_shwi (src);
24584
24585 return true;
24586 }
24587
24588 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24589 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24590 store NULL_TREE in DISCR_DECL. Otherwise:
24591
24592 - store the discriminant field in STRUCT_TYPE that controls the variant
24593 part to *DISCR_DECL
24594
24595 - put in *DISCR_LISTS_P an array where for each variant, the item
24596 represents the corresponding matching list of discriminant values.
24597
24598 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24599 the above array.
24600
24601 Note that when the array is allocated (i.e. when the analysis is
24602 successful), it is up to the caller to free the array. */
24603
24604 static void
24605 analyze_variants_discr (tree variant_part_decl,
24606 tree struct_type,
24607 tree *discr_decl,
24608 dw_discr_list_ref **discr_lists_p,
24609 unsigned *discr_lists_length)
24610 {
24611 tree variant_part_type = TREE_TYPE (variant_part_decl);
24612 tree variant;
24613 dw_discr_list_ref *discr_lists;
24614 unsigned i;
24615
24616 /* Compute how many variants there are in this variant part. */
24617 *discr_lists_length = 0;
24618 for (variant = TYPE_FIELDS (variant_part_type);
24619 variant != NULL_TREE;
24620 variant = DECL_CHAIN (variant))
24621 ++*discr_lists_length;
24622
24623 *discr_decl = NULL_TREE;
24624 *discr_lists_p
24625 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24626 sizeof (**discr_lists_p));
24627 discr_lists = *discr_lists_p;
24628
24629 /* And then analyze all variants to extract discriminant information for all
24630 of them. This analysis is conservative: as soon as we detect something we
24631 do not support, abort everything and pretend we found nothing. */
24632 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24633 variant != NULL_TREE;
24634 variant = DECL_CHAIN (variant), ++i)
24635 {
24636 tree match_expr = DECL_QUALIFIER (variant);
24637
24638 /* Now, try to analyze the predicate and deduce a discriminant for
24639 it. */
24640 if (match_expr == boolean_true_node)
24641 /* Typically happens for the default variant: it matches all cases that
24642 previous variants rejected. Don't output any matching value for
24643 this one. */
24644 continue;
24645
24646 /* The following loop tries to iterate over each discriminant
24647 possibility: single values or ranges. */
24648 while (match_expr != NULL_TREE)
24649 {
24650 tree next_round_match_expr;
24651 tree candidate_discr = NULL_TREE;
24652 dw_discr_list_ref new_node = NULL;
24653
24654 /* Possibilities are matched one after the other by nested
24655 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24656 continue with the rest at next iteration. */
24657 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24658 {
24659 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24660 match_expr = TREE_OPERAND (match_expr, 1);
24661 }
24662 else
24663 next_round_match_expr = NULL_TREE;
24664
24665 if (match_expr == boolean_false_node)
24666 /* This sub-expression matches nothing: just wait for the next
24667 one. */
24668 ;
24669
24670 else if (TREE_CODE (match_expr) == EQ_EXPR)
24671 {
24672 /* We are matching: <discr_field> == <integer_cst>
24673 This sub-expression matches a single value. */
24674 tree integer_cst = TREE_OPERAND (match_expr, 1);
24675
24676 candidate_discr
24677 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24678 struct_type);
24679
24680 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24681 if (!get_discr_value (integer_cst,
24682 &new_node->dw_discr_lower_bound))
24683 goto abort;
24684 new_node->dw_discr_range = false;
24685 }
24686
24687 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24688 {
24689 /* We are matching:
24690 <discr_field> > <integer_cst>
24691 && <discr_field> < <integer_cst>.
24692 This sub-expression matches the range of values between the
24693 two matched integer constants. Note that comparisons can be
24694 inclusive or exclusive. */
24695 tree candidate_discr_1, candidate_discr_2;
24696 tree lower_cst, upper_cst;
24697 bool lower_cst_included, upper_cst_included;
24698 tree lower_op = TREE_OPERAND (match_expr, 0);
24699 tree upper_op = TREE_OPERAND (match_expr, 1);
24700
24701 /* When the comparison is exclusive, the integer constant is not
24702 the discriminant range bound we are looking for: we will have
24703 to increment or decrement it. */
24704 if (TREE_CODE (lower_op) == GE_EXPR)
24705 lower_cst_included = true;
24706 else if (TREE_CODE (lower_op) == GT_EXPR)
24707 lower_cst_included = false;
24708 else
24709 goto abort;
24710
24711 if (TREE_CODE (upper_op) == LE_EXPR)
24712 upper_cst_included = true;
24713 else if (TREE_CODE (upper_op) == LT_EXPR)
24714 upper_cst_included = false;
24715 else
24716 goto abort;
24717
24718 /* Extract the discriminant from the first operand and check it
24719 is consistant with the same analysis in the second
24720 operand. */
24721 candidate_discr_1
24722 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24723 struct_type);
24724 candidate_discr_2
24725 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24726 struct_type);
24727 if (candidate_discr_1 == candidate_discr_2)
24728 candidate_discr = candidate_discr_1;
24729 else
24730 goto abort;
24731
24732 /* Extract bounds from both. */
24733 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24734 lower_cst = TREE_OPERAND (lower_op, 1);
24735 upper_cst = TREE_OPERAND (upper_op, 1);
24736
24737 if (!lower_cst_included)
24738 lower_cst
24739 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24740 build_int_cst (TREE_TYPE (lower_cst), 1));
24741 if (!upper_cst_included)
24742 upper_cst
24743 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24744 build_int_cst (TREE_TYPE (upper_cst), 1));
24745
24746 if (!get_discr_value (lower_cst,
24747 &new_node->dw_discr_lower_bound)
24748 || !get_discr_value (upper_cst,
24749 &new_node->dw_discr_upper_bound))
24750 goto abort;
24751
24752 new_node->dw_discr_range = true;
24753 }
24754
24755 else
24756 /* Unsupported sub-expression: we cannot determine the set of
24757 matching discriminant values. Abort everything. */
24758 goto abort;
24759
24760 /* If the discriminant info is not consistant with what we saw so
24761 far, consider the analysis failed and abort everything. */
24762 if (candidate_discr == NULL_TREE
24763 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24764 goto abort;
24765 else
24766 *discr_decl = candidate_discr;
24767
24768 if (new_node != NULL)
24769 {
24770 new_node->dw_discr_next = discr_lists[i];
24771 discr_lists[i] = new_node;
24772 }
24773 match_expr = next_round_match_expr;
24774 }
24775 }
24776
24777 /* If we reach this point, we could match everything we were interested
24778 in. */
24779 return;
24780
24781 abort:
24782 /* Clean all data structure and return no result. */
24783 free (*discr_lists_p);
24784 *discr_lists_p = NULL;
24785 *discr_decl = NULL_TREE;
24786 }
24787
24788 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24789 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24790 under CONTEXT_DIE.
24791
24792 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24793 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24794 this type, which are record types, represent the available variants and each
24795 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24796 values are inferred from these attributes.
24797
24798 In trees, the offsets for the fields inside these sub-records are relative
24799 to the variant part itself, whereas the corresponding DIEs should have
24800 offset attributes that are relative to the embedding record base address.
24801 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24802 must be an expression that computes the offset of the variant part to
24803 describe in DWARF. */
24804
24805 static void
24806 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24807 dw_die_ref context_die)
24808 {
24809 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24810 tree variant_part_offset = vlr_ctx->variant_part_offset;
24811 struct loc_descr_context ctx = {
24812 vlr_ctx->struct_type, /* context_type */
24813 NULL_TREE, /* base_decl */
24814 NULL, /* dpi */
24815 false, /* placeholder_arg */
24816 false /* placeholder_seen */
24817 };
24818
24819 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24820 NULL_TREE if there is no such field. */
24821 tree discr_decl = NULL_TREE;
24822 dw_discr_list_ref *discr_lists;
24823 unsigned discr_lists_length = 0;
24824 unsigned i;
24825
24826 dw_die_ref dwarf_proc_die = NULL;
24827 dw_die_ref variant_part_die
24828 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24829
24830 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24831
24832 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24833 &discr_decl, &discr_lists, &discr_lists_length);
24834
24835 if (discr_decl != NULL_TREE)
24836 {
24837 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24838
24839 if (discr_die)
24840 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24841 else
24842 /* We have no DIE for the discriminant, so just discard all
24843 discrimimant information in the output. */
24844 discr_decl = NULL_TREE;
24845 }
24846
24847 /* If the offset for this variant part is more complex than a constant,
24848 create a DWARF procedure for it so that we will not have to generate DWARF
24849 expressions for it for each member. */
24850 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24851 && (dwarf_version >= 3 || !dwarf_strict))
24852 {
24853 const tree dwarf_proc_fndecl
24854 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24855 build_function_type (TREE_TYPE (variant_part_offset),
24856 NULL_TREE));
24857 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24858 const dw_loc_descr_ref dwarf_proc_body
24859 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24860
24861 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24862 dwarf_proc_fndecl, context_die);
24863 if (dwarf_proc_die != NULL)
24864 variant_part_offset = dwarf_proc_call;
24865 }
24866
24867 /* Output DIEs for all variants. */
24868 i = 0;
24869 for (tree variant = TYPE_FIELDS (variant_part_type);
24870 variant != NULL_TREE;
24871 variant = DECL_CHAIN (variant), ++i)
24872 {
24873 tree variant_type = TREE_TYPE (variant);
24874 dw_die_ref variant_die;
24875
24876 /* All variants (i.e. members of a variant part) are supposed to be
24877 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24878 under these records. */
24879 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24880
24881 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24882 equate_decl_number_to_die (variant, variant_die);
24883
24884 /* Output discriminant values this variant matches, if any. */
24885 if (discr_decl == NULL || discr_lists[i] == NULL)
24886 /* In the case we have discriminant information at all, this is
24887 probably the default variant: as the standard says, don't
24888 output any discriminant value/list attribute. */
24889 ;
24890 else if (discr_lists[i]->dw_discr_next == NULL
24891 && !discr_lists[i]->dw_discr_range)
24892 /* If there is only one accepted value, don't bother outputting a
24893 list. */
24894 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24895 else
24896 add_discr_list (variant_die, discr_lists[i]);
24897
24898 for (tree member = TYPE_FIELDS (variant_type);
24899 member != NULL_TREE;
24900 member = DECL_CHAIN (member))
24901 {
24902 struct vlr_context vlr_sub_ctx = {
24903 vlr_ctx->struct_type, /* struct_type */
24904 NULL /* variant_part_offset */
24905 };
24906 if (is_variant_part (member))
24907 {
24908 /* All offsets for fields inside variant parts are relative to
24909 the top-level embedding RECORD_TYPE's base address. On the
24910 other hand, offsets in GCC's types are relative to the
24911 nested-most variant part. So we have to sum offsets each time
24912 we recurse. */
24913
24914 vlr_sub_ctx.variant_part_offset
24915 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24916 variant_part_offset, byte_position (member));
24917 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24918 }
24919 else
24920 {
24921 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24922 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24923 }
24924 }
24925 }
24926
24927 free (discr_lists);
24928 }
24929
24930 /* Generate a DIE for a class member. */
24931
24932 static void
24933 gen_member_die (tree type, dw_die_ref context_die)
24934 {
24935 tree member;
24936 tree binfo = TYPE_BINFO (type);
24937
24938 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24939
24940 /* If this is not an incomplete type, output descriptions of each of its
24941 members. Note that as we output the DIEs necessary to represent the
24942 members of this record or union type, we will also be trying to output
24943 DIEs to represent the *types* of those members. However the `type'
24944 function (above) will specifically avoid generating type DIEs for member
24945 types *within* the list of member DIEs for this (containing) type except
24946 for those types (of members) which are explicitly marked as also being
24947 members of this (containing) type themselves. The g++ front- end can
24948 force any given type to be treated as a member of some other (containing)
24949 type by setting the TYPE_CONTEXT of the given (member) type to point to
24950 the TREE node representing the appropriate (containing) type. */
24951
24952 /* First output info about the base classes. */
24953 if (binfo)
24954 {
24955 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24956 int i;
24957 tree base;
24958
24959 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24960 gen_inheritance_die (base,
24961 (accesses ? (*accesses)[i] : access_public_node),
24962 type,
24963 context_die);
24964 }
24965
24966 /* Now output info about the data members and type members. */
24967 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24968 {
24969 struct vlr_context vlr_ctx = { type, NULL_TREE };
24970 bool static_inline_p
24971 = (TREE_STATIC (member)
24972 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24973 != -1));
24974
24975 /* Ignore clones. */
24976 if (DECL_ABSTRACT_ORIGIN (member))
24977 continue;
24978
24979 /* If we thought we were generating minimal debug info for TYPE
24980 and then changed our minds, some of the member declarations
24981 may have already been defined. Don't define them again, but
24982 do put them in the right order. */
24983
24984 if (dw_die_ref child = lookup_decl_die (member))
24985 {
24986 /* Handle inline static data members, which only have in-class
24987 declarations. */
24988 dw_die_ref ref = NULL;
24989 if (child->die_tag == DW_TAG_variable
24990 && child->die_parent == comp_unit_die ())
24991 {
24992 ref = get_AT_ref (child, DW_AT_specification);
24993 /* For C++17 inline static data members followed by redundant
24994 out of class redeclaration, we might get here with
24995 child being the DIE created for the out of class
24996 redeclaration and with its DW_AT_specification being
24997 the DIE created for in-class definition. We want to
24998 reparent the latter, and don't want to create another
24999 DIE with DW_AT_specification in that case, because
25000 we already have one. */
25001 if (ref
25002 && static_inline_p
25003 && ref->die_tag == DW_TAG_variable
25004 && ref->die_parent == comp_unit_die ()
25005 && get_AT (ref, DW_AT_specification) == NULL)
25006 {
25007 child = ref;
25008 ref = NULL;
25009 static_inline_p = false;
25010 }
25011 }
25012
25013 if (child->die_tag == DW_TAG_variable
25014 && child->die_parent == comp_unit_die ()
25015 && ref == NULL)
25016 {
25017 reparent_child (child, context_die);
25018 if (dwarf_version < 5)
25019 child->die_tag = DW_TAG_member;
25020 }
25021 else
25022 splice_child_die (context_die, child);
25023 }
25024
25025 /* Do not generate standard DWARF for variant parts if we are generating
25026 the corresponding GNAT encodings: DIEs generated for both would
25027 conflict in our mappings. */
25028 else if (is_variant_part (member)
25029 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25030 {
25031 vlr_ctx.variant_part_offset = byte_position (member);
25032 gen_variant_part (member, &vlr_ctx, context_die);
25033 }
25034 else
25035 {
25036 vlr_ctx.variant_part_offset = NULL_TREE;
25037 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25038 }
25039
25040 /* For C++ inline static data members emit immediately a DW_TAG_variable
25041 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25042 DW_AT_specification. */
25043 if (static_inline_p)
25044 {
25045 int old_extern = DECL_EXTERNAL (member);
25046 DECL_EXTERNAL (member) = 0;
25047 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25048 DECL_EXTERNAL (member) = old_extern;
25049 }
25050 }
25051 }
25052
25053 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25054 is set, we pretend that the type was never defined, so we only get the
25055 member DIEs needed by later specification DIEs. */
25056
25057 static void
25058 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25059 enum debug_info_usage usage)
25060 {
25061 if (TREE_ASM_WRITTEN (type))
25062 {
25063 /* Fill in the bound of variable-length fields in late dwarf if
25064 still incomplete. */
25065 if (!early_dwarf && variably_modified_type_p (type, NULL))
25066 for (tree member = TYPE_FIELDS (type);
25067 member;
25068 member = DECL_CHAIN (member))
25069 fill_variable_array_bounds (TREE_TYPE (member));
25070 return;
25071 }
25072
25073 dw_die_ref type_die = lookup_type_die (type);
25074 dw_die_ref scope_die = 0;
25075 int nested = 0;
25076 int complete = (TYPE_SIZE (type)
25077 && (! TYPE_STUB_DECL (type)
25078 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25079 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25080 complete = complete && should_emit_struct_debug (type, usage);
25081
25082 if (type_die && ! complete)
25083 return;
25084
25085 if (TYPE_CONTEXT (type) != NULL_TREE
25086 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25087 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25088 nested = 1;
25089
25090 scope_die = scope_die_for (type, context_die);
25091
25092 /* Generate child dies for template paramaters. */
25093 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25094 schedule_generic_params_dies_gen (type);
25095
25096 if (! type_die || (nested && is_cu_die (scope_die)))
25097 /* First occurrence of type or toplevel definition of nested class. */
25098 {
25099 dw_die_ref old_die = type_die;
25100
25101 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25102 ? record_type_tag (type) : DW_TAG_union_type,
25103 scope_die, type);
25104 equate_type_number_to_die (type, type_die);
25105 if (old_die)
25106 add_AT_specification (type_die, old_die);
25107 else
25108 add_name_attribute (type_die, type_tag (type));
25109 }
25110 else
25111 remove_AT (type_die, DW_AT_declaration);
25112
25113 /* If this type has been completed, then give it a byte_size attribute and
25114 then give a list of members. */
25115 if (complete && !ns_decl)
25116 {
25117 /* Prevent infinite recursion in cases where the type of some member of
25118 this type is expressed in terms of this type itself. */
25119 TREE_ASM_WRITTEN (type) = 1;
25120 add_byte_size_attribute (type_die, type);
25121 add_alignment_attribute (type_die, type);
25122 if (TYPE_STUB_DECL (type) != NULL_TREE)
25123 {
25124 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25125 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25126 }
25127
25128 /* If the first reference to this type was as the return type of an
25129 inline function, then it may not have a parent. Fix this now. */
25130 if (type_die->die_parent == NULL)
25131 add_child_die (scope_die, type_die);
25132
25133 gen_member_die (type, type_die);
25134
25135 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25136 if (TYPE_ARTIFICIAL (type))
25137 add_AT_flag (type_die, DW_AT_artificial, 1);
25138
25139 /* GNU extension: Record what type our vtable lives in. */
25140 if (TYPE_VFIELD (type))
25141 {
25142 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25143
25144 gen_type_die (vtype, context_die);
25145 add_AT_die_ref (type_die, DW_AT_containing_type,
25146 lookup_type_die (vtype));
25147 }
25148 }
25149 else
25150 {
25151 add_AT_flag (type_die, DW_AT_declaration, 1);
25152
25153 /* We don't need to do this for function-local types. */
25154 if (TYPE_STUB_DECL (type)
25155 && ! decl_function_context (TYPE_STUB_DECL (type)))
25156 vec_safe_push (incomplete_types, type);
25157 }
25158
25159 if (get_AT (type_die, DW_AT_name))
25160 add_pubtype (type, type_die);
25161 }
25162
25163 /* Generate a DIE for a subroutine _type_. */
25164
25165 static void
25166 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25167 {
25168 tree return_type = TREE_TYPE (type);
25169 dw_die_ref subr_die
25170 = new_die (DW_TAG_subroutine_type,
25171 scope_die_for (type, context_die), type);
25172
25173 equate_type_number_to_die (type, subr_die);
25174 add_prototyped_attribute (subr_die, type);
25175 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25176 context_die);
25177 add_alignment_attribute (subr_die, type);
25178 gen_formal_types_die (type, subr_die);
25179
25180 if (get_AT (subr_die, DW_AT_name))
25181 add_pubtype (type, subr_die);
25182 if ((dwarf_version >= 5 || !dwarf_strict)
25183 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25184 add_AT_flag (subr_die, DW_AT_reference, 1);
25185 if ((dwarf_version >= 5 || !dwarf_strict)
25186 && lang_hooks.types.type_dwarf_attribute (type,
25187 DW_AT_rvalue_reference) != -1)
25188 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25189 }
25190
25191 /* Generate a DIE for a type definition. */
25192
25193 static void
25194 gen_typedef_die (tree decl, dw_die_ref context_die)
25195 {
25196 dw_die_ref type_die;
25197 tree type;
25198
25199 if (TREE_ASM_WRITTEN (decl))
25200 {
25201 if (DECL_ORIGINAL_TYPE (decl))
25202 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25203 return;
25204 }
25205
25206 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25207 checks in process_scope_var and modified_type_die), this should be called
25208 only for original types. */
25209 gcc_assert (decl_ultimate_origin (decl) == NULL
25210 || decl_ultimate_origin (decl) == decl);
25211
25212 TREE_ASM_WRITTEN (decl) = 1;
25213 type_die = new_die (DW_TAG_typedef, context_die, decl);
25214
25215 add_name_and_src_coords_attributes (type_die, decl);
25216 if (DECL_ORIGINAL_TYPE (decl))
25217 {
25218 type = DECL_ORIGINAL_TYPE (decl);
25219 if (type == error_mark_node)
25220 return;
25221
25222 gcc_assert (type != TREE_TYPE (decl));
25223 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25224 }
25225 else
25226 {
25227 type = TREE_TYPE (decl);
25228 if (type == error_mark_node)
25229 return;
25230
25231 if (is_naming_typedef_decl (TYPE_NAME (type)))
25232 {
25233 /* Here, we are in the case of decl being a typedef naming
25234 an anonymous type, e.g:
25235 typedef struct {...} foo;
25236 In that case TREE_TYPE (decl) is not a typedef variant
25237 type and TYPE_NAME of the anonymous type is set to the
25238 TYPE_DECL of the typedef. This construct is emitted by
25239 the C++ FE.
25240
25241 TYPE is the anonymous struct named by the typedef
25242 DECL. As we need the DW_AT_type attribute of the
25243 DW_TAG_typedef to point to the DIE of TYPE, let's
25244 generate that DIE right away. add_type_attribute
25245 called below will then pick (via lookup_type_die) that
25246 anonymous struct DIE. */
25247 if (!TREE_ASM_WRITTEN (type))
25248 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25249
25250 /* This is a GNU Extension. We are adding a
25251 DW_AT_linkage_name attribute to the DIE of the
25252 anonymous struct TYPE. The value of that attribute
25253 is the name of the typedef decl naming the anonymous
25254 struct. This greatly eases the work of consumers of
25255 this debug info. */
25256 add_linkage_name_raw (lookup_type_die (type), decl);
25257 }
25258 }
25259
25260 add_type_attribute (type_die, type, decl_quals (decl), false,
25261 context_die);
25262
25263 if (is_naming_typedef_decl (decl))
25264 /* We want that all subsequent calls to lookup_type_die with
25265 TYPE in argument yield the DW_TAG_typedef we have just
25266 created. */
25267 equate_type_number_to_die (type, type_die);
25268
25269 add_alignment_attribute (type_die, TREE_TYPE (decl));
25270
25271 add_accessibility_attribute (type_die, decl);
25272
25273 if (DECL_ABSTRACT_P (decl))
25274 equate_decl_number_to_die (decl, type_die);
25275
25276 if (get_AT (type_die, DW_AT_name))
25277 add_pubtype (decl, type_die);
25278 }
25279
25280 /* Generate a DIE for a struct, class, enum or union type. */
25281
25282 static void
25283 gen_tagged_type_die (tree type,
25284 dw_die_ref context_die,
25285 enum debug_info_usage usage)
25286 {
25287 if (type == NULL_TREE
25288 || !is_tagged_type (type))
25289 return;
25290
25291 if (TREE_ASM_WRITTEN (type))
25292 ;
25293 /* If this is a nested type whose containing class hasn't been written
25294 out yet, writing it out will cover this one, too. This does not apply
25295 to instantiations of member class templates; they need to be added to
25296 the containing class as they are generated. FIXME: This hurts the
25297 idea of combining type decls from multiple TUs, since we can't predict
25298 what set of template instantiations we'll get. */
25299 else if (TYPE_CONTEXT (type)
25300 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25301 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25302 {
25303 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25304
25305 if (TREE_ASM_WRITTEN (type))
25306 return;
25307
25308 /* If that failed, attach ourselves to the stub. */
25309 context_die = lookup_type_die (TYPE_CONTEXT (type));
25310 }
25311 else if (TYPE_CONTEXT (type) != NULL_TREE
25312 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25313 {
25314 /* If this type is local to a function that hasn't been written
25315 out yet, use a NULL context for now; it will be fixed up in
25316 decls_for_scope. */
25317 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25318 /* A declaration DIE doesn't count; nested types need to go in the
25319 specification. */
25320 if (context_die && is_declaration_die (context_die))
25321 context_die = NULL;
25322 }
25323 else
25324 context_die = declare_in_namespace (type, context_die);
25325
25326 if (TREE_CODE (type) == ENUMERAL_TYPE)
25327 {
25328 /* This might have been written out by the call to
25329 declare_in_namespace. */
25330 if (!TREE_ASM_WRITTEN (type))
25331 gen_enumeration_type_die (type, context_die);
25332 }
25333 else
25334 gen_struct_or_union_type_die (type, context_die, usage);
25335
25336 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25337 it up if it is ever completed. gen_*_type_die will set it for us
25338 when appropriate. */
25339 }
25340
25341 /* Generate a type description DIE. */
25342
25343 static void
25344 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25345 enum debug_info_usage usage)
25346 {
25347 struct array_descr_info info;
25348
25349 if (type == NULL_TREE || type == error_mark_node)
25350 return;
25351
25352 if (flag_checking && type)
25353 verify_type (type);
25354
25355 if (TYPE_NAME (type) != NULL_TREE
25356 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25357 && is_redundant_typedef (TYPE_NAME (type))
25358 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25359 /* The DECL of this type is a typedef we don't want to emit debug
25360 info for but we want debug info for its underlying typedef.
25361 This can happen for e.g, the injected-class-name of a C++
25362 type. */
25363 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25364
25365 /* If TYPE is a typedef type variant, let's generate debug info
25366 for the parent typedef which TYPE is a type of. */
25367 if (typedef_variant_p (type))
25368 {
25369 if (TREE_ASM_WRITTEN (type))
25370 return;
25371
25372 tree name = TYPE_NAME (type);
25373 tree origin = decl_ultimate_origin (name);
25374 if (origin != NULL && origin != name)
25375 {
25376 gen_decl_die (origin, NULL, NULL, context_die);
25377 return;
25378 }
25379
25380 /* Prevent broken recursion; we can't hand off to the same type. */
25381 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25382
25383 /* Give typedefs the right scope. */
25384 context_die = scope_die_for (type, context_die);
25385
25386 TREE_ASM_WRITTEN (type) = 1;
25387
25388 gen_decl_die (name, NULL, NULL, context_die);
25389 return;
25390 }
25391
25392 /* If type is an anonymous tagged type named by a typedef, let's
25393 generate debug info for the typedef. */
25394 if (is_naming_typedef_decl (TYPE_NAME (type)))
25395 {
25396 /* Give typedefs the right scope. */
25397 context_die = scope_die_for (type, context_die);
25398
25399 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25400 return;
25401 }
25402
25403 if (lang_hooks.types.get_debug_type)
25404 {
25405 tree debug_type = lang_hooks.types.get_debug_type (type);
25406
25407 if (debug_type != NULL_TREE && debug_type != type)
25408 {
25409 gen_type_die_with_usage (debug_type, context_die, usage);
25410 return;
25411 }
25412 }
25413
25414 /* We are going to output a DIE to represent the unqualified version
25415 of this type (i.e. without any const or volatile qualifiers) so
25416 get the main variant (i.e. the unqualified version) of this type
25417 now. (Vectors and arrays are special because the debugging info is in the
25418 cloned type itself. Similarly function/method types can contain extra
25419 ref-qualification). */
25420 if (TREE_CODE (type) == FUNCTION_TYPE
25421 || TREE_CODE (type) == METHOD_TYPE)
25422 {
25423 /* For function/method types, can't use type_main_variant here,
25424 because that can have different ref-qualifiers for C++,
25425 but try to canonicalize. */
25426 tree main = TYPE_MAIN_VARIANT (type);
25427 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25428 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25429 && check_base_type (t, main)
25430 && check_lang_type (t, type))
25431 {
25432 type = t;
25433 break;
25434 }
25435 }
25436 else if (TREE_CODE (type) != VECTOR_TYPE
25437 && TREE_CODE (type) != ARRAY_TYPE)
25438 type = type_main_variant (type);
25439
25440 /* If this is an array type with hidden descriptor, handle it first. */
25441 if (!TREE_ASM_WRITTEN (type)
25442 && lang_hooks.types.get_array_descr_info)
25443 {
25444 memset (&info, 0, sizeof (info));
25445 if (lang_hooks.types.get_array_descr_info (type, &info))
25446 {
25447 /* Fortran sometimes emits array types with no dimension. */
25448 gcc_assert (info.ndimensions >= 0
25449 && (info.ndimensions
25450 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25451 gen_descr_array_type_die (type, &info, context_die);
25452 TREE_ASM_WRITTEN (type) = 1;
25453 return;
25454 }
25455 }
25456
25457 if (TREE_ASM_WRITTEN (type))
25458 {
25459 /* Variable-length types may be incomplete even if
25460 TREE_ASM_WRITTEN. For such types, fall through to
25461 gen_array_type_die() and possibly fill in
25462 DW_AT_{upper,lower}_bound attributes. */
25463 if ((TREE_CODE (type) != ARRAY_TYPE
25464 && TREE_CODE (type) != RECORD_TYPE
25465 && TREE_CODE (type) != UNION_TYPE
25466 && TREE_CODE (type) != QUAL_UNION_TYPE)
25467 || !variably_modified_type_p (type, NULL))
25468 return;
25469 }
25470
25471 switch (TREE_CODE (type))
25472 {
25473 case ERROR_MARK:
25474 break;
25475
25476 case POINTER_TYPE:
25477 case REFERENCE_TYPE:
25478 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25479 ensures that the gen_type_die recursion will terminate even if the
25480 type is recursive. Recursive types are possible in Ada. */
25481 /* ??? We could perhaps do this for all types before the switch
25482 statement. */
25483 TREE_ASM_WRITTEN (type) = 1;
25484
25485 /* For these types, all that is required is that we output a DIE (or a
25486 set of DIEs) to represent the "basis" type. */
25487 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25488 DINFO_USAGE_IND_USE);
25489 break;
25490
25491 case OFFSET_TYPE:
25492 /* This code is used for C++ pointer-to-data-member types.
25493 Output a description of the relevant class type. */
25494 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25495 DINFO_USAGE_IND_USE);
25496
25497 /* Output a description of the type of the object pointed to. */
25498 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25499 DINFO_USAGE_IND_USE);
25500
25501 /* Now output a DIE to represent this pointer-to-data-member type
25502 itself. */
25503 gen_ptr_to_mbr_type_die (type, context_die);
25504 break;
25505
25506 case FUNCTION_TYPE:
25507 /* Force out return type (in case it wasn't forced out already). */
25508 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25509 DINFO_USAGE_DIR_USE);
25510 gen_subroutine_type_die (type, context_die);
25511 break;
25512
25513 case METHOD_TYPE:
25514 /* Force out return type (in case it wasn't forced out already). */
25515 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25516 DINFO_USAGE_DIR_USE);
25517 gen_subroutine_type_die (type, context_die);
25518 break;
25519
25520 case ARRAY_TYPE:
25521 case VECTOR_TYPE:
25522 gen_array_type_die (type, context_die);
25523 break;
25524
25525 case ENUMERAL_TYPE:
25526 case RECORD_TYPE:
25527 case UNION_TYPE:
25528 case QUAL_UNION_TYPE:
25529 gen_tagged_type_die (type, context_die, usage);
25530 return;
25531
25532 case VOID_TYPE:
25533 case INTEGER_TYPE:
25534 case REAL_TYPE:
25535 case FIXED_POINT_TYPE:
25536 case COMPLEX_TYPE:
25537 case BOOLEAN_TYPE:
25538 /* No DIEs needed for fundamental types. */
25539 break;
25540
25541 case NULLPTR_TYPE:
25542 case LANG_TYPE:
25543 /* Just use DW_TAG_unspecified_type. */
25544 {
25545 dw_die_ref type_die = lookup_type_die (type);
25546 if (type_die == NULL)
25547 {
25548 tree name = TYPE_IDENTIFIER (type);
25549 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25550 type);
25551 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25552 equate_type_number_to_die (type, type_die);
25553 }
25554 }
25555 break;
25556
25557 default:
25558 if (is_cxx_auto (type))
25559 {
25560 tree name = TYPE_IDENTIFIER (type);
25561 dw_die_ref *die = (name == get_identifier ("auto")
25562 ? &auto_die : &decltype_auto_die);
25563 if (!*die)
25564 {
25565 *die = new_die (DW_TAG_unspecified_type,
25566 comp_unit_die (), NULL_TREE);
25567 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25568 }
25569 equate_type_number_to_die (type, *die);
25570 break;
25571 }
25572 gcc_unreachable ();
25573 }
25574
25575 TREE_ASM_WRITTEN (type) = 1;
25576 }
25577
25578 static void
25579 gen_type_die (tree type, dw_die_ref context_die)
25580 {
25581 if (type != error_mark_node)
25582 {
25583 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25584 if (flag_checking)
25585 {
25586 dw_die_ref die = lookup_type_die (type);
25587 if (die)
25588 check_die (die);
25589 }
25590 }
25591 }
25592
25593 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25594 things which are local to the given block. */
25595
25596 static void
25597 gen_block_die (tree stmt, dw_die_ref context_die)
25598 {
25599 int must_output_die = 0;
25600 bool inlined_func;
25601
25602 /* Ignore blocks that are NULL. */
25603 if (stmt == NULL_TREE)
25604 return;
25605
25606 inlined_func = inlined_function_outer_scope_p (stmt);
25607
25608 /* If the block is one fragment of a non-contiguous block, do not
25609 process the variables, since they will have been done by the
25610 origin block. Do process subblocks. */
25611 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25612 {
25613 tree sub;
25614
25615 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25616 gen_block_die (sub, context_die);
25617
25618 return;
25619 }
25620
25621 /* Determine if we need to output any Dwarf DIEs at all to represent this
25622 block. */
25623 if (inlined_func)
25624 /* The outer scopes for inlinings *must* always be represented. We
25625 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25626 must_output_die = 1;
25627 else if (BLOCK_DIE (stmt))
25628 /* If we already have a DIE then it was filled early. Meanwhile
25629 we might have pruned all BLOCK_VARS as optimized out but we
25630 still want to generate high/low PC attributes so output it. */
25631 must_output_die = 1;
25632 else if (TREE_USED (stmt)
25633 || TREE_ASM_WRITTEN (stmt)
25634 || BLOCK_ABSTRACT (stmt))
25635 {
25636 /* Determine if this block directly contains any "significant"
25637 local declarations which we will need to output DIEs for. */
25638 if (debug_info_level > DINFO_LEVEL_TERSE)
25639 {
25640 /* We are not in terse mode so any local declaration that
25641 is not ignored for debug purposes counts as being a
25642 "significant" one. */
25643 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25644 must_output_die = 1;
25645 else
25646 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25647 if (!DECL_IGNORED_P (var))
25648 {
25649 must_output_die = 1;
25650 break;
25651 }
25652 }
25653 else if (!dwarf2out_ignore_block (stmt))
25654 must_output_die = 1;
25655 }
25656
25657 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25658 DIE for any block which contains no significant local declarations at
25659 all. Rather, in such cases we just call `decls_for_scope' so that any
25660 needed Dwarf info for any sub-blocks will get properly generated. Note
25661 that in terse mode, our definition of what constitutes a "significant"
25662 local declaration gets restricted to include only inlined function
25663 instances and local (nested) function definitions. */
25664 if (must_output_die)
25665 {
25666 if (inlined_func)
25667 {
25668 /* If STMT block is abstract, that means we have been called
25669 indirectly from dwarf2out_abstract_function.
25670 That function rightfully marks the descendent blocks (of
25671 the abstract function it is dealing with) as being abstract,
25672 precisely to prevent us from emitting any
25673 DW_TAG_inlined_subroutine DIE as a descendent
25674 of an abstract function instance. So in that case, we should
25675 not call gen_inlined_subroutine_die.
25676
25677 Later though, when cgraph asks dwarf2out to emit info
25678 for the concrete instance of the function decl into which
25679 the concrete instance of STMT got inlined, the later will lead
25680 to the generation of a DW_TAG_inlined_subroutine DIE. */
25681 if (! BLOCK_ABSTRACT (stmt))
25682 gen_inlined_subroutine_die (stmt, context_die);
25683 }
25684 else
25685 gen_lexical_block_die (stmt, context_die);
25686 }
25687 else
25688 decls_for_scope (stmt, context_die);
25689 }
25690
25691 /* Process variable DECL (or variable with origin ORIGIN) within
25692 block STMT and add it to CONTEXT_DIE. */
25693 static void
25694 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25695 {
25696 dw_die_ref die;
25697 tree decl_or_origin = decl ? decl : origin;
25698
25699 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25700 die = lookup_decl_die (decl_or_origin);
25701 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25702 {
25703 if (TYPE_DECL_IS_STUB (decl_or_origin))
25704 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25705 else
25706 die = lookup_decl_die (decl_or_origin);
25707 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25708 if (! die && ! early_dwarf)
25709 return;
25710 }
25711 else
25712 die = NULL;
25713
25714 /* Avoid creating DIEs for local typedefs and concrete static variables that
25715 will only be pruned later. */
25716 if ((origin || decl_ultimate_origin (decl))
25717 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25718 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25719 {
25720 origin = decl_ultimate_origin (decl_or_origin);
25721 if (decl && VAR_P (decl) && die != NULL)
25722 {
25723 die = lookup_decl_die (origin);
25724 if (die != NULL)
25725 equate_decl_number_to_die (decl, die);
25726 }
25727 return;
25728 }
25729
25730 if (die != NULL && die->die_parent == NULL)
25731 add_child_die (context_die, die);
25732 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25733 {
25734 if (early_dwarf)
25735 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25736 stmt, context_die);
25737 }
25738 else
25739 {
25740 if (decl && DECL_P (decl))
25741 {
25742 die = lookup_decl_die (decl);
25743
25744 /* Early created DIEs do not have a parent as the decls refer
25745 to the function as DECL_CONTEXT rather than the BLOCK. */
25746 if (die && die->die_parent == NULL)
25747 {
25748 gcc_assert (in_lto_p);
25749 add_child_die (context_die, die);
25750 }
25751 }
25752
25753 gen_decl_die (decl, origin, NULL, context_die);
25754 }
25755 }
25756
25757 /* Generate all of the decls declared within a given scope and (recursively)
25758 all of its sub-blocks. */
25759
25760 static void
25761 decls_for_scope (tree stmt, dw_die_ref context_die)
25762 {
25763 tree decl;
25764 unsigned int i;
25765 tree subblocks;
25766
25767 /* Ignore NULL blocks. */
25768 if (stmt == NULL_TREE)
25769 return;
25770
25771 /* Output the DIEs to represent all of the data objects and typedefs
25772 declared directly within this block but not within any nested
25773 sub-blocks. Also, nested function and tag DIEs have been
25774 generated with a parent of NULL; fix that up now. We don't
25775 have to do this if we're at -g1. */
25776 if (debug_info_level > DINFO_LEVEL_TERSE)
25777 {
25778 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25779 process_scope_var (stmt, decl, NULL_TREE, context_die);
25780 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25781 origin - avoid doing this twice as we have no good way to see
25782 if we've done it once already. */
25783 if (! early_dwarf)
25784 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25785 {
25786 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25787 if (decl == current_function_decl)
25788 /* Ignore declarations of the current function, while they
25789 are declarations, gen_subprogram_die would treat them
25790 as definitions again, because they are equal to
25791 current_function_decl and endlessly recurse. */;
25792 else if (TREE_CODE (decl) == FUNCTION_DECL)
25793 process_scope_var (stmt, decl, NULL_TREE, context_die);
25794 else
25795 process_scope_var (stmt, NULL_TREE, decl, context_die);
25796 }
25797 }
25798
25799 /* Even if we're at -g1, we need to process the subblocks in order to get
25800 inlined call information. */
25801
25802 /* Output the DIEs to represent all sub-blocks (and the items declared
25803 therein) of this block. */
25804 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25805 subblocks != NULL;
25806 subblocks = BLOCK_CHAIN (subblocks))
25807 gen_block_die (subblocks, context_die);
25808 }
25809
25810 /* Is this a typedef we can avoid emitting? */
25811
25812 bool
25813 is_redundant_typedef (const_tree decl)
25814 {
25815 if (TYPE_DECL_IS_STUB (decl))
25816 return true;
25817
25818 if (DECL_ARTIFICIAL (decl)
25819 && DECL_CONTEXT (decl)
25820 && is_tagged_type (DECL_CONTEXT (decl))
25821 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25822 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25823 /* Also ignore the artificial member typedef for the class name. */
25824 return true;
25825
25826 return false;
25827 }
25828
25829 /* Return TRUE if TYPE is a typedef that names a type for linkage
25830 purposes. This kind of typedefs is produced by the C++ FE for
25831 constructs like:
25832
25833 typedef struct {...} foo;
25834
25835 In that case, there is no typedef variant type produced for foo.
25836 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25837 struct type. */
25838
25839 static bool
25840 is_naming_typedef_decl (const_tree decl)
25841 {
25842 if (decl == NULL_TREE
25843 || TREE_CODE (decl) != TYPE_DECL
25844 || DECL_NAMELESS (decl)
25845 || !is_tagged_type (TREE_TYPE (decl))
25846 || DECL_IS_BUILTIN (decl)
25847 || is_redundant_typedef (decl)
25848 /* It looks like Ada produces TYPE_DECLs that are very similar
25849 to C++ naming typedefs but that have different
25850 semantics. Let's be specific to c++ for now. */
25851 || !is_cxx (decl))
25852 return FALSE;
25853
25854 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25855 && TYPE_NAME (TREE_TYPE (decl)) == decl
25856 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25857 != TYPE_NAME (TREE_TYPE (decl))));
25858 }
25859
25860 /* Looks up the DIE for a context. */
25861
25862 static inline dw_die_ref
25863 lookup_context_die (tree context)
25864 {
25865 if (context)
25866 {
25867 /* Find die that represents this context. */
25868 if (TYPE_P (context))
25869 {
25870 context = TYPE_MAIN_VARIANT (context);
25871 dw_die_ref ctx = lookup_type_die (context);
25872 if (!ctx)
25873 return NULL;
25874 return strip_naming_typedef (context, ctx);
25875 }
25876 else
25877 return lookup_decl_die (context);
25878 }
25879 return comp_unit_die ();
25880 }
25881
25882 /* Returns the DIE for a context. */
25883
25884 static inline dw_die_ref
25885 get_context_die (tree context)
25886 {
25887 if (context)
25888 {
25889 /* Find die that represents this context. */
25890 if (TYPE_P (context))
25891 {
25892 context = TYPE_MAIN_VARIANT (context);
25893 return strip_naming_typedef (context, force_type_die (context));
25894 }
25895 else
25896 return force_decl_die (context);
25897 }
25898 return comp_unit_die ();
25899 }
25900
25901 /* Returns the DIE for decl. A DIE will always be returned. */
25902
25903 static dw_die_ref
25904 force_decl_die (tree decl)
25905 {
25906 dw_die_ref decl_die;
25907 unsigned saved_external_flag;
25908 tree save_fn = NULL_TREE;
25909 decl_die = lookup_decl_die (decl);
25910 if (!decl_die)
25911 {
25912 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25913
25914 decl_die = lookup_decl_die (decl);
25915 if (decl_die)
25916 return decl_die;
25917
25918 switch (TREE_CODE (decl))
25919 {
25920 case FUNCTION_DECL:
25921 /* Clear current_function_decl, so that gen_subprogram_die thinks
25922 that this is a declaration. At this point, we just want to force
25923 declaration die. */
25924 save_fn = current_function_decl;
25925 current_function_decl = NULL_TREE;
25926 gen_subprogram_die (decl, context_die);
25927 current_function_decl = save_fn;
25928 break;
25929
25930 case VAR_DECL:
25931 /* Set external flag to force declaration die. Restore it after
25932 gen_decl_die() call. */
25933 saved_external_flag = DECL_EXTERNAL (decl);
25934 DECL_EXTERNAL (decl) = 1;
25935 gen_decl_die (decl, NULL, NULL, context_die);
25936 DECL_EXTERNAL (decl) = saved_external_flag;
25937 break;
25938
25939 case NAMESPACE_DECL:
25940 if (dwarf_version >= 3 || !dwarf_strict)
25941 dwarf2out_decl (decl);
25942 else
25943 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25944 decl_die = comp_unit_die ();
25945 break;
25946
25947 case TRANSLATION_UNIT_DECL:
25948 decl_die = comp_unit_die ();
25949 break;
25950
25951 default:
25952 gcc_unreachable ();
25953 }
25954
25955 /* We should be able to find the DIE now. */
25956 if (!decl_die)
25957 decl_die = lookup_decl_die (decl);
25958 gcc_assert (decl_die);
25959 }
25960
25961 return decl_die;
25962 }
25963
25964 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25965 always returned. */
25966
25967 static dw_die_ref
25968 force_type_die (tree type)
25969 {
25970 dw_die_ref type_die;
25971
25972 type_die = lookup_type_die (type);
25973 if (!type_die)
25974 {
25975 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25976
25977 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25978 false, context_die);
25979 gcc_assert (type_die);
25980 }
25981 return type_die;
25982 }
25983
25984 /* Force out any required namespaces to be able to output DECL,
25985 and return the new context_die for it, if it's changed. */
25986
25987 static dw_die_ref
25988 setup_namespace_context (tree thing, dw_die_ref context_die)
25989 {
25990 tree context = (DECL_P (thing)
25991 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25992 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25993 /* Force out the namespace. */
25994 context_die = force_decl_die (context);
25995
25996 return context_die;
25997 }
25998
25999 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26000 type) within its namespace, if appropriate.
26001
26002 For compatibility with older debuggers, namespace DIEs only contain
26003 declarations; all definitions are emitted at CU scope, with
26004 DW_AT_specification pointing to the declaration (like with class
26005 members). */
26006
26007 static dw_die_ref
26008 declare_in_namespace (tree thing, dw_die_ref context_die)
26009 {
26010 dw_die_ref ns_context;
26011
26012 if (debug_info_level <= DINFO_LEVEL_TERSE)
26013 return context_die;
26014
26015 /* External declarations in the local scope only need to be emitted
26016 once, not once in the namespace and once in the scope.
26017
26018 This avoids declaring the `extern' below in the
26019 namespace DIE as well as in the innermost scope:
26020
26021 namespace S
26022 {
26023 int i=5;
26024 int foo()
26025 {
26026 int i=8;
26027 extern int i;
26028 return i;
26029 }
26030 }
26031 */
26032 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26033 return context_die;
26034
26035 /* If this decl is from an inlined function, then don't try to emit it in its
26036 namespace, as we will get confused. It would have already been emitted
26037 when the abstract instance of the inline function was emitted anyways. */
26038 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26039 return context_die;
26040
26041 ns_context = setup_namespace_context (thing, context_die);
26042
26043 if (ns_context != context_die)
26044 {
26045 if (is_fortran ())
26046 return ns_context;
26047 if (DECL_P (thing))
26048 gen_decl_die (thing, NULL, NULL, ns_context);
26049 else
26050 gen_type_die (thing, ns_context);
26051 }
26052 return context_die;
26053 }
26054
26055 /* Generate a DIE for a namespace or namespace alias. */
26056
26057 static void
26058 gen_namespace_die (tree decl, dw_die_ref context_die)
26059 {
26060 dw_die_ref namespace_die;
26061
26062 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26063 they are an alias of. */
26064 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26065 {
26066 /* Output a real namespace or module. */
26067 context_die = setup_namespace_context (decl, comp_unit_die ());
26068 namespace_die = new_die (is_fortran ()
26069 ? DW_TAG_module : DW_TAG_namespace,
26070 context_die, decl);
26071 /* For Fortran modules defined in different CU don't add src coords. */
26072 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26073 {
26074 const char *name = dwarf2_name (decl, 0);
26075 if (name)
26076 add_name_attribute (namespace_die, name);
26077 }
26078 else
26079 add_name_and_src_coords_attributes (namespace_die, decl);
26080 if (DECL_EXTERNAL (decl))
26081 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26082 equate_decl_number_to_die (decl, namespace_die);
26083 }
26084 else
26085 {
26086 /* Output a namespace alias. */
26087
26088 /* Force out the namespace we are an alias of, if necessary. */
26089 dw_die_ref origin_die
26090 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26091
26092 if (DECL_FILE_SCOPE_P (decl)
26093 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26094 context_die = setup_namespace_context (decl, comp_unit_die ());
26095 /* Now create the namespace alias DIE. */
26096 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26097 add_name_and_src_coords_attributes (namespace_die, decl);
26098 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26099 equate_decl_number_to_die (decl, namespace_die);
26100 }
26101 if ((dwarf_version >= 5 || !dwarf_strict)
26102 && lang_hooks.decls.decl_dwarf_attribute (decl,
26103 DW_AT_export_symbols) == 1)
26104 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26105
26106 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26107 if (want_pubnames ())
26108 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26109 }
26110
26111 /* Generate Dwarf debug information for a decl described by DECL.
26112 The return value is currently only meaningful for PARM_DECLs,
26113 for all other decls it returns NULL.
26114
26115 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26116 It can be NULL otherwise. */
26117
26118 static dw_die_ref
26119 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26120 dw_die_ref context_die)
26121 {
26122 tree decl_or_origin = decl ? decl : origin;
26123 tree class_origin = NULL, ultimate_origin;
26124
26125 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26126 return NULL;
26127
26128 switch (TREE_CODE (decl_or_origin))
26129 {
26130 case ERROR_MARK:
26131 break;
26132
26133 case CONST_DECL:
26134 if (!is_fortran () && !is_ada ())
26135 {
26136 /* The individual enumerators of an enum type get output when we output
26137 the Dwarf representation of the relevant enum type itself. */
26138 break;
26139 }
26140
26141 /* Emit its type. */
26142 gen_type_die (TREE_TYPE (decl), context_die);
26143
26144 /* And its containing namespace. */
26145 context_die = declare_in_namespace (decl, context_die);
26146
26147 gen_const_die (decl, context_die);
26148 break;
26149
26150 case FUNCTION_DECL:
26151 #if 0
26152 /* FIXME */
26153 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26154 on local redeclarations of global functions. That seems broken. */
26155 if (current_function_decl != decl)
26156 /* This is only a declaration. */;
26157 #endif
26158
26159 /* We should have abstract copies already and should not generate
26160 stray type DIEs in late LTO dumping. */
26161 if (! early_dwarf)
26162 ;
26163
26164 /* If we're emitting a clone, emit info for the abstract instance. */
26165 else if (origin || DECL_ORIGIN (decl) != decl)
26166 dwarf2out_abstract_function (origin
26167 ? DECL_ORIGIN (origin)
26168 : DECL_ABSTRACT_ORIGIN (decl));
26169
26170 /* If we're emitting a possibly inlined function emit it as
26171 abstract instance. */
26172 else if (cgraph_function_possibly_inlined_p (decl)
26173 && ! DECL_ABSTRACT_P (decl)
26174 && ! class_or_namespace_scope_p (context_die)
26175 /* dwarf2out_abstract_function won't emit a die if this is just
26176 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26177 that case, because that works only if we have a die. */
26178 && DECL_INITIAL (decl) != NULL_TREE)
26179 dwarf2out_abstract_function (decl);
26180
26181 /* Otherwise we're emitting the primary DIE for this decl. */
26182 else if (debug_info_level > DINFO_LEVEL_TERSE)
26183 {
26184 /* Before we describe the FUNCTION_DECL itself, make sure that we
26185 have its containing type. */
26186 if (!origin)
26187 origin = decl_class_context (decl);
26188 if (origin != NULL_TREE)
26189 gen_type_die (origin, context_die);
26190
26191 /* And its return type. */
26192 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26193
26194 /* And its virtual context. */
26195 if (DECL_VINDEX (decl) != NULL_TREE)
26196 gen_type_die (DECL_CONTEXT (decl), context_die);
26197
26198 /* Make sure we have a member DIE for decl. */
26199 if (origin != NULL_TREE)
26200 gen_type_die_for_member (origin, decl, context_die);
26201
26202 /* And its containing namespace. */
26203 context_die = declare_in_namespace (decl, context_die);
26204 }
26205
26206 /* Now output a DIE to represent the function itself. */
26207 if (decl)
26208 gen_subprogram_die (decl, context_die);
26209 break;
26210
26211 case TYPE_DECL:
26212 /* If we are in terse mode, don't generate any DIEs to represent any
26213 actual typedefs. */
26214 if (debug_info_level <= DINFO_LEVEL_TERSE)
26215 break;
26216
26217 /* In the special case of a TYPE_DECL node representing the declaration
26218 of some type tag, if the given TYPE_DECL is marked as having been
26219 instantiated from some other (original) TYPE_DECL node (e.g. one which
26220 was generated within the original definition of an inline function) we
26221 used to generate a special (abbreviated) DW_TAG_structure_type,
26222 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26223 should be actually referencing those DIEs, as variable DIEs with that
26224 type would be emitted already in the abstract origin, so it was always
26225 removed during unused type prunning. Don't add anything in this
26226 case. */
26227 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26228 break;
26229
26230 if (is_redundant_typedef (decl))
26231 gen_type_die (TREE_TYPE (decl), context_die);
26232 else
26233 /* Output a DIE to represent the typedef itself. */
26234 gen_typedef_die (decl, context_die);
26235 break;
26236
26237 case LABEL_DECL:
26238 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26239 gen_label_die (decl, context_die);
26240 break;
26241
26242 case VAR_DECL:
26243 case RESULT_DECL:
26244 /* If we are in terse mode, don't generate any DIEs to represent any
26245 variable declarations or definitions. */
26246 if (debug_info_level <= DINFO_LEVEL_TERSE)
26247 break;
26248
26249 /* Avoid generating stray type DIEs during late dwarf dumping.
26250 All types have been dumped early. */
26251 if (early_dwarf
26252 /* ??? But in LTRANS we cannot annotate early created variably
26253 modified type DIEs without copying them and adjusting all
26254 references to them. Dump them again as happens for inlining
26255 which copies both the decl and the types. */
26256 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26257 in VLA bound information for example. */
26258 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26259 current_function_decl)))
26260 {
26261 /* Output any DIEs that are needed to specify the type of this data
26262 object. */
26263 if (decl_by_reference_p (decl_or_origin))
26264 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26265 else
26266 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26267 }
26268
26269 if (early_dwarf)
26270 {
26271 /* And its containing type. */
26272 class_origin = decl_class_context (decl_or_origin);
26273 if (class_origin != NULL_TREE)
26274 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26275
26276 /* And its containing namespace. */
26277 context_die = declare_in_namespace (decl_or_origin, context_die);
26278 }
26279
26280 /* Now output the DIE to represent the data object itself. This gets
26281 complicated because of the possibility that the VAR_DECL really
26282 represents an inlined instance of a formal parameter for an inline
26283 function. */
26284 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26285 if (ultimate_origin != NULL_TREE
26286 && TREE_CODE (ultimate_origin) == PARM_DECL)
26287 gen_formal_parameter_die (decl, origin,
26288 true /* Emit name attribute. */,
26289 context_die);
26290 else
26291 gen_variable_die (decl, origin, context_die);
26292 break;
26293
26294 case FIELD_DECL:
26295 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26296 /* Ignore the nameless fields that are used to skip bits but handle C++
26297 anonymous unions and structs. */
26298 if (DECL_NAME (decl) != NULL_TREE
26299 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26300 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26301 {
26302 gen_type_die (member_declared_type (decl), context_die);
26303 gen_field_die (decl, ctx, context_die);
26304 }
26305 break;
26306
26307 case PARM_DECL:
26308 /* Avoid generating stray type DIEs during late dwarf dumping.
26309 All types have been dumped early. */
26310 if (early_dwarf
26311 /* ??? But in LTRANS we cannot annotate early created variably
26312 modified type DIEs without copying them and adjusting all
26313 references to them. Dump them again as happens for inlining
26314 which copies both the decl and the types. */
26315 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26316 in VLA bound information for example. */
26317 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26318 current_function_decl)))
26319 {
26320 if (DECL_BY_REFERENCE (decl_or_origin))
26321 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26322 else
26323 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26324 }
26325 return gen_formal_parameter_die (decl, origin,
26326 true /* Emit name attribute. */,
26327 context_die);
26328
26329 case NAMESPACE_DECL:
26330 if (dwarf_version >= 3 || !dwarf_strict)
26331 gen_namespace_die (decl, context_die);
26332 break;
26333
26334 case IMPORTED_DECL:
26335 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26336 DECL_CONTEXT (decl), context_die);
26337 break;
26338
26339 case NAMELIST_DECL:
26340 gen_namelist_decl (DECL_NAME (decl), context_die,
26341 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26342 break;
26343
26344 default:
26345 /* Probably some frontend-internal decl. Assume we don't care. */
26346 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26347 break;
26348 }
26349
26350 return NULL;
26351 }
26352 \f
26353 /* Output initial debug information for global DECL. Called at the
26354 end of the parsing process.
26355
26356 This is the initial debug generation process. As such, the DIEs
26357 generated may be incomplete. A later debug generation pass
26358 (dwarf2out_late_global_decl) will augment the information generated
26359 in this pass (e.g., with complete location info). */
26360
26361 static void
26362 dwarf2out_early_global_decl (tree decl)
26363 {
26364 set_early_dwarf s;
26365
26366 /* gen_decl_die() will set DECL_ABSTRACT because
26367 cgraph_function_possibly_inlined_p() returns true. This is in
26368 turn will cause DW_AT_inline attributes to be set.
26369
26370 This happens because at early dwarf generation, there is no
26371 cgraph information, causing cgraph_function_possibly_inlined_p()
26372 to return true. Trick cgraph_function_possibly_inlined_p()
26373 while we generate dwarf early. */
26374 bool save = symtab->global_info_ready;
26375 symtab->global_info_ready = true;
26376
26377 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26378 other DECLs and they can point to template types or other things
26379 that dwarf2out can't handle when done via dwarf2out_decl. */
26380 if (TREE_CODE (decl) != TYPE_DECL
26381 && TREE_CODE (decl) != PARM_DECL)
26382 {
26383 if (TREE_CODE (decl) == FUNCTION_DECL)
26384 {
26385 tree save_fndecl = current_function_decl;
26386
26387 /* For nested functions, make sure we have DIEs for the parents first
26388 so that all nested DIEs are generated at the proper scope in the
26389 first shot. */
26390 tree context = decl_function_context (decl);
26391 if (context != NULL)
26392 {
26393 dw_die_ref context_die = lookup_decl_die (context);
26394 current_function_decl = context;
26395
26396 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26397 enough so that it lands in its own context. This avoids type
26398 pruning issues later on. */
26399 if (context_die == NULL || is_declaration_die (context_die))
26400 dwarf2out_decl (context);
26401 }
26402
26403 /* Emit an abstract origin of a function first. This happens
26404 with C++ constructor clones for example and makes
26405 dwarf2out_abstract_function happy which requires the early
26406 DIE of the abstract instance to be present. */
26407 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26408 dw_die_ref origin_die;
26409 if (origin != NULL
26410 /* Do not emit the DIE multiple times but make sure to
26411 process it fully here in case we just saw a declaration. */
26412 && ((origin_die = lookup_decl_die (origin)) == NULL
26413 || is_declaration_die (origin_die)))
26414 {
26415 current_function_decl = origin;
26416 dwarf2out_decl (origin);
26417 }
26418
26419 /* Emit the DIE for decl but avoid doing that multiple times. */
26420 dw_die_ref old_die;
26421 if ((old_die = lookup_decl_die (decl)) == NULL
26422 || is_declaration_die (old_die))
26423 {
26424 current_function_decl = decl;
26425 dwarf2out_decl (decl);
26426 }
26427
26428 current_function_decl = save_fndecl;
26429 }
26430 else
26431 dwarf2out_decl (decl);
26432 }
26433 symtab->global_info_ready = save;
26434 }
26435
26436 /* Return whether EXPR is an expression with the following pattern:
26437 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26438
26439 static bool
26440 is_trivial_indirect_ref (tree expr)
26441 {
26442 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26443 return false;
26444
26445 tree nop = TREE_OPERAND (expr, 0);
26446 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26447 return false;
26448
26449 tree int_cst = TREE_OPERAND (nop, 0);
26450 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26451 }
26452
26453 /* Output debug information for global decl DECL. Called from
26454 toplev.c after compilation proper has finished. */
26455
26456 static void
26457 dwarf2out_late_global_decl (tree decl)
26458 {
26459 /* Fill-in any location information we were unable to determine
26460 on the first pass. */
26461 if (VAR_P (decl))
26462 {
26463 dw_die_ref die = lookup_decl_die (decl);
26464
26465 /* We may have to generate early debug late for LTO in case debug
26466 was not enabled at compile-time or the target doesn't support
26467 the LTO early debug scheme. */
26468 if (! die && in_lto_p)
26469 {
26470 dwarf2out_decl (decl);
26471 die = lookup_decl_die (decl);
26472 }
26473
26474 if (die)
26475 {
26476 /* We get called via the symtab code invoking late_global_decl
26477 for symbols that are optimized out.
26478
26479 Do not add locations for those, except if they have a
26480 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26481 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26482 INDIRECT_REF expression, as this could generate relocations to
26483 text symbols in LTO object files, which is invalid. */
26484 varpool_node *node = varpool_node::get (decl);
26485 if ((! node || ! node->definition)
26486 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26487 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26488 tree_add_const_value_attribute_for_decl (die, decl);
26489 else
26490 add_location_or_const_value_attribute (die, decl, false);
26491 }
26492 }
26493 }
26494
26495 /* Output debug information for type decl DECL. Called from toplev.c
26496 and from language front ends (to record built-in types). */
26497 static void
26498 dwarf2out_type_decl (tree decl, int local)
26499 {
26500 if (!local)
26501 {
26502 set_early_dwarf s;
26503 dwarf2out_decl (decl);
26504 }
26505 }
26506
26507 /* Output debug information for imported module or decl DECL.
26508 NAME is non-NULL name in the lexical block if the decl has been renamed.
26509 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26510 that DECL belongs to.
26511 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26512 static void
26513 dwarf2out_imported_module_or_decl_1 (tree decl,
26514 tree name,
26515 tree lexical_block,
26516 dw_die_ref lexical_block_die)
26517 {
26518 expanded_location xloc;
26519 dw_die_ref imported_die = NULL;
26520 dw_die_ref at_import_die;
26521
26522 if (TREE_CODE (decl) == IMPORTED_DECL)
26523 {
26524 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26525 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26526 gcc_assert (decl);
26527 }
26528 else
26529 xloc = expand_location (input_location);
26530
26531 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26532 {
26533 at_import_die = force_type_die (TREE_TYPE (decl));
26534 /* For namespace N { typedef void T; } using N::T; base_type_die
26535 returns NULL, but DW_TAG_imported_declaration requires
26536 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26537 if (!at_import_die)
26538 {
26539 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26540 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26541 at_import_die = lookup_type_die (TREE_TYPE (decl));
26542 gcc_assert (at_import_die);
26543 }
26544 }
26545 else
26546 {
26547 at_import_die = lookup_decl_die (decl);
26548 if (!at_import_die)
26549 {
26550 /* If we're trying to avoid duplicate debug info, we may not have
26551 emitted the member decl for this field. Emit it now. */
26552 if (TREE_CODE (decl) == FIELD_DECL)
26553 {
26554 tree type = DECL_CONTEXT (decl);
26555
26556 if (TYPE_CONTEXT (type)
26557 && TYPE_P (TYPE_CONTEXT (type))
26558 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26559 DINFO_USAGE_DIR_USE))
26560 return;
26561 gen_type_die_for_member (type, decl,
26562 get_context_die (TYPE_CONTEXT (type)));
26563 }
26564 if (TREE_CODE (decl) == NAMELIST_DECL)
26565 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26566 get_context_die (DECL_CONTEXT (decl)),
26567 NULL_TREE);
26568 else
26569 at_import_die = force_decl_die (decl);
26570 }
26571 }
26572
26573 if (TREE_CODE (decl) == NAMESPACE_DECL)
26574 {
26575 if (dwarf_version >= 3 || !dwarf_strict)
26576 imported_die = new_die (DW_TAG_imported_module,
26577 lexical_block_die,
26578 lexical_block);
26579 else
26580 return;
26581 }
26582 else
26583 imported_die = new_die (DW_TAG_imported_declaration,
26584 lexical_block_die,
26585 lexical_block);
26586
26587 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26588 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26589 if (debug_column_info && xloc.column)
26590 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26591 if (name)
26592 add_AT_string (imported_die, DW_AT_name,
26593 IDENTIFIER_POINTER (name));
26594 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26595 }
26596
26597 /* Output debug information for imported module or decl DECL.
26598 NAME is non-NULL name in context if the decl has been renamed.
26599 CHILD is true if decl is one of the renamed decls as part of
26600 importing whole module.
26601 IMPLICIT is set if this hook is called for an implicit import
26602 such as inline namespace. */
26603
26604 static void
26605 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26606 bool child, bool implicit)
26607 {
26608 /* dw_die_ref at_import_die; */
26609 dw_die_ref scope_die;
26610
26611 if (debug_info_level <= DINFO_LEVEL_TERSE)
26612 return;
26613
26614 gcc_assert (decl);
26615
26616 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26617 should be enough, for DWARF4 and older even if we emit as extension
26618 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26619 for the benefit of consumers unaware of DW_AT_export_symbols. */
26620 if (implicit
26621 && dwarf_version >= 5
26622 && lang_hooks.decls.decl_dwarf_attribute (decl,
26623 DW_AT_export_symbols) == 1)
26624 return;
26625
26626 set_early_dwarf s;
26627
26628 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26629 We need decl DIE for reference and scope die. First, get DIE for the decl
26630 itself. */
26631
26632 /* Get the scope die for decl context. Use comp_unit_die for global module
26633 or decl. If die is not found for non globals, force new die. */
26634 if (context
26635 && TYPE_P (context)
26636 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26637 return;
26638
26639 scope_die = get_context_die (context);
26640
26641 if (child)
26642 {
26643 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26644 there is nothing we can do, here. */
26645 if (dwarf_version < 3 && dwarf_strict)
26646 return;
26647
26648 gcc_assert (scope_die->die_child);
26649 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26650 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26651 scope_die = scope_die->die_child;
26652 }
26653
26654 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26655 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26656 }
26657
26658 /* Output debug information for namelists. */
26659
26660 static dw_die_ref
26661 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26662 {
26663 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26664 tree value;
26665 unsigned i;
26666
26667 if (debug_info_level <= DINFO_LEVEL_TERSE)
26668 return NULL;
26669
26670 gcc_assert (scope_die != NULL);
26671 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26672 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26673
26674 /* If there are no item_decls, we have a nondefining namelist, e.g.
26675 with USE association; hence, set DW_AT_declaration. */
26676 if (item_decls == NULL_TREE)
26677 {
26678 add_AT_flag (nml_die, DW_AT_declaration, 1);
26679 return nml_die;
26680 }
26681
26682 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26683 {
26684 nml_item_ref_die = lookup_decl_die (value);
26685 if (!nml_item_ref_die)
26686 nml_item_ref_die = force_decl_die (value);
26687
26688 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26689 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26690 }
26691 return nml_die;
26692 }
26693
26694
26695 /* Write the debugging output for DECL and return the DIE. */
26696
26697 static void
26698 dwarf2out_decl (tree decl)
26699 {
26700 dw_die_ref context_die = comp_unit_die ();
26701
26702 switch (TREE_CODE (decl))
26703 {
26704 case ERROR_MARK:
26705 return;
26706
26707 case FUNCTION_DECL:
26708 /* If we're a nested function, initially use a parent of NULL; if we're
26709 a plain function, this will be fixed up in decls_for_scope. If
26710 we're a method, it will be ignored, since we already have a DIE.
26711 Avoid doing this late though since clones of class methods may
26712 otherwise end up in limbo and create type DIEs late. */
26713 if (early_dwarf
26714 && decl_function_context (decl)
26715 /* But if we're in terse mode, we don't care about scope. */
26716 && debug_info_level > DINFO_LEVEL_TERSE)
26717 context_die = NULL;
26718 break;
26719
26720 case VAR_DECL:
26721 /* For local statics lookup proper context die. */
26722 if (local_function_static (decl))
26723 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26724
26725 /* If we are in terse mode, don't generate any DIEs to represent any
26726 variable declarations or definitions. */
26727 if (debug_info_level <= DINFO_LEVEL_TERSE)
26728 return;
26729 break;
26730
26731 case CONST_DECL:
26732 if (debug_info_level <= DINFO_LEVEL_TERSE)
26733 return;
26734 if (!is_fortran () && !is_ada ())
26735 return;
26736 if (TREE_STATIC (decl) && decl_function_context (decl))
26737 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26738 break;
26739
26740 case NAMESPACE_DECL:
26741 case IMPORTED_DECL:
26742 if (debug_info_level <= DINFO_LEVEL_TERSE)
26743 return;
26744 if (lookup_decl_die (decl) != NULL)
26745 return;
26746 break;
26747
26748 case TYPE_DECL:
26749 /* Don't emit stubs for types unless they are needed by other DIEs. */
26750 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26751 return;
26752
26753 /* Don't bother trying to generate any DIEs to represent any of the
26754 normal built-in types for the language we are compiling. */
26755 if (DECL_IS_BUILTIN (decl))
26756 return;
26757
26758 /* If we are in terse mode, don't generate any DIEs for types. */
26759 if (debug_info_level <= DINFO_LEVEL_TERSE)
26760 return;
26761
26762 /* If we're a function-scope tag, initially use a parent of NULL;
26763 this will be fixed up in decls_for_scope. */
26764 if (decl_function_context (decl))
26765 context_die = NULL;
26766
26767 break;
26768
26769 case NAMELIST_DECL:
26770 break;
26771
26772 default:
26773 return;
26774 }
26775
26776 gen_decl_die (decl, NULL, NULL, context_die);
26777
26778 if (flag_checking)
26779 {
26780 dw_die_ref die = lookup_decl_die (decl);
26781 if (die)
26782 check_die (die);
26783 }
26784 }
26785
26786 /* Write the debugging output for DECL. */
26787
26788 static void
26789 dwarf2out_function_decl (tree decl)
26790 {
26791 dwarf2out_decl (decl);
26792 call_arg_locations = NULL;
26793 call_arg_loc_last = NULL;
26794 call_site_count = -1;
26795 tail_call_site_count = -1;
26796 decl_loc_table->empty ();
26797 cached_dw_loc_list_table->empty ();
26798 }
26799
26800 /* Output a marker (i.e. a label) for the beginning of the generated code for
26801 a lexical block. */
26802
26803 static void
26804 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26805 unsigned int blocknum)
26806 {
26807 switch_to_section (current_function_section ());
26808 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26809 }
26810
26811 /* Output a marker (i.e. a label) for the end of the generated code for a
26812 lexical block. */
26813
26814 static void
26815 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26816 {
26817 switch_to_section (current_function_section ());
26818 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26819 }
26820
26821 /* Returns nonzero if it is appropriate not to emit any debugging
26822 information for BLOCK, because it doesn't contain any instructions.
26823
26824 Don't allow this for blocks with nested functions or local classes
26825 as we would end up with orphans, and in the presence of scheduling
26826 we may end up calling them anyway. */
26827
26828 static bool
26829 dwarf2out_ignore_block (const_tree block)
26830 {
26831 tree decl;
26832 unsigned int i;
26833
26834 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26835 if (TREE_CODE (decl) == FUNCTION_DECL
26836 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26837 return 0;
26838 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26839 {
26840 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26841 if (TREE_CODE (decl) == FUNCTION_DECL
26842 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26843 return 0;
26844 }
26845
26846 return 1;
26847 }
26848
26849 /* Hash table routines for file_hash. */
26850
26851 bool
26852 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26853 {
26854 return filename_cmp (p1->filename, p2) == 0;
26855 }
26856
26857 hashval_t
26858 dwarf_file_hasher::hash (dwarf_file_data *p)
26859 {
26860 return htab_hash_string (p->filename);
26861 }
26862
26863 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26864 dwarf2out.c) and return its "index". The index of each (known) filename is
26865 just a unique number which is associated with only that one filename. We
26866 need such numbers for the sake of generating labels (in the .debug_sfnames
26867 section) and references to those files numbers (in the .debug_srcinfo
26868 and .debug_macinfo sections). If the filename given as an argument is not
26869 found in our current list, add it to the list and assign it the next
26870 available unique index number. */
26871
26872 static struct dwarf_file_data *
26873 lookup_filename (const char *file_name)
26874 {
26875 struct dwarf_file_data * created;
26876
26877 if (!file_name)
26878 return NULL;
26879
26880 dwarf_file_data **slot
26881 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26882 INSERT);
26883 if (*slot)
26884 return *slot;
26885
26886 created = ggc_alloc<dwarf_file_data> ();
26887 created->filename = file_name;
26888 created->emitted_number = 0;
26889 *slot = created;
26890 return created;
26891 }
26892
26893 /* If the assembler will construct the file table, then translate the compiler
26894 internal file table number into the assembler file table number, and emit
26895 a .file directive if we haven't already emitted one yet. The file table
26896 numbers are different because we prune debug info for unused variables and
26897 types, which may include filenames. */
26898
26899 static int
26900 maybe_emit_file (struct dwarf_file_data * fd)
26901 {
26902 if (! fd->emitted_number)
26903 {
26904 if (last_emitted_file)
26905 fd->emitted_number = last_emitted_file->emitted_number + 1;
26906 else
26907 fd->emitted_number = 1;
26908 last_emitted_file = fd;
26909
26910 if (output_asm_line_debug_info ())
26911 {
26912 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26913 output_quoted_string (asm_out_file,
26914 remap_debug_filename (fd->filename));
26915 fputc ('\n', asm_out_file);
26916 }
26917 }
26918
26919 return fd->emitted_number;
26920 }
26921
26922 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26923 That generation should happen after function debug info has been
26924 generated. The value of the attribute is the constant value of ARG. */
26925
26926 static void
26927 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26928 {
26929 die_arg_entry entry;
26930
26931 if (!die || !arg)
26932 return;
26933
26934 gcc_assert (early_dwarf);
26935
26936 if (!tmpl_value_parm_die_table)
26937 vec_alloc (tmpl_value_parm_die_table, 32);
26938
26939 entry.die = die;
26940 entry.arg = arg;
26941 vec_safe_push (tmpl_value_parm_die_table, entry);
26942 }
26943
26944 /* Return TRUE if T is an instance of generic type, FALSE
26945 otherwise. */
26946
26947 static bool
26948 generic_type_p (tree t)
26949 {
26950 if (t == NULL_TREE || !TYPE_P (t))
26951 return false;
26952 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26953 }
26954
26955 /* Schedule the generation of the generic parameter dies for the
26956 instance of generic type T. The proper generation itself is later
26957 done by gen_scheduled_generic_parms_dies. */
26958
26959 static void
26960 schedule_generic_params_dies_gen (tree t)
26961 {
26962 if (!generic_type_p (t))
26963 return;
26964
26965 gcc_assert (early_dwarf);
26966
26967 if (!generic_type_instances)
26968 vec_alloc (generic_type_instances, 256);
26969
26970 vec_safe_push (generic_type_instances, t);
26971 }
26972
26973 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26974 by append_entry_to_tmpl_value_parm_die_table. This function must
26975 be called after function DIEs have been generated. */
26976
26977 static void
26978 gen_remaining_tmpl_value_param_die_attribute (void)
26979 {
26980 if (tmpl_value_parm_die_table)
26981 {
26982 unsigned i, j;
26983 die_arg_entry *e;
26984
26985 /* We do this in two phases - first get the cases we can
26986 handle during early-finish, preserving those we cannot
26987 (containing symbolic constants where we don't yet know
26988 whether we are going to output the referenced symbols).
26989 For those we try again at late-finish. */
26990 j = 0;
26991 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26992 {
26993 if (!e->die->removed
26994 && !tree_add_const_value_attribute (e->die, e->arg))
26995 {
26996 dw_loc_descr_ref loc = NULL;
26997 if (! early_dwarf
26998 && (dwarf_version >= 5 || !dwarf_strict))
26999 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27000 if (loc)
27001 add_AT_loc (e->die, DW_AT_location, loc);
27002 else
27003 (*tmpl_value_parm_die_table)[j++] = *e;
27004 }
27005 }
27006 tmpl_value_parm_die_table->truncate (j);
27007 }
27008 }
27009
27010 /* Generate generic parameters DIEs for instances of generic types
27011 that have been previously scheduled by
27012 schedule_generic_params_dies_gen. This function must be called
27013 after all the types of the CU have been laid out. */
27014
27015 static void
27016 gen_scheduled_generic_parms_dies (void)
27017 {
27018 unsigned i;
27019 tree t;
27020
27021 if (!generic_type_instances)
27022 return;
27023
27024 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27025 if (COMPLETE_TYPE_P (t))
27026 gen_generic_params_dies (t);
27027
27028 generic_type_instances = NULL;
27029 }
27030
27031
27032 /* Replace DW_AT_name for the decl with name. */
27033
27034 static void
27035 dwarf2out_set_name (tree decl, tree name)
27036 {
27037 dw_die_ref die;
27038 dw_attr_node *attr;
27039 const char *dname;
27040
27041 die = TYPE_SYMTAB_DIE (decl);
27042 if (!die)
27043 return;
27044
27045 dname = dwarf2_name (name, 0);
27046 if (!dname)
27047 return;
27048
27049 attr = get_AT (die, DW_AT_name);
27050 if (attr)
27051 {
27052 struct indirect_string_node *node;
27053
27054 node = find_AT_string (dname);
27055 /* replace the string. */
27056 attr->dw_attr_val.v.val_str = node;
27057 }
27058
27059 else
27060 add_name_attribute (die, dname);
27061 }
27062
27063 /* True if before or during processing of the first function being emitted. */
27064 static bool in_first_function_p = true;
27065 /* True if loc_note during dwarf2out_var_location call might still be
27066 before first real instruction at address equal to .Ltext0. */
27067 static bool maybe_at_text_label_p = true;
27068 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27069 static unsigned int first_loclabel_num_not_at_text_label;
27070
27071 /* Look ahead for a real insn, or for a begin stmt marker. */
27072
27073 static rtx_insn *
27074 dwarf2out_next_real_insn (rtx_insn *loc_note)
27075 {
27076 rtx_insn *next_real = NEXT_INSN (loc_note);
27077
27078 while (next_real)
27079 if (INSN_P (next_real))
27080 break;
27081 else
27082 next_real = NEXT_INSN (next_real);
27083
27084 return next_real;
27085 }
27086
27087 /* Called by the final INSN scan whenever we see a var location. We
27088 use it to drop labels in the right places, and throw the location in
27089 our lookup table. */
27090
27091 static void
27092 dwarf2out_var_location (rtx_insn *loc_note)
27093 {
27094 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27095 struct var_loc_node *newloc;
27096 rtx_insn *next_real, *next_note;
27097 rtx_insn *call_insn = NULL;
27098 static const char *last_label;
27099 static const char *last_postcall_label;
27100 static bool last_in_cold_section_p;
27101 static rtx_insn *expected_next_loc_note;
27102 tree decl;
27103 bool var_loc_p;
27104 var_loc_view view = 0;
27105
27106 if (!NOTE_P (loc_note))
27107 {
27108 if (CALL_P (loc_note))
27109 {
27110 maybe_reset_location_view (loc_note, cur_line_info_table);
27111 call_site_count++;
27112 if (SIBLING_CALL_P (loc_note))
27113 tail_call_site_count++;
27114 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27115 {
27116 call_insn = loc_note;
27117 loc_note = NULL;
27118 var_loc_p = false;
27119
27120 next_real = dwarf2out_next_real_insn (call_insn);
27121 next_note = NULL;
27122 cached_next_real_insn = NULL;
27123 goto create_label;
27124 }
27125 if (optimize == 0 && !flag_var_tracking)
27126 {
27127 /* When the var-tracking pass is not running, there is no note
27128 for indirect calls whose target is compile-time known. In this
27129 case, process such calls specifically so that we generate call
27130 sites for them anyway. */
27131 rtx x = PATTERN (loc_note);
27132 if (GET_CODE (x) == PARALLEL)
27133 x = XVECEXP (x, 0, 0);
27134 if (GET_CODE (x) == SET)
27135 x = SET_SRC (x);
27136 if (GET_CODE (x) == CALL)
27137 x = XEXP (x, 0);
27138 if (!MEM_P (x)
27139 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27140 || !SYMBOL_REF_DECL (XEXP (x, 0))
27141 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27142 != FUNCTION_DECL))
27143 {
27144 call_insn = loc_note;
27145 loc_note = NULL;
27146 var_loc_p = false;
27147
27148 next_real = dwarf2out_next_real_insn (call_insn);
27149 next_note = NULL;
27150 cached_next_real_insn = NULL;
27151 goto create_label;
27152 }
27153 }
27154 }
27155 else if (!debug_variable_location_views)
27156 gcc_unreachable ();
27157 else
27158 maybe_reset_location_view (loc_note, cur_line_info_table);
27159
27160 return;
27161 }
27162
27163 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27164 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27165 return;
27166
27167 /* Optimize processing a large consecutive sequence of location
27168 notes so we don't spend too much time in next_real_insn. If the
27169 next insn is another location note, remember the next_real_insn
27170 calculation for next time. */
27171 next_real = cached_next_real_insn;
27172 if (next_real)
27173 {
27174 if (expected_next_loc_note != loc_note)
27175 next_real = NULL;
27176 }
27177
27178 next_note = NEXT_INSN (loc_note);
27179 if (! next_note
27180 || next_note->deleted ()
27181 || ! NOTE_P (next_note)
27182 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27183 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27184 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27185 next_note = NULL;
27186
27187 if (! next_real)
27188 next_real = dwarf2out_next_real_insn (loc_note);
27189
27190 if (next_note)
27191 {
27192 expected_next_loc_note = next_note;
27193 cached_next_real_insn = next_real;
27194 }
27195 else
27196 cached_next_real_insn = NULL;
27197
27198 /* If there are no instructions which would be affected by this note,
27199 don't do anything. */
27200 if (var_loc_p
27201 && next_real == NULL_RTX
27202 && !NOTE_DURING_CALL_P (loc_note))
27203 return;
27204
27205 create_label:
27206
27207 if (next_real == NULL_RTX)
27208 next_real = get_last_insn ();
27209
27210 /* If there were any real insns between note we processed last time
27211 and this note (or if it is the first note), clear
27212 last_{,postcall_}label so that they are not reused this time. */
27213 if (last_var_location_insn == NULL_RTX
27214 || last_var_location_insn != next_real
27215 || last_in_cold_section_p != in_cold_section_p)
27216 {
27217 last_label = NULL;
27218 last_postcall_label = NULL;
27219 }
27220
27221 if (var_loc_p)
27222 {
27223 const char *label
27224 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27225 view = cur_line_info_table->view;
27226 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27227 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27228 if (newloc == NULL)
27229 return;
27230 }
27231 else
27232 {
27233 decl = NULL_TREE;
27234 newloc = NULL;
27235 }
27236
27237 /* If there were no real insns between note we processed last time
27238 and this note, use the label we emitted last time. Otherwise
27239 create a new label and emit it. */
27240 if (last_label == NULL)
27241 {
27242 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27243 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27244 loclabel_num++;
27245 last_label = ggc_strdup (loclabel);
27246 /* See if loclabel might be equal to .Ltext0. If yes,
27247 bump first_loclabel_num_not_at_text_label. */
27248 if (!have_multiple_function_sections
27249 && in_first_function_p
27250 && maybe_at_text_label_p)
27251 {
27252 static rtx_insn *last_start;
27253 rtx_insn *insn;
27254 for (insn = loc_note; insn; insn = previous_insn (insn))
27255 if (insn == last_start)
27256 break;
27257 else if (!NONDEBUG_INSN_P (insn))
27258 continue;
27259 else
27260 {
27261 rtx body = PATTERN (insn);
27262 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27263 continue;
27264 /* Inline asm could occupy zero bytes. */
27265 else if (GET_CODE (body) == ASM_INPUT
27266 || asm_noperands (body) >= 0)
27267 continue;
27268 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27269 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27270 continue;
27271 #endif
27272 else
27273 {
27274 /* Assume insn has non-zero length. */
27275 maybe_at_text_label_p = false;
27276 break;
27277 }
27278 }
27279 if (maybe_at_text_label_p)
27280 {
27281 last_start = loc_note;
27282 first_loclabel_num_not_at_text_label = loclabel_num;
27283 }
27284 }
27285 }
27286
27287 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27288 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27289
27290 if (!var_loc_p)
27291 {
27292 struct call_arg_loc_node *ca_loc
27293 = ggc_cleared_alloc<call_arg_loc_node> ();
27294 rtx_insn *prev = call_insn;
27295
27296 ca_loc->call_arg_loc_note
27297 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27298 ca_loc->next = NULL;
27299 ca_loc->label = last_label;
27300 gcc_assert (prev
27301 && (CALL_P (prev)
27302 || (NONJUMP_INSN_P (prev)
27303 && GET_CODE (PATTERN (prev)) == SEQUENCE
27304 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27305 if (!CALL_P (prev))
27306 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27307 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27308
27309 /* Look for a SYMBOL_REF in the "prev" instruction. */
27310 rtx x = get_call_rtx_from (PATTERN (prev));
27311 if (x)
27312 {
27313 /* Try to get the call symbol, if any. */
27314 if (MEM_P (XEXP (x, 0)))
27315 x = XEXP (x, 0);
27316 /* First, look for a memory access to a symbol_ref. */
27317 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27318 && SYMBOL_REF_DECL (XEXP (x, 0))
27319 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27320 ca_loc->symbol_ref = XEXP (x, 0);
27321 /* Otherwise, look at a compile-time known user-level function
27322 declaration. */
27323 else if (MEM_P (x)
27324 && MEM_EXPR (x)
27325 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27326 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27327 }
27328
27329 ca_loc->block = insn_scope (prev);
27330 if (call_arg_locations)
27331 call_arg_loc_last->next = ca_loc;
27332 else
27333 call_arg_locations = ca_loc;
27334 call_arg_loc_last = ca_loc;
27335 }
27336 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27337 {
27338 newloc->label = last_label;
27339 newloc->view = view;
27340 }
27341 else
27342 {
27343 if (!last_postcall_label)
27344 {
27345 sprintf (loclabel, "%s-1", last_label);
27346 last_postcall_label = ggc_strdup (loclabel);
27347 }
27348 newloc->label = last_postcall_label;
27349 /* ??? This view is at last_label, not last_label-1, but we
27350 could only assume view at last_label-1 is zero if we could
27351 assume calls always have length greater than one. This is
27352 probably true in general, though there might be a rare
27353 exception to this rule, e.g. if a call insn is optimized out
27354 by target magic. Then, even the -1 in the label will be
27355 wrong, which might invalidate the range. Anyway, using view,
27356 though technically possibly incorrect, will work as far as
27357 ranges go: since L-1 is in the middle of the call insn,
27358 (L-1).0 and (L-1).V shouldn't make any difference, and having
27359 the loclist entry refer to the .loc entry might be useful, so
27360 leave it like this. */
27361 newloc->view = view;
27362 }
27363
27364 if (var_loc_p && flag_debug_asm)
27365 {
27366 const char *name, *sep, *patstr;
27367 if (decl && DECL_NAME (decl))
27368 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27369 else
27370 name = "";
27371 if (NOTE_VAR_LOCATION_LOC (loc_note))
27372 {
27373 sep = " => ";
27374 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27375 }
27376 else
27377 {
27378 sep = " ";
27379 patstr = "RESET";
27380 }
27381 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27382 name, sep, patstr);
27383 }
27384
27385 last_var_location_insn = next_real;
27386 last_in_cold_section_p = in_cold_section_p;
27387 }
27388
27389 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27390 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27391 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27392 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27393 BLOCK_FRAGMENT_ORIGIN links. */
27394 static bool
27395 block_within_block_p (tree block, tree outer, bool bothways)
27396 {
27397 if (block == outer)
27398 return true;
27399
27400 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27401 for (tree context = BLOCK_SUPERCONTEXT (block);
27402 context != outer;
27403 context = BLOCK_SUPERCONTEXT (context))
27404 if (!context || TREE_CODE (context) != BLOCK)
27405 return false;
27406
27407 if (!bothways)
27408 return true;
27409
27410 /* Now check that each block is actually referenced by its
27411 parent. */
27412 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27413 context = BLOCK_SUPERCONTEXT (context))
27414 {
27415 if (BLOCK_FRAGMENT_ORIGIN (context))
27416 {
27417 gcc_assert (!BLOCK_SUBBLOCKS (context));
27418 context = BLOCK_FRAGMENT_ORIGIN (context);
27419 }
27420 for (tree sub = BLOCK_SUBBLOCKS (context);
27421 sub != block;
27422 sub = BLOCK_CHAIN (sub))
27423 if (!sub)
27424 return false;
27425 if (context == outer)
27426 return true;
27427 else
27428 block = context;
27429 }
27430 }
27431
27432 /* Called during final while assembling the marker of the entry point
27433 for an inlined function. */
27434
27435 static void
27436 dwarf2out_inline_entry (tree block)
27437 {
27438 gcc_assert (debug_inline_points);
27439
27440 /* If we can't represent it, don't bother. */
27441 if (!(dwarf_version >= 3 || !dwarf_strict))
27442 return;
27443
27444 gcc_assert (DECL_P (block_ultimate_origin (block)));
27445
27446 /* Sanity check the block tree. This would catch a case in which
27447 BLOCK got removed from the tree reachable from the outermost
27448 lexical block, but got retained in markers. It would still link
27449 back to its parents, but some ancestor would be missing a link
27450 down the path to the sub BLOCK. If the block got removed, its
27451 BLOCK_NUMBER will not be a usable value. */
27452 if (flag_checking)
27453 gcc_assert (block_within_block_p (block,
27454 DECL_INITIAL (current_function_decl),
27455 true));
27456
27457 gcc_assert (inlined_function_outer_scope_p (block));
27458 gcc_assert (!BLOCK_DIE (block));
27459
27460 if (BLOCK_FRAGMENT_ORIGIN (block))
27461 block = BLOCK_FRAGMENT_ORIGIN (block);
27462 /* Can the entry point ever not be at the beginning of an
27463 unfragmented lexical block? */
27464 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27465 || (cur_line_info_table
27466 && !ZERO_VIEW_P (cur_line_info_table->view))))
27467 return;
27468
27469 if (!inline_entry_data_table)
27470 inline_entry_data_table
27471 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27472
27473
27474 inline_entry_data **iedp
27475 = inline_entry_data_table->find_slot_with_hash (block,
27476 htab_hash_pointer (block),
27477 INSERT);
27478 if (*iedp)
27479 /* ??? Ideally, we'd record all entry points for the same inlined
27480 function (some may have been duplicated by e.g. unrolling), but
27481 we have no way to represent that ATM. */
27482 return;
27483
27484 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27485 ied->block = block;
27486 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27487 ied->label_num = BLOCK_NUMBER (block);
27488 if (cur_line_info_table)
27489 ied->view = cur_line_info_table->view;
27490
27491 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27492
27493 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27494 BLOCK_NUMBER (block));
27495 ASM_OUTPUT_LABEL (asm_out_file, label);
27496 }
27497
27498 /* Called from finalize_size_functions for size functions so that their body
27499 can be encoded in the debug info to describe the layout of variable-length
27500 structures. */
27501
27502 static void
27503 dwarf2out_size_function (tree decl)
27504 {
27505 function_to_dwarf_procedure (decl);
27506 }
27507
27508 /* Note in one location list that text section has changed. */
27509
27510 int
27511 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27512 {
27513 var_loc_list *list = *slot;
27514 if (list->first)
27515 list->last_before_switch
27516 = list->last->next ? list->last->next : list->last;
27517 return 1;
27518 }
27519
27520 /* Note in all location lists that text section has changed. */
27521
27522 static void
27523 var_location_switch_text_section (void)
27524 {
27525 if (decl_loc_table == NULL)
27526 return;
27527
27528 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27529 }
27530
27531 /* Create a new line number table. */
27532
27533 static dw_line_info_table *
27534 new_line_info_table (void)
27535 {
27536 dw_line_info_table *table;
27537
27538 table = ggc_cleared_alloc<dw_line_info_table> ();
27539 table->file_num = 1;
27540 table->line_num = 1;
27541 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27542 FORCE_RESET_NEXT_VIEW (table->view);
27543 table->symviews_since_reset = 0;
27544
27545 return table;
27546 }
27547
27548 /* Lookup the "current" table into which we emit line info, so
27549 that we don't have to do it for every source line. */
27550
27551 static void
27552 set_cur_line_info_table (section *sec)
27553 {
27554 dw_line_info_table *table;
27555
27556 if (sec == text_section)
27557 table = text_section_line_info;
27558 else if (sec == cold_text_section)
27559 {
27560 table = cold_text_section_line_info;
27561 if (!table)
27562 {
27563 cold_text_section_line_info = table = new_line_info_table ();
27564 table->end_label = cold_end_label;
27565 }
27566 }
27567 else
27568 {
27569 const char *end_label;
27570
27571 if (crtl->has_bb_partition)
27572 {
27573 if (in_cold_section_p)
27574 end_label = crtl->subsections.cold_section_end_label;
27575 else
27576 end_label = crtl->subsections.hot_section_end_label;
27577 }
27578 else
27579 {
27580 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27581 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27582 current_function_funcdef_no);
27583 end_label = ggc_strdup (label);
27584 }
27585
27586 table = new_line_info_table ();
27587 table->end_label = end_label;
27588
27589 vec_safe_push (separate_line_info, table);
27590 }
27591
27592 if (output_asm_line_debug_info ())
27593 table->is_stmt = (cur_line_info_table
27594 ? cur_line_info_table->is_stmt
27595 : DWARF_LINE_DEFAULT_IS_STMT_START);
27596 cur_line_info_table = table;
27597 }
27598
27599
27600 /* We need to reset the locations at the beginning of each
27601 function. We can't do this in the end_function hook, because the
27602 declarations that use the locations won't have been output when
27603 that hook is called. Also compute have_multiple_function_sections here. */
27604
27605 static void
27606 dwarf2out_begin_function (tree fun)
27607 {
27608 section *sec = function_section (fun);
27609
27610 if (sec != text_section)
27611 have_multiple_function_sections = true;
27612
27613 if (crtl->has_bb_partition && !cold_text_section)
27614 {
27615 gcc_assert (current_function_decl == fun);
27616 cold_text_section = unlikely_text_section ();
27617 switch_to_section (cold_text_section);
27618 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27619 switch_to_section (sec);
27620 }
27621
27622 dwarf2out_note_section_used ();
27623 call_site_count = 0;
27624 tail_call_site_count = 0;
27625
27626 set_cur_line_info_table (sec);
27627 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27628 }
27629
27630 /* Helper function of dwarf2out_end_function, called only after emitting
27631 the very first function into assembly. Check if some .debug_loc range
27632 might end with a .LVL* label that could be equal to .Ltext0.
27633 In that case we must force using absolute addresses in .debug_loc ranges,
27634 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27635 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27636 list terminator.
27637 Set have_multiple_function_sections to true in that case and
27638 terminate htab traversal. */
27639
27640 int
27641 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27642 {
27643 var_loc_list *entry = *slot;
27644 struct var_loc_node *node;
27645
27646 node = entry->first;
27647 if (node && node->next && node->next->label)
27648 {
27649 unsigned int i;
27650 const char *label = node->next->label;
27651 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27652
27653 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27654 {
27655 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27656 if (strcmp (label, loclabel) == 0)
27657 {
27658 have_multiple_function_sections = true;
27659 return 0;
27660 }
27661 }
27662 }
27663 return 1;
27664 }
27665
27666 /* Hook called after emitting a function into assembly.
27667 This does something only for the very first function emitted. */
27668
27669 static void
27670 dwarf2out_end_function (unsigned int)
27671 {
27672 if (in_first_function_p
27673 && !have_multiple_function_sections
27674 && first_loclabel_num_not_at_text_label
27675 && decl_loc_table)
27676 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27677 in_first_function_p = false;
27678 maybe_at_text_label_p = false;
27679 }
27680
27681 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27682 front-ends register a translation unit even before dwarf2out_init is
27683 called. */
27684 static tree main_translation_unit = NULL_TREE;
27685
27686 /* Hook called by front-ends after they built their main translation unit.
27687 Associate comp_unit_die to UNIT. */
27688
27689 static void
27690 dwarf2out_register_main_translation_unit (tree unit)
27691 {
27692 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27693 && main_translation_unit == NULL_TREE);
27694 main_translation_unit = unit;
27695 /* If dwarf2out_init has not been called yet, it will perform the association
27696 itself looking at main_translation_unit. */
27697 if (decl_die_table != NULL)
27698 equate_decl_number_to_die (unit, comp_unit_die ());
27699 }
27700
27701 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27702
27703 static void
27704 push_dw_line_info_entry (dw_line_info_table *table,
27705 enum dw_line_info_opcode opcode, unsigned int val)
27706 {
27707 dw_line_info_entry e;
27708 e.opcode = opcode;
27709 e.val = val;
27710 vec_safe_push (table->entries, e);
27711 }
27712
27713 /* Output a label to mark the beginning of a source code line entry
27714 and record information relating to this source line, in
27715 'line_info_table' for later output of the .debug_line section. */
27716 /* ??? The discriminator parameter ought to be unsigned. */
27717
27718 static void
27719 dwarf2out_source_line (unsigned int line, unsigned int column,
27720 const char *filename,
27721 int discriminator, bool is_stmt)
27722 {
27723 unsigned int file_num;
27724 dw_line_info_table *table;
27725 static var_loc_view lvugid;
27726
27727 if (debug_info_level < DINFO_LEVEL_TERSE)
27728 return;
27729
27730 table = cur_line_info_table;
27731
27732 if (line == 0)
27733 {
27734 if (debug_variable_location_views
27735 && output_asm_line_debug_info ()
27736 && table && !RESETTING_VIEW_P (table->view))
27737 {
27738 /* If we're using the assembler to compute view numbers, we
27739 can't issue a .loc directive for line zero, so we can't
27740 get a view number at this point. We might attempt to
27741 compute it from the previous view, or equate it to a
27742 subsequent view (though it might not be there!), but
27743 since we're omitting the line number entry, we might as
27744 well omit the view number as well. That means pretending
27745 it's a view number zero, which might very well turn out
27746 to be correct. ??? Extend the assembler so that the
27747 compiler could emit e.g. ".locview .LVU#", to output a
27748 view without changing line number information. We'd then
27749 have to count it in symviews_since_reset; when it's omitted,
27750 it doesn't count. */
27751 if (!zero_view_p)
27752 zero_view_p = BITMAP_GGC_ALLOC ();
27753 bitmap_set_bit (zero_view_p, table->view);
27754 if (flag_debug_asm)
27755 {
27756 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27757 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27758 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27759 ASM_COMMENT_START);
27760 assemble_name (asm_out_file, label);
27761 putc ('\n', asm_out_file);
27762 }
27763 table->view = ++lvugid;
27764 }
27765 return;
27766 }
27767
27768 /* The discriminator column was added in dwarf4. Simplify the below
27769 by simply removing it if we're not supposed to output it. */
27770 if (dwarf_version < 4 && dwarf_strict)
27771 discriminator = 0;
27772
27773 if (!debug_column_info)
27774 column = 0;
27775
27776 file_num = maybe_emit_file (lookup_filename (filename));
27777
27778 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27779 the debugger has used the second (possibly duplicate) line number
27780 at the beginning of the function to mark the end of the prologue.
27781 We could eliminate any other duplicates within the function. For
27782 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27783 that second line number entry. */
27784 /* Recall that this end-of-prologue indication is *not* the same thing
27785 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27786 to which the hook corresponds, follows the last insn that was
27787 emitted by gen_prologue. What we need is to precede the first insn
27788 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27789 insn that corresponds to something the user wrote. These may be
27790 very different locations once scheduling is enabled. */
27791
27792 if (0 && file_num == table->file_num
27793 && line == table->line_num
27794 && column == table->column_num
27795 && discriminator == table->discrim_num
27796 && is_stmt == table->is_stmt)
27797 return;
27798
27799 switch_to_section (current_function_section ());
27800
27801 /* If requested, emit something human-readable. */
27802 if (flag_debug_asm)
27803 {
27804 if (debug_column_info)
27805 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27806 filename, line, column);
27807 else
27808 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27809 filename, line);
27810 }
27811
27812 if (output_asm_line_debug_info ())
27813 {
27814 /* Emit the .loc directive understood by GNU as. */
27815 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27816 file_num, line, is_stmt, discriminator */
27817 fputs ("\t.loc ", asm_out_file);
27818 fprint_ul (asm_out_file, file_num);
27819 putc (' ', asm_out_file);
27820 fprint_ul (asm_out_file, line);
27821 putc (' ', asm_out_file);
27822 fprint_ul (asm_out_file, column);
27823
27824 if (is_stmt != table->is_stmt)
27825 {
27826 fputs (" is_stmt ", asm_out_file);
27827 putc (is_stmt ? '1' : '0', asm_out_file);
27828 }
27829 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27830 {
27831 gcc_assert (discriminator > 0);
27832 fputs (" discriminator ", asm_out_file);
27833 fprint_ul (asm_out_file, (unsigned long) discriminator);
27834 }
27835 if (debug_variable_location_views)
27836 {
27837 if (!RESETTING_VIEW_P (table->view))
27838 {
27839 table->symviews_since_reset++;
27840 if (table->symviews_since_reset > symview_upper_bound)
27841 symview_upper_bound = table->symviews_since_reset;
27842 /* When we're using the assembler to compute view
27843 numbers, we output symbolic labels after "view" in
27844 .loc directives, and the assembler will set them for
27845 us, so that we can refer to the view numbers in
27846 location lists. The only exceptions are when we know
27847 a view will be zero: "-0" is a forced reset, used
27848 e.g. in the beginning of functions, whereas "0" tells
27849 the assembler to check that there was a PC change
27850 since the previous view, in a way that implicitly
27851 resets the next view. */
27852 fputs (" view ", asm_out_file);
27853 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27854 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27855 assemble_name (asm_out_file, label);
27856 table->view = ++lvugid;
27857 }
27858 else
27859 {
27860 table->symviews_since_reset = 0;
27861 if (FORCE_RESETTING_VIEW_P (table->view))
27862 fputs (" view -0", asm_out_file);
27863 else
27864 fputs (" view 0", asm_out_file);
27865 /* Mark the present view as a zero view. Earlier debug
27866 binds may have already added its id to loclists to be
27867 emitted later, so we can't reuse the id for something
27868 else. However, it's good to know whether a view is
27869 known to be zero, because then we may be able to
27870 optimize out locviews that are all zeros, so take
27871 note of it in zero_view_p. */
27872 if (!zero_view_p)
27873 zero_view_p = BITMAP_GGC_ALLOC ();
27874 bitmap_set_bit (zero_view_p, lvugid);
27875 table->view = ++lvugid;
27876 }
27877 }
27878 putc ('\n', asm_out_file);
27879 }
27880 else
27881 {
27882 unsigned int label_num = ++line_info_label_num;
27883
27884 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27885
27886 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27887 push_dw_line_info_entry (table, LI_adv_address, label_num);
27888 else
27889 push_dw_line_info_entry (table, LI_set_address, label_num);
27890 if (debug_variable_location_views)
27891 {
27892 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27893 if (resetting)
27894 table->view = 0;
27895
27896 if (flag_debug_asm)
27897 fprintf (asm_out_file, "\t%s view %s%d\n",
27898 ASM_COMMENT_START,
27899 resetting ? "-" : "",
27900 table->view);
27901
27902 table->view++;
27903 }
27904 if (file_num != table->file_num)
27905 push_dw_line_info_entry (table, LI_set_file, file_num);
27906 if (discriminator != table->discrim_num)
27907 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27908 if (is_stmt != table->is_stmt)
27909 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27910 push_dw_line_info_entry (table, LI_set_line, line);
27911 if (debug_column_info)
27912 push_dw_line_info_entry (table, LI_set_column, column);
27913 }
27914
27915 table->file_num = file_num;
27916 table->line_num = line;
27917 table->column_num = column;
27918 table->discrim_num = discriminator;
27919 table->is_stmt = is_stmt;
27920 table->in_use = true;
27921 }
27922
27923 /* Record the beginning of a new source file. */
27924
27925 static void
27926 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27927 {
27928 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27929 {
27930 macinfo_entry e;
27931 e.code = DW_MACINFO_start_file;
27932 e.lineno = lineno;
27933 e.info = ggc_strdup (filename);
27934 vec_safe_push (macinfo_table, e);
27935 }
27936 }
27937
27938 /* Record the end of a source file. */
27939
27940 static void
27941 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27942 {
27943 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27944 {
27945 macinfo_entry e;
27946 e.code = DW_MACINFO_end_file;
27947 e.lineno = lineno;
27948 e.info = NULL;
27949 vec_safe_push (macinfo_table, e);
27950 }
27951 }
27952
27953 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27954 the tail part of the directive line, i.e. the part which is past the
27955 initial whitespace, #, whitespace, directive-name, whitespace part. */
27956
27957 static void
27958 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27959 const char *buffer ATTRIBUTE_UNUSED)
27960 {
27961 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27962 {
27963 macinfo_entry e;
27964 /* Insert a dummy first entry to be able to optimize the whole
27965 predefined macro block using DW_MACRO_import. */
27966 if (macinfo_table->is_empty () && lineno <= 1)
27967 {
27968 e.code = 0;
27969 e.lineno = 0;
27970 e.info = NULL;
27971 vec_safe_push (macinfo_table, e);
27972 }
27973 e.code = DW_MACINFO_define;
27974 e.lineno = lineno;
27975 e.info = ggc_strdup (buffer);
27976 vec_safe_push (macinfo_table, e);
27977 }
27978 }
27979
27980 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27981 the tail part of the directive line, i.e. the part which is past the
27982 initial whitespace, #, whitespace, directive-name, whitespace part. */
27983
27984 static void
27985 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27986 const char *buffer ATTRIBUTE_UNUSED)
27987 {
27988 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27989 {
27990 macinfo_entry e;
27991 /* Insert a dummy first entry to be able to optimize the whole
27992 predefined macro block using DW_MACRO_import. */
27993 if (macinfo_table->is_empty () && lineno <= 1)
27994 {
27995 e.code = 0;
27996 e.lineno = 0;
27997 e.info = NULL;
27998 vec_safe_push (macinfo_table, e);
27999 }
28000 e.code = DW_MACINFO_undef;
28001 e.lineno = lineno;
28002 e.info = ggc_strdup (buffer);
28003 vec_safe_push (macinfo_table, e);
28004 }
28005 }
28006
28007 /* Helpers to manipulate hash table of CUs. */
28008
28009 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28010 {
28011 static inline hashval_t hash (const macinfo_entry *);
28012 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28013 };
28014
28015 inline hashval_t
28016 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28017 {
28018 return htab_hash_string (entry->info);
28019 }
28020
28021 inline bool
28022 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28023 const macinfo_entry *entry2)
28024 {
28025 return !strcmp (entry1->info, entry2->info);
28026 }
28027
28028 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28029
28030 /* Output a single .debug_macinfo entry. */
28031
28032 static void
28033 output_macinfo_op (macinfo_entry *ref)
28034 {
28035 int file_num;
28036 size_t len;
28037 struct indirect_string_node *node;
28038 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28039 struct dwarf_file_data *fd;
28040
28041 switch (ref->code)
28042 {
28043 case DW_MACINFO_start_file:
28044 fd = lookup_filename (ref->info);
28045 file_num = maybe_emit_file (fd);
28046 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28047 dw2_asm_output_data_uleb128 (ref->lineno,
28048 "Included from line number %lu",
28049 (unsigned long) ref->lineno);
28050 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28051 break;
28052 case DW_MACINFO_end_file:
28053 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28054 break;
28055 case DW_MACINFO_define:
28056 case DW_MACINFO_undef:
28057 len = strlen (ref->info) + 1;
28058 if (!dwarf_strict
28059 && len > DWARF_OFFSET_SIZE
28060 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28061 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28062 {
28063 ref->code = ref->code == DW_MACINFO_define
28064 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28065 output_macinfo_op (ref);
28066 return;
28067 }
28068 dw2_asm_output_data (1, ref->code,
28069 ref->code == DW_MACINFO_define
28070 ? "Define macro" : "Undefine macro");
28071 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28072 (unsigned long) ref->lineno);
28073 dw2_asm_output_nstring (ref->info, -1, "The macro");
28074 break;
28075 case DW_MACRO_define_strp:
28076 case DW_MACRO_undef_strp:
28077 node = find_AT_string (ref->info);
28078 gcc_assert (node
28079 && (node->form == DW_FORM_strp
28080 || node->form == dwarf_FORM (DW_FORM_strx)));
28081 dw2_asm_output_data (1, ref->code,
28082 ref->code == DW_MACRO_define_strp
28083 ? "Define macro strp"
28084 : "Undefine macro strp");
28085 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28086 (unsigned long) ref->lineno);
28087 if (node->form == DW_FORM_strp)
28088 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28089 debug_str_section, "The macro: \"%s\"",
28090 ref->info);
28091 else
28092 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28093 ref->info);
28094 break;
28095 case DW_MACRO_import:
28096 dw2_asm_output_data (1, ref->code, "Import");
28097 ASM_GENERATE_INTERNAL_LABEL (label,
28098 DEBUG_MACRO_SECTION_LABEL,
28099 ref->lineno + macinfo_label_base);
28100 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28101 break;
28102 default:
28103 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28104 ASM_COMMENT_START, (unsigned long) ref->code);
28105 break;
28106 }
28107 }
28108
28109 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28110 other compilation unit .debug_macinfo sections. IDX is the first
28111 index of a define/undef, return the number of ops that should be
28112 emitted in a comdat .debug_macinfo section and emit
28113 a DW_MACRO_import entry referencing it.
28114 If the define/undef entry should be emitted normally, return 0. */
28115
28116 static unsigned
28117 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28118 macinfo_hash_type **macinfo_htab)
28119 {
28120 macinfo_entry *first, *second, *cur, *inc;
28121 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28122 unsigned char checksum[16];
28123 struct md5_ctx ctx;
28124 char *grp_name, *tail;
28125 const char *base;
28126 unsigned int i, count, encoded_filename_len, linebuf_len;
28127 macinfo_entry **slot;
28128
28129 first = &(*macinfo_table)[idx];
28130 second = &(*macinfo_table)[idx + 1];
28131
28132 /* Optimize only if there are at least two consecutive define/undef ops,
28133 and either all of them are before first DW_MACINFO_start_file
28134 with lineno {0,1} (i.e. predefined macro block), or all of them are
28135 in some included header file. */
28136 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28137 return 0;
28138 if (vec_safe_is_empty (files))
28139 {
28140 if (first->lineno > 1 || second->lineno > 1)
28141 return 0;
28142 }
28143 else if (first->lineno == 0)
28144 return 0;
28145
28146 /* Find the last define/undef entry that can be grouped together
28147 with first and at the same time compute md5 checksum of their
28148 codes, linenumbers and strings. */
28149 md5_init_ctx (&ctx);
28150 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28151 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28152 break;
28153 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28154 break;
28155 else
28156 {
28157 unsigned char code = cur->code;
28158 md5_process_bytes (&code, 1, &ctx);
28159 checksum_uleb128 (cur->lineno, &ctx);
28160 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28161 }
28162 md5_finish_ctx (&ctx, checksum);
28163 count = i - idx;
28164
28165 /* From the containing include filename (if any) pick up just
28166 usable characters from its basename. */
28167 if (vec_safe_is_empty (files))
28168 base = "";
28169 else
28170 base = lbasename (files->last ().info);
28171 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28172 if (ISIDNUM (base[i]) || base[i] == '.')
28173 encoded_filename_len++;
28174 /* Count . at the end. */
28175 if (encoded_filename_len)
28176 encoded_filename_len++;
28177
28178 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28179 linebuf_len = strlen (linebuf);
28180
28181 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28182 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28183 + 16 * 2 + 1);
28184 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28185 tail = grp_name + 4;
28186 if (encoded_filename_len)
28187 {
28188 for (i = 0; base[i]; i++)
28189 if (ISIDNUM (base[i]) || base[i] == '.')
28190 *tail++ = base[i];
28191 *tail++ = '.';
28192 }
28193 memcpy (tail, linebuf, linebuf_len);
28194 tail += linebuf_len;
28195 *tail++ = '.';
28196 for (i = 0; i < 16; i++)
28197 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28198
28199 /* Construct a macinfo_entry for DW_MACRO_import
28200 in the empty vector entry before the first define/undef. */
28201 inc = &(*macinfo_table)[idx - 1];
28202 inc->code = DW_MACRO_import;
28203 inc->lineno = 0;
28204 inc->info = ggc_strdup (grp_name);
28205 if (!*macinfo_htab)
28206 *macinfo_htab = new macinfo_hash_type (10);
28207 /* Avoid emitting duplicates. */
28208 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28209 if (*slot != NULL)
28210 {
28211 inc->code = 0;
28212 inc->info = NULL;
28213 /* If such an entry has been used before, just emit
28214 a DW_MACRO_import op. */
28215 inc = *slot;
28216 output_macinfo_op (inc);
28217 /* And clear all macinfo_entry in the range to avoid emitting them
28218 in the second pass. */
28219 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28220 {
28221 cur->code = 0;
28222 cur->info = NULL;
28223 }
28224 }
28225 else
28226 {
28227 *slot = inc;
28228 inc->lineno = (*macinfo_htab)->elements ();
28229 output_macinfo_op (inc);
28230 }
28231 return count;
28232 }
28233
28234 /* Save any strings needed by the macinfo table in the debug str
28235 table. All strings must be collected into the table by the time
28236 index_string is called. */
28237
28238 static void
28239 save_macinfo_strings (void)
28240 {
28241 unsigned len;
28242 unsigned i;
28243 macinfo_entry *ref;
28244
28245 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28246 {
28247 switch (ref->code)
28248 {
28249 /* Match the logic in output_macinfo_op to decide on
28250 indirect strings. */
28251 case DW_MACINFO_define:
28252 case DW_MACINFO_undef:
28253 len = strlen (ref->info) + 1;
28254 if (!dwarf_strict
28255 && len > DWARF_OFFSET_SIZE
28256 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28257 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28258 set_indirect_string (find_AT_string (ref->info));
28259 break;
28260 case DW_MACRO_define_strp:
28261 case DW_MACRO_undef_strp:
28262 set_indirect_string (find_AT_string (ref->info));
28263 break;
28264 default:
28265 break;
28266 }
28267 }
28268 }
28269
28270 /* Output macinfo section(s). */
28271
28272 static void
28273 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28274 {
28275 unsigned i;
28276 unsigned long length = vec_safe_length (macinfo_table);
28277 macinfo_entry *ref;
28278 vec<macinfo_entry, va_gc> *files = NULL;
28279 macinfo_hash_type *macinfo_htab = NULL;
28280 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28281
28282 if (! length)
28283 return;
28284
28285 /* output_macinfo* uses these interchangeably. */
28286 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28287 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28288 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28289 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28290
28291 /* AIX Assembler inserts the length, so adjust the reference to match the
28292 offset expected by debuggers. */
28293 strcpy (dl_section_ref, debug_line_label);
28294 if (XCOFF_DEBUGGING_INFO)
28295 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28296
28297 /* For .debug_macro emit the section header. */
28298 if (!dwarf_strict || dwarf_version >= 5)
28299 {
28300 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28301 "DWARF macro version number");
28302 if (DWARF_OFFSET_SIZE == 8)
28303 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28304 else
28305 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28306 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28307 debug_line_section, NULL);
28308 }
28309
28310 /* In the first loop, it emits the primary .debug_macinfo section
28311 and after each emitted op the macinfo_entry is cleared.
28312 If a longer range of define/undef ops can be optimized using
28313 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28314 the vector before the first define/undef in the range and the
28315 whole range of define/undef ops is not emitted and kept. */
28316 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28317 {
28318 switch (ref->code)
28319 {
28320 case DW_MACINFO_start_file:
28321 vec_safe_push (files, *ref);
28322 break;
28323 case DW_MACINFO_end_file:
28324 if (!vec_safe_is_empty (files))
28325 files->pop ();
28326 break;
28327 case DW_MACINFO_define:
28328 case DW_MACINFO_undef:
28329 if ((!dwarf_strict || dwarf_version >= 5)
28330 && HAVE_COMDAT_GROUP
28331 && vec_safe_length (files) != 1
28332 && i > 0
28333 && i + 1 < length
28334 && (*macinfo_table)[i - 1].code == 0)
28335 {
28336 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28337 if (count)
28338 {
28339 i += count - 1;
28340 continue;
28341 }
28342 }
28343 break;
28344 case 0:
28345 /* A dummy entry may be inserted at the beginning to be able
28346 to optimize the whole block of predefined macros. */
28347 if (i == 0)
28348 continue;
28349 default:
28350 break;
28351 }
28352 output_macinfo_op (ref);
28353 ref->info = NULL;
28354 ref->code = 0;
28355 }
28356
28357 if (!macinfo_htab)
28358 return;
28359
28360 /* Save the number of transparent includes so we can adjust the
28361 label number for the fat LTO object DWARF. */
28362 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28363
28364 delete macinfo_htab;
28365 macinfo_htab = NULL;
28366
28367 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28368 terminate the current chain and switch to a new comdat .debug_macinfo
28369 section and emit the define/undef entries within it. */
28370 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28371 switch (ref->code)
28372 {
28373 case 0:
28374 continue;
28375 case DW_MACRO_import:
28376 {
28377 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28378 tree comdat_key = get_identifier (ref->info);
28379 /* Terminate the previous .debug_macinfo section. */
28380 dw2_asm_output_data (1, 0, "End compilation unit");
28381 targetm.asm_out.named_section (debug_macinfo_section_name,
28382 SECTION_DEBUG
28383 | SECTION_LINKONCE
28384 | (early_lto_debug
28385 ? SECTION_EXCLUDE : 0),
28386 comdat_key);
28387 ASM_GENERATE_INTERNAL_LABEL (label,
28388 DEBUG_MACRO_SECTION_LABEL,
28389 ref->lineno + macinfo_label_base);
28390 ASM_OUTPUT_LABEL (asm_out_file, label);
28391 ref->code = 0;
28392 ref->info = NULL;
28393 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28394 "DWARF macro version number");
28395 if (DWARF_OFFSET_SIZE == 8)
28396 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28397 else
28398 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28399 }
28400 break;
28401 case DW_MACINFO_define:
28402 case DW_MACINFO_undef:
28403 output_macinfo_op (ref);
28404 ref->code = 0;
28405 ref->info = NULL;
28406 break;
28407 default:
28408 gcc_unreachable ();
28409 }
28410
28411 macinfo_label_base += macinfo_label_base_adj;
28412 }
28413
28414 /* Initialize the various sections and labels for dwarf output and prefix
28415 them with PREFIX if non-NULL. Returns the generation (zero based
28416 number of times function was called). */
28417
28418 static unsigned
28419 init_sections_and_labels (bool early_lto_debug)
28420 {
28421 /* As we may get called multiple times have a generation count for
28422 labels. */
28423 static unsigned generation = 0;
28424
28425 if (early_lto_debug)
28426 {
28427 if (!dwarf_split_debug_info)
28428 {
28429 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28430 SECTION_DEBUG | SECTION_EXCLUDE,
28431 NULL);
28432 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28433 SECTION_DEBUG | SECTION_EXCLUDE,
28434 NULL);
28435 debug_macinfo_section_name
28436 = ((dwarf_strict && dwarf_version < 5)
28437 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28438 debug_macinfo_section = get_section (debug_macinfo_section_name,
28439 SECTION_DEBUG
28440 | SECTION_EXCLUDE, NULL);
28441 }
28442 else
28443 {
28444 /* ??? Which of the following do we need early? */
28445 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28446 SECTION_DEBUG | SECTION_EXCLUDE,
28447 NULL);
28448 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28449 SECTION_DEBUG | SECTION_EXCLUDE,
28450 NULL);
28451 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28452 SECTION_DEBUG
28453 | SECTION_EXCLUDE, NULL);
28454 debug_skeleton_abbrev_section
28455 = get_section (DEBUG_LTO_ABBREV_SECTION,
28456 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28457 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28458 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28459 generation);
28460
28461 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28462 stay in the main .o, but the skeleton_line goes into the split
28463 off dwo. */
28464 debug_skeleton_line_section
28465 = get_section (DEBUG_LTO_LINE_SECTION,
28466 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28467 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28468 DEBUG_SKELETON_LINE_SECTION_LABEL,
28469 generation);
28470 debug_str_offsets_section
28471 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28472 SECTION_DEBUG | SECTION_EXCLUDE,
28473 NULL);
28474 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28475 DEBUG_SKELETON_INFO_SECTION_LABEL,
28476 generation);
28477 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28478 DEBUG_STR_DWO_SECTION_FLAGS,
28479 NULL);
28480 debug_macinfo_section_name
28481 = ((dwarf_strict && dwarf_version < 5)
28482 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28483 debug_macinfo_section = get_section (debug_macinfo_section_name,
28484 SECTION_DEBUG | SECTION_EXCLUDE,
28485 NULL);
28486 }
28487 /* For macro info and the file table we have to refer to a
28488 debug_line section. */
28489 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28490 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28491 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28492 DEBUG_LINE_SECTION_LABEL, generation);
28493
28494 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28495 DEBUG_STR_SECTION_FLAGS
28496 | SECTION_EXCLUDE, NULL);
28497 if (!dwarf_split_debug_info)
28498 debug_line_str_section
28499 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28500 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28501 }
28502 else
28503 {
28504 if (!dwarf_split_debug_info)
28505 {
28506 debug_info_section = get_section (DEBUG_INFO_SECTION,
28507 SECTION_DEBUG, NULL);
28508 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28509 SECTION_DEBUG, NULL);
28510 debug_loc_section = get_section (dwarf_version >= 5
28511 ? DEBUG_LOCLISTS_SECTION
28512 : DEBUG_LOC_SECTION,
28513 SECTION_DEBUG, NULL);
28514 debug_macinfo_section_name
28515 = ((dwarf_strict && dwarf_version < 5)
28516 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28517 debug_macinfo_section = get_section (debug_macinfo_section_name,
28518 SECTION_DEBUG, NULL);
28519 }
28520 else
28521 {
28522 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28523 SECTION_DEBUG | SECTION_EXCLUDE,
28524 NULL);
28525 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28526 SECTION_DEBUG | SECTION_EXCLUDE,
28527 NULL);
28528 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28529 SECTION_DEBUG, NULL);
28530 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28531 SECTION_DEBUG, NULL);
28532 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28533 SECTION_DEBUG, NULL);
28534 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28535 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28536 generation);
28537
28538 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28539 stay in the main .o, but the skeleton_line goes into the
28540 split off dwo. */
28541 debug_skeleton_line_section
28542 = get_section (DEBUG_DWO_LINE_SECTION,
28543 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28544 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28545 DEBUG_SKELETON_LINE_SECTION_LABEL,
28546 generation);
28547 debug_str_offsets_section
28548 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28549 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28550 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28551 DEBUG_SKELETON_INFO_SECTION_LABEL,
28552 generation);
28553 debug_loc_section = get_section (dwarf_version >= 5
28554 ? DEBUG_DWO_LOCLISTS_SECTION
28555 : DEBUG_DWO_LOC_SECTION,
28556 SECTION_DEBUG | SECTION_EXCLUDE,
28557 NULL);
28558 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28559 DEBUG_STR_DWO_SECTION_FLAGS,
28560 NULL);
28561 debug_macinfo_section_name
28562 = ((dwarf_strict && dwarf_version < 5)
28563 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28564 debug_macinfo_section = get_section (debug_macinfo_section_name,
28565 SECTION_DEBUG | SECTION_EXCLUDE,
28566 NULL);
28567 }
28568 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28569 SECTION_DEBUG, NULL);
28570 debug_line_section = get_section (DEBUG_LINE_SECTION,
28571 SECTION_DEBUG, NULL);
28572 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28573 SECTION_DEBUG, NULL);
28574 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28575 SECTION_DEBUG, NULL);
28576 debug_str_section = get_section (DEBUG_STR_SECTION,
28577 DEBUG_STR_SECTION_FLAGS, NULL);
28578 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28579 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28580 DEBUG_STR_SECTION_FLAGS, NULL);
28581
28582 debug_ranges_section = get_section (dwarf_version >= 5
28583 ? DEBUG_RNGLISTS_SECTION
28584 : DEBUG_RANGES_SECTION,
28585 SECTION_DEBUG, NULL);
28586 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28587 SECTION_DEBUG, NULL);
28588 }
28589
28590 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28591 DEBUG_ABBREV_SECTION_LABEL, generation);
28592 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28593 DEBUG_INFO_SECTION_LABEL, generation);
28594 info_section_emitted = false;
28595 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28596 DEBUG_LINE_SECTION_LABEL, generation);
28597 /* There are up to 4 unique ranges labels per generation.
28598 See also output_rnglists. */
28599 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28600 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28601 if (dwarf_version >= 5 && dwarf_split_debug_info)
28602 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28603 DEBUG_RANGES_SECTION_LABEL,
28604 1 + generation * 4);
28605 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28606 DEBUG_ADDR_SECTION_LABEL, generation);
28607 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28608 (dwarf_strict && dwarf_version < 5)
28609 ? DEBUG_MACINFO_SECTION_LABEL
28610 : DEBUG_MACRO_SECTION_LABEL, generation);
28611 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28612 generation);
28613
28614 ++generation;
28615 return generation - 1;
28616 }
28617
28618 /* Set up for Dwarf output at the start of compilation. */
28619
28620 static void
28621 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28622 {
28623 /* Allocate the file_table. */
28624 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28625
28626 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28627 /* Allocate the decl_die_table. */
28628 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28629
28630 /* Allocate the decl_loc_table. */
28631 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28632
28633 /* Allocate the cached_dw_loc_list_table. */
28634 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28635
28636 /* Allocate the initial hunk of the abbrev_die_table. */
28637 vec_alloc (abbrev_die_table, 256);
28638 /* Zero-th entry is allocated, but unused. */
28639 abbrev_die_table->quick_push (NULL);
28640
28641 /* Allocate the dwarf_proc_stack_usage_map. */
28642 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28643
28644 /* Allocate the pubtypes and pubnames vectors. */
28645 vec_alloc (pubname_table, 32);
28646 vec_alloc (pubtype_table, 32);
28647
28648 vec_alloc (incomplete_types, 64);
28649
28650 vec_alloc (used_rtx_array, 32);
28651
28652 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28653 vec_alloc (macinfo_table, 64);
28654 #endif
28655
28656 /* If front-ends already registered a main translation unit but we were not
28657 ready to perform the association, do this now. */
28658 if (main_translation_unit != NULL_TREE)
28659 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28660 }
28661
28662 /* Called before compile () starts outputtting functions, variables
28663 and toplevel asms into assembly. */
28664
28665 static void
28666 dwarf2out_assembly_start (void)
28667 {
28668 if (text_section_line_info)
28669 return;
28670
28671 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28672 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28673 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28674 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28675 COLD_TEXT_SECTION_LABEL, 0);
28676 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28677
28678 switch_to_section (text_section);
28679 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28680 #endif
28681
28682 /* Make sure the line number table for .text always exists. */
28683 text_section_line_info = new_line_info_table ();
28684 text_section_line_info->end_label = text_end_label;
28685
28686 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28687 cur_line_info_table = text_section_line_info;
28688 #endif
28689
28690 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28691 && dwarf2out_do_cfi_asm ()
28692 && !dwarf2out_do_eh_frame ())
28693 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28694 }
28695
28696 /* A helper function for dwarf2out_finish called through
28697 htab_traverse. Assign a string its index. All strings must be
28698 collected into the table by the time index_string is called,
28699 because the indexing code relies on htab_traverse to traverse nodes
28700 in the same order for each run. */
28701
28702 int
28703 index_string (indirect_string_node **h, unsigned int *index)
28704 {
28705 indirect_string_node *node = *h;
28706
28707 find_string_form (node);
28708 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28709 {
28710 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28711 node->index = *index;
28712 *index += 1;
28713 }
28714 return 1;
28715 }
28716
28717 /* A helper function for output_indirect_strings called through
28718 htab_traverse. Output the offset to a string and update the
28719 current offset. */
28720
28721 int
28722 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28723 {
28724 indirect_string_node *node = *h;
28725
28726 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28727 {
28728 /* Assert that this node has been assigned an index. */
28729 gcc_assert (node->index != NO_INDEX_ASSIGNED
28730 && node->index != NOT_INDEXED);
28731 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28732 "indexed string 0x%x: %s", node->index, node->str);
28733 *offset += strlen (node->str) + 1;
28734 }
28735 return 1;
28736 }
28737
28738 /* A helper function for dwarf2out_finish called through
28739 htab_traverse. Output the indexed string. */
28740
28741 int
28742 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28743 {
28744 struct indirect_string_node *node = *h;
28745
28746 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28747 {
28748 /* Assert that the strings are output in the same order as their
28749 indexes were assigned. */
28750 gcc_assert (*cur_idx == node->index);
28751 assemble_string (node->str, strlen (node->str) + 1);
28752 *cur_idx += 1;
28753 }
28754 return 1;
28755 }
28756
28757 /* A helper function for output_indirect_strings. Counts the number
28758 of index strings offsets. Must match the logic of the functions
28759 output_index_string[_offsets] above. */
28760 int
28761 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28762 {
28763 struct indirect_string_node *node = *h;
28764
28765 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28766 *last_idx += 1;
28767 return 1;
28768 }
28769
28770 /* A helper function for dwarf2out_finish called through
28771 htab_traverse. Emit one queued .debug_str string. */
28772
28773 int
28774 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28775 {
28776 struct indirect_string_node *node = *h;
28777
28778 node->form = find_string_form (node);
28779 if (node->form == form && node->refcount > 0)
28780 {
28781 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28782 assemble_string (node->str, strlen (node->str) + 1);
28783 }
28784
28785 return 1;
28786 }
28787
28788 /* Output the indexed string table. */
28789
28790 static void
28791 output_indirect_strings (void)
28792 {
28793 switch_to_section (debug_str_section);
28794 if (!dwarf_split_debug_info)
28795 debug_str_hash->traverse<enum dwarf_form,
28796 output_indirect_string> (DW_FORM_strp);
28797 else
28798 {
28799 unsigned int offset = 0;
28800 unsigned int cur_idx = 0;
28801
28802 if (skeleton_debug_str_hash)
28803 skeleton_debug_str_hash->traverse<enum dwarf_form,
28804 output_indirect_string> (DW_FORM_strp);
28805
28806 switch_to_section (debug_str_offsets_section);
28807 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28808 header. Note that we don't need to generate a label to the
28809 actual index table following the header here, because this is
28810 for the split dwarf case only. In an .dwo file there is only
28811 one string offsets table (and one debug info section). But
28812 if we would start using string offset tables for the main (or
28813 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28814 pointing to the actual index after the header. Split dwarf
28815 units will never have a string offsets base attribute. When
28816 a split unit is moved into a .dwp file the string offsets can
28817 be found through the .debug_cu_index section table. */
28818 if (dwarf_version >= 5)
28819 {
28820 unsigned int last_idx = 0;
28821 unsigned long str_offsets_length;
28822
28823 debug_str_hash->traverse_noresize
28824 <unsigned int *, count_index_strings> (&last_idx);
28825 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28826 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28827 dw2_asm_output_data (4, 0xffffffff,
28828 "Escape value for 64-bit DWARF extension");
28829 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28830 "Length of string offsets unit");
28831 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28832 dw2_asm_output_data (2, 0, "Header zero padding");
28833 }
28834 debug_str_hash->traverse_noresize
28835 <unsigned int *, output_index_string_offset> (&offset);
28836 switch_to_section (debug_str_dwo_section);
28837 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28838 (&cur_idx);
28839 }
28840 }
28841
28842 /* Callback for htab_traverse to assign an index to an entry in the
28843 table, and to write that entry to the .debug_addr section. */
28844
28845 int
28846 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28847 {
28848 addr_table_entry *entry = *slot;
28849
28850 if (entry->refcount == 0)
28851 {
28852 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28853 || entry->index == NOT_INDEXED);
28854 return 1;
28855 }
28856
28857 gcc_assert (entry->index == *cur_index);
28858 (*cur_index)++;
28859
28860 switch (entry->kind)
28861 {
28862 case ate_kind_rtx:
28863 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28864 "0x%x", entry->index);
28865 break;
28866 case ate_kind_rtx_dtprel:
28867 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28868 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28869 DWARF2_ADDR_SIZE,
28870 entry->addr.rtl);
28871 fputc ('\n', asm_out_file);
28872 break;
28873 case ate_kind_label:
28874 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28875 "0x%x", entry->index);
28876 break;
28877 default:
28878 gcc_unreachable ();
28879 }
28880 return 1;
28881 }
28882
28883 /* A helper function for dwarf2out_finish. Counts the number
28884 of indexed addresses. Must match the logic of the functions
28885 output_addr_table_entry above. */
28886 int
28887 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28888 {
28889 addr_table_entry *entry = *slot;
28890
28891 if (entry->refcount > 0)
28892 *last_idx += 1;
28893 return 1;
28894 }
28895
28896 /* Produce the .debug_addr section. */
28897
28898 static void
28899 output_addr_table (void)
28900 {
28901 unsigned int index = 0;
28902 if (addr_index_table == NULL || addr_index_table->size () == 0)
28903 return;
28904
28905 switch_to_section (debug_addr_section);
28906 addr_index_table
28907 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28908 }
28909
28910 #if ENABLE_ASSERT_CHECKING
28911 /* Verify that all marks are clear. */
28912
28913 static void
28914 verify_marks_clear (dw_die_ref die)
28915 {
28916 dw_die_ref c;
28917
28918 gcc_assert (! die->die_mark);
28919 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28920 }
28921 #endif /* ENABLE_ASSERT_CHECKING */
28922
28923 /* Clear the marks for a die and its children.
28924 Be cool if the mark isn't set. */
28925
28926 static void
28927 prune_unmark_dies (dw_die_ref die)
28928 {
28929 dw_die_ref c;
28930
28931 if (die->die_mark)
28932 die->die_mark = 0;
28933 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28934 }
28935
28936 /* Given LOC that is referenced by a DIE we're marking as used, find all
28937 referenced DWARF procedures it references and mark them as used. */
28938
28939 static void
28940 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28941 {
28942 for (; loc != NULL; loc = loc->dw_loc_next)
28943 switch (loc->dw_loc_opc)
28944 {
28945 case DW_OP_implicit_pointer:
28946 case DW_OP_convert:
28947 case DW_OP_reinterpret:
28948 case DW_OP_GNU_implicit_pointer:
28949 case DW_OP_GNU_convert:
28950 case DW_OP_GNU_reinterpret:
28951 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28952 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28953 break;
28954 case DW_OP_GNU_variable_value:
28955 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28956 {
28957 dw_die_ref ref
28958 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28959 if (ref == NULL)
28960 break;
28961 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28962 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28963 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28964 }
28965 /* FALLTHRU */
28966 case DW_OP_call2:
28967 case DW_OP_call4:
28968 case DW_OP_call_ref:
28969 case DW_OP_const_type:
28970 case DW_OP_GNU_const_type:
28971 case DW_OP_GNU_parameter_ref:
28972 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28973 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28974 break;
28975 case DW_OP_regval_type:
28976 case DW_OP_deref_type:
28977 case DW_OP_GNU_regval_type:
28978 case DW_OP_GNU_deref_type:
28979 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28980 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28981 break;
28982 case DW_OP_entry_value:
28983 case DW_OP_GNU_entry_value:
28984 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28985 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28986 break;
28987 default:
28988 break;
28989 }
28990 }
28991
28992 /* Given DIE that we're marking as used, find any other dies
28993 it references as attributes and mark them as used. */
28994
28995 static void
28996 prune_unused_types_walk_attribs (dw_die_ref die)
28997 {
28998 dw_attr_node *a;
28999 unsigned ix;
29000
29001 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29002 {
29003 switch (AT_class (a))
29004 {
29005 /* Make sure DWARF procedures referenced by location descriptions will
29006 get emitted. */
29007 case dw_val_class_loc:
29008 prune_unused_types_walk_loc_descr (AT_loc (a));
29009 break;
29010 case dw_val_class_loc_list:
29011 for (dw_loc_list_ref list = AT_loc_list (a);
29012 list != NULL;
29013 list = list->dw_loc_next)
29014 prune_unused_types_walk_loc_descr (list->expr);
29015 break;
29016
29017 case dw_val_class_view_list:
29018 /* This points to a loc_list in another attribute, so it's
29019 already covered. */
29020 break;
29021
29022 case dw_val_class_die_ref:
29023 /* A reference to another DIE.
29024 Make sure that it will get emitted.
29025 If it was broken out into a comdat group, don't follow it. */
29026 if (! AT_ref (a)->comdat_type_p
29027 || a->dw_attr == DW_AT_specification)
29028 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29029 break;
29030
29031 case dw_val_class_str:
29032 /* Set the string's refcount to 0 so that prune_unused_types_mark
29033 accounts properly for it. */
29034 a->dw_attr_val.v.val_str->refcount = 0;
29035 break;
29036
29037 default:
29038 break;
29039 }
29040 }
29041 }
29042
29043 /* Mark the generic parameters and arguments children DIEs of DIE. */
29044
29045 static void
29046 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29047 {
29048 dw_die_ref c;
29049
29050 if (die == NULL || die->die_child == NULL)
29051 return;
29052 c = die->die_child;
29053 do
29054 {
29055 if (is_template_parameter (c))
29056 prune_unused_types_mark (c, 1);
29057 c = c->die_sib;
29058 } while (c && c != die->die_child);
29059 }
29060
29061 /* Mark DIE as being used. If DOKIDS is true, then walk down
29062 to DIE's children. */
29063
29064 static void
29065 prune_unused_types_mark (dw_die_ref die, int dokids)
29066 {
29067 dw_die_ref c;
29068
29069 if (die->die_mark == 0)
29070 {
29071 /* We haven't done this node yet. Mark it as used. */
29072 die->die_mark = 1;
29073 /* If this is the DIE of a generic type instantiation,
29074 mark the children DIEs that describe its generic parms and
29075 args. */
29076 prune_unused_types_mark_generic_parms_dies (die);
29077
29078 /* We also have to mark its parents as used.
29079 (But we don't want to mark our parent's kids due to this,
29080 unless it is a class.) */
29081 if (die->die_parent)
29082 prune_unused_types_mark (die->die_parent,
29083 class_scope_p (die->die_parent));
29084
29085 /* Mark any referenced nodes. */
29086 prune_unused_types_walk_attribs (die);
29087
29088 /* If this node is a specification,
29089 also mark the definition, if it exists. */
29090 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29091 prune_unused_types_mark (die->die_definition, 1);
29092 }
29093
29094 if (dokids && die->die_mark != 2)
29095 {
29096 /* We need to walk the children, but haven't done so yet.
29097 Remember that we've walked the kids. */
29098 die->die_mark = 2;
29099
29100 /* If this is an array type, we need to make sure our
29101 kids get marked, even if they're types. If we're
29102 breaking out types into comdat sections, do this
29103 for all type definitions. */
29104 if (die->die_tag == DW_TAG_array_type
29105 || (use_debug_types
29106 && is_type_die (die) && ! is_declaration_die (die)))
29107 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29108 else
29109 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29110 }
29111 }
29112
29113 /* For local classes, look if any static member functions were emitted
29114 and if so, mark them. */
29115
29116 static void
29117 prune_unused_types_walk_local_classes (dw_die_ref die)
29118 {
29119 dw_die_ref c;
29120
29121 if (die->die_mark == 2)
29122 return;
29123
29124 switch (die->die_tag)
29125 {
29126 case DW_TAG_structure_type:
29127 case DW_TAG_union_type:
29128 case DW_TAG_class_type:
29129 break;
29130
29131 case DW_TAG_subprogram:
29132 if (!get_AT_flag (die, DW_AT_declaration)
29133 || die->die_definition != NULL)
29134 prune_unused_types_mark (die, 1);
29135 return;
29136
29137 default:
29138 return;
29139 }
29140
29141 /* Mark children. */
29142 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29143 }
29144
29145 /* Walk the tree DIE and mark types that we actually use. */
29146
29147 static void
29148 prune_unused_types_walk (dw_die_ref die)
29149 {
29150 dw_die_ref c;
29151
29152 /* Don't do anything if this node is already marked and
29153 children have been marked as well. */
29154 if (die->die_mark == 2)
29155 return;
29156
29157 switch (die->die_tag)
29158 {
29159 case DW_TAG_structure_type:
29160 case DW_TAG_union_type:
29161 case DW_TAG_class_type:
29162 if (die->die_perennial_p)
29163 break;
29164
29165 for (c = die->die_parent; c; c = c->die_parent)
29166 if (c->die_tag == DW_TAG_subprogram)
29167 break;
29168
29169 /* Finding used static member functions inside of classes
29170 is needed just for local classes, because for other classes
29171 static member function DIEs with DW_AT_specification
29172 are emitted outside of the DW_TAG_*_type. If we ever change
29173 it, we'd need to call this even for non-local classes. */
29174 if (c)
29175 prune_unused_types_walk_local_classes (die);
29176
29177 /* It's a type node --- don't mark it. */
29178 return;
29179
29180 case DW_TAG_const_type:
29181 case DW_TAG_packed_type:
29182 case DW_TAG_pointer_type:
29183 case DW_TAG_reference_type:
29184 case DW_TAG_rvalue_reference_type:
29185 case DW_TAG_volatile_type:
29186 case DW_TAG_typedef:
29187 case DW_TAG_array_type:
29188 case DW_TAG_interface_type:
29189 case DW_TAG_friend:
29190 case DW_TAG_enumeration_type:
29191 case DW_TAG_subroutine_type:
29192 case DW_TAG_string_type:
29193 case DW_TAG_set_type:
29194 case DW_TAG_subrange_type:
29195 case DW_TAG_ptr_to_member_type:
29196 case DW_TAG_file_type:
29197 /* Type nodes are useful only when other DIEs reference them --- don't
29198 mark them. */
29199 /* FALLTHROUGH */
29200
29201 case DW_TAG_dwarf_procedure:
29202 /* Likewise for DWARF procedures. */
29203
29204 if (die->die_perennial_p)
29205 break;
29206
29207 return;
29208
29209 default:
29210 /* Mark everything else. */
29211 break;
29212 }
29213
29214 if (die->die_mark == 0)
29215 {
29216 die->die_mark = 1;
29217
29218 /* Now, mark any dies referenced from here. */
29219 prune_unused_types_walk_attribs (die);
29220 }
29221
29222 die->die_mark = 2;
29223
29224 /* Mark children. */
29225 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29226 }
29227
29228 /* Increment the string counts on strings referred to from DIE's
29229 attributes. */
29230
29231 static void
29232 prune_unused_types_update_strings (dw_die_ref die)
29233 {
29234 dw_attr_node *a;
29235 unsigned ix;
29236
29237 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29238 if (AT_class (a) == dw_val_class_str)
29239 {
29240 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29241 s->refcount++;
29242 /* Avoid unnecessarily putting strings that are used less than
29243 twice in the hash table. */
29244 if (s->refcount
29245 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29246 {
29247 indirect_string_node **slot
29248 = debug_str_hash->find_slot_with_hash (s->str,
29249 htab_hash_string (s->str),
29250 INSERT);
29251 gcc_assert (*slot == NULL);
29252 *slot = s;
29253 }
29254 }
29255 }
29256
29257 /* Mark DIE and its children as removed. */
29258
29259 static void
29260 mark_removed (dw_die_ref die)
29261 {
29262 dw_die_ref c;
29263 die->removed = true;
29264 FOR_EACH_CHILD (die, c, mark_removed (c));
29265 }
29266
29267 /* Remove from the tree DIE any dies that aren't marked. */
29268
29269 static void
29270 prune_unused_types_prune (dw_die_ref die)
29271 {
29272 dw_die_ref c;
29273
29274 gcc_assert (die->die_mark);
29275 prune_unused_types_update_strings (die);
29276
29277 if (! die->die_child)
29278 return;
29279
29280 c = die->die_child;
29281 do {
29282 dw_die_ref prev = c, next;
29283 for (c = c->die_sib; ! c->die_mark; c = next)
29284 if (c == die->die_child)
29285 {
29286 /* No marked children between 'prev' and the end of the list. */
29287 if (prev == c)
29288 /* No marked children at all. */
29289 die->die_child = NULL;
29290 else
29291 {
29292 prev->die_sib = c->die_sib;
29293 die->die_child = prev;
29294 }
29295 c->die_sib = NULL;
29296 mark_removed (c);
29297 return;
29298 }
29299 else
29300 {
29301 next = c->die_sib;
29302 c->die_sib = NULL;
29303 mark_removed (c);
29304 }
29305
29306 if (c != prev->die_sib)
29307 prev->die_sib = c;
29308 prune_unused_types_prune (c);
29309 } while (c != die->die_child);
29310 }
29311
29312 /* Remove dies representing declarations that we never use. */
29313
29314 static void
29315 prune_unused_types (void)
29316 {
29317 unsigned int i;
29318 limbo_die_node *node;
29319 comdat_type_node *ctnode;
29320 pubname_entry *pub;
29321 dw_die_ref base_type;
29322
29323 #if ENABLE_ASSERT_CHECKING
29324 /* All the marks should already be clear. */
29325 verify_marks_clear (comp_unit_die ());
29326 for (node = limbo_die_list; node; node = node->next)
29327 verify_marks_clear (node->die);
29328 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29329 verify_marks_clear (ctnode->root_die);
29330 #endif /* ENABLE_ASSERT_CHECKING */
29331
29332 /* Mark types that are used in global variables. */
29333 premark_types_used_by_global_vars ();
29334
29335 /* Set the mark on nodes that are actually used. */
29336 prune_unused_types_walk (comp_unit_die ());
29337 for (node = limbo_die_list; node; node = node->next)
29338 prune_unused_types_walk (node->die);
29339 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29340 {
29341 prune_unused_types_walk (ctnode->root_die);
29342 prune_unused_types_mark (ctnode->type_die, 1);
29343 }
29344
29345 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29346 are unusual in that they are pubnames that are the children of pubtypes.
29347 They should only be marked via their parent DW_TAG_enumeration_type die,
29348 not as roots in themselves. */
29349 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29350 if (pub->die->die_tag != DW_TAG_enumerator)
29351 prune_unused_types_mark (pub->die, 1);
29352 for (i = 0; base_types.iterate (i, &base_type); i++)
29353 prune_unused_types_mark (base_type, 1);
29354
29355 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29356 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29357 callees). */
29358 cgraph_node *cnode;
29359 FOR_EACH_FUNCTION (cnode)
29360 if (cnode->referred_to_p (false))
29361 {
29362 dw_die_ref die = lookup_decl_die (cnode->decl);
29363 if (die == NULL || die->die_mark)
29364 continue;
29365 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29366 if (e->caller != cnode
29367 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29368 {
29369 prune_unused_types_mark (die, 1);
29370 break;
29371 }
29372 }
29373
29374 if (debug_str_hash)
29375 debug_str_hash->empty ();
29376 if (skeleton_debug_str_hash)
29377 skeleton_debug_str_hash->empty ();
29378 prune_unused_types_prune (comp_unit_die ());
29379 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29380 {
29381 node = *pnode;
29382 if (!node->die->die_mark)
29383 *pnode = node->next;
29384 else
29385 {
29386 prune_unused_types_prune (node->die);
29387 pnode = &node->next;
29388 }
29389 }
29390 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29391 prune_unused_types_prune (ctnode->root_die);
29392
29393 /* Leave the marks clear. */
29394 prune_unmark_dies (comp_unit_die ());
29395 for (node = limbo_die_list; node; node = node->next)
29396 prune_unmark_dies (node->die);
29397 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29398 prune_unmark_dies (ctnode->root_die);
29399 }
29400
29401 /* Helpers to manipulate hash table of comdat type units. */
29402
29403 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29404 {
29405 static inline hashval_t hash (const comdat_type_node *);
29406 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29407 };
29408
29409 inline hashval_t
29410 comdat_type_hasher::hash (const comdat_type_node *type_node)
29411 {
29412 hashval_t h;
29413 memcpy (&h, type_node->signature, sizeof (h));
29414 return h;
29415 }
29416
29417 inline bool
29418 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29419 const comdat_type_node *type_node_2)
29420 {
29421 return (! memcmp (type_node_1->signature, type_node_2->signature,
29422 DWARF_TYPE_SIGNATURE_SIZE));
29423 }
29424
29425 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29426 to the location it would have been added, should we know its
29427 DECL_ASSEMBLER_NAME when we added other attributes. This will
29428 probably improve compactness of debug info, removing equivalent
29429 abbrevs, and hide any differences caused by deferring the
29430 computation of the assembler name, triggered by e.g. PCH. */
29431
29432 static inline void
29433 move_linkage_attr (dw_die_ref die)
29434 {
29435 unsigned ix = vec_safe_length (die->die_attr);
29436 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29437
29438 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29439 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29440
29441 while (--ix > 0)
29442 {
29443 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29444
29445 if (prev->dw_attr == DW_AT_decl_line
29446 || prev->dw_attr == DW_AT_decl_column
29447 || prev->dw_attr == DW_AT_name)
29448 break;
29449 }
29450
29451 if (ix != vec_safe_length (die->die_attr) - 1)
29452 {
29453 die->die_attr->pop ();
29454 die->die_attr->quick_insert (ix, linkage);
29455 }
29456 }
29457
29458 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29459 referenced from typed stack ops and count how often they are used. */
29460
29461 static void
29462 mark_base_types (dw_loc_descr_ref loc)
29463 {
29464 dw_die_ref base_type = NULL;
29465
29466 for (; loc; loc = loc->dw_loc_next)
29467 {
29468 switch (loc->dw_loc_opc)
29469 {
29470 case DW_OP_regval_type:
29471 case DW_OP_deref_type:
29472 case DW_OP_GNU_regval_type:
29473 case DW_OP_GNU_deref_type:
29474 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29475 break;
29476 case DW_OP_convert:
29477 case DW_OP_reinterpret:
29478 case DW_OP_GNU_convert:
29479 case DW_OP_GNU_reinterpret:
29480 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29481 continue;
29482 /* FALLTHRU */
29483 case DW_OP_const_type:
29484 case DW_OP_GNU_const_type:
29485 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29486 break;
29487 case DW_OP_entry_value:
29488 case DW_OP_GNU_entry_value:
29489 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29490 continue;
29491 default:
29492 continue;
29493 }
29494 gcc_assert (base_type->die_parent == comp_unit_die ());
29495 if (base_type->die_mark)
29496 base_type->die_mark++;
29497 else
29498 {
29499 base_types.safe_push (base_type);
29500 base_type->die_mark = 1;
29501 }
29502 }
29503 }
29504
29505 /* Comparison function for sorting marked base types. */
29506
29507 static int
29508 base_type_cmp (const void *x, const void *y)
29509 {
29510 dw_die_ref dx = *(const dw_die_ref *) x;
29511 dw_die_ref dy = *(const dw_die_ref *) y;
29512 unsigned int byte_size1, byte_size2;
29513 unsigned int encoding1, encoding2;
29514 unsigned int align1, align2;
29515 if (dx->die_mark > dy->die_mark)
29516 return -1;
29517 if (dx->die_mark < dy->die_mark)
29518 return 1;
29519 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29520 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29521 if (byte_size1 < byte_size2)
29522 return 1;
29523 if (byte_size1 > byte_size2)
29524 return -1;
29525 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29526 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29527 if (encoding1 < encoding2)
29528 return 1;
29529 if (encoding1 > encoding2)
29530 return -1;
29531 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29532 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29533 if (align1 < align2)
29534 return 1;
29535 if (align1 > align2)
29536 return -1;
29537 return 0;
29538 }
29539
29540 /* Move base types marked by mark_base_types as early as possible
29541 in the CU, sorted by decreasing usage count both to make the
29542 uleb128 references as small as possible and to make sure they
29543 will have die_offset already computed by calc_die_sizes when
29544 sizes of typed stack loc ops is computed. */
29545
29546 static void
29547 move_marked_base_types (void)
29548 {
29549 unsigned int i;
29550 dw_die_ref base_type, die, c;
29551
29552 if (base_types.is_empty ())
29553 return;
29554
29555 /* Sort by decreasing usage count, they will be added again in that
29556 order later on. */
29557 base_types.qsort (base_type_cmp);
29558 die = comp_unit_die ();
29559 c = die->die_child;
29560 do
29561 {
29562 dw_die_ref prev = c;
29563 c = c->die_sib;
29564 while (c->die_mark)
29565 {
29566 remove_child_with_prev (c, prev);
29567 /* As base types got marked, there must be at least
29568 one node other than DW_TAG_base_type. */
29569 gcc_assert (die->die_child != NULL);
29570 c = prev->die_sib;
29571 }
29572 }
29573 while (c != die->die_child);
29574 gcc_assert (die->die_child);
29575 c = die->die_child;
29576 for (i = 0; base_types.iterate (i, &base_type); i++)
29577 {
29578 base_type->die_mark = 0;
29579 base_type->die_sib = c->die_sib;
29580 c->die_sib = base_type;
29581 c = base_type;
29582 }
29583 }
29584
29585 /* Helper function for resolve_addr, attempt to resolve
29586 one CONST_STRING, return true if successful. Similarly verify that
29587 SYMBOL_REFs refer to variables emitted in the current CU. */
29588
29589 static bool
29590 resolve_one_addr (rtx *addr)
29591 {
29592 rtx rtl = *addr;
29593
29594 if (GET_CODE (rtl) == CONST_STRING)
29595 {
29596 size_t len = strlen (XSTR (rtl, 0)) + 1;
29597 tree t = build_string (len, XSTR (rtl, 0));
29598 tree tlen = size_int (len - 1);
29599 TREE_TYPE (t)
29600 = build_array_type (char_type_node, build_index_type (tlen));
29601 rtl = lookup_constant_def (t);
29602 if (!rtl || !MEM_P (rtl))
29603 return false;
29604 rtl = XEXP (rtl, 0);
29605 if (GET_CODE (rtl) == SYMBOL_REF
29606 && SYMBOL_REF_DECL (rtl)
29607 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29608 return false;
29609 vec_safe_push (used_rtx_array, rtl);
29610 *addr = rtl;
29611 return true;
29612 }
29613
29614 if (GET_CODE (rtl) == SYMBOL_REF
29615 && SYMBOL_REF_DECL (rtl))
29616 {
29617 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29618 {
29619 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29620 return false;
29621 }
29622 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29623 return false;
29624 }
29625
29626 if (GET_CODE (rtl) == CONST)
29627 {
29628 subrtx_ptr_iterator::array_type array;
29629 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29630 if (!resolve_one_addr (*iter))
29631 return false;
29632 }
29633
29634 return true;
29635 }
29636
29637 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29638 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29639 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29640
29641 static rtx
29642 string_cst_pool_decl (tree t)
29643 {
29644 rtx rtl = output_constant_def (t, 1);
29645 unsigned char *array;
29646 dw_loc_descr_ref l;
29647 tree decl;
29648 size_t len;
29649 dw_die_ref ref;
29650
29651 if (!rtl || !MEM_P (rtl))
29652 return NULL_RTX;
29653 rtl = XEXP (rtl, 0);
29654 if (GET_CODE (rtl) != SYMBOL_REF
29655 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29656 return NULL_RTX;
29657
29658 decl = SYMBOL_REF_DECL (rtl);
29659 if (!lookup_decl_die (decl))
29660 {
29661 len = TREE_STRING_LENGTH (t);
29662 vec_safe_push (used_rtx_array, rtl);
29663 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29664 array = ggc_vec_alloc<unsigned char> (len);
29665 memcpy (array, TREE_STRING_POINTER (t), len);
29666 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29667 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29668 l->dw_loc_oprnd2.v.val_vec.length = len;
29669 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29670 l->dw_loc_oprnd2.v.val_vec.array = array;
29671 add_AT_loc (ref, DW_AT_location, l);
29672 equate_decl_number_to_die (decl, ref);
29673 }
29674 return rtl;
29675 }
29676
29677 /* Helper function of resolve_addr_in_expr. LOC is
29678 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29679 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29680 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29681 with DW_OP_implicit_pointer if possible
29682 and return true, if unsuccessful, return false. */
29683
29684 static bool
29685 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29686 {
29687 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29688 HOST_WIDE_INT offset = 0;
29689 dw_die_ref ref = NULL;
29690 tree decl;
29691
29692 if (GET_CODE (rtl) == CONST
29693 && GET_CODE (XEXP (rtl, 0)) == PLUS
29694 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29695 {
29696 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29697 rtl = XEXP (XEXP (rtl, 0), 0);
29698 }
29699 if (GET_CODE (rtl) == CONST_STRING)
29700 {
29701 size_t len = strlen (XSTR (rtl, 0)) + 1;
29702 tree t = build_string (len, XSTR (rtl, 0));
29703 tree tlen = size_int (len - 1);
29704
29705 TREE_TYPE (t)
29706 = build_array_type (char_type_node, build_index_type (tlen));
29707 rtl = string_cst_pool_decl (t);
29708 if (!rtl)
29709 return false;
29710 }
29711 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29712 {
29713 decl = SYMBOL_REF_DECL (rtl);
29714 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29715 {
29716 ref = lookup_decl_die (decl);
29717 if (ref && (get_AT (ref, DW_AT_location)
29718 || get_AT (ref, DW_AT_const_value)))
29719 {
29720 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29721 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29722 loc->dw_loc_oprnd1.val_entry = NULL;
29723 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29724 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29725 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29726 loc->dw_loc_oprnd2.v.val_int = offset;
29727 return true;
29728 }
29729 }
29730 }
29731 return false;
29732 }
29733
29734 /* Helper function for resolve_addr, handle one location
29735 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29736 the location list couldn't be resolved. */
29737
29738 static bool
29739 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29740 {
29741 dw_loc_descr_ref keep = NULL;
29742 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29743 switch (loc->dw_loc_opc)
29744 {
29745 case DW_OP_addr:
29746 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29747 {
29748 if ((prev == NULL
29749 || prev->dw_loc_opc == DW_OP_piece
29750 || prev->dw_loc_opc == DW_OP_bit_piece)
29751 && loc->dw_loc_next
29752 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29753 && (!dwarf_strict || dwarf_version >= 5)
29754 && optimize_one_addr_into_implicit_ptr (loc))
29755 break;
29756 return false;
29757 }
29758 break;
29759 case DW_OP_GNU_addr_index:
29760 case DW_OP_addrx:
29761 case DW_OP_GNU_const_index:
29762 case DW_OP_constx:
29763 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29764 || loc->dw_loc_opc == DW_OP_addrx)
29765 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29766 || loc->dw_loc_opc == DW_OP_constx)
29767 && loc->dtprel))
29768 {
29769 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29770 if (!resolve_one_addr (&rtl))
29771 return false;
29772 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29773 loc->dw_loc_oprnd1.val_entry
29774 = add_addr_table_entry (rtl, ate_kind_rtx);
29775 }
29776 break;
29777 case DW_OP_const4u:
29778 case DW_OP_const8u:
29779 if (loc->dtprel
29780 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29781 return false;
29782 break;
29783 case DW_OP_plus_uconst:
29784 if (size_of_loc_descr (loc)
29785 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29786 + 1
29787 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29788 {
29789 dw_loc_descr_ref repl
29790 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29791 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29792 add_loc_descr (&repl, loc->dw_loc_next);
29793 *loc = *repl;
29794 }
29795 break;
29796 case DW_OP_implicit_value:
29797 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29798 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29799 return false;
29800 break;
29801 case DW_OP_implicit_pointer:
29802 case DW_OP_GNU_implicit_pointer:
29803 case DW_OP_GNU_parameter_ref:
29804 case DW_OP_GNU_variable_value:
29805 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29806 {
29807 dw_die_ref ref
29808 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29809 if (ref == NULL)
29810 return false;
29811 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29812 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29813 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29814 }
29815 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29816 {
29817 if (prev == NULL
29818 && loc->dw_loc_next == NULL
29819 && AT_class (a) == dw_val_class_loc)
29820 switch (a->dw_attr)
29821 {
29822 /* Following attributes allow both exprloc and reference,
29823 so if the whole expression is DW_OP_GNU_variable_value
29824 alone we could transform it into reference. */
29825 case DW_AT_byte_size:
29826 case DW_AT_bit_size:
29827 case DW_AT_lower_bound:
29828 case DW_AT_upper_bound:
29829 case DW_AT_bit_stride:
29830 case DW_AT_count:
29831 case DW_AT_allocated:
29832 case DW_AT_associated:
29833 case DW_AT_byte_stride:
29834 a->dw_attr_val.val_class = dw_val_class_die_ref;
29835 a->dw_attr_val.val_entry = NULL;
29836 a->dw_attr_val.v.val_die_ref.die
29837 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29838 a->dw_attr_val.v.val_die_ref.external = 0;
29839 return true;
29840 default:
29841 break;
29842 }
29843 if (dwarf_strict)
29844 return false;
29845 }
29846 break;
29847 case DW_OP_const_type:
29848 case DW_OP_regval_type:
29849 case DW_OP_deref_type:
29850 case DW_OP_convert:
29851 case DW_OP_reinterpret:
29852 case DW_OP_GNU_const_type:
29853 case DW_OP_GNU_regval_type:
29854 case DW_OP_GNU_deref_type:
29855 case DW_OP_GNU_convert:
29856 case DW_OP_GNU_reinterpret:
29857 while (loc->dw_loc_next
29858 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29859 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29860 {
29861 dw_die_ref base1, base2;
29862 unsigned enc1, enc2, size1, size2;
29863 if (loc->dw_loc_opc == DW_OP_regval_type
29864 || loc->dw_loc_opc == DW_OP_deref_type
29865 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29866 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29867 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29868 else if (loc->dw_loc_oprnd1.val_class
29869 == dw_val_class_unsigned_const)
29870 break;
29871 else
29872 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29873 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29874 == dw_val_class_unsigned_const)
29875 break;
29876 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29877 gcc_assert (base1->die_tag == DW_TAG_base_type
29878 && base2->die_tag == DW_TAG_base_type);
29879 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29880 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29881 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29882 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29883 if (size1 == size2
29884 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29885 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29886 && loc != keep)
29887 || enc1 == enc2))
29888 {
29889 /* Optimize away next DW_OP_convert after
29890 adjusting LOC's base type die reference. */
29891 if (loc->dw_loc_opc == DW_OP_regval_type
29892 || loc->dw_loc_opc == DW_OP_deref_type
29893 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29894 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29895 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29896 else
29897 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29898 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29899 continue;
29900 }
29901 /* Don't change integer DW_OP_convert after e.g. floating
29902 point typed stack entry. */
29903 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29904 keep = loc->dw_loc_next;
29905 break;
29906 }
29907 break;
29908 default:
29909 break;
29910 }
29911 return true;
29912 }
29913
29914 /* Helper function of resolve_addr. DIE had DW_AT_location of
29915 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29916 and DW_OP_addr couldn't be resolved. resolve_addr has already
29917 removed the DW_AT_location attribute. This function attempts to
29918 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29919 to it or DW_AT_const_value attribute, if possible. */
29920
29921 static void
29922 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29923 {
29924 if (!VAR_P (decl)
29925 || lookup_decl_die (decl) != die
29926 || DECL_EXTERNAL (decl)
29927 || !TREE_STATIC (decl)
29928 || DECL_INITIAL (decl) == NULL_TREE
29929 || DECL_P (DECL_INITIAL (decl))
29930 || get_AT (die, DW_AT_const_value))
29931 return;
29932
29933 tree init = DECL_INITIAL (decl);
29934 HOST_WIDE_INT offset = 0;
29935 /* For variables that have been optimized away and thus
29936 don't have a memory location, see if we can emit
29937 DW_AT_const_value instead. */
29938 if (tree_add_const_value_attribute (die, init))
29939 return;
29940 if (dwarf_strict && dwarf_version < 5)
29941 return;
29942 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29943 and ADDR_EXPR refers to a decl that has DW_AT_location or
29944 DW_AT_const_value (but isn't addressable, otherwise
29945 resolving the original DW_OP_addr wouldn't fail), see if
29946 we can add DW_OP_implicit_pointer. */
29947 STRIP_NOPS (init);
29948 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29949 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29950 {
29951 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29952 init = TREE_OPERAND (init, 0);
29953 STRIP_NOPS (init);
29954 }
29955 if (TREE_CODE (init) != ADDR_EXPR)
29956 return;
29957 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29958 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29959 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29960 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29961 && TREE_OPERAND (init, 0) != decl))
29962 {
29963 dw_die_ref ref;
29964 dw_loc_descr_ref l;
29965
29966 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29967 {
29968 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29969 if (!rtl)
29970 return;
29971 decl = SYMBOL_REF_DECL (rtl);
29972 }
29973 else
29974 decl = TREE_OPERAND (init, 0);
29975 ref = lookup_decl_die (decl);
29976 if (ref == NULL
29977 || (!get_AT (ref, DW_AT_location)
29978 && !get_AT (ref, DW_AT_const_value)))
29979 return;
29980 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29981 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29982 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29983 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29984 add_AT_loc (die, DW_AT_location, l);
29985 }
29986 }
29987
29988 /* Return NULL if l is a DWARF expression, or first op that is not
29989 valid DWARF expression. */
29990
29991 static dw_loc_descr_ref
29992 non_dwarf_expression (dw_loc_descr_ref l)
29993 {
29994 while (l)
29995 {
29996 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29997 return l;
29998 switch (l->dw_loc_opc)
29999 {
30000 case DW_OP_regx:
30001 case DW_OP_implicit_value:
30002 case DW_OP_stack_value:
30003 case DW_OP_implicit_pointer:
30004 case DW_OP_GNU_implicit_pointer:
30005 case DW_OP_GNU_parameter_ref:
30006 case DW_OP_piece:
30007 case DW_OP_bit_piece:
30008 return l;
30009 default:
30010 break;
30011 }
30012 l = l->dw_loc_next;
30013 }
30014 return NULL;
30015 }
30016
30017 /* Return adjusted copy of EXPR:
30018 If it is empty DWARF expression, return it.
30019 If it is valid non-empty DWARF expression,
30020 return copy of EXPR with DW_OP_deref appended to it.
30021 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30022 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30023 If it is DWARF expression followed by DW_OP_stack_value, return
30024 copy of the DWARF expression without anything appended.
30025 Otherwise, return NULL. */
30026
30027 static dw_loc_descr_ref
30028 copy_deref_exprloc (dw_loc_descr_ref expr)
30029 {
30030 dw_loc_descr_ref tail = NULL;
30031
30032 if (expr == NULL)
30033 return NULL;
30034
30035 dw_loc_descr_ref l = non_dwarf_expression (expr);
30036 if (l && l->dw_loc_next)
30037 return NULL;
30038
30039 if (l)
30040 {
30041 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30042 tail = new_loc_descr ((enum dwarf_location_atom)
30043 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30044 0, 0);
30045 else
30046 switch (l->dw_loc_opc)
30047 {
30048 case DW_OP_regx:
30049 tail = new_loc_descr (DW_OP_bregx,
30050 l->dw_loc_oprnd1.v.val_unsigned, 0);
30051 break;
30052 case DW_OP_stack_value:
30053 break;
30054 default:
30055 return NULL;
30056 }
30057 }
30058 else
30059 tail = new_loc_descr (DW_OP_deref, 0, 0);
30060
30061 dw_loc_descr_ref ret = NULL, *p = &ret;
30062 while (expr != l)
30063 {
30064 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30065 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30066 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30067 p = &(*p)->dw_loc_next;
30068 expr = expr->dw_loc_next;
30069 }
30070 *p = tail;
30071 return ret;
30072 }
30073
30074 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30075 reference to a variable or argument, adjust it if needed and return:
30076 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30077 attribute if present should be removed
30078 0 keep the attribute perhaps with minor modifications, no need to rescan
30079 1 if the attribute has been successfully adjusted. */
30080
30081 static int
30082 optimize_string_length (dw_attr_node *a)
30083 {
30084 dw_loc_descr_ref l = AT_loc (a), lv;
30085 dw_die_ref die;
30086 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30087 {
30088 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30089 die = lookup_decl_die (decl);
30090 if (die)
30091 {
30092 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30093 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30094 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30095 }
30096 else
30097 return -1;
30098 }
30099 else
30100 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30101
30102 /* DWARF5 allows reference class, so we can then reference the DIE.
30103 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30104 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30105 {
30106 a->dw_attr_val.val_class = dw_val_class_die_ref;
30107 a->dw_attr_val.val_entry = NULL;
30108 a->dw_attr_val.v.val_die_ref.die = die;
30109 a->dw_attr_val.v.val_die_ref.external = 0;
30110 return 0;
30111 }
30112
30113 dw_attr_node *av = get_AT (die, DW_AT_location);
30114 dw_loc_list_ref d;
30115 bool non_dwarf_expr = false;
30116
30117 if (av == NULL)
30118 return dwarf_strict ? -1 : 0;
30119 switch (AT_class (av))
30120 {
30121 case dw_val_class_loc_list:
30122 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30123 if (d->expr && non_dwarf_expression (d->expr))
30124 non_dwarf_expr = true;
30125 break;
30126 case dw_val_class_view_list:
30127 gcc_unreachable ();
30128 case dw_val_class_loc:
30129 lv = AT_loc (av);
30130 if (lv == NULL)
30131 return dwarf_strict ? -1 : 0;
30132 if (non_dwarf_expression (lv))
30133 non_dwarf_expr = true;
30134 break;
30135 default:
30136 return dwarf_strict ? -1 : 0;
30137 }
30138
30139 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30140 into DW_OP_call4 or DW_OP_GNU_variable_value into
30141 DW_OP_call4 DW_OP_deref, do so. */
30142 if (!non_dwarf_expr
30143 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30144 {
30145 l->dw_loc_opc = DW_OP_call4;
30146 if (l->dw_loc_next)
30147 l->dw_loc_next = NULL;
30148 else
30149 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30150 return 0;
30151 }
30152
30153 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30154 copy over the DW_AT_location attribute from die to a. */
30155 if (l->dw_loc_next != NULL)
30156 {
30157 a->dw_attr_val = av->dw_attr_val;
30158 return 1;
30159 }
30160
30161 dw_loc_list_ref list, *p;
30162 switch (AT_class (av))
30163 {
30164 case dw_val_class_loc_list:
30165 p = &list;
30166 list = NULL;
30167 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30168 {
30169 lv = copy_deref_exprloc (d->expr);
30170 if (lv)
30171 {
30172 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30173 p = &(*p)->dw_loc_next;
30174 }
30175 else if (!dwarf_strict && d->expr)
30176 return 0;
30177 }
30178 if (list == NULL)
30179 return dwarf_strict ? -1 : 0;
30180 a->dw_attr_val.val_class = dw_val_class_loc_list;
30181 gen_llsym (list);
30182 *AT_loc_list_ptr (a) = list;
30183 return 1;
30184 case dw_val_class_loc:
30185 lv = copy_deref_exprloc (AT_loc (av));
30186 if (lv == NULL)
30187 return dwarf_strict ? -1 : 0;
30188 a->dw_attr_val.v.val_loc = lv;
30189 return 1;
30190 default:
30191 gcc_unreachable ();
30192 }
30193 }
30194
30195 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30196 an address in .rodata section if the string literal is emitted there,
30197 or remove the containing location list or replace DW_AT_const_value
30198 with DW_AT_location and empty location expression, if it isn't found
30199 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30200 to something that has been emitted in the current CU. */
30201
30202 static void
30203 resolve_addr (dw_die_ref die)
30204 {
30205 dw_die_ref c;
30206 dw_attr_node *a;
30207 dw_loc_list_ref *curr, *start, loc;
30208 unsigned ix;
30209 bool remove_AT_byte_size = false;
30210
30211 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30212 switch (AT_class (a))
30213 {
30214 case dw_val_class_loc_list:
30215 start = curr = AT_loc_list_ptr (a);
30216 loc = *curr;
30217 gcc_assert (loc);
30218 /* The same list can be referenced more than once. See if we have
30219 already recorded the result from a previous pass. */
30220 if (loc->replaced)
30221 *curr = loc->dw_loc_next;
30222 else if (!loc->resolved_addr)
30223 {
30224 /* As things stand, we do not expect or allow one die to
30225 reference a suffix of another die's location list chain.
30226 References must be identical or completely separate.
30227 There is therefore no need to cache the result of this
30228 pass on any list other than the first; doing so
30229 would lead to unnecessary writes. */
30230 while (*curr)
30231 {
30232 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30233 if (!resolve_addr_in_expr (a, (*curr)->expr))
30234 {
30235 dw_loc_list_ref next = (*curr)->dw_loc_next;
30236 dw_loc_descr_ref l = (*curr)->expr;
30237
30238 if (next && (*curr)->ll_symbol)
30239 {
30240 gcc_assert (!next->ll_symbol);
30241 next->ll_symbol = (*curr)->ll_symbol;
30242 next->vl_symbol = (*curr)->vl_symbol;
30243 }
30244 if (dwarf_split_debug_info)
30245 remove_loc_list_addr_table_entries (l);
30246 *curr = next;
30247 }
30248 else
30249 {
30250 mark_base_types ((*curr)->expr);
30251 curr = &(*curr)->dw_loc_next;
30252 }
30253 }
30254 if (loc == *start)
30255 loc->resolved_addr = 1;
30256 else
30257 {
30258 loc->replaced = 1;
30259 loc->dw_loc_next = *start;
30260 }
30261 }
30262 if (!*start)
30263 {
30264 remove_AT (die, a->dw_attr);
30265 ix--;
30266 }
30267 break;
30268 case dw_val_class_view_list:
30269 {
30270 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30271 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30272 dw_val_node *llnode
30273 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30274 /* If we no longer have a loclist, or it no longer needs
30275 views, drop this attribute. */
30276 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30277 {
30278 remove_AT (die, a->dw_attr);
30279 ix--;
30280 }
30281 break;
30282 }
30283 case dw_val_class_loc:
30284 {
30285 dw_loc_descr_ref l = AT_loc (a);
30286 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30287 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30288 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30289 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30290 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30291 with DW_FORM_ref referencing the same DIE as
30292 DW_OP_GNU_variable_value used to reference. */
30293 if (a->dw_attr == DW_AT_string_length
30294 && l
30295 && l->dw_loc_opc == DW_OP_GNU_variable_value
30296 && (l->dw_loc_next == NULL
30297 || (l->dw_loc_next->dw_loc_next == NULL
30298 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30299 {
30300 switch (optimize_string_length (a))
30301 {
30302 case -1:
30303 remove_AT (die, a->dw_attr);
30304 ix--;
30305 /* If we drop DW_AT_string_length, we need to drop also
30306 DW_AT_{string_length_,}byte_size. */
30307 remove_AT_byte_size = true;
30308 continue;
30309 default:
30310 break;
30311 case 1:
30312 /* Even if we keep the optimized DW_AT_string_length,
30313 it might have changed AT_class, so process it again. */
30314 ix--;
30315 continue;
30316 }
30317 }
30318 /* For -gdwarf-2 don't attempt to optimize
30319 DW_AT_data_member_location containing
30320 DW_OP_plus_uconst - older consumers might
30321 rely on it being that op instead of a more complex,
30322 but shorter, location description. */
30323 if ((dwarf_version > 2
30324 || a->dw_attr != DW_AT_data_member_location
30325 || l == NULL
30326 || l->dw_loc_opc != DW_OP_plus_uconst
30327 || l->dw_loc_next != NULL)
30328 && !resolve_addr_in_expr (a, l))
30329 {
30330 if (dwarf_split_debug_info)
30331 remove_loc_list_addr_table_entries (l);
30332 if (l != NULL
30333 && l->dw_loc_next == NULL
30334 && l->dw_loc_opc == DW_OP_addr
30335 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30336 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30337 && a->dw_attr == DW_AT_location)
30338 {
30339 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30340 remove_AT (die, a->dw_attr);
30341 ix--;
30342 optimize_location_into_implicit_ptr (die, decl);
30343 break;
30344 }
30345 if (a->dw_attr == DW_AT_string_length)
30346 /* If we drop DW_AT_string_length, we need to drop also
30347 DW_AT_{string_length_,}byte_size. */
30348 remove_AT_byte_size = true;
30349 remove_AT (die, a->dw_attr);
30350 ix--;
30351 }
30352 else
30353 mark_base_types (l);
30354 }
30355 break;
30356 case dw_val_class_addr:
30357 if (a->dw_attr == DW_AT_const_value
30358 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30359 {
30360 if (AT_index (a) != NOT_INDEXED)
30361 remove_addr_table_entry (a->dw_attr_val.val_entry);
30362 remove_AT (die, a->dw_attr);
30363 ix--;
30364 }
30365 if ((die->die_tag == DW_TAG_call_site
30366 && a->dw_attr == DW_AT_call_origin)
30367 || (die->die_tag == DW_TAG_GNU_call_site
30368 && a->dw_attr == DW_AT_abstract_origin))
30369 {
30370 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30371 dw_die_ref tdie = lookup_decl_die (tdecl);
30372 dw_die_ref cdie;
30373 if (tdie == NULL
30374 && DECL_EXTERNAL (tdecl)
30375 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30376 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30377 {
30378 dw_die_ref pdie = cdie;
30379 /* Make sure we don't add these DIEs into type units.
30380 We could emit skeleton DIEs for context (namespaces,
30381 outer structs/classes) and a skeleton DIE for the
30382 innermost context with DW_AT_signature pointing to the
30383 type unit. See PR78835. */
30384 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30385 pdie = pdie->die_parent;
30386 if (pdie == NULL)
30387 {
30388 /* Creating a full DIE for tdecl is overly expensive and
30389 at this point even wrong when in the LTO phase
30390 as it can end up generating new type DIEs we didn't
30391 output and thus optimize_external_refs will crash. */
30392 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30393 add_AT_flag (tdie, DW_AT_external, 1);
30394 add_AT_flag (tdie, DW_AT_declaration, 1);
30395 add_linkage_attr (tdie, tdecl);
30396 add_name_and_src_coords_attributes (tdie, tdecl, true);
30397 equate_decl_number_to_die (tdecl, tdie);
30398 }
30399 }
30400 if (tdie)
30401 {
30402 a->dw_attr_val.val_class = dw_val_class_die_ref;
30403 a->dw_attr_val.v.val_die_ref.die = tdie;
30404 a->dw_attr_val.v.val_die_ref.external = 0;
30405 }
30406 else
30407 {
30408 if (AT_index (a) != NOT_INDEXED)
30409 remove_addr_table_entry (a->dw_attr_val.val_entry);
30410 remove_AT (die, a->dw_attr);
30411 ix--;
30412 }
30413 }
30414 break;
30415 default:
30416 break;
30417 }
30418
30419 if (remove_AT_byte_size)
30420 remove_AT (die, dwarf_version >= 5
30421 ? DW_AT_string_length_byte_size
30422 : DW_AT_byte_size);
30423
30424 FOR_EACH_CHILD (die, c, resolve_addr (c));
30425 }
30426 \f
30427 /* Helper routines for optimize_location_lists.
30428 This pass tries to share identical local lists in .debug_loc
30429 section. */
30430
30431 /* Iteratively hash operands of LOC opcode into HSTATE. */
30432
30433 static void
30434 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30435 {
30436 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30437 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30438
30439 switch (loc->dw_loc_opc)
30440 {
30441 case DW_OP_const4u:
30442 case DW_OP_const8u:
30443 if (loc->dtprel)
30444 goto hash_addr;
30445 /* FALLTHRU */
30446 case DW_OP_const1u:
30447 case DW_OP_const1s:
30448 case DW_OP_const2u:
30449 case DW_OP_const2s:
30450 case DW_OP_const4s:
30451 case DW_OP_const8s:
30452 case DW_OP_constu:
30453 case DW_OP_consts:
30454 case DW_OP_pick:
30455 case DW_OP_plus_uconst:
30456 case DW_OP_breg0:
30457 case DW_OP_breg1:
30458 case DW_OP_breg2:
30459 case DW_OP_breg3:
30460 case DW_OP_breg4:
30461 case DW_OP_breg5:
30462 case DW_OP_breg6:
30463 case DW_OP_breg7:
30464 case DW_OP_breg8:
30465 case DW_OP_breg9:
30466 case DW_OP_breg10:
30467 case DW_OP_breg11:
30468 case DW_OP_breg12:
30469 case DW_OP_breg13:
30470 case DW_OP_breg14:
30471 case DW_OP_breg15:
30472 case DW_OP_breg16:
30473 case DW_OP_breg17:
30474 case DW_OP_breg18:
30475 case DW_OP_breg19:
30476 case DW_OP_breg20:
30477 case DW_OP_breg21:
30478 case DW_OP_breg22:
30479 case DW_OP_breg23:
30480 case DW_OP_breg24:
30481 case DW_OP_breg25:
30482 case DW_OP_breg26:
30483 case DW_OP_breg27:
30484 case DW_OP_breg28:
30485 case DW_OP_breg29:
30486 case DW_OP_breg30:
30487 case DW_OP_breg31:
30488 case DW_OP_regx:
30489 case DW_OP_fbreg:
30490 case DW_OP_piece:
30491 case DW_OP_deref_size:
30492 case DW_OP_xderef_size:
30493 hstate.add_object (val1->v.val_int);
30494 break;
30495 case DW_OP_skip:
30496 case DW_OP_bra:
30497 {
30498 int offset;
30499
30500 gcc_assert (val1->val_class == dw_val_class_loc);
30501 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30502 hstate.add_object (offset);
30503 }
30504 break;
30505 case DW_OP_implicit_value:
30506 hstate.add_object (val1->v.val_unsigned);
30507 switch (val2->val_class)
30508 {
30509 case dw_val_class_const:
30510 hstate.add_object (val2->v.val_int);
30511 break;
30512 case dw_val_class_vec:
30513 {
30514 unsigned int elt_size = val2->v.val_vec.elt_size;
30515 unsigned int len = val2->v.val_vec.length;
30516
30517 hstate.add_int (elt_size);
30518 hstate.add_int (len);
30519 hstate.add (val2->v.val_vec.array, len * elt_size);
30520 }
30521 break;
30522 case dw_val_class_const_double:
30523 hstate.add_object (val2->v.val_double.low);
30524 hstate.add_object (val2->v.val_double.high);
30525 break;
30526 case dw_val_class_wide_int:
30527 hstate.add (val2->v.val_wide->get_val (),
30528 get_full_len (*val2->v.val_wide)
30529 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30530 break;
30531 case dw_val_class_addr:
30532 inchash::add_rtx (val2->v.val_addr, hstate);
30533 break;
30534 default:
30535 gcc_unreachable ();
30536 }
30537 break;
30538 case DW_OP_bregx:
30539 case DW_OP_bit_piece:
30540 hstate.add_object (val1->v.val_int);
30541 hstate.add_object (val2->v.val_int);
30542 break;
30543 case DW_OP_addr:
30544 hash_addr:
30545 if (loc->dtprel)
30546 {
30547 unsigned char dtprel = 0xd1;
30548 hstate.add_object (dtprel);
30549 }
30550 inchash::add_rtx (val1->v.val_addr, hstate);
30551 break;
30552 case DW_OP_GNU_addr_index:
30553 case DW_OP_addrx:
30554 case DW_OP_GNU_const_index:
30555 case DW_OP_constx:
30556 {
30557 if (loc->dtprel)
30558 {
30559 unsigned char dtprel = 0xd1;
30560 hstate.add_object (dtprel);
30561 }
30562 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30563 }
30564 break;
30565 case DW_OP_implicit_pointer:
30566 case DW_OP_GNU_implicit_pointer:
30567 hstate.add_int (val2->v.val_int);
30568 break;
30569 case DW_OP_entry_value:
30570 case DW_OP_GNU_entry_value:
30571 hstate.add_object (val1->v.val_loc);
30572 break;
30573 case DW_OP_regval_type:
30574 case DW_OP_deref_type:
30575 case DW_OP_GNU_regval_type:
30576 case DW_OP_GNU_deref_type:
30577 {
30578 unsigned int byte_size
30579 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30580 unsigned int encoding
30581 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30582 hstate.add_object (val1->v.val_int);
30583 hstate.add_object (byte_size);
30584 hstate.add_object (encoding);
30585 }
30586 break;
30587 case DW_OP_convert:
30588 case DW_OP_reinterpret:
30589 case DW_OP_GNU_convert:
30590 case DW_OP_GNU_reinterpret:
30591 if (val1->val_class == dw_val_class_unsigned_const)
30592 {
30593 hstate.add_object (val1->v.val_unsigned);
30594 break;
30595 }
30596 /* FALLTHRU */
30597 case DW_OP_const_type:
30598 case DW_OP_GNU_const_type:
30599 {
30600 unsigned int byte_size
30601 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30602 unsigned int encoding
30603 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30604 hstate.add_object (byte_size);
30605 hstate.add_object (encoding);
30606 if (loc->dw_loc_opc != DW_OP_const_type
30607 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30608 break;
30609 hstate.add_object (val2->val_class);
30610 switch (val2->val_class)
30611 {
30612 case dw_val_class_const:
30613 hstate.add_object (val2->v.val_int);
30614 break;
30615 case dw_val_class_vec:
30616 {
30617 unsigned int elt_size = val2->v.val_vec.elt_size;
30618 unsigned int len = val2->v.val_vec.length;
30619
30620 hstate.add_object (elt_size);
30621 hstate.add_object (len);
30622 hstate.add (val2->v.val_vec.array, len * elt_size);
30623 }
30624 break;
30625 case dw_val_class_const_double:
30626 hstate.add_object (val2->v.val_double.low);
30627 hstate.add_object (val2->v.val_double.high);
30628 break;
30629 case dw_val_class_wide_int:
30630 hstate.add (val2->v.val_wide->get_val (),
30631 get_full_len (*val2->v.val_wide)
30632 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30633 break;
30634 default:
30635 gcc_unreachable ();
30636 }
30637 }
30638 break;
30639
30640 default:
30641 /* Other codes have no operands. */
30642 break;
30643 }
30644 }
30645
30646 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30647
30648 static inline void
30649 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30650 {
30651 dw_loc_descr_ref l;
30652 bool sizes_computed = false;
30653 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30654 size_of_locs (loc);
30655
30656 for (l = loc; l != NULL; l = l->dw_loc_next)
30657 {
30658 enum dwarf_location_atom opc = l->dw_loc_opc;
30659 hstate.add_object (opc);
30660 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30661 {
30662 size_of_locs (loc);
30663 sizes_computed = true;
30664 }
30665 hash_loc_operands (l, hstate);
30666 }
30667 }
30668
30669 /* Compute hash of the whole location list LIST_HEAD. */
30670
30671 static inline void
30672 hash_loc_list (dw_loc_list_ref list_head)
30673 {
30674 dw_loc_list_ref curr = list_head;
30675 inchash::hash hstate;
30676
30677 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30678 {
30679 hstate.add (curr->begin, strlen (curr->begin) + 1);
30680 hstate.add (curr->end, strlen (curr->end) + 1);
30681 hstate.add_object (curr->vbegin);
30682 hstate.add_object (curr->vend);
30683 if (curr->section)
30684 hstate.add (curr->section, strlen (curr->section) + 1);
30685 hash_locs (curr->expr, hstate);
30686 }
30687 list_head->hash = hstate.end ();
30688 }
30689
30690 /* Return true if X and Y opcodes have the same operands. */
30691
30692 static inline bool
30693 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30694 {
30695 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30696 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30697 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30698 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30699
30700 switch (x->dw_loc_opc)
30701 {
30702 case DW_OP_const4u:
30703 case DW_OP_const8u:
30704 if (x->dtprel)
30705 goto hash_addr;
30706 /* FALLTHRU */
30707 case DW_OP_const1u:
30708 case DW_OP_const1s:
30709 case DW_OP_const2u:
30710 case DW_OP_const2s:
30711 case DW_OP_const4s:
30712 case DW_OP_const8s:
30713 case DW_OP_constu:
30714 case DW_OP_consts:
30715 case DW_OP_pick:
30716 case DW_OP_plus_uconst:
30717 case DW_OP_breg0:
30718 case DW_OP_breg1:
30719 case DW_OP_breg2:
30720 case DW_OP_breg3:
30721 case DW_OP_breg4:
30722 case DW_OP_breg5:
30723 case DW_OP_breg6:
30724 case DW_OP_breg7:
30725 case DW_OP_breg8:
30726 case DW_OP_breg9:
30727 case DW_OP_breg10:
30728 case DW_OP_breg11:
30729 case DW_OP_breg12:
30730 case DW_OP_breg13:
30731 case DW_OP_breg14:
30732 case DW_OP_breg15:
30733 case DW_OP_breg16:
30734 case DW_OP_breg17:
30735 case DW_OP_breg18:
30736 case DW_OP_breg19:
30737 case DW_OP_breg20:
30738 case DW_OP_breg21:
30739 case DW_OP_breg22:
30740 case DW_OP_breg23:
30741 case DW_OP_breg24:
30742 case DW_OP_breg25:
30743 case DW_OP_breg26:
30744 case DW_OP_breg27:
30745 case DW_OP_breg28:
30746 case DW_OP_breg29:
30747 case DW_OP_breg30:
30748 case DW_OP_breg31:
30749 case DW_OP_regx:
30750 case DW_OP_fbreg:
30751 case DW_OP_piece:
30752 case DW_OP_deref_size:
30753 case DW_OP_xderef_size:
30754 return valx1->v.val_int == valy1->v.val_int;
30755 case DW_OP_skip:
30756 case DW_OP_bra:
30757 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30758 can cause irrelevant differences in dw_loc_addr. */
30759 gcc_assert (valx1->val_class == dw_val_class_loc
30760 && valy1->val_class == dw_val_class_loc
30761 && (dwarf_split_debug_info
30762 || x->dw_loc_addr == y->dw_loc_addr));
30763 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30764 case DW_OP_implicit_value:
30765 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30766 || valx2->val_class != valy2->val_class)
30767 return false;
30768 switch (valx2->val_class)
30769 {
30770 case dw_val_class_const:
30771 return valx2->v.val_int == valy2->v.val_int;
30772 case dw_val_class_vec:
30773 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30774 && valx2->v.val_vec.length == valy2->v.val_vec.length
30775 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30776 valx2->v.val_vec.elt_size
30777 * valx2->v.val_vec.length) == 0;
30778 case dw_val_class_const_double:
30779 return valx2->v.val_double.low == valy2->v.val_double.low
30780 && valx2->v.val_double.high == valy2->v.val_double.high;
30781 case dw_val_class_wide_int:
30782 return *valx2->v.val_wide == *valy2->v.val_wide;
30783 case dw_val_class_addr:
30784 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30785 default:
30786 gcc_unreachable ();
30787 }
30788 case DW_OP_bregx:
30789 case DW_OP_bit_piece:
30790 return valx1->v.val_int == valy1->v.val_int
30791 && valx2->v.val_int == valy2->v.val_int;
30792 case DW_OP_addr:
30793 hash_addr:
30794 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30795 case DW_OP_GNU_addr_index:
30796 case DW_OP_addrx:
30797 case DW_OP_GNU_const_index:
30798 case DW_OP_constx:
30799 {
30800 rtx ax1 = valx1->val_entry->addr.rtl;
30801 rtx ay1 = valy1->val_entry->addr.rtl;
30802 return rtx_equal_p (ax1, ay1);
30803 }
30804 case DW_OP_implicit_pointer:
30805 case DW_OP_GNU_implicit_pointer:
30806 return valx1->val_class == dw_val_class_die_ref
30807 && valx1->val_class == valy1->val_class
30808 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30809 && valx2->v.val_int == valy2->v.val_int;
30810 case DW_OP_entry_value:
30811 case DW_OP_GNU_entry_value:
30812 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30813 case DW_OP_const_type:
30814 case DW_OP_GNU_const_type:
30815 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30816 || valx2->val_class != valy2->val_class)
30817 return false;
30818 switch (valx2->val_class)
30819 {
30820 case dw_val_class_const:
30821 return valx2->v.val_int == valy2->v.val_int;
30822 case dw_val_class_vec:
30823 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30824 && valx2->v.val_vec.length == valy2->v.val_vec.length
30825 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30826 valx2->v.val_vec.elt_size
30827 * valx2->v.val_vec.length) == 0;
30828 case dw_val_class_const_double:
30829 return valx2->v.val_double.low == valy2->v.val_double.low
30830 && valx2->v.val_double.high == valy2->v.val_double.high;
30831 case dw_val_class_wide_int:
30832 return *valx2->v.val_wide == *valy2->v.val_wide;
30833 default:
30834 gcc_unreachable ();
30835 }
30836 case DW_OP_regval_type:
30837 case DW_OP_deref_type:
30838 case DW_OP_GNU_regval_type:
30839 case DW_OP_GNU_deref_type:
30840 return valx1->v.val_int == valy1->v.val_int
30841 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30842 case DW_OP_convert:
30843 case DW_OP_reinterpret:
30844 case DW_OP_GNU_convert:
30845 case DW_OP_GNU_reinterpret:
30846 if (valx1->val_class != valy1->val_class)
30847 return false;
30848 if (valx1->val_class == dw_val_class_unsigned_const)
30849 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30850 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30851 case DW_OP_GNU_parameter_ref:
30852 return valx1->val_class == dw_val_class_die_ref
30853 && valx1->val_class == valy1->val_class
30854 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30855 default:
30856 /* Other codes have no operands. */
30857 return true;
30858 }
30859 }
30860
30861 /* Return true if DWARF location expressions X and Y are the same. */
30862
30863 static inline bool
30864 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30865 {
30866 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30867 if (x->dw_loc_opc != y->dw_loc_opc
30868 || x->dtprel != y->dtprel
30869 || !compare_loc_operands (x, y))
30870 break;
30871 return x == NULL && y == NULL;
30872 }
30873
30874 /* Hashtable helpers. */
30875
30876 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30877 {
30878 static inline hashval_t hash (const dw_loc_list_struct *);
30879 static inline bool equal (const dw_loc_list_struct *,
30880 const dw_loc_list_struct *);
30881 };
30882
30883 /* Return precomputed hash of location list X. */
30884
30885 inline hashval_t
30886 loc_list_hasher::hash (const dw_loc_list_struct *x)
30887 {
30888 return x->hash;
30889 }
30890
30891 /* Return true if location lists A and B are the same. */
30892
30893 inline bool
30894 loc_list_hasher::equal (const dw_loc_list_struct *a,
30895 const dw_loc_list_struct *b)
30896 {
30897 if (a == b)
30898 return 1;
30899 if (a->hash != b->hash)
30900 return 0;
30901 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30902 if (strcmp (a->begin, b->begin) != 0
30903 || strcmp (a->end, b->end) != 0
30904 || (a->section == NULL) != (b->section == NULL)
30905 || (a->section && strcmp (a->section, b->section) != 0)
30906 || a->vbegin != b->vbegin || a->vend != b->vend
30907 || !compare_locs (a->expr, b->expr))
30908 break;
30909 return a == NULL && b == NULL;
30910 }
30911
30912 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30913
30914
30915 /* Recursively optimize location lists referenced from DIE
30916 children and share them whenever possible. */
30917
30918 static void
30919 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30920 {
30921 dw_die_ref c;
30922 dw_attr_node *a;
30923 unsigned ix;
30924 dw_loc_list_struct **slot;
30925 bool drop_locviews = false;
30926 bool has_locviews = false;
30927
30928 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30929 if (AT_class (a) == dw_val_class_loc_list)
30930 {
30931 dw_loc_list_ref list = AT_loc_list (a);
30932 /* TODO: perform some optimizations here, before hashing
30933 it and storing into the hash table. */
30934 hash_loc_list (list);
30935 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30936 if (*slot == NULL)
30937 {
30938 *slot = list;
30939 if (loc_list_has_views (list))
30940 gcc_assert (list->vl_symbol);
30941 else if (list->vl_symbol)
30942 {
30943 drop_locviews = true;
30944 list->vl_symbol = NULL;
30945 }
30946 }
30947 else
30948 {
30949 if (list->vl_symbol && !(*slot)->vl_symbol)
30950 drop_locviews = true;
30951 a->dw_attr_val.v.val_loc_list = *slot;
30952 }
30953 }
30954 else if (AT_class (a) == dw_val_class_view_list)
30955 {
30956 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30957 has_locviews = true;
30958 }
30959
30960
30961 if (drop_locviews && has_locviews)
30962 remove_AT (die, DW_AT_GNU_locviews);
30963
30964 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30965 }
30966
30967
30968 /* Recursively assign each location list a unique index into the debug_addr
30969 section. */
30970
30971 static void
30972 index_location_lists (dw_die_ref die)
30973 {
30974 dw_die_ref c;
30975 dw_attr_node *a;
30976 unsigned ix;
30977
30978 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30979 if (AT_class (a) == dw_val_class_loc_list)
30980 {
30981 dw_loc_list_ref list = AT_loc_list (a);
30982 dw_loc_list_ref curr;
30983 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30984 {
30985 /* Don't index an entry that has already been indexed
30986 or won't be output. Make sure skip_loc_list_entry doesn't
30987 call size_of_locs, because that might cause circular dependency,
30988 index_location_lists requiring address table indexes to be
30989 computed, but adding new indexes through add_addr_table_entry
30990 and address table index computation requiring no new additions
30991 to the hash table. In the rare case of DWARF[234] >= 64KB
30992 location expression, we'll just waste unused address table entry
30993 for it. */
30994 if (curr->begin_entry != NULL
30995 || skip_loc_list_entry (curr))
30996 continue;
30997
30998 curr->begin_entry
30999 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31000 }
31001 }
31002
31003 FOR_EACH_CHILD (die, c, index_location_lists (c));
31004 }
31005
31006 /* Optimize location lists referenced from DIE
31007 children and share them whenever possible. */
31008
31009 static void
31010 optimize_location_lists (dw_die_ref die)
31011 {
31012 loc_list_hash_type htab (500);
31013 optimize_location_lists_1 (die, &htab);
31014 }
31015 \f
31016 /* Traverse the limbo die list, and add parent/child links. The only
31017 dies without parents that should be here are concrete instances of
31018 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31019 For concrete instances, we can get the parent die from the abstract
31020 instance. */
31021
31022 static void
31023 flush_limbo_die_list (void)
31024 {
31025 limbo_die_node *node;
31026
31027 /* get_context_die calls force_decl_die, which can put new DIEs on the
31028 limbo list in LTO mode when nested functions are put in a different
31029 partition than that of their parent function. */
31030 while ((node = limbo_die_list))
31031 {
31032 dw_die_ref die = node->die;
31033 limbo_die_list = node->next;
31034
31035 if (die->die_parent == NULL)
31036 {
31037 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31038
31039 if (origin && origin->die_parent)
31040 add_child_die (origin->die_parent, die);
31041 else if (is_cu_die (die))
31042 ;
31043 else if (seen_error ())
31044 /* It's OK to be confused by errors in the input. */
31045 add_child_die (comp_unit_die (), die);
31046 else
31047 {
31048 /* In certain situations, the lexical block containing a
31049 nested function can be optimized away, which results
31050 in the nested function die being orphaned. Likewise
31051 with the return type of that nested function. Force
31052 this to be a child of the containing function.
31053
31054 It may happen that even the containing function got fully
31055 inlined and optimized out. In that case we are lost and
31056 assign the empty child. This should not be big issue as
31057 the function is likely unreachable too. */
31058 gcc_assert (node->created_for);
31059
31060 if (DECL_P (node->created_for))
31061 origin = get_context_die (DECL_CONTEXT (node->created_for));
31062 else if (TYPE_P (node->created_for))
31063 origin = scope_die_for (node->created_for, comp_unit_die ());
31064 else
31065 origin = comp_unit_die ();
31066
31067 add_child_die (origin, die);
31068 }
31069 }
31070 }
31071 }
31072
31073 /* Reset DIEs so we can output them again. */
31074
31075 static void
31076 reset_dies (dw_die_ref die)
31077 {
31078 dw_die_ref c;
31079
31080 /* Remove stuff we re-generate. */
31081 die->die_mark = 0;
31082 die->die_offset = 0;
31083 die->die_abbrev = 0;
31084 remove_AT (die, DW_AT_sibling);
31085
31086 FOR_EACH_CHILD (die, c, reset_dies (c));
31087 }
31088
31089 /* Output stuff that dwarf requires at the end of every file,
31090 and generate the DWARF-2 debugging info. */
31091
31092 static void
31093 dwarf2out_finish (const char *)
31094 {
31095 comdat_type_node *ctnode;
31096 dw_die_ref main_comp_unit_die;
31097 unsigned char checksum[16];
31098 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31099
31100 /* Flush out any latecomers to the limbo party. */
31101 flush_limbo_die_list ();
31102
31103 if (inline_entry_data_table)
31104 gcc_assert (inline_entry_data_table->elements () == 0);
31105
31106 if (flag_checking)
31107 {
31108 verify_die (comp_unit_die ());
31109 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31110 verify_die (node->die);
31111 }
31112
31113 /* We shouldn't have any symbols with delayed asm names for
31114 DIEs generated after early finish. */
31115 gcc_assert (deferred_asm_name == NULL);
31116
31117 gen_remaining_tmpl_value_param_die_attribute ();
31118
31119 if (flag_generate_lto || flag_generate_offload)
31120 {
31121 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31122
31123 /* Prune stuff so that dwarf2out_finish runs successfully
31124 for the fat part of the object. */
31125 reset_dies (comp_unit_die ());
31126 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31127 reset_dies (node->die);
31128
31129 hash_table<comdat_type_hasher> comdat_type_table (100);
31130 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31131 {
31132 comdat_type_node **slot
31133 = comdat_type_table.find_slot (ctnode, INSERT);
31134
31135 /* Don't reset types twice. */
31136 if (*slot != HTAB_EMPTY_ENTRY)
31137 continue;
31138
31139 /* Remove the pointer to the line table. */
31140 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31141
31142 if (debug_info_level >= DINFO_LEVEL_TERSE)
31143 reset_dies (ctnode->root_die);
31144
31145 *slot = ctnode;
31146 }
31147
31148 /* Reset die CU symbol so we don't output it twice. */
31149 comp_unit_die ()->die_id.die_symbol = NULL;
31150
31151 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31152 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31153 if (have_macinfo)
31154 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31155
31156 /* Remove indirect string decisions. */
31157 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31158 if (debug_line_str_hash)
31159 {
31160 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31161 debug_line_str_hash = NULL;
31162 }
31163 }
31164
31165 #if ENABLE_ASSERT_CHECKING
31166 {
31167 dw_die_ref die = comp_unit_die (), c;
31168 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31169 }
31170 #endif
31171 resolve_addr (comp_unit_die ());
31172 move_marked_base_types ();
31173
31174 /* Initialize sections and labels used for actual assembler output. */
31175 unsigned generation = init_sections_and_labels (false);
31176
31177 /* Traverse the DIE's and add sibling attributes to those DIE's that
31178 have children. */
31179 add_sibling_attributes (comp_unit_die ());
31180 limbo_die_node *node;
31181 for (node = cu_die_list; node; node = node->next)
31182 add_sibling_attributes (node->die);
31183 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31184 add_sibling_attributes (ctnode->root_die);
31185
31186 /* When splitting DWARF info, we put some attributes in the
31187 skeleton compile_unit DIE that remains in the .o, while
31188 most attributes go in the DWO compile_unit_die. */
31189 if (dwarf_split_debug_info)
31190 {
31191 limbo_die_node *cu;
31192 main_comp_unit_die = gen_compile_unit_die (NULL);
31193 if (dwarf_version >= 5)
31194 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31195 cu = limbo_die_list;
31196 gcc_assert (cu->die == main_comp_unit_die);
31197 limbo_die_list = limbo_die_list->next;
31198 cu->next = cu_die_list;
31199 cu_die_list = cu;
31200 }
31201 else
31202 main_comp_unit_die = comp_unit_die ();
31203
31204 /* Output a terminator label for the .text section. */
31205 switch_to_section (text_section);
31206 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31207 if (cold_text_section)
31208 {
31209 switch_to_section (cold_text_section);
31210 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31211 }
31212
31213 /* We can only use the low/high_pc attributes if all of the code was
31214 in .text. */
31215 if (!have_multiple_function_sections
31216 || (dwarf_version < 3 && dwarf_strict))
31217 {
31218 /* Don't add if the CU has no associated code. */
31219 if (text_section_used)
31220 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31221 text_end_label, true);
31222 }
31223 else
31224 {
31225 unsigned fde_idx;
31226 dw_fde_ref fde;
31227 bool range_list_added = false;
31228
31229 if (text_section_used)
31230 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31231 text_end_label, &range_list_added, true);
31232 if (cold_text_section_used)
31233 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31234 cold_end_label, &range_list_added, true);
31235
31236 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31237 {
31238 if (DECL_IGNORED_P (fde->decl))
31239 continue;
31240 if (!fde->in_std_section)
31241 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31242 fde->dw_fde_end, &range_list_added,
31243 true);
31244 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31245 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31246 fde->dw_fde_second_end, &range_list_added,
31247 true);
31248 }
31249
31250 if (range_list_added)
31251 {
31252 /* We need to give .debug_loc and .debug_ranges an appropriate
31253 "base address". Use zero so that these addresses become
31254 absolute. Historically, we've emitted the unexpected
31255 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31256 Emit both to give time for other tools to adapt. */
31257 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31258 if (! dwarf_strict && dwarf_version < 4)
31259 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31260
31261 add_ranges (NULL);
31262 }
31263 }
31264
31265 /* AIX Assembler inserts the length, so adjust the reference to match the
31266 offset expected by debuggers. */
31267 strcpy (dl_section_ref, debug_line_section_label);
31268 if (XCOFF_DEBUGGING_INFO)
31269 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31270
31271 if (debug_info_level >= DINFO_LEVEL_TERSE)
31272 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31273 dl_section_ref);
31274
31275 if (have_macinfo)
31276 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31277 macinfo_section_label);
31278
31279 if (dwarf_split_debug_info)
31280 {
31281 if (have_location_lists)
31282 {
31283 /* Since we generate the loclists in the split DWARF .dwo
31284 file itself, we don't need to generate a loclists_base
31285 attribute for the split compile unit DIE. That attribute
31286 (and using relocatable sec_offset FORMs) isn't allowed
31287 for a split compile unit. Only if the .debug_loclists
31288 section was in the main file, would we need to generate a
31289 loclists_base attribute here (for the full or skeleton
31290 unit DIE). */
31291
31292 /* optimize_location_lists calculates the size of the lists,
31293 so index them first, and assign indices to the entries.
31294 Although optimize_location_lists will remove entries from
31295 the table, it only does so for duplicates, and therefore
31296 only reduces ref_counts to 1. */
31297 index_location_lists (comp_unit_die ());
31298 }
31299
31300 if (addr_index_table != NULL)
31301 {
31302 unsigned int index = 0;
31303 addr_index_table
31304 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31305 (&index);
31306 }
31307 }
31308
31309 loc_list_idx = 0;
31310 if (have_location_lists)
31311 {
31312 optimize_location_lists (comp_unit_die ());
31313 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31314 if (dwarf_version >= 5 && dwarf_split_debug_info)
31315 assign_location_list_indexes (comp_unit_die ());
31316 }
31317
31318 save_macinfo_strings ();
31319
31320 if (dwarf_split_debug_info)
31321 {
31322 unsigned int index = 0;
31323
31324 /* Add attributes common to skeleton compile_units and
31325 type_units. Because these attributes include strings, it
31326 must be done before freezing the string table. Top-level
31327 skeleton die attrs are added when the skeleton type unit is
31328 created, so ensure it is created by this point. */
31329 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31330 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31331 }
31332
31333 /* Output all of the compilation units. We put the main one last so that
31334 the offsets are available to output_pubnames. */
31335 for (node = cu_die_list; node; node = node->next)
31336 output_comp_unit (node->die, 0, NULL);
31337
31338 hash_table<comdat_type_hasher> comdat_type_table (100);
31339 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31340 {
31341 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31342
31343 /* Don't output duplicate types. */
31344 if (*slot != HTAB_EMPTY_ENTRY)
31345 continue;
31346
31347 /* Add a pointer to the line table for the main compilation unit
31348 so that the debugger can make sense of DW_AT_decl_file
31349 attributes. */
31350 if (debug_info_level >= DINFO_LEVEL_TERSE)
31351 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31352 (!dwarf_split_debug_info
31353 ? dl_section_ref
31354 : debug_skeleton_line_section_label));
31355
31356 output_comdat_type_unit (ctnode);
31357 *slot = ctnode;
31358 }
31359
31360 if (dwarf_split_debug_info)
31361 {
31362 int mark;
31363 struct md5_ctx ctx;
31364
31365 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31366 index_rnglists ();
31367
31368 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31369 md5_init_ctx (&ctx);
31370 mark = 0;
31371 die_checksum (comp_unit_die (), &ctx, &mark);
31372 unmark_all_dies (comp_unit_die ());
31373 md5_finish_ctx (&ctx, checksum);
31374
31375 if (dwarf_version < 5)
31376 {
31377 /* Use the first 8 bytes of the checksum as the dwo_id,
31378 and add it to both comp-unit DIEs. */
31379 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31380 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31381 }
31382
31383 /* Add the base offset of the ranges table to the skeleton
31384 comp-unit DIE. */
31385 if (!vec_safe_is_empty (ranges_table))
31386 {
31387 if (dwarf_version >= 5)
31388 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31389 ranges_base_label);
31390 else
31391 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31392 ranges_section_label);
31393 }
31394
31395 switch_to_section (debug_addr_section);
31396 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31397 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31398 before DWARF5, didn't have a header for .debug_addr units.
31399 DWARF5 specifies a small header when address tables are used. */
31400 if (dwarf_version >= 5)
31401 {
31402 unsigned int last_idx = 0;
31403 unsigned long addrs_length;
31404
31405 addr_index_table->traverse_noresize
31406 <unsigned int *, count_index_addrs> (&last_idx);
31407 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31408
31409 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31410 dw2_asm_output_data (4, 0xffffffff,
31411 "Escape value for 64-bit DWARF extension");
31412 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31413 "Length of Address Unit");
31414 dw2_asm_output_data (2, 5, "DWARF addr version");
31415 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31416 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31417 }
31418 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31419 output_addr_table ();
31420 }
31421
31422 /* Output the main compilation unit if non-empty or if .debug_macinfo
31423 or .debug_macro will be emitted. */
31424 output_comp_unit (comp_unit_die (), have_macinfo,
31425 dwarf_split_debug_info ? checksum : NULL);
31426
31427 if (dwarf_split_debug_info && info_section_emitted)
31428 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31429
31430 /* Output the abbreviation table. */
31431 if (vec_safe_length (abbrev_die_table) != 1)
31432 {
31433 switch_to_section (debug_abbrev_section);
31434 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31435 output_abbrev_section ();
31436 }
31437
31438 /* Output location list section if necessary. */
31439 if (have_location_lists)
31440 {
31441 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31442 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31443 /* Output the location lists info. */
31444 switch_to_section (debug_loc_section);
31445 if (dwarf_version >= 5)
31446 {
31447 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31448 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31449 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31450 dw2_asm_output_data (4, 0xffffffff,
31451 "Initial length escape value indicating "
31452 "64-bit DWARF extension");
31453 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31454 "Length of Location Lists");
31455 ASM_OUTPUT_LABEL (asm_out_file, l1);
31456 output_dwarf_version ();
31457 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31458 dw2_asm_output_data (1, 0, "Segment Size");
31459 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31460 "Offset Entry Count");
31461 }
31462 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31463 if (dwarf_version >= 5 && dwarf_split_debug_info)
31464 {
31465 unsigned int save_loc_list_idx = loc_list_idx;
31466 loc_list_idx = 0;
31467 output_loclists_offsets (comp_unit_die ());
31468 gcc_assert (save_loc_list_idx == loc_list_idx);
31469 }
31470 output_location_lists (comp_unit_die ());
31471 if (dwarf_version >= 5)
31472 ASM_OUTPUT_LABEL (asm_out_file, l2);
31473 }
31474
31475 output_pubtables ();
31476
31477 /* Output the address range information if a CU (.debug_info section)
31478 was emitted. We output an empty table even if we had no functions
31479 to put in it. This because the consumer has no way to tell the
31480 difference between an empty table that we omitted and failure to
31481 generate a table that would have contained data. */
31482 if (info_section_emitted)
31483 {
31484 switch_to_section (debug_aranges_section);
31485 output_aranges ();
31486 }
31487
31488 /* Output ranges section if necessary. */
31489 if (!vec_safe_is_empty (ranges_table))
31490 {
31491 if (dwarf_version >= 5)
31492 output_rnglists (generation);
31493 else
31494 output_ranges ();
31495 }
31496
31497 /* Have to end the macro section. */
31498 if (have_macinfo)
31499 {
31500 switch_to_section (debug_macinfo_section);
31501 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31502 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31503 : debug_skeleton_line_section_label, false);
31504 dw2_asm_output_data (1, 0, "End compilation unit");
31505 }
31506
31507 /* Output the source line correspondence table. We must do this
31508 even if there is no line information. Otherwise, on an empty
31509 translation unit, we will generate a present, but empty,
31510 .debug_info section. IRIX 6.5 `nm' will then complain when
31511 examining the file. This is done late so that any filenames
31512 used by the debug_info section are marked as 'used'. */
31513 switch_to_section (debug_line_section);
31514 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31515 if (! output_asm_line_debug_info ())
31516 output_line_info (false);
31517
31518 if (dwarf_split_debug_info && info_section_emitted)
31519 {
31520 switch_to_section (debug_skeleton_line_section);
31521 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31522 output_line_info (true);
31523 }
31524
31525 /* If we emitted any indirect strings, output the string table too. */
31526 if (debug_str_hash || skeleton_debug_str_hash)
31527 output_indirect_strings ();
31528 if (debug_line_str_hash)
31529 {
31530 switch_to_section (debug_line_str_section);
31531 const enum dwarf_form form = DW_FORM_line_strp;
31532 debug_line_str_hash->traverse<enum dwarf_form,
31533 output_indirect_string> (form);
31534 }
31535
31536 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31537 symview_upper_bound = 0;
31538 if (zero_view_p)
31539 bitmap_clear (zero_view_p);
31540 }
31541
31542 /* Returns a hash value for X (which really is a variable_value_struct). */
31543
31544 inline hashval_t
31545 variable_value_hasher::hash (variable_value_struct *x)
31546 {
31547 return (hashval_t) x->decl_id;
31548 }
31549
31550 /* Return nonzero if decl_id of variable_value_struct X is the same as
31551 UID of decl Y. */
31552
31553 inline bool
31554 variable_value_hasher::equal (variable_value_struct *x, tree y)
31555 {
31556 return x->decl_id == DECL_UID (y);
31557 }
31558
31559 /* Helper function for resolve_variable_value, handle
31560 DW_OP_GNU_variable_value in one location expression.
31561 Return true if exprloc has been changed into loclist. */
31562
31563 static bool
31564 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31565 {
31566 dw_loc_descr_ref next;
31567 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31568 {
31569 next = loc->dw_loc_next;
31570 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31571 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31572 continue;
31573
31574 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31575 if (DECL_CONTEXT (decl) != current_function_decl)
31576 continue;
31577
31578 dw_die_ref ref = lookup_decl_die (decl);
31579 if (ref)
31580 {
31581 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31582 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31583 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31584 continue;
31585 }
31586 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31587 if (l == NULL)
31588 continue;
31589 if (l->dw_loc_next)
31590 {
31591 if (AT_class (a) != dw_val_class_loc)
31592 continue;
31593 switch (a->dw_attr)
31594 {
31595 /* Following attributes allow both exprloc and loclist
31596 classes, so we can change them into a loclist. */
31597 case DW_AT_location:
31598 case DW_AT_string_length:
31599 case DW_AT_return_addr:
31600 case DW_AT_data_member_location:
31601 case DW_AT_frame_base:
31602 case DW_AT_segment:
31603 case DW_AT_static_link:
31604 case DW_AT_use_location:
31605 case DW_AT_vtable_elem_location:
31606 if (prev)
31607 {
31608 prev->dw_loc_next = NULL;
31609 prepend_loc_descr_to_each (l, AT_loc (a));
31610 }
31611 if (next)
31612 add_loc_descr_to_each (l, next);
31613 a->dw_attr_val.val_class = dw_val_class_loc_list;
31614 a->dw_attr_val.val_entry = NULL;
31615 a->dw_attr_val.v.val_loc_list = l;
31616 have_location_lists = true;
31617 return true;
31618 /* Following attributes allow both exprloc and reference,
31619 so if the whole expression is DW_OP_GNU_variable_value alone
31620 we could transform it into reference. */
31621 case DW_AT_byte_size:
31622 case DW_AT_bit_size:
31623 case DW_AT_lower_bound:
31624 case DW_AT_upper_bound:
31625 case DW_AT_bit_stride:
31626 case DW_AT_count:
31627 case DW_AT_allocated:
31628 case DW_AT_associated:
31629 case DW_AT_byte_stride:
31630 if (prev == NULL && next == NULL)
31631 break;
31632 /* FALLTHRU */
31633 default:
31634 if (dwarf_strict)
31635 continue;
31636 break;
31637 }
31638 /* Create DW_TAG_variable that we can refer to. */
31639 gen_decl_die (decl, NULL_TREE, NULL,
31640 lookup_decl_die (current_function_decl));
31641 ref = lookup_decl_die (decl);
31642 if (ref)
31643 {
31644 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31645 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31646 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31647 }
31648 continue;
31649 }
31650 if (prev)
31651 {
31652 prev->dw_loc_next = l->expr;
31653 add_loc_descr (&prev->dw_loc_next, next);
31654 free_loc_descr (loc, NULL);
31655 next = prev->dw_loc_next;
31656 }
31657 else
31658 {
31659 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31660 add_loc_descr (&loc, next);
31661 next = loc;
31662 }
31663 loc = prev;
31664 }
31665 return false;
31666 }
31667
31668 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31669
31670 static void
31671 resolve_variable_value (dw_die_ref die)
31672 {
31673 dw_attr_node *a;
31674 dw_loc_list_ref loc;
31675 unsigned ix;
31676
31677 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31678 switch (AT_class (a))
31679 {
31680 case dw_val_class_loc:
31681 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31682 break;
31683 /* FALLTHRU */
31684 case dw_val_class_loc_list:
31685 loc = AT_loc_list (a);
31686 gcc_assert (loc);
31687 for (; loc; loc = loc->dw_loc_next)
31688 resolve_variable_value_in_expr (a, loc->expr);
31689 break;
31690 default:
31691 break;
31692 }
31693 }
31694
31695 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31696 temporaries in the current function. */
31697
31698 static void
31699 resolve_variable_values (void)
31700 {
31701 if (!variable_value_hash || !current_function_decl)
31702 return;
31703
31704 struct variable_value_struct *node
31705 = variable_value_hash->find_with_hash (current_function_decl,
31706 DECL_UID (current_function_decl));
31707
31708 if (node == NULL)
31709 return;
31710
31711 unsigned int i;
31712 dw_die_ref die;
31713 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31714 resolve_variable_value (die);
31715 }
31716
31717 /* Helper function for note_variable_value, handle one location
31718 expression. */
31719
31720 static void
31721 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31722 {
31723 for (; loc; loc = loc->dw_loc_next)
31724 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31725 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31726 {
31727 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31728 dw_die_ref ref = lookup_decl_die (decl);
31729 if (! ref && (flag_generate_lto || flag_generate_offload))
31730 {
31731 /* ??? This is somewhat a hack because we do not create DIEs
31732 for variables not in BLOCK trees early but when generating
31733 early LTO output we need the dw_val_class_decl_ref to be
31734 fully resolved. For fat LTO objects we'd also like to
31735 undo this after LTO dwarf output. */
31736 gcc_assert (DECL_CONTEXT (decl));
31737 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31738 gcc_assert (ctx != NULL);
31739 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31740 ref = lookup_decl_die (decl);
31741 gcc_assert (ref != NULL);
31742 }
31743 if (ref)
31744 {
31745 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31746 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31747 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31748 continue;
31749 }
31750 if (VAR_P (decl)
31751 && DECL_CONTEXT (decl)
31752 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31753 && lookup_decl_die (DECL_CONTEXT (decl)))
31754 {
31755 if (!variable_value_hash)
31756 variable_value_hash
31757 = hash_table<variable_value_hasher>::create_ggc (10);
31758
31759 tree fndecl = DECL_CONTEXT (decl);
31760 struct variable_value_struct *node;
31761 struct variable_value_struct **slot
31762 = variable_value_hash->find_slot_with_hash (fndecl,
31763 DECL_UID (fndecl),
31764 INSERT);
31765 if (*slot == NULL)
31766 {
31767 node = ggc_cleared_alloc<variable_value_struct> ();
31768 node->decl_id = DECL_UID (fndecl);
31769 *slot = node;
31770 }
31771 else
31772 node = *slot;
31773
31774 vec_safe_push (node->dies, die);
31775 }
31776 }
31777 }
31778
31779 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31780 with dw_val_class_decl_ref operand. */
31781
31782 static void
31783 note_variable_value (dw_die_ref die)
31784 {
31785 dw_die_ref c;
31786 dw_attr_node *a;
31787 dw_loc_list_ref loc;
31788 unsigned ix;
31789
31790 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31791 switch (AT_class (a))
31792 {
31793 case dw_val_class_loc_list:
31794 loc = AT_loc_list (a);
31795 gcc_assert (loc);
31796 if (!loc->noted_variable_value)
31797 {
31798 loc->noted_variable_value = 1;
31799 for (; loc; loc = loc->dw_loc_next)
31800 note_variable_value_in_expr (die, loc->expr);
31801 }
31802 break;
31803 case dw_val_class_loc:
31804 note_variable_value_in_expr (die, AT_loc (a));
31805 break;
31806 default:
31807 break;
31808 }
31809
31810 /* Mark children. */
31811 FOR_EACH_CHILD (die, c, note_variable_value (c));
31812 }
31813
31814 /* Perform any cleanups needed after the early debug generation pass
31815 has run. */
31816
31817 static void
31818 dwarf2out_early_finish (const char *filename)
31819 {
31820 set_early_dwarf s;
31821 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31822
31823 /* PCH might result in DW_AT_producer string being restored from the
31824 header compilation, so always fill it with empty string initially
31825 and overwrite only here. */
31826 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31827 producer_string = gen_producer_string ();
31828 producer->dw_attr_val.v.val_str->refcount--;
31829 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31830
31831 /* Add the name for the main input file now. We delayed this from
31832 dwarf2out_init to avoid complications with PCH. */
31833 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31834 add_comp_dir_attribute (comp_unit_die ());
31835
31836 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31837 DW_AT_comp_dir into .debug_line_str section. */
31838 if (!output_asm_line_debug_info ()
31839 && dwarf_version >= 5
31840 && DWARF5_USE_DEBUG_LINE_STR)
31841 {
31842 for (int i = 0; i < 2; i++)
31843 {
31844 dw_attr_node *a = get_AT (comp_unit_die (),
31845 i ? DW_AT_comp_dir : DW_AT_name);
31846 if (a == NULL
31847 || AT_class (a) != dw_val_class_str
31848 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31849 continue;
31850
31851 if (! debug_line_str_hash)
31852 debug_line_str_hash
31853 = hash_table<indirect_string_hasher>::create_ggc (10);
31854
31855 struct indirect_string_node *node
31856 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31857 set_indirect_string (node);
31858 node->form = DW_FORM_line_strp;
31859 a->dw_attr_val.v.val_str->refcount--;
31860 a->dw_attr_val.v.val_str = node;
31861 }
31862 }
31863
31864 /* With LTO early dwarf was really finished at compile-time, so make
31865 sure to adjust the phase after annotating the LTRANS CU DIE. */
31866 if (in_lto_p)
31867 {
31868 early_dwarf_finished = true;
31869 return;
31870 }
31871
31872 /* Walk through the list of incomplete types again, trying once more to
31873 emit full debugging info for them. */
31874 retry_incomplete_types ();
31875
31876 /* The point here is to flush out the limbo list so that it is empty
31877 and we don't need to stream it for LTO. */
31878 flush_limbo_die_list ();
31879
31880 gen_scheduled_generic_parms_dies ();
31881 gen_remaining_tmpl_value_param_die_attribute ();
31882
31883 /* Add DW_AT_linkage_name for all deferred DIEs. */
31884 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31885 {
31886 tree decl = node->created_for;
31887 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31888 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31889 ended up in deferred_asm_name before we knew it was
31890 constant and never written to disk. */
31891 && DECL_ASSEMBLER_NAME (decl))
31892 {
31893 add_linkage_attr (node->die, decl);
31894 move_linkage_attr (node->die);
31895 }
31896 }
31897 deferred_asm_name = NULL;
31898
31899 if (flag_eliminate_unused_debug_types)
31900 prune_unused_types ();
31901
31902 /* Generate separate COMDAT sections for type DIEs. */
31903 if (use_debug_types)
31904 {
31905 break_out_comdat_types (comp_unit_die ());
31906
31907 /* Each new type_unit DIE was added to the limbo die list when created.
31908 Since these have all been added to comdat_type_list, clear the
31909 limbo die list. */
31910 limbo_die_list = NULL;
31911
31912 /* For each new comdat type unit, copy declarations for incomplete
31913 types to make the new unit self-contained (i.e., no direct
31914 references to the main compile unit). */
31915 for (comdat_type_node *ctnode = comdat_type_list;
31916 ctnode != NULL; ctnode = ctnode->next)
31917 copy_decls_for_unworthy_types (ctnode->root_die);
31918 copy_decls_for_unworthy_types (comp_unit_die ());
31919
31920 /* In the process of copying declarations from one unit to another,
31921 we may have left some declarations behind that are no longer
31922 referenced. Prune them. */
31923 prune_unused_types ();
31924 }
31925
31926 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31927 with dw_val_class_decl_ref operand. */
31928 note_variable_value (comp_unit_die ());
31929 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31930 note_variable_value (node->die);
31931 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31932 ctnode = ctnode->next)
31933 note_variable_value (ctnode->root_die);
31934 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31935 note_variable_value (node->die);
31936
31937 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31938 both the main_cu and all skeleton TUs. Making this call unconditional
31939 would end up either adding a second copy of the AT_pubnames attribute, or
31940 requiring a special case in add_top_level_skeleton_die_attrs. */
31941 if (!dwarf_split_debug_info)
31942 add_AT_pubnames (comp_unit_die ());
31943
31944 /* The early debug phase is now finished. */
31945 early_dwarf_finished = true;
31946
31947 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31948 if ((!flag_generate_lto && !flag_generate_offload)
31949 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
31950 copy_lto_debug_sections operation of the simple object support in
31951 libiberty is not implemented for them yet. */
31952 || TARGET_PECOFF || TARGET_COFF)
31953 return;
31954
31955 /* Now as we are going to output for LTO initialize sections and labels
31956 to the LTO variants. We don't need a random-seed postfix as other
31957 LTO sections as linking the LTO debug sections into one in a partial
31958 link is fine. */
31959 init_sections_and_labels (true);
31960
31961 /* The output below is modeled after dwarf2out_finish with all
31962 location related output removed and some LTO specific changes.
31963 Some refactoring might make both smaller and easier to match up. */
31964
31965 /* Traverse the DIE's and add add sibling attributes to those DIE's
31966 that have children. */
31967 add_sibling_attributes (comp_unit_die ());
31968 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31969 add_sibling_attributes (node->die);
31970 for (comdat_type_node *ctnode = comdat_type_list;
31971 ctnode != NULL; ctnode = ctnode->next)
31972 add_sibling_attributes (ctnode->root_die);
31973
31974 /* AIX Assembler inserts the length, so adjust the reference to match the
31975 offset expected by debuggers. */
31976 strcpy (dl_section_ref, debug_line_section_label);
31977 if (XCOFF_DEBUGGING_INFO)
31978 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31979
31980 if (debug_info_level >= DINFO_LEVEL_TERSE)
31981 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31982
31983 if (have_macinfo)
31984 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31985 macinfo_section_label);
31986
31987 save_macinfo_strings ();
31988
31989 if (dwarf_split_debug_info)
31990 {
31991 unsigned int index = 0;
31992 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31993 }
31994
31995 /* Output all of the compilation units. We put the main one last so that
31996 the offsets are available to output_pubnames. */
31997 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31998 output_comp_unit (node->die, 0, NULL);
31999
32000 hash_table<comdat_type_hasher> comdat_type_table (100);
32001 for (comdat_type_node *ctnode = comdat_type_list;
32002 ctnode != NULL; ctnode = ctnode->next)
32003 {
32004 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32005
32006 /* Don't output duplicate types. */
32007 if (*slot != HTAB_EMPTY_ENTRY)
32008 continue;
32009
32010 /* Add a pointer to the line table for the main compilation unit
32011 so that the debugger can make sense of DW_AT_decl_file
32012 attributes. */
32013 if (debug_info_level >= DINFO_LEVEL_TERSE)
32014 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32015 (!dwarf_split_debug_info
32016 ? debug_line_section_label
32017 : debug_skeleton_line_section_label));
32018
32019 output_comdat_type_unit (ctnode);
32020 *slot = ctnode;
32021 }
32022
32023 /* Stick a unique symbol to the main debuginfo section. */
32024 compute_comp_unit_symbol (comp_unit_die ());
32025
32026 /* Output the main compilation unit. We always need it if only for
32027 the CU symbol. */
32028 output_comp_unit (comp_unit_die (), true, NULL);
32029
32030 /* Output the abbreviation table. */
32031 if (vec_safe_length (abbrev_die_table) != 1)
32032 {
32033 switch_to_section (debug_abbrev_section);
32034 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32035 output_abbrev_section ();
32036 }
32037
32038 /* Have to end the macro section. */
32039 if (have_macinfo)
32040 {
32041 /* We have to save macinfo state if we need to output it again
32042 for the FAT part of the object. */
32043 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32044 if (flag_fat_lto_objects)
32045 macinfo_table = macinfo_table->copy ();
32046
32047 switch_to_section (debug_macinfo_section);
32048 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32049 output_macinfo (debug_line_section_label, true);
32050 dw2_asm_output_data (1, 0, "End compilation unit");
32051
32052 if (flag_fat_lto_objects)
32053 {
32054 vec_free (macinfo_table);
32055 macinfo_table = saved_macinfo_table;
32056 }
32057 }
32058
32059 /* Emit a skeleton debug_line section. */
32060 switch_to_section (debug_line_section);
32061 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32062 output_line_info (true);
32063
32064 /* If we emitted any indirect strings, output the string table too. */
32065 if (debug_str_hash || skeleton_debug_str_hash)
32066 output_indirect_strings ();
32067 if (debug_line_str_hash)
32068 {
32069 switch_to_section (debug_line_str_section);
32070 const enum dwarf_form form = DW_FORM_line_strp;
32071 debug_line_str_hash->traverse<enum dwarf_form,
32072 output_indirect_string> (form);
32073 }
32074
32075 /* Switch back to the text section. */
32076 switch_to_section (text_section);
32077 }
32078
32079 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32080 within the same process. For use by toplev::finalize. */
32081
32082 void
32083 dwarf2out_c_finalize (void)
32084 {
32085 last_var_location_insn = NULL;
32086 cached_next_real_insn = NULL;
32087 used_rtx_array = NULL;
32088 incomplete_types = NULL;
32089 debug_info_section = NULL;
32090 debug_skeleton_info_section = NULL;
32091 debug_abbrev_section = NULL;
32092 debug_skeleton_abbrev_section = NULL;
32093 debug_aranges_section = NULL;
32094 debug_addr_section = NULL;
32095 debug_macinfo_section = NULL;
32096 debug_line_section = NULL;
32097 debug_skeleton_line_section = NULL;
32098 debug_loc_section = NULL;
32099 debug_pubnames_section = NULL;
32100 debug_pubtypes_section = NULL;
32101 debug_str_section = NULL;
32102 debug_line_str_section = NULL;
32103 debug_str_dwo_section = NULL;
32104 debug_str_offsets_section = NULL;
32105 debug_ranges_section = NULL;
32106 debug_frame_section = NULL;
32107 fde_vec = NULL;
32108 debug_str_hash = NULL;
32109 debug_line_str_hash = NULL;
32110 skeleton_debug_str_hash = NULL;
32111 dw2_string_counter = 0;
32112 have_multiple_function_sections = false;
32113 text_section_used = false;
32114 cold_text_section_used = false;
32115 cold_text_section = NULL;
32116 current_unit_personality = NULL;
32117
32118 early_dwarf = false;
32119 early_dwarf_finished = false;
32120
32121 next_die_offset = 0;
32122 single_comp_unit_die = NULL;
32123 comdat_type_list = NULL;
32124 limbo_die_list = NULL;
32125 file_table = NULL;
32126 decl_die_table = NULL;
32127 common_block_die_table = NULL;
32128 decl_loc_table = NULL;
32129 call_arg_locations = NULL;
32130 call_arg_loc_last = NULL;
32131 call_site_count = -1;
32132 tail_call_site_count = -1;
32133 cached_dw_loc_list_table = NULL;
32134 abbrev_die_table = NULL;
32135 delete dwarf_proc_stack_usage_map;
32136 dwarf_proc_stack_usage_map = NULL;
32137 line_info_label_num = 0;
32138 cur_line_info_table = NULL;
32139 text_section_line_info = NULL;
32140 cold_text_section_line_info = NULL;
32141 separate_line_info = NULL;
32142 info_section_emitted = false;
32143 pubname_table = NULL;
32144 pubtype_table = NULL;
32145 macinfo_table = NULL;
32146 ranges_table = NULL;
32147 ranges_by_label = NULL;
32148 rnglist_idx = 0;
32149 have_location_lists = false;
32150 loclabel_num = 0;
32151 poc_label_num = 0;
32152 last_emitted_file = NULL;
32153 label_num = 0;
32154 tmpl_value_parm_die_table = NULL;
32155 generic_type_instances = NULL;
32156 frame_pointer_fb_offset = 0;
32157 frame_pointer_fb_offset_valid = false;
32158 base_types.release ();
32159 XDELETEVEC (producer_string);
32160 producer_string = NULL;
32161 }
32162
32163 #include "gt-dwarf2out.h"