C-family, Objective-C [1/3] : Implement Wobjc-root-class [PR77404].
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2020 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 if (debug_info_level <= DINFO_LEVEL_TERSE)
403 return false;
404
405 enum debug_struct_file criterion;
406 tree type_decl;
407 bool generic = lang_hooks.types.generic_p (type);
408
409 if (generic)
410 criterion = debug_struct_generic[usage];
411 else
412 criterion = debug_struct_ordinary[usage];
413
414 if (criterion == DINFO_STRUCT_FILE_NONE)
415 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
416 if (criterion == DINFO_STRUCT_FILE_ANY)
417 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
418
419 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
420
421 if (type_decl != NULL)
422 {
423 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
425
426 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
427 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
428 }
429
430 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
431 }
432 \f
433 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
434 switch to the data section instead, and write out a synthetic start label
435 for collect2 the first time around. */
436
437 static void
438 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
439 {
440 if (eh_frame_section == 0)
441 {
442 int flags;
443
444 if (EH_TABLES_CAN_BE_READ_ONLY)
445 {
446 int fde_encoding;
447 int per_encoding;
448 int lsda_encoding;
449
450 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
451 /*global=*/0);
452 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
453 /*global=*/1);
454 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
455 /*global=*/0);
456 flags = ((! flag_pic
457 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
458 && (fde_encoding & 0x70) != DW_EH_PE_aligned
459 && (per_encoding & 0x70) != DW_EH_PE_absptr
460 && (per_encoding & 0x70) != DW_EH_PE_aligned
461 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
462 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
463 ? 0 : SECTION_WRITE);
464 }
465 else
466 flags = SECTION_WRITE;
467
468 #ifdef EH_FRAME_SECTION_NAME
469 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
470 #else
471 eh_frame_section = ((flags == SECTION_WRITE)
472 ? data_section : readonly_data_section);
473 #endif /* EH_FRAME_SECTION_NAME */
474 }
475
476 switch_to_section (eh_frame_section);
477
478 #ifdef EH_FRAME_THROUGH_COLLECT2
479 /* We have no special eh_frame section. Emit special labels to guide
480 collect2. */
481 if (!back)
482 {
483 tree label = get_file_function_name ("F");
484 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
485 targetm.asm_out.globalize_label (asm_out_file,
486 IDENTIFIER_POINTER (label));
487 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
488 }
489 #endif
490 }
491
492 /* Switch [BACK] to the eh or debug frame table section, depending on
493 FOR_EH. */
494
495 static void
496 switch_to_frame_table_section (int for_eh, bool back)
497 {
498 if (for_eh)
499 switch_to_eh_frame_section (back);
500 else
501 {
502 if (!debug_frame_section)
503 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
504 SECTION_DEBUG, NULL);
505 switch_to_section (debug_frame_section);
506 }
507 }
508
509 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
510
511 enum dw_cfi_oprnd_type
512 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
513 {
514 switch (cfi)
515 {
516 case DW_CFA_nop:
517 case DW_CFA_GNU_window_save:
518 case DW_CFA_remember_state:
519 case DW_CFA_restore_state:
520 return dw_cfi_oprnd_unused;
521
522 case DW_CFA_set_loc:
523 case DW_CFA_advance_loc1:
524 case DW_CFA_advance_loc2:
525 case DW_CFA_advance_loc4:
526 case DW_CFA_MIPS_advance_loc8:
527 return dw_cfi_oprnd_addr;
528
529 case DW_CFA_offset:
530 case DW_CFA_offset_extended:
531 case DW_CFA_def_cfa:
532 case DW_CFA_offset_extended_sf:
533 case DW_CFA_def_cfa_sf:
534 case DW_CFA_restore:
535 case DW_CFA_restore_extended:
536 case DW_CFA_undefined:
537 case DW_CFA_same_value:
538 case DW_CFA_def_cfa_register:
539 case DW_CFA_register:
540 case DW_CFA_expression:
541 case DW_CFA_val_expression:
542 return dw_cfi_oprnd_reg_num;
543
544 case DW_CFA_def_cfa_offset:
545 case DW_CFA_GNU_args_size:
546 case DW_CFA_def_cfa_offset_sf:
547 return dw_cfi_oprnd_offset;
548
549 case DW_CFA_def_cfa_expression:
550 return dw_cfi_oprnd_loc;
551
552 default:
553 gcc_unreachable ();
554 }
555 }
556
557 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
558
559 enum dw_cfi_oprnd_type
560 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
561 {
562 switch (cfi)
563 {
564 case DW_CFA_def_cfa:
565 case DW_CFA_def_cfa_sf:
566 case DW_CFA_offset:
567 case DW_CFA_offset_extended_sf:
568 case DW_CFA_offset_extended:
569 return dw_cfi_oprnd_offset;
570
571 case DW_CFA_register:
572 return dw_cfi_oprnd_reg_num;
573
574 case DW_CFA_expression:
575 case DW_CFA_val_expression:
576 return dw_cfi_oprnd_loc;
577
578 case DW_CFA_def_cfa_expression:
579 return dw_cfi_oprnd_cfa_loc;
580
581 default:
582 return dw_cfi_oprnd_unused;
583 }
584 }
585
586 /* Output one FDE. */
587
588 static void
589 output_fde (dw_fde_ref fde, bool for_eh, bool second,
590 char *section_start_label, int fde_encoding, char *augmentation,
591 bool any_lsda_needed, int lsda_encoding)
592 {
593 const char *begin, *end;
594 static unsigned int j;
595 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
596
597 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
598 /* empty */ 0);
599 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
600 for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
602 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
603 if (!XCOFF_DEBUGGING_INFO || for_eh)
604 {
605 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
606 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
607 " indicating 64-bit DWARF extension");
608 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
609 "FDE Length");
610 }
611 ASM_OUTPUT_LABEL (asm_out_file, l1);
612
613 if (for_eh)
614 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
615 else
616 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
617 debug_frame_section, "FDE CIE offset");
618
619 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
620 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
621
622 if (for_eh)
623 {
624 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
625 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
626 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
627 "FDE initial location");
628 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
629 end, begin, "FDE address range");
630 }
631 else
632 {
633 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
634 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
635 }
636
637 if (augmentation[0])
638 {
639 if (any_lsda_needed)
640 {
641 int size = size_of_encoded_value (lsda_encoding);
642
643 if (lsda_encoding == DW_EH_PE_aligned)
644 {
645 int offset = ( 4 /* Length */
646 + 4 /* CIE offset */
647 + 2 * size_of_encoded_value (fde_encoding)
648 + 1 /* Augmentation size */ );
649 int pad = -offset & (PTR_SIZE - 1);
650
651 size += pad;
652 gcc_assert (size_of_uleb128 (size) == 1);
653 }
654
655 dw2_asm_output_data_uleb128 (size, "Augmentation size");
656
657 if (fde->uses_eh_lsda)
658 {
659 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
660 fde->funcdef_number);
661 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
662 gen_rtx_SYMBOL_REF (Pmode, l1),
663 false,
664 "Language Specific Data Area");
665 }
666 else
667 {
668 if (lsda_encoding == DW_EH_PE_aligned)
669 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
670 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
671 "Language Specific Data Area (none)");
672 }
673 }
674 else
675 dw2_asm_output_data_uleb128 (0, "Augmentation size");
676 }
677
678 /* Loop through the Call Frame Instructions associated with this FDE. */
679 fde->dw_fde_current_label = begin;
680 {
681 size_t from, until, i;
682
683 from = 0;
684 until = vec_safe_length (fde->dw_fde_cfi);
685
686 if (fde->dw_fde_second_begin == NULL)
687 ;
688 else if (!second)
689 until = fde->dw_fde_switch_cfi_index;
690 else
691 from = fde->dw_fde_switch_cfi_index;
692
693 for (i = from; i < until; i++)
694 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
695 }
696
697 /* If we are to emit a ref/link from function bodies to their frame tables,
698 do it now. This is typically performed to make sure that tables
699 associated with functions are dragged with them and not discarded in
700 garbage collecting links. We need to do this on a per function basis to
701 cope with -ffunction-sections. */
702
703 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
704 /* Switch to the function section, emit the ref to the tables, and
705 switch *back* into the table section. */
706 switch_to_section (function_section (fde->decl));
707 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
708 switch_to_frame_table_section (for_eh, true);
709 #endif
710
711 /* Pad the FDE out to an address sized boundary. */
712 ASM_OUTPUT_ALIGN (asm_out_file,
713 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
714 ASM_OUTPUT_LABEL (asm_out_file, l2);
715
716 j += 2;
717 }
718
719 /* Return true if frame description entry FDE is needed for EH. */
720
721 static bool
722 fde_needed_for_eh_p (dw_fde_ref fde)
723 {
724 if (flag_asynchronous_unwind_tables)
725 return true;
726
727 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
728 return true;
729
730 if (fde->uses_eh_lsda)
731 return true;
732
733 /* If exceptions are enabled, we have collected nothrow info. */
734 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
735 return false;
736
737 return true;
738 }
739
740 /* Output the call frame information used to record information
741 that relates to calculating the frame pointer, and records the
742 location of saved registers. */
743
744 static void
745 output_call_frame_info (int for_eh)
746 {
747 unsigned int i;
748 dw_fde_ref fde;
749 dw_cfi_ref cfi;
750 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
751 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
752 bool any_lsda_needed = false;
753 char augmentation[6];
754 int augmentation_size;
755 int fde_encoding = DW_EH_PE_absptr;
756 int per_encoding = DW_EH_PE_absptr;
757 int lsda_encoding = DW_EH_PE_absptr;
758 int return_reg;
759 rtx personality = NULL;
760 int dw_cie_version;
761
762 /* Don't emit a CIE if there won't be any FDEs. */
763 if (!fde_vec)
764 return;
765
766 /* Nothing to do if the assembler's doing it all. */
767 if (dwarf2out_do_cfi_asm ())
768 return;
769
770 /* If we don't have any functions we'll want to unwind out of, don't emit
771 any EH unwind information. If we make FDEs linkonce, we may have to
772 emit an empty label for an FDE that wouldn't otherwise be emitted. We
773 want to avoid having an FDE kept around when the function it refers to
774 is discarded. Example where this matters: a primary function template
775 in C++ requires EH information, an explicit specialization doesn't. */
776 if (for_eh)
777 {
778 bool any_eh_needed = false;
779
780 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
781 {
782 if (fde->uses_eh_lsda)
783 any_eh_needed = any_lsda_needed = true;
784 else if (fde_needed_for_eh_p (fde))
785 any_eh_needed = true;
786 else if (TARGET_USES_WEAK_UNWIND_INFO)
787 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
788 }
789
790 if (!any_eh_needed)
791 return;
792 }
793
794 /* We're going to be generating comments, so turn on app. */
795 if (flag_debug_asm)
796 app_enable ();
797
798 /* Switch to the proper frame section, first time. */
799 switch_to_frame_table_section (for_eh, false);
800
801 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
802 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
803
804 /* Output the CIE. */
805 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
806 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
807 if (!XCOFF_DEBUGGING_INFO || for_eh)
808 {
809 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
810 dw2_asm_output_data (4, 0xffffffff,
811 "Initial length escape value indicating 64-bit DWARF extension");
812 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
813 "Length of Common Information Entry");
814 }
815 ASM_OUTPUT_LABEL (asm_out_file, l1);
816
817 /* Now that the CIE pointer is PC-relative for EH,
818 use 0 to identify the CIE. */
819 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
820 (for_eh ? 0 : DWARF_CIE_ID),
821 "CIE Identifier Tag");
822
823 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
824 use CIE version 1, unless that would produce incorrect results
825 due to overflowing the return register column. */
826 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
827 dw_cie_version = 1;
828 if (return_reg >= 256 || dwarf_version > 2)
829 dw_cie_version = 3;
830 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
831
832 augmentation[0] = 0;
833 augmentation_size = 0;
834
835 personality = current_unit_personality;
836 if (for_eh)
837 {
838 char *p;
839
840 /* Augmentation:
841 z Indicates that a uleb128 is present to size the
842 augmentation section.
843 L Indicates the encoding (and thus presence) of
844 an LSDA pointer in the FDE augmentation.
845 R Indicates a non-default pointer encoding for
846 FDE code pointers.
847 P Indicates the presence of an encoding + language
848 personality routine in the CIE augmentation. */
849
850 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
851 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
852 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
853
854 p = augmentation + 1;
855 if (personality)
856 {
857 *p++ = 'P';
858 augmentation_size += 1 + size_of_encoded_value (per_encoding);
859 assemble_external_libcall (personality);
860 }
861 if (any_lsda_needed)
862 {
863 *p++ = 'L';
864 augmentation_size += 1;
865 }
866 if (fde_encoding != DW_EH_PE_absptr)
867 {
868 *p++ = 'R';
869 augmentation_size += 1;
870 }
871 if (p > augmentation + 1)
872 {
873 augmentation[0] = 'z';
874 *p = '\0';
875 }
876
877 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
878 if (personality && per_encoding == DW_EH_PE_aligned)
879 {
880 int offset = ( 4 /* Length */
881 + 4 /* CIE Id */
882 + 1 /* CIE version */
883 + strlen (augmentation) + 1 /* Augmentation */
884 + size_of_uleb128 (1) /* Code alignment */
885 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
886 + 1 /* RA column */
887 + 1 /* Augmentation size */
888 + 1 /* Personality encoding */ );
889 int pad = -offset & (PTR_SIZE - 1);
890
891 augmentation_size += pad;
892
893 /* Augmentations should be small, so there's scarce need to
894 iterate for a solution. Die if we exceed one uleb128 byte. */
895 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
896 }
897 }
898
899 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
900 if (dw_cie_version >= 4)
901 {
902 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
903 dw2_asm_output_data (1, 0, "CIE Segment Size");
904 }
905 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
906 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
907 "CIE Data Alignment Factor");
908
909 if (dw_cie_version == 1)
910 dw2_asm_output_data (1, return_reg, "CIE RA Column");
911 else
912 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
913
914 if (augmentation[0])
915 {
916 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
917 if (personality)
918 {
919 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
920 eh_data_format_name (per_encoding));
921 dw2_asm_output_encoded_addr_rtx (per_encoding,
922 personality,
923 true, NULL);
924 }
925
926 if (any_lsda_needed)
927 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
928 eh_data_format_name (lsda_encoding));
929
930 if (fde_encoding != DW_EH_PE_absptr)
931 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
932 eh_data_format_name (fde_encoding));
933 }
934
935 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
936 output_cfi (cfi, NULL, for_eh);
937
938 /* Pad the CIE out to an address sized boundary. */
939 ASM_OUTPUT_ALIGN (asm_out_file,
940 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
941 ASM_OUTPUT_LABEL (asm_out_file, l2);
942
943 /* Loop through all of the FDE's. */
944 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
945 {
946 unsigned int k;
947
948 /* Don't emit EH unwind info for leaf functions that don't need it. */
949 if (for_eh && !fde_needed_for_eh_p (fde))
950 continue;
951
952 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
953 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
954 augmentation, any_lsda_needed, lsda_encoding);
955 }
956
957 if (for_eh && targetm.terminate_dw2_eh_frame_info)
958 dw2_asm_output_data (4, 0, "End of Table");
959
960 /* Turn off app to make assembly quicker. */
961 if (flag_debug_asm)
962 app_disable ();
963 }
964
965 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
966
967 static void
968 dwarf2out_do_cfi_startproc (bool second)
969 {
970 int enc;
971 rtx ref;
972
973 fprintf (asm_out_file, "\t.cfi_startproc\n");
974
975 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
976
977 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
978 eh unwinders. */
979 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
980 return;
981
982 rtx personality = get_personality_function (current_function_decl);
983
984 if (personality)
985 {
986 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
987 ref = personality;
988
989 /* ??? The GAS support isn't entirely consistent. We have to
990 handle indirect support ourselves, but PC-relative is done
991 in the assembler. Further, the assembler can't handle any
992 of the weirder relocation types. */
993 if (enc & DW_EH_PE_indirect)
994 {
995 if (targetm.asm_out.make_eh_symbol_indirect != NULL)
996 ref = targetm.asm_out.make_eh_symbol_indirect (ref, true);
997 else
998 ref = dw2_force_const_mem (ref, true);
999 }
1000
1001 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
1002 output_addr_const (asm_out_file, ref);
1003 fputc ('\n', asm_out_file);
1004 }
1005
1006 if (crtl->uses_eh_lsda)
1007 {
1008 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1009
1010 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1011 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1012 current_function_funcdef_no);
1013 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1014 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1015
1016 if (enc & DW_EH_PE_indirect)
1017 {
1018 if (targetm.asm_out.make_eh_symbol_indirect != NULL)
1019 ref = targetm.asm_out.make_eh_symbol_indirect (ref, true);
1020 else
1021 ref = dw2_force_const_mem (ref, true);
1022 }
1023
1024 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1025 output_addr_const (asm_out_file, ref);
1026 fputc ('\n', asm_out_file);
1027 }
1028 }
1029
1030 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1031 this allocation may be done before pass_final. */
1032
1033 dw_fde_ref
1034 dwarf2out_alloc_current_fde (void)
1035 {
1036 dw_fde_ref fde;
1037
1038 fde = ggc_cleared_alloc<dw_fde_node> ();
1039 fde->decl = current_function_decl;
1040 fde->funcdef_number = current_function_funcdef_no;
1041 fde->fde_index = vec_safe_length (fde_vec);
1042 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1043 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1044 fde->nothrow = crtl->nothrow;
1045 fde->drap_reg = INVALID_REGNUM;
1046 fde->vdrap_reg = INVALID_REGNUM;
1047
1048 /* Record the FDE associated with this function. */
1049 cfun->fde = fde;
1050 vec_safe_push (fde_vec, fde);
1051
1052 return fde;
1053 }
1054
1055 /* Output a marker (i.e. a label) for the beginning of a function, before
1056 the prologue. */
1057
1058 void
1059 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1060 unsigned int column ATTRIBUTE_UNUSED,
1061 const char *file ATTRIBUTE_UNUSED)
1062 {
1063 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1064 char * dup_label;
1065 dw_fde_ref fde;
1066 section *fnsec;
1067 bool do_frame;
1068
1069 current_function_func_begin_label = NULL;
1070
1071 do_frame = dwarf2out_do_frame ();
1072
1073 /* ??? current_function_func_begin_label is also used by except.c for
1074 call-site information. We must emit this label if it might be used. */
1075 if (!do_frame
1076 && (!flag_exceptions
1077 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1078 return;
1079
1080 fnsec = function_section (current_function_decl);
1081 switch_to_section (fnsec);
1082 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1083 current_function_funcdef_no);
1084 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1085 current_function_funcdef_no);
1086 dup_label = xstrdup (label);
1087 current_function_func_begin_label = dup_label;
1088
1089 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1090 if (!do_frame)
1091 return;
1092
1093 /* Unlike the debug version, the EH version of frame unwind info is a per-
1094 function setting so we need to record whether we need it for the unit. */
1095 do_eh_frame |= dwarf2out_do_eh_frame ();
1096
1097 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1098 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1099 would include pass_dwarf2_frame. If we've not created the FDE yet,
1100 do so now. */
1101 fde = cfun->fde;
1102 if (fde == NULL)
1103 fde = dwarf2out_alloc_current_fde ();
1104
1105 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1106 fde->dw_fde_begin = dup_label;
1107 fde->dw_fde_current_label = dup_label;
1108 fde->in_std_section = (fnsec == text_section
1109 || (cold_text_section && fnsec == cold_text_section));
1110
1111 /* We only want to output line number information for the genuine dwarf2
1112 prologue case, not the eh frame case. */
1113 #ifdef DWARF2_DEBUGGING_INFO
1114 if (file)
1115 dwarf2out_source_line (line, column, file, 0, true);
1116 #endif
1117
1118 if (dwarf2out_do_cfi_asm ())
1119 dwarf2out_do_cfi_startproc (false);
1120 else
1121 {
1122 rtx personality = get_personality_function (current_function_decl);
1123 if (!current_unit_personality)
1124 current_unit_personality = personality;
1125
1126 /* We cannot keep a current personality per function as without CFI
1127 asm, at the point where we emit the CFI data, there is no current
1128 function anymore. */
1129 if (personality && current_unit_personality != personality)
1130 sorry ("multiple EH personalities are supported only with assemblers "
1131 "supporting %<.cfi_personality%> directive");
1132 }
1133 }
1134
1135 /* Output a marker (i.e. a label) for the end of the generated code
1136 for a function prologue. This gets called *after* the prologue code has
1137 been generated. */
1138
1139 void
1140 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1141 const char *file ATTRIBUTE_UNUSED)
1142 {
1143 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1144
1145 /* Output a label to mark the endpoint of the code generated for this
1146 function. */
1147 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1148 current_function_funcdef_no);
1149 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1150 current_function_funcdef_no);
1151 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1152 }
1153
1154 /* Output a marker (i.e. a label) for the beginning of the generated code
1155 for a function epilogue. This gets called *before* the prologue code has
1156 been generated. */
1157
1158 void
1159 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1160 const char *file ATTRIBUTE_UNUSED)
1161 {
1162 dw_fde_ref fde = cfun->fde;
1163 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1164
1165 if (fde->dw_fde_vms_begin_epilogue)
1166 return;
1167
1168 /* Output a label to mark the endpoint of the code generated for this
1169 function. */
1170 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1171 current_function_funcdef_no);
1172 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1173 current_function_funcdef_no);
1174 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1175 }
1176
1177 /* Output a marker (i.e. a label) for the absolute end of the generated code
1178 for a function definition. This gets called *after* the epilogue code has
1179 been generated. */
1180
1181 void
1182 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1183 const char *file ATTRIBUTE_UNUSED)
1184 {
1185 dw_fde_ref fde;
1186 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1187
1188 last_var_location_insn = NULL;
1189 cached_next_real_insn = NULL;
1190
1191 if (dwarf2out_do_cfi_asm ())
1192 fprintf (asm_out_file, "\t.cfi_endproc\n");
1193
1194 /* Output a label to mark the endpoint of the code generated for this
1195 function. */
1196 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1197 current_function_funcdef_no);
1198 ASM_OUTPUT_LABEL (asm_out_file, label);
1199 fde = cfun->fde;
1200 gcc_assert (fde != NULL);
1201 if (fde->dw_fde_second_begin == NULL)
1202 fde->dw_fde_end = xstrdup (label);
1203 }
1204
1205 void
1206 dwarf2out_frame_finish (void)
1207 {
1208 /* Output call frame information. */
1209 if (targetm.debug_unwind_info () == UI_DWARF2)
1210 output_call_frame_info (0);
1211
1212 /* Output another copy for the unwinder. */
1213 if (do_eh_frame)
1214 output_call_frame_info (1);
1215 }
1216
1217 /* Note that the current function section is being used for code. */
1218
1219 static void
1220 dwarf2out_note_section_used (void)
1221 {
1222 section *sec = current_function_section ();
1223 if (sec == text_section)
1224 text_section_used = true;
1225 else if (sec == cold_text_section)
1226 cold_text_section_used = true;
1227 }
1228
1229 static void var_location_switch_text_section (void);
1230 static void set_cur_line_info_table (section *);
1231
1232 void
1233 dwarf2out_switch_text_section (void)
1234 {
1235 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1236 section *sect;
1237 dw_fde_ref fde = cfun->fde;
1238
1239 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1240
1241 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1242 current_function_funcdef_no);
1243
1244 fde->dw_fde_second_begin = ggc_strdup (label);
1245 if (!in_cold_section_p)
1246 {
1247 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1248 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1249 }
1250 else
1251 {
1252 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1253 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1254 }
1255 have_multiple_function_sections = true;
1256
1257 /* There is no need to mark used sections when not debugging. */
1258 if (cold_text_section != NULL)
1259 dwarf2out_note_section_used ();
1260
1261 if (dwarf2out_do_cfi_asm ())
1262 fprintf (asm_out_file, "\t.cfi_endproc\n");
1263
1264 /* Now do the real section switch. */
1265 sect = current_function_section ();
1266 switch_to_section (sect);
1267
1268 fde->second_in_std_section
1269 = (sect == text_section
1270 || (cold_text_section && sect == cold_text_section));
1271
1272 if (dwarf2out_do_cfi_asm ())
1273 dwarf2out_do_cfi_startproc (true);
1274
1275 var_location_switch_text_section ();
1276
1277 if (cold_text_section != NULL)
1278 set_cur_line_info_table (sect);
1279 }
1280 \f
1281 /* And now, the subset of the debugging information support code necessary
1282 for emitting location expressions. */
1283
1284 /* Data about a single source file. */
1285 struct GTY((for_user)) dwarf_file_data {
1286 const char * filename;
1287 int emitted_number;
1288 };
1289
1290 /* Describe an entry into the .debug_addr section. */
1291
1292 enum ate_kind {
1293 ate_kind_rtx,
1294 ate_kind_rtx_dtprel,
1295 ate_kind_label
1296 };
1297
1298 struct GTY((for_user)) addr_table_entry {
1299 enum ate_kind kind;
1300 unsigned int refcount;
1301 unsigned int index;
1302 union addr_table_entry_struct_union
1303 {
1304 rtx GTY ((tag ("0"))) rtl;
1305 char * GTY ((tag ("1"))) label;
1306 }
1307 GTY ((desc ("%1.kind"))) addr;
1308 };
1309
1310 typedef unsigned int var_loc_view;
1311
1312 /* Location lists are ranges + location descriptions for that range,
1313 so you can track variables that are in different places over
1314 their entire life. */
1315 typedef struct GTY(()) dw_loc_list_struct {
1316 dw_loc_list_ref dw_loc_next;
1317 const char *begin; /* Label and addr_entry for start of range */
1318 addr_table_entry *begin_entry;
1319 const char *end; /* Label for end of range */
1320 char *ll_symbol; /* Label for beginning of location list.
1321 Only on head of list. */
1322 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1323 const char *section; /* Section this loclist is relative to */
1324 dw_loc_descr_ref expr;
1325 var_loc_view vbegin, vend;
1326 hashval_t hash;
1327 /* True if all addresses in this and subsequent lists are known to be
1328 resolved. */
1329 bool resolved_addr;
1330 /* True if this list has been replaced by dw_loc_next. */
1331 bool replaced;
1332 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1333 section. */
1334 unsigned char emitted : 1;
1335 /* True if hash field is index rather than hash value. */
1336 unsigned char num_assigned : 1;
1337 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1338 unsigned char offset_emitted : 1;
1339 /* True if note_variable_value_in_expr has been called on it. */
1340 unsigned char noted_variable_value : 1;
1341 /* True if the range should be emitted even if begin and end
1342 are the same. */
1343 bool force;
1344 } dw_loc_list_node;
1345
1346 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1347 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1348
1349 /* Convert a DWARF stack opcode into its string name. */
1350
1351 static const char *
1352 dwarf_stack_op_name (unsigned int op)
1353 {
1354 const char *name = get_DW_OP_name (op);
1355
1356 if (name != NULL)
1357 return name;
1358
1359 return "OP_<unknown>";
1360 }
1361
1362 /* Return TRUE iff we're to output location view lists as a separate
1363 attribute next to the location lists, as an extension compatible
1364 with DWARF 2 and above. */
1365
1366 static inline bool
1367 dwarf2out_locviews_in_attribute ()
1368 {
1369 return debug_variable_location_views == 1;
1370 }
1371
1372 /* Return TRUE iff we're to output location view lists as part of the
1373 location lists, as proposed for standardization after DWARF 5. */
1374
1375 static inline bool
1376 dwarf2out_locviews_in_loclist ()
1377 {
1378 #ifndef DW_LLE_view_pair
1379 return false;
1380 #else
1381 return debug_variable_location_views == -1;
1382 #endif
1383 }
1384
1385 /* Return a pointer to a newly allocated location description. Location
1386 descriptions are simple expression terms that can be strung
1387 together to form more complicated location (address) descriptions. */
1388
1389 static inline dw_loc_descr_ref
1390 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1391 unsigned HOST_WIDE_INT oprnd2)
1392 {
1393 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1394
1395 descr->dw_loc_opc = op;
1396 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1397 descr->dw_loc_oprnd1.val_entry = NULL;
1398 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1399 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1400 descr->dw_loc_oprnd2.val_entry = NULL;
1401 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1402
1403 return descr;
1404 }
1405
1406 /* Add a location description term to a location description expression. */
1407
1408 static inline void
1409 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1410 {
1411 dw_loc_descr_ref *d;
1412
1413 /* Find the end of the chain. */
1414 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1415 ;
1416
1417 *d = descr;
1418 }
1419
1420 /* Compare two location operands for exact equality. */
1421
1422 static bool
1423 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1424 {
1425 if (a->val_class != b->val_class)
1426 return false;
1427 switch (a->val_class)
1428 {
1429 case dw_val_class_none:
1430 return true;
1431 case dw_val_class_addr:
1432 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1433
1434 case dw_val_class_offset:
1435 case dw_val_class_unsigned_const:
1436 case dw_val_class_const:
1437 case dw_val_class_unsigned_const_implicit:
1438 case dw_val_class_const_implicit:
1439 case dw_val_class_range_list:
1440 /* These are all HOST_WIDE_INT, signed or unsigned. */
1441 return a->v.val_unsigned == b->v.val_unsigned;
1442
1443 case dw_val_class_loc:
1444 return a->v.val_loc == b->v.val_loc;
1445 case dw_val_class_loc_list:
1446 return a->v.val_loc_list == b->v.val_loc_list;
1447 case dw_val_class_view_list:
1448 return a->v.val_view_list == b->v.val_view_list;
1449 case dw_val_class_die_ref:
1450 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1451 case dw_val_class_fde_ref:
1452 return a->v.val_fde_index == b->v.val_fde_index;
1453 case dw_val_class_symview:
1454 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1455 case dw_val_class_lbl_id:
1456 case dw_val_class_lineptr:
1457 case dw_val_class_macptr:
1458 case dw_val_class_loclistsptr:
1459 case dw_val_class_high_pc:
1460 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1461 case dw_val_class_str:
1462 return a->v.val_str == b->v.val_str;
1463 case dw_val_class_flag:
1464 return a->v.val_flag == b->v.val_flag;
1465 case dw_val_class_file:
1466 case dw_val_class_file_implicit:
1467 return a->v.val_file == b->v.val_file;
1468 case dw_val_class_decl_ref:
1469 return a->v.val_decl_ref == b->v.val_decl_ref;
1470
1471 case dw_val_class_const_double:
1472 return (a->v.val_double.high == b->v.val_double.high
1473 && a->v.val_double.low == b->v.val_double.low);
1474
1475 case dw_val_class_wide_int:
1476 return *a->v.val_wide == *b->v.val_wide;
1477
1478 case dw_val_class_vec:
1479 {
1480 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1481 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1482
1483 return (a_len == b_len
1484 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1485 }
1486
1487 case dw_val_class_data8:
1488 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1489
1490 case dw_val_class_vms_delta:
1491 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1492 && !strcmp (a->v.val_vms_delta.lbl2, b->v.val_vms_delta.lbl2));
1493
1494 case dw_val_class_discr_value:
1495 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1496 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1497 case dw_val_class_discr_list:
1498 /* It makes no sense comparing two discriminant value lists. */
1499 return false;
1500 }
1501 gcc_unreachable ();
1502 }
1503
1504 /* Compare two location atoms for exact equality. */
1505
1506 static bool
1507 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1508 {
1509 if (a->dw_loc_opc != b->dw_loc_opc)
1510 return false;
1511
1512 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1513 address size, but since we always allocate cleared storage it
1514 should be zero for other types of locations. */
1515 if (a->dtprel != b->dtprel)
1516 return false;
1517
1518 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1519 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1520 }
1521
1522 /* Compare two complete location expressions for exact equality. */
1523
1524 bool
1525 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1526 {
1527 while (1)
1528 {
1529 if (a == b)
1530 return true;
1531 if (a == NULL || b == NULL)
1532 return false;
1533 if (!loc_descr_equal_p_1 (a, b))
1534 return false;
1535
1536 a = a->dw_loc_next;
1537 b = b->dw_loc_next;
1538 }
1539 }
1540
1541
1542 /* Add a constant POLY_OFFSET to a location expression. */
1543
1544 static void
1545 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1546 {
1547 dw_loc_descr_ref loc;
1548 HOST_WIDE_INT *p;
1549
1550 gcc_assert (*list_head != NULL);
1551
1552 if (known_eq (poly_offset, 0))
1553 return;
1554
1555 /* Find the end of the chain. */
1556 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1557 ;
1558
1559 HOST_WIDE_INT offset;
1560 if (!poly_offset.is_constant (&offset))
1561 {
1562 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1563 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1564 return;
1565 }
1566
1567 p = NULL;
1568 if (loc->dw_loc_opc == DW_OP_fbreg
1569 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1570 p = &loc->dw_loc_oprnd1.v.val_int;
1571 else if (loc->dw_loc_opc == DW_OP_bregx)
1572 p = &loc->dw_loc_oprnd2.v.val_int;
1573
1574 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1575 offset. Don't optimize if an signed integer overflow would happen. */
1576 if (p != NULL
1577 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1578 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1579 *p += offset;
1580
1581 else if (offset > 0)
1582 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1583
1584 else
1585 {
1586 loc->dw_loc_next
1587 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1588 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1589 }
1590 }
1591
1592 /* Return a pointer to a newly allocated location description for
1593 REG and OFFSET. */
1594
1595 static inline dw_loc_descr_ref
1596 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1597 {
1598 HOST_WIDE_INT const_offset;
1599 if (offset.is_constant (&const_offset))
1600 {
1601 if (reg <= 31)
1602 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1603 const_offset, 0);
1604 else
1605 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1606 }
1607 else
1608 {
1609 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1610 loc_descr_plus_const (&ret, offset);
1611 return ret;
1612 }
1613 }
1614
1615 /* Add a constant OFFSET to a location list. */
1616
1617 static void
1618 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1619 {
1620 dw_loc_list_ref d;
1621 for (d = list_head; d != NULL; d = d->dw_loc_next)
1622 loc_descr_plus_const (&d->expr, offset);
1623 }
1624
1625 #define DWARF_REF_SIZE \
1626 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1627
1628 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1629 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1630 DW_FORM_data16 with 128 bits. */
1631 #define DWARF_LARGEST_DATA_FORM_BITS \
1632 (dwarf_version >= 5 ? 128 : 64)
1633
1634 /* Utility inline function for construction of ops that were GNU extension
1635 before DWARF 5. */
1636 static inline enum dwarf_location_atom
1637 dwarf_OP (enum dwarf_location_atom op)
1638 {
1639 switch (op)
1640 {
1641 case DW_OP_implicit_pointer:
1642 if (dwarf_version < 5)
1643 return DW_OP_GNU_implicit_pointer;
1644 break;
1645
1646 case DW_OP_entry_value:
1647 if (dwarf_version < 5)
1648 return DW_OP_GNU_entry_value;
1649 break;
1650
1651 case DW_OP_const_type:
1652 if (dwarf_version < 5)
1653 return DW_OP_GNU_const_type;
1654 break;
1655
1656 case DW_OP_regval_type:
1657 if (dwarf_version < 5)
1658 return DW_OP_GNU_regval_type;
1659 break;
1660
1661 case DW_OP_deref_type:
1662 if (dwarf_version < 5)
1663 return DW_OP_GNU_deref_type;
1664 break;
1665
1666 case DW_OP_convert:
1667 if (dwarf_version < 5)
1668 return DW_OP_GNU_convert;
1669 break;
1670
1671 case DW_OP_reinterpret:
1672 if (dwarf_version < 5)
1673 return DW_OP_GNU_reinterpret;
1674 break;
1675
1676 case DW_OP_addrx:
1677 if (dwarf_version < 5)
1678 return DW_OP_GNU_addr_index;
1679 break;
1680
1681 case DW_OP_constx:
1682 if (dwarf_version < 5)
1683 return DW_OP_GNU_const_index;
1684 break;
1685
1686 default:
1687 break;
1688 }
1689 return op;
1690 }
1691
1692 /* Similarly for attributes. */
1693 static inline enum dwarf_attribute
1694 dwarf_AT (enum dwarf_attribute at)
1695 {
1696 switch (at)
1697 {
1698 case DW_AT_call_return_pc:
1699 if (dwarf_version < 5)
1700 return DW_AT_low_pc;
1701 break;
1702
1703 case DW_AT_call_tail_call:
1704 if (dwarf_version < 5)
1705 return DW_AT_GNU_tail_call;
1706 break;
1707
1708 case DW_AT_call_origin:
1709 if (dwarf_version < 5)
1710 return DW_AT_abstract_origin;
1711 break;
1712
1713 case DW_AT_call_target:
1714 if (dwarf_version < 5)
1715 return DW_AT_GNU_call_site_target;
1716 break;
1717
1718 case DW_AT_call_target_clobbered:
1719 if (dwarf_version < 5)
1720 return DW_AT_GNU_call_site_target_clobbered;
1721 break;
1722
1723 case DW_AT_call_parameter:
1724 if (dwarf_version < 5)
1725 return DW_AT_abstract_origin;
1726 break;
1727
1728 case DW_AT_call_value:
1729 if (dwarf_version < 5)
1730 return DW_AT_GNU_call_site_value;
1731 break;
1732
1733 case DW_AT_call_data_value:
1734 if (dwarf_version < 5)
1735 return DW_AT_GNU_call_site_data_value;
1736 break;
1737
1738 case DW_AT_call_all_calls:
1739 if (dwarf_version < 5)
1740 return DW_AT_GNU_all_call_sites;
1741 break;
1742
1743 case DW_AT_call_all_tail_calls:
1744 if (dwarf_version < 5)
1745 return DW_AT_GNU_all_tail_call_sites;
1746 break;
1747
1748 case DW_AT_dwo_name:
1749 if (dwarf_version < 5)
1750 return DW_AT_GNU_dwo_name;
1751 break;
1752
1753 case DW_AT_addr_base:
1754 if (dwarf_version < 5)
1755 return DW_AT_GNU_addr_base;
1756 break;
1757
1758 default:
1759 break;
1760 }
1761 return at;
1762 }
1763
1764 /* And similarly for tags. */
1765 static inline enum dwarf_tag
1766 dwarf_TAG (enum dwarf_tag tag)
1767 {
1768 switch (tag)
1769 {
1770 case DW_TAG_call_site:
1771 if (dwarf_version < 5)
1772 return DW_TAG_GNU_call_site;
1773 break;
1774
1775 case DW_TAG_call_site_parameter:
1776 if (dwarf_version < 5)
1777 return DW_TAG_GNU_call_site_parameter;
1778 break;
1779
1780 default:
1781 break;
1782 }
1783 return tag;
1784 }
1785
1786 /* And similarly for forms. */
1787 static inline enum dwarf_form
1788 dwarf_FORM (enum dwarf_form form)
1789 {
1790 switch (form)
1791 {
1792 case DW_FORM_addrx:
1793 if (dwarf_version < 5)
1794 return DW_FORM_GNU_addr_index;
1795 break;
1796
1797 case DW_FORM_strx:
1798 if (dwarf_version < 5)
1799 return DW_FORM_GNU_str_index;
1800 break;
1801
1802 default:
1803 break;
1804 }
1805 return form;
1806 }
1807
1808 static unsigned long int get_base_type_offset (dw_die_ref);
1809
1810 /* Return the size of a location descriptor. */
1811
1812 static unsigned long
1813 size_of_loc_descr (dw_loc_descr_ref loc)
1814 {
1815 unsigned long size = 1;
1816
1817 switch (loc->dw_loc_opc)
1818 {
1819 case DW_OP_addr:
1820 size += DWARF2_ADDR_SIZE;
1821 break;
1822 case DW_OP_GNU_addr_index:
1823 case DW_OP_addrx:
1824 case DW_OP_GNU_const_index:
1825 case DW_OP_constx:
1826 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1827 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1828 break;
1829 case DW_OP_const1u:
1830 case DW_OP_const1s:
1831 size += 1;
1832 break;
1833 case DW_OP_const2u:
1834 case DW_OP_const2s:
1835 size += 2;
1836 break;
1837 case DW_OP_const4u:
1838 case DW_OP_const4s:
1839 size += 4;
1840 break;
1841 case DW_OP_const8u:
1842 case DW_OP_const8s:
1843 size += 8;
1844 break;
1845 case DW_OP_constu:
1846 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1847 break;
1848 case DW_OP_consts:
1849 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1850 break;
1851 case DW_OP_pick:
1852 size += 1;
1853 break;
1854 case DW_OP_plus_uconst:
1855 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1856 break;
1857 case DW_OP_skip:
1858 case DW_OP_bra:
1859 size += 2;
1860 break;
1861 case DW_OP_breg0:
1862 case DW_OP_breg1:
1863 case DW_OP_breg2:
1864 case DW_OP_breg3:
1865 case DW_OP_breg4:
1866 case DW_OP_breg5:
1867 case DW_OP_breg6:
1868 case DW_OP_breg7:
1869 case DW_OP_breg8:
1870 case DW_OP_breg9:
1871 case DW_OP_breg10:
1872 case DW_OP_breg11:
1873 case DW_OP_breg12:
1874 case DW_OP_breg13:
1875 case DW_OP_breg14:
1876 case DW_OP_breg15:
1877 case DW_OP_breg16:
1878 case DW_OP_breg17:
1879 case DW_OP_breg18:
1880 case DW_OP_breg19:
1881 case DW_OP_breg20:
1882 case DW_OP_breg21:
1883 case DW_OP_breg22:
1884 case DW_OP_breg23:
1885 case DW_OP_breg24:
1886 case DW_OP_breg25:
1887 case DW_OP_breg26:
1888 case DW_OP_breg27:
1889 case DW_OP_breg28:
1890 case DW_OP_breg29:
1891 case DW_OP_breg30:
1892 case DW_OP_breg31:
1893 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1894 break;
1895 case DW_OP_regx:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 break;
1898 case DW_OP_fbreg:
1899 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1900 break;
1901 case DW_OP_bregx:
1902 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1903 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1904 break;
1905 case DW_OP_piece:
1906 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1907 break;
1908 case DW_OP_bit_piece:
1909 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1910 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1911 break;
1912 case DW_OP_deref_size:
1913 case DW_OP_xderef_size:
1914 size += 1;
1915 break;
1916 case DW_OP_call2:
1917 size += 2;
1918 break;
1919 case DW_OP_call4:
1920 size += 4;
1921 break;
1922 case DW_OP_call_ref:
1923 case DW_OP_GNU_variable_value:
1924 size += DWARF_REF_SIZE;
1925 break;
1926 case DW_OP_implicit_value:
1927 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1928 + loc->dw_loc_oprnd1.v.val_unsigned;
1929 break;
1930 case DW_OP_implicit_pointer:
1931 case DW_OP_GNU_implicit_pointer:
1932 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1933 break;
1934 case DW_OP_entry_value:
1935 case DW_OP_GNU_entry_value:
1936 {
1937 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1938 size += size_of_uleb128 (op_size) + op_size;
1939 break;
1940 }
1941 case DW_OP_const_type:
1942 case DW_OP_GNU_const_type:
1943 {
1944 unsigned long o
1945 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1946 size += size_of_uleb128 (o) + 1;
1947 switch (loc->dw_loc_oprnd2.val_class)
1948 {
1949 case dw_val_class_vec:
1950 size += loc->dw_loc_oprnd2.v.val_vec.length
1951 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1952 break;
1953 case dw_val_class_const:
1954 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1955 break;
1956 case dw_val_class_const_double:
1957 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1958 break;
1959 case dw_val_class_wide_int:
1960 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1961 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1962 break;
1963 default:
1964 gcc_unreachable ();
1965 }
1966 break;
1967 }
1968 case DW_OP_regval_type:
1969 case DW_OP_GNU_regval_type:
1970 {
1971 unsigned long o
1972 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1973 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1974 + size_of_uleb128 (o);
1975 }
1976 break;
1977 case DW_OP_deref_type:
1978 case DW_OP_GNU_deref_type:
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1982 size += 1 + size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_convert:
1986 case DW_OP_reinterpret:
1987 case DW_OP_GNU_convert:
1988 case DW_OP_GNU_reinterpret:
1989 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1990 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1991 else
1992 {
1993 unsigned long o
1994 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1995 size += size_of_uleb128 (o);
1996 }
1997 break;
1998 case DW_OP_GNU_parameter_ref:
1999 size += 4;
2000 break;
2001 default:
2002 break;
2003 }
2004
2005 return size;
2006 }
2007
2008 /* Return the size of a series of location descriptors. */
2009
2010 unsigned long
2011 size_of_locs (dw_loc_descr_ref loc)
2012 {
2013 dw_loc_descr_ref l;
2014 unsigned long size;
2015
2016 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2017 field, to avoid writing to a PCH file. */
2018 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2019 {
2020 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2021 break;
2022 size += size_of_loc_descr (l);
2023 }
2024 if (! l)
2025 return size;
2026
2027 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2028 {
2029 l->dw_loc_addr = size;
2030 size += size_of_loc_descr (l);
2031 }
2032
2033 return size;
2034 }
2035
2036 /* Return the size of the value in a DW_AT_discr_value attribute. */
2037
2038 static int
2039 size_of_discr_value (dw_discr_value *discr_value)
2040 {
2041 if (discr_value->pos)
2042 return size_of_uleb128 (discr_value->v.uval);
2043 else
2044 return size_of_sleb128 (discr_value->v.sval);
2045 }
2046
2047 /* Return the size of the value in a DW_AT_discr_list attribute. */
2048
2049 static int
2050 size_of_discr_list (dw_discr_list_ref discr_list)
2051 {
2052 int size = 0;
2053
2054 for (dw_discr_list_ref list = discr_list;
2055 list != NULL;
2056 list = list->dw_discr_next)
2057 {
2058 /* One byte for the discriminant value descriptor, and then one or two
2059 LEB128 numbers, depending on whether it's a single case label or a
2060 range label. */
2061 size += 1;
2062 size += size_of_discr_value (&list->dw_discr_lower_bound);
2063 if (list->dw_discr_range != 0)
2064 size += size_of_discr_value (&list->dw_discr_upper_bound);
2065 }
2066 return size;
2067 }
2068
2069 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2070 static void get_ref_die_offset_label (char *, dw_die_ref);
2071 static unsigned long int get_ref_die_offset (dw_die_ref);
2072
2073 /* Output location description stack opcode's operands (if any).
2074 The for_eh_or_skip parameter controls whether register numbers are
2075 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2076 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2077 info). This should be suppressed for the cases that have not been converted
2078 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2079
2080 static void
2081 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2082 {
2083 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2084 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2085
2086 switch (loc->dw_loc_opc)
2087 {
2088 #ifdef DWARF2_DEBUGGING_INFO
2089 case DW_OP_const2u:
2090 case DW_OP_const2s:
2091 dw2_asm_output_data (2, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const4u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const4s:
2104 dw2_asm_output_data (4, val1->v.val_int, NULL);
2105 break;
2106 case DW_OP_const8u:
2107 if (loc->dtprel)
2108 {
2109 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2110 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2111 val1->v.val_addr);
2112 fputc ('\n', asm_out_file);
2113 break;
2114 }
2115 /* FALLTHRU */
2116 case DW_OP_const8s:
2117 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2118 dw2_asm_output_data (8, val1->v.val_int, NULL);
2119 break;
2120 case DW_OP_skip:
2121 case DW_OP_bra:
2122 {
2123 int offset;
2124
2125 gcc_assert (val1->val_class == dw_val_class_loc);
2126 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2127
2128 dw2_asm_output_data (2, offset, NULL);
2129 }
2130 break;
2131 case DW_OP_implicit_value:
2132 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2133 switch (val2->val_class)
2134 {
2135 case dw_val_class_const:
2136 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2137 break;
2138 case dw_val_class_vec:
2139 {
2140 unsigned int elt_size = val2->v.val_vec.elt_size;
2141 unsigned int len = val2->v.val_vec.length;
2142 unsigned int i;
2143 unsigned char *p;
2144
2145 if (elt_size > sizeof (HOST_WIDE_INT))
2146 {
2147 elt_size /= 2;
2148 len *= 2;
2149 }
2150 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2151 i < len;
2152 i++, p += elt_size)
2153 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2154 "fp or vector constant word %u", i);
2155 }
2156 break;
2157 case dw_val_class_const_double:
2158 {
2159 unsigned HOST_WIDE_INT first, second;
2160
2161 if (WORDS_BIG_ENDIAN)
2162 {
2163 first = val2->v.val_double.high;
2164 second = val2->v.val_double.low;
2165 }
2166 else
2167 {
2168 first = val2->v.val_double.low;
2169 second = val2->v.val_double.high;
2170 }
2171 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2172 first, NULL);
2173 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2174 second, NULL);
2175 }
2176 break;
2177 case dw_val_class_wide_int:
2178 {
2179 int i;
2180 int len = get_full_len (*val2->v.val_wide);
2181 if (WORDS_BIG_ENDIAN)
2182 for (i = len - 1; i >= 0; --i)
2183 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2184 val2->v.val_wide->elt (i), NULL);
2185 else
2186 for (i = 0; i < len; ++i)
2187 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2188 val2->v.val_wide->elt (i), NULL);
2189 }
2190 break;
2191 case dw_val_class_addr:
2192 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2193 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2194 break;
2195 default:
2196 gcc_unreachable ();
2197 }
2198 break;
2199 #else
2200 case DW_OP_const2u:
2201 case DW_OP_const2s:
2202 case DW_OP_const4u:
2203 case DW_OP_const4s:
2204 case DW_OP_const8u:
2205 case DW_OP_const8s:
2206 case DW_OP_skip:
2207 case DW_OP_bra:
2208 case DW_OP_implicit_value:
2209 /* We currently don't make any attempt to make sure these are
2210 aligned properly like we do for the main unwind info, so
2211 don't support emitting things larger than a byte if we're
2212 only doing unwinding. */
2213 gcc_unreachable ();
2214 #endif
2215 case DW_OP_const1u:
2216 case DW_OP_const1s:
2217 dw2_asm_output_data (1, val1->v.val_int, NULL);
2218 break;
2219 case DW_OP_constu:
2220 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2221 break;
2222 case DW_OP_consts:
2223 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2224 break;
2225 case DW_OP_pick:
2226 dw2_asm_output_data (1, val1->v.val_int, NULL);
2227 break;
2228 case DW_OP_plus_uconst:
2229 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2230 break;
2231 case DW_OP_breg0:
2232 case DW_OP_breg1:
2233 case DW_OP_breg2:
2234 case DW_OP_breg3:
2235 case DW_OP_breg4:
2236 case DW_OP_breg5:
2237 case DW_OP_breg6:
2238 case DW_OP_breg7:
2239 case DW_OP_breg8:
2240 case DW_OP_breg9:
2241 case DW_OP_breg10:
2242 case DW_OP_breg11:
2243 case DW_OP_breg12:
2244 case DW_OP_breg13:
2245 case DW_OP_breg14:
2246 case DW_OP_breg15:
2247 case DW_OP_breg16:
2248 case DW_OP_breg17:
2249 case DW_OP_breg18:
2250 case DW_OP_breg19:
2251 case DW_OP_breg20:
2252 case DW_OP_breg21:
2253 case DW_OP_breg22:
2254 case DW_OP_breg23:
2255 case DW_OP_breg24:
2256 case DW_OP_breg25:
2257 case DW_OP_breg26:
2258 case DW_OP_breg27:
2259 case DW_OP_breg28:
2260 case DW_OP_breg29:
2261 case DW_OP_breg30:
2262 case DW_OP_breg31:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_regx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 }
2274 break;
2275 case DW_OP_fbreg:
2276 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2277 break;
2278 case DW_OP_bregx:
2279 {
2280 unsigned r = val1->v.val_unsigned;
2281 if (for_eh_or_skip >= 0)
2282 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2283 gcc_assert (size_of_uleb128 (r)
2284 == size_of_uleb128 (val1->v.val_unsigned));
2285 dw2_asm_output_data_uleb128 (r, NULL);
2286 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2287 }
2288 break;
2289 case DW_OP_piece:
2290 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2291 break;
2292 case DW_OP_bit_piece:
2293 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2294 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2295 break;
2296 case DW_OP_deref_size:
2297 case DW_OP_xderef_size:
2298 dw2_asm_output_data (1, val1->v.val_int, NULL);
2299 break;
2300
2301 case DW_OP_addr:
2302 if (loc->dtprel)
2303 {
2304 if (targetm.asm_out.output_dwarf_dtprel)
2305 {
2306 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2307 DWARF2_ADDR_SIZE,
2308 val1->v.val_addr);
2309 fputc ('\n', asm_out_file);
2310 }
2311 else
2312 gcc_unreachable ();
2313 }
2314 else
2315 {
2316 #ifdef DWARF2_DEBUGGING_INFO
2317 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2318 #else
2319 gcc_unreachable ();
2320 #endif
2321 }
2322 break;
2323
2324 case DW_OP_GNU_addr_index:
2325 case DW_OP_addrx:
2326 case DW_OP_GNU_const_index:
2327 case DW_OP_constx:
2328 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2329 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2330 "(index into .debug_addr)");
2331 break;
2332
2333 case DW_OP_call2:
2334 case DW_OP_call4:
2335 {
2336 unsigned long die_offset
2337 = get_ref_die_offset (val1->v.val_die_ref.die);
2338 /* Make sure the offset has been computed and that we can encode it as
2339 an operand. */
2340 gcc_assert (die_offset > 0
2341 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2342 ? 0xffff
2343 : 0xffffffff));
2344 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2345 die_offset, NULL);
2346 }
2347 break;
2348
2349 case DW_OP_call_ref:
2350 case DW_OP_GNU_variable_value:
2351 {
2352 char label[MAX_ARTIFICIAL_LABEL_BYTES
2353 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2354 gcc_assert (val1->val_class == dw_val_class_die_ref);
2355 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2356 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2357 }
2358 break;
2359
2360 case DW_OP_implicit_pointer:
2361 case DW_OP_GNU_implicit_pointer:
2362 {
2363 char label[MAX_ARTIFICIAL_LABEL_BYTES
2364 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2365 gcc_assert (val1->val_class == dw_val_class_die_ref);
2366 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2367 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2368 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2369 }
2370 break;
2371
2372 case DW_OP_entry_value:
2373 case DW_OP_GNU_entry_value:
2374 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2375 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2376 break;
2377
2378 case DW_OP_const_type:
2379 case DW_OP_GNU_const_type:
2380 {
2381 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2382 gcc_assert (o);
2383 dw2_asm_output_data_uleb128 (o, NULL);
2384 switch (val2->val_class)
2385 {
2386 case dw_val_class_const:
2387 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2388 dw2_asm_output_data (1, l, NULL);
2389 dw2_asm_output_data (l, val2->v.val_int, NULL);
2390 break;
2391 case dw_val_class_vec:
2392 {
2393 unsigned int elt_size = val2->v.val_vec.elt_size;
2394 unsigned int len = val2->v.val_vec.length;
2395 unsigned int i;
2396 unsigned char *p;
2397
2398 l = len * elt_size;
2399 dw2_asm_output_data (1, l, NULL);
2400 if (elt_size > sizeof (HOST_WIDE_INT))
2401 {
2402 elt_size /= 2;
2403 len *= 2;
2404 }
2405 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2406 i < len;
2407 i++, p += elt_size)
2408 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2409 "fp or vector constant word %u", i);
2410 }
2411 break;
2412 case dw_val_class_const_double:
2413 {
2414 unsigned HOST_WIDE_INT first, second;
2415 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2416
2417 dw2_asm_output_data (1, 2 * l, NULL);
2418 if (WORDS_BIG_ENDIAN)
2419 {
2420 first = val2->v.val_double.high;
2421 second = val2->v.val_double.low;
2422 }
2423 else
2424 {
2425 first = val2->v.val_double.low;
2426 second = val2->v.val_double.high;
2427 }
2428 dw2_asm_output_data (l, first, NULL);
2429 dw2_asm_output_data (l, second, NULL);
2430 }
2431 break;
2432 case dw_val_class_wide_int:
2433 {
2434 int i;
2435 int len = get_full_len (*val2->v.val_wide);
2436 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2437
2438 dw2_asm_output_data (1, len * l, NULL);
2439 if (WORDS_BIG_ENDIAN)
2440 for (i = len - 1; i >= 0; --i)
2441 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2442 else
2443 for (i = 0; i < len; ++i)
2444 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2445 }
2446 break;
2447 default:
2448 gcc_unreachable ();
2449 }
2450 }
2451 break;
2452 case DW_OP_regval_type:
2453 case DW_OP_GNU_regval_type:
2454 {
2455 unsigned r = val1->v.val_unsigned;
2456 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2457 gcc_assert (o);
2458 if (for_eh_or_skip >= 0)
2459 {
2460 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2461 gcc_assert (size_of_uleb128 (r)
2462 == size_of_uleb128 (val1->v.val_unsigned));
2463 }
2464 dw2_asm_output_data_uleb128 (r, NULL);
2465 dw2_asm_output_data_uleb128 (o, NULL);
2466 }
2467 break;
2468 case DW_OP_deref_type:
2469 case DW_OP_GNU_deref_type:
2470 {
2471 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2472 gcc_assert (o);
2473 dw2_asm_output_data (1, val1->v.val_int, NULL);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477 case DW_OP_convert:
2478 case DW_OP_reinterpret:
2479 case DW_OP_GNU_convert:
2480 case DW_OP_GNU_reinterpret:
2481 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2482 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2483 else
2484 {
2485 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2486 gcc_assert (o);
2487 dw2_asm_output_data_uleb128 (o, NULL);
2488 }
2489 break;
2490
2491 case DW_OP_GNU_parameter_ref:
2492 {
2493 unsigned long o;
2494 gcc_assert (val1->val_class == dw_val_class_die_ref);
2495 o = get_ref_die_offset (val1->v.val_die_ref.die);
2496 dw2_asm_output_data (4, o, NULL);
2497 }
2498 break;
2499
2500 default:
2501 /* Other codes have no operands. */
2502 break;
2503 }
2504 }
2505
2506 /* Output a sequence of location operations.
2507 The for_eh_or_skip parameter controls whether register numbers are
2508 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2509 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2510 info). This should be suppressed for the cases that have not been converted
2511 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2512
2513 void
2514 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2515 {
2516 for (; loc != NULL; loc = loc->dw_loc_next)
2517 {
2518 enum dwarf_location_atom opc = loc->dw_loc_opc;
2519 /* Output the opcode. */
2520 if (for_eh_or_skip >= 0
2521 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2522 {
2523 unsigned r = (opc - DW_OP_breg0);
2524 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2525 gcc_assert (r <= 31);
2526 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2527 }
2528 else if (for_eh_or_skip >= 0
2529 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2530 {
2531 unsigned r = (opc - DW_OP_reg0);
2532 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2533 gcc_assert (r <= 31);
2534 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2535 }
2536
2537 dw2_asm_output_data (1, opc,
2538 "%s", dwarf_stack_op_name (opc));
2539
2540 /* Output the operand(s) (if any). */
2541 output_loc_operands (loc, for_eh_or_skip);
2542 }
2543 }
2544
2545 /* Output location description stack opcode's operands (if any).
2546 The output is single bytes on a line, suitable for .cfi_escape. */
2547
2548 static void
2549 output_loc_operands_raw (dw_loc_descr_ref loc)
2550 {
2551 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2552 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2553
2554 switch (loc->dw_loc_opc)
2555 {
2556 case DW_OP_addr:
2557 case DW_OP_GNU_addr_index:
2558 case DW_OP_addrx:
2559 case DW_OP_GNU_const_index:
2560 case DW_OP_constx:
2561 case DW_OP_implicit_value:
2562 /* We cannot output addresses in .cfi_escape, only bytes. */
2563 gcc_unreachable ();
2564
2565 case DW_OP_const1u:
2566 case DW_OP_const1s:
2567 case DW_OP_pick:
2568 case DW_OP_deref_size:
2569 case DW_OP_xderef_size:
2570 fputc (',', asm_out_file);
2571 dw2_asm_output_data_raw (1, val1->v.val_int);
2572 break;
2573
2574 case DW_OP_const2u:
2575 case DW_OP_const2s:
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (2, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_const4u:
2581 case DW_OP_const4s:
2582 fputc (',', asm_out_file);
2583 dw2_asm_output_data_raw (4, val1->v.val_int);
2584 break;
2585
2586 case DW_OP_const8u:
2587 case DW_OP_const8s:
2588 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2589 fputc (',', asm_out_file);
2590 dw2_asm_output_data_raw (8, val1->v.val_int);
2591 break;
2592
2593 case DW_OP_skip:
2594 case DW_OP_bra:
2595 {
2596 int offset;
2597
2598 gcc_assert (val1->val_class == dw_val_class_loc);
2599 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2600
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_raw (2, offset);
2603 }
2604 break;
2605
2606 case DW_OP_regx:
2607 {
2608 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2609 gcc_assert (size_of_uleb128 (r)
2610 == size_of_uleb128 (val1->v.val_unsigned));
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (r);
2613 }
2614 break;
2615
2616 case DW_OP_constu:
2617 case DW_OP_plus_uconst:
2618 case DW_OP_piece:
2619 fputc (',', asm_out_file);
2620 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2621 break;
2622
2623 case DW_OP_bit_piece:
2624 fputc (',', asm_out_file);
2625 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2626 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2627 break;
2628
2629 case DW_OP_consts:
2630 case DW_OP_breg0:
2631 case DW_OP_breg1:
2632 case DW_OP_breg2:
2633 case DW_OP_breg3:
2634 case DW_OP_breg4:
2635 case DW_OP_breg5:
2636 case DW_OP_breg6:
2637 case DW_OP_breg7:
2638 case DW_OP_breg8:
2639 case DW_OP_breg9:
2640 case DW_OP_breg10:
2641 case DW_OP_breg11:
2642 case DW_OP_breg12:
2643 case DW_OP_breg13:
2644 case DW_OP_breg14:
2645 case DW_OP_breg15:
2646 case DW_OP_breg16:
2647 case DW_OP_breg17:
2648 case DW_OP_breg18:
2649 case DW_OP_breg19:
2650 case DW_OP_breg20:
2651 case DW_OP_breg21:
2652 case DW_OP_breg22:
2653 case DW_OP_breg23:
2654 case DW_OP_breg24:
2655 case DW_OP_breg25:
2656 case DW_OP_breg26:
2657 case DW_OP_breg27:
2658 case DW_OP_breg28:
2659 case DW_OP_breg29:
2660 case DW_OP_breg30:
2661 case DW_OP_breg31:
2662 case DW_OP_fbreg:
2663 fputc (',', asm_out_file);
2664 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2665 break;
2666
2667 case DW_OP_bregx:
2668 {
2669 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2670 gcc_assert (size_of_uleb128 (r)
2671 == size_of_uleb128 (val1->v.val_unsigned));
2672 fputc (',', asm_out_file);
2673 dw2_asm_output_data_uleb128_raw (r);
2674 fputc (',', asm_out_file);
2675 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2676 }
2677 break;
2678
2679 case DW_OP_implicit_pointer:
2680 case DW_OP_entry_value:
2681 case DW_OP_const_type:
2682 case DW_OP_regval_type:
2683 case DW_OP_deref_type:
2684 case DW_OP_convert:
2685 case DW_OP_reinterpret:
2686 case DW_OP_GNU_implicit_pointer:
2687 case DW_OP_GNU_entry_value:
2688 case DW_OP_GNU_const_type:
2689 case DW_OP_GNU_regval_type:
2690 case DW_OP_GNU_deref_type:
2691 case DW_OP_GNU_convert:
2692 case DW_OP_GNU_reinterpret:
2693 case DW_OP_GNU_parameter_ref:
2694 gcc_unreachable ();
2695 break;
2696
2697 default:
2698 /* Other codes have no operands. */
2699 break;
2700 }
2701 }
2702
2703 void
2704 output_loc_sequence_raw (dw_loc_descr_ref loc)
2705 {
2706 while (1)
2707 {
2708 enum dwarf_location_atom opc = loc->dw_loc_opc;
2709 /* Output the opcode. */
2710 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2711 {
2712 unsigned r = (opc - DW_OP_breg0);
2713 r = DWARF2_FRAME_REG_OUT (r, 1);
2714 gcc_assert (r <= 31);
2715 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2716 }
2717 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2718 {
2719 unsigned r = (opc - DW_OP_reg0);
2720 r = DWARF2_FRAME_REG_OUT (r, 1);
2721 gcc_assert (r <= 31);
2722 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2723 }
2724 /* Output the opcode. */
2725 fprintf (asm_out_file, "%#x", opc);
2726 output_loc_operands_raw (loc);
2727
2728 if (!loc->dw_loc_next)
2729 break;
2730 loc = loc->dw_loc_next;
2731
2732 fputc (',', asm_out_file);
2733 }
2734 }
2735
2736 /* This function builds a dwarf location descriptor sequence from a
2737 dw_cfa_location, adding the given OFFSET to the result of the
2738 expression. */
2739
2740 struct dw_loc_descr_node *
2741 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2742 {
2743 struct dw_loc_descr_node *head, *tmp;
2744
2745 offset += cfa->offset;
2746
2747 if (cfa->indirect)
2748 {
2749 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2750 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2751 head->dw_loc_oprnd1.val_entry = NULL;
2752 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2753 add_loc_descr (&head, tmp);
2754 loc_descr_plus_const (&head, offset);
2755 }
2756 else
2757 head = new_reg_loc_descr (cfa->reg, offset);
2758
2759 return head;
2760 }
2761
2762 /* This function builds a dwarf location descriptor sequence for
2763 the address at OFFSET from the CFA when stack is aligned to
2764 ALIGNMENT byte. */
2765
2766 struct dw_loc_descr_node *
2767 build_cfa_aligned_loc (dw_cfa_location *cfa,
2768 poly_int64 offset, HOST_WIDE_INT alignment)
2769 {
2770 struct dw_loc_descr_node *head;
2771 unsigned int dwarf_fp
2772 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2773
2774 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2775 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2776 {
2777 head = new_reg_loc_descr (dwarf_fp, 0);
2778 add_loc_descr (&head, int_loc_descriptor (alignment));
2779 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2780 loc_descr_plus_const (&head, offset);
2781 }
2782 else
2783 head = new_reg_loc_descr (dwarf_fp, offset);
2784 return head;
2785 }
2786 \f
2787 /* And now, the support for symbolic debugging information. */
2788
2789 /* .debug_str support. */
2790
2791 static void dwarf2out_init (const char *);
2792 static void dwarf2out_finish (const char *);
2793 static void dwarf2out_early_finish (const char *);
2794 static void dwarf2out_assembly_start (void);
2795 static void dwarf2out_define (unsigned int, const char *);
2796 static void dwarf2out_undef (unsigned int, const char *);
2797 static void dwarf2out_start_source_file (unsigned, const char *);
2798 static void dwarf2out_end_source_file (unsigned);
2799 static void dwarf2out_function_decl (tree);
2800 static void dwarf2out_begin_block (unsigned, unsigned);
2801 static void dwarf2out_end_block (unsigned, unsigned);
2802 static bool dwarf2out_ignore_block (const_tree);
2803 static void dwarf2out_early_global_decl (tree);
2804 static void dwarf2out_late_global_decl (tree);
2805 static void dwarf2out_type_decl (tree, int);
2806 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2807 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2808 dw_die_ref);
2809 static void dwarf2out_abstract_function (tree);
2810 static void dwarf2out_var_location (rtx_insn *);
2811 static void dwarf2out_inline_entry (tree);
2812 static void dwarf2out_size_function (tree);
2813 static void dwarf2out_begin_function (tree);
2814 static void dwarf2out_end_function (unsigned int);
2815 static void dwarf2out_register_main_translation_unit (tree unit);
2816 static void dwarf2out_set_name (tree, tree);
2817 static void dwarf2out_register_external_die (tree decl, const char *sym,
2818 unsigned HOST_WIDE_INT off);
2819 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2820 unsigned HOST_WIDE_INT *off);
2821
2822 /* The debug hooks structure. */
2823
2824 const struct gcc_debug_hooks dwarf2_debug_hooks =
2825 {
2826 dwarf2out_init,
2827 dwarf2out_finish,
2828 dwarf2out_early_finish,
2829 dwarf2out_assembly_start,
2830 dwarf2out_define,
2831 dwarf2out_undef,
2832 dwarf2out_start_source_file,
2833 dwarf2out_end_source_file,
2834 dwarf2out_begin_block,
2835 dwarf2out_end_block,
2836 dwarf2out_ignore_block,
2837 dwarf2out_source_line,
2838 dwarf2out_begin_prologue,
2839 #if VMS_DEBUGGING_INFO
2840 dwarf2out_vms_end_prologue,
2841 dwarf2out_vms_begin_epilogue,
2842 #else
2843 debug_nothing_int_charstar,
2844 debug_nothing_int_charstar,
2845 #endif
2846 dwarf2out_end_epilogue,
2847 dwarf2out_begin_function,
2848 dwarf2out_end_function, /* end_function */
2849 dwarf2out_register_main_translation_unit,
2850 dwarf2out_function_decl, /* function_decl */
2851 dwarf2out_early_global_decl,
2852 dwarf2out_late_global_decl,
2853 dwarf2out_type_decl, /* type_decl */
2854 dwarf2out_imported_module_or_decl,
2855 dwarf2out_die_ref_for_decl,
2856 dwarf2out_register_external_die,
2857 debug_nothing_tree, /* deferred_inline_function */
2858 /* The DWARF 2 backend tries to reduce debugging bloat by not
2859 emitting the abstract description of inline functions until
2860 something tries to reference them. */
2861 dwarf2out_abstract_function, /* outlining_inline_function */
2862 debug_nothing_rtx_code_label, /* label */
2863 debug_nothing_int, /* handle_pch */
2864 dwarf2out_var_location,
2865 dwarf2out_inline_entry, /* inline_entry */
2866 dwarf2out_size_function, /* size_function */
2867 dwarf2out_switch_text_section,
2868 dwarf2out_set_name,
2869 1, /* start_end_main_source_file */
2870 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2871 };
2872
2873 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2874 {
2875 dwarf2out_init,
2876 debug_nothing_charstar,
2877 debug_nothing_charstar,
2878 dwarf2out_assembly_start,
2879 debug_nothing_int_charstar,
2880 debug_nothing_int_charstar,
2881 debug_nothing_int_charstar,
2882 debug_nothing_int,
2883 debug_nothing_int_int, /* begin_block */
2884 debug_nothing_int_int, /* end_block */
2885 debug_true_const_tree, /* ignore_block */
2886 dwarf2out_source_line, /* source_line */
2887 debug_nothing_int_int_charstar, /* begin_prologue */
2888 debug_nothing_int_charstar, /* end_prologue */
2889 debug_nothing_int_charstar, /* begin_epilogue */
2890 debug_nothing_int_charstar, /* end_epilogue */
2891 debug_nothing_tree, /* begin_function */
2892 debug_nothing_int, /* end_function */
2893 debug_nothing_tree, /* register_main_translation_unit */
2894 debug_nothing_tree, /* function_decl */
2895 debug_nothing_tree, /* early_global_decl */
2896 debug_nothing_tree, /* late_global_decl */
2897 debug_nothing_tree_int, /* type_decl */
2898 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2899 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2900 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2901 debug_nothing_tree, /* deferred_inline_function */
2902 debug_nothing_tree, /* outlining_inline_function */
2903 debug_nothing_rtx_code_label, /* label */
2904 debug_nothing_int, /* handle_pch */
2905 debug_nothing_rtx_insn, /* var_location */
2906 debug_nothing_tree, /* inline_entry */
2907 debug_nothing_tree, /* size_function */
2908 debug_nothing_void, /* switch_text_section */
2909 debug_nothing_tree_tree, /* set_name */
2910 0, /* start_end_main_source_file */
2911 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2912 };
2913 \f
2914 /* NOTE: In the comments in this file, many references are made to
2915 "Debugging Information Entries". This term is abbreviated as `DIE'
2916 throughout the remainder of this file. */
2917
2918 /* An internal representation of the DWARF output is built, and then
2919 walked to generate the DWARF debugging info. The walk of the internal
2920 representation is done after the entire program has been compiled.
2921 The types below are used to describe the internal representation. */
2922
2923 /* Whether to put type DIEs into their own section .debug_types instead
2924 of making them part of the .debug_info section. Only supported for
2925 Dwarf V4 or higher and the user didn't disable them through
2926 -fno-debug-types-section. It is more efficient to put them in a
2927 separate comdat sections since the linker will then be able to
2928 remove duplicates. But not all tools support .debug_types sections
2929 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2930 it is DW_UT_type unit type in .debug_info section. For late LTO
2931 debug there should be almost no types emitted so avoid enabling
2932 -fdebug-types-section there. */
2933
2934 #define use_debug_types (dwarf_version >= 4 \
2935 && flag_debug_types_section \
2936 && !in_lto_p)
2937
2938 /* Various DIE's use offsets relative to the beginning of the
2939 .debug_info section to refer to each other. */
2940
2941 typedef long int dw_offset;
2942
2943 struct comdat_type_node;
2944
2945 /* The entries in the line_info table more-or-less mirror the opcodes
2946 that are used in the real dwarf line table. Arrays of these entries
2947 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2948 supported. */
2949
2950 enum dw_line_info_opcode {
2951 /* Emit DW_LNE_set_address; the operand is the label index. */
2952 LI_set_address,
2953
2954 /* Emit a row to the matrix with the given line. This may be done
2955 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2956 special opcodes. */
2957 LI_set_line,
2958
2959 /* Emit a DW_LNS_set_file. */
2960 LI_set_file,
2961
2962 /* Emit a DW_LNS_set_column. */
2963 LI_set_column,
2964
2965 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2966 LI_negate_stmt,
2967
2968 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2969 LI_set_prologue_end,
2970 LI_set_epilogue_begin,
2971
2972 /* Emit a DW_LNE_set_discriminator. */
2973 LI_set_discriminator,
2974
2975 /* Output a Fixed Advance PC; the target PC is the label index; the
2976 base PC is the previous LI_adv_address or LI_set_address entry.
2977 We only use this when emitting debug views without assembler
2978 support, at explicit user request. Ideally, we should only use
2979 it when the offset might be zero but we can't tell: it's the only
2980 way to maybe change the PC without resetting the view number. */
2981 LI_adv_address
2982 };
2983
2984 typedef struct GTY(()) dw_line_info_struct {
2985 enum dw_line_info_opcode opcode;
2986 unsigned int val;
2987 } dw_line_info_entry;
2988
2989
2990 struct GTY(()) dw_line_info_table {
2991 /* The label that marks the end of this section. */
2992 const char *end_label;
2993
2994 /* The values for the last row of the matrix, as collected in the table.
2995 These are used to minimize the changes to the next row. */
2996 unsigned int file_num;
2997 unsigned int line_num;
2998 unsigned int column_num;
2999 int discrim_num;
3000 bool is_stmt;
3001 bool in_use;
3002
3003 /* This denotes the NEXT view number.
3004
3005 If it is 0, it is known that the NEXT view will be the first view
3006 at the given PC.
3007
3008 If it is -1, we're forcing the view number to be reset, e.g. at a
3009 function entry.
3010
3011 The meaning of other nonzero values depends on whether we're
3012 computing views internally or leaving it for the assembler to do
3013 so. If we're emitting them internally, view denotes the view
3014 number since the last known advance of PC. If we're leaving it
3015 for the assembler, it denotes the LVU label number that we're
3016 going to ask the assembler to assign. */
3017 var_loc_view view;
3018
3019 /* This counts the number of symbolic views emitted in this table
3020 since the latest view reset. Its max value, over all tables,
3021 sets symview_upper_bound. */
3022 var_loc_view symviews_since_reset;
3023
3024 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3025 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3026 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3027 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3028
3029 vec<dw_line_info_entry, va_gc> *entries;
3030 };
3031
3032 /* This is an upper bound for view numbers that the assembler may
3033 assign to symbolic views output in this translation. It is used to
3034 decide how big a field to use to represent view numbers in
3035 symview-classed attributes. */
3036
3037 static var_loc_view symview_upper_bound;
3038
3039 /* If we're keep track of location views and their reset points, and
3040 INSN is a reset point (i.e., it necessarily advances the PC), mark
3041 the next view in TABLE as reset. */
3042
3043 static void
3044 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3045 {
3046 if (!debug_internal_reset_location_views)
3047 return;
3048
3049 /* Maybe turn (part of?) this test into a default target hook. */
3050 int reset = 0;
3051
3052 if (targetm.reset_location_view)
3053 reset = targetm.reset_location_view (insn);
3054
3055 if (reset)
3056 ;
3057 else if (JUMP_TABLE_DATA_P (insn))
3058 reset = 1;
3059 else if (GET_CODE (insn) == USE
3060 || GET_CODE (insn) == CLOBBER
3061 || GET_CODE (insn) == ASM_INPUT
3062 || asm_noperands (insn) >= 0)
3063 ;
3064 else if (get_attr_min_length (insn) > 0)
3065 reset = 1;
3066
3067 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3068 RESET_NEXT_VIEW (table->view);
3069 }
3070
3071 /* Each DIE attribute has a field specifying the attribute kind,
3072 a link to the next attribute in the chain, and an attribute value.
3073 Attributes are typically linked below the DIE they modify. */
3074
3075 typedef struct GTY(()) dw_attr_struct {
3076 enum dwarf_attribute dw_attr;
3077 dw_val_node dw_attr_val;
3078 }
3079 dw_attr_node;
3080
3081
3082 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3083 The children of each node form a circular list linked by
3084 die_sib. die_child points to the node *before* the "first" child node. */
3085
3086 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3087 union die_symbol_or_type_node
3088 {
3089 const char * GTY ((tag ("0"))) die_symbol;
3090 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3091 }
3092 GTY ((desc ("%0.comdat_type_p"))) die_id;
3093 vec<dw_attr_node, va_gc> *die_attr;
3094 dw_die_ref die_parent;
3095 dw_die_ref die_child;
3096 dw_die_ref die_sib;
3097 dw_die_ref die_definition; /* ref from a specification to its definition */
3098 dw_offset die_offset;
3099 unsigned long die_abbrev;
3100 int die_mark;
3101 unsigned int decl_id;
3102 enum dwarf_tag die_tag;
3103 /* Die is used and must not be pruned as unused. */
3104 BOOL_BITFIELD die_perennial_p : 1;
3105 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3106 /* For an external ref to die_symbol if die_offset contains an extra
3107 offset to that symbol. */
3108 BOOL_BITFIELD with_offset : 1;
3109 /* Whether this DIE was removed from the DIE tree, for example via
3110 prune_unused_types. We don't consider those present from the
3111 DIE lookup routines. */
3112 BOOL_BITFIELD removed : 1;
3113 /* Lots of spare bits. */
3114 }
3115 die_node;
3116
3117 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3118 static bool early_dwarf;
3119 static bool early_dwarf_finished;
3120 class set_early_dwarf {
3121 public:
3122 bool saved;
3123 set_early_dwarf () : saved(early_dwarf)
3124 {
3125 gcc_assert (! early_dwarf_finished);
3126 early_dwarf = true;
3127 }
3128 ~set_early_dwarf () { early_dwarf = saved; }
3129 };
3130
3131 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3132 #define FOR_EACH_CHILD(die, c, expr) do { \
3133 c = die->die_child; \
3134 if (c) do { \
3135 c = c->die_sib; \
3136 expr; \
3137 } while (c != die->die_child); \
3138 } while (0)
3139
3140 /* The pubname structure */
3141
3142 typedef struct GTY(()) pubname_struct {
3143 dw_die_ref die;
3144 const char *name;
3145 }
3146 pubname_entry;
3147
3148
3149 struct GTY(()) dw_ranges {
3150 const char *label;
3151 /* If this is positive, it's a block number, otherwise it's a
3152 bitwise-negated index into dw_ranges_by_label. */
3153 int num;
3154 /* Index for the range list for DW_FORM_rnglistx. */
3155 unsigned int idx : 31;
3156 /* True if this range might be possibly in a different section
3157 from previous entry. */
3158 unsigned int maybe_new_sec : 1;
3159 };
3160
3161 /* A structure to hold a macinfo entry. */
3162
3163 typedef struct GTY(()) macinfo_struct {
3164 unsigned char code;
3165 unsigned HOST_WIDE_INT lineno;
3166 const char *info;
3167 }
3168 macinfo_entry;
3169
3170
3171 struct GTY(()) dw_ranges_by_label {
3172 const char *begin;
3173 const char *end;
3174 };
3175
3176 /* The comdat type node structure. */
3177 struct GTY(()) comdat_type_node
3178 {
3179 dw_die_ref root_die;
3180 dw_die_ref type_die;
3181 dw_die_ref skeleton_die;
3182 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3183 comdat_type_node *next;
3184 };
3185
3186 /* A list of DIEs for which we can't determine ancestry (parent_die
3187 field) just yet. Later in dwarf2out_finish we will fill in the
3188 missing bits. */
3189 typedef struct GTY(()) limbo_die_struct {
3190 dw_die_ref die;
3191 /* The tree for which this DIE was created. We use this to
3192 determine ancestry later. */
3193 tree created_for;
3194 struct limbo_die_struct *next;
3195 }
3196 limbo_die_node;
3197
3198 typedef struct skeleton_chain_struct
3199 {
3200 dw_die_ref old_die;
3201 dw_die_ref new_die;
3202 struct skeleton_chain_struct *parent;
3203 }
3204 skeleton_chain_node;
3205
3206 /* Define a macro which returns nonzero for a TYPE_DECL which was
3207 implicitly generated for a type.
3208
3209 Note that, unlike the C front-end (which generates a NULL named
3210 TYPE_DECL node for each complete tagged type, each array type,
3211 and each function type node created) the C++ front-end generates
3212 a _named_ TYPE_DECL node for each tagged type node created.
3213 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3214 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3215 front-end, but for each type, tagged or not. */
3216
3217 #define TYPE_DECL_IS_STUB(decl) \
3218 (DECL_NAME (decl) == NULL_TREE \
3219 || (DECL_ARTIFICIAL (decl) \
3220 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3221 /* This is necessary for stub decls that \
3222 appear in nested inline functions. */ \
3223 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3224 && (decl_ultimate_origin (decl) \
3225 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3226
3227 /* Information concerning the compilation unit's programming
3228 language, and compiler version. */
3229
3230 /* Fixed size portion of the DWARF compilation unit header. */
3231 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3232 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3233 + (dwarf_version >= 5 ? 4 : 3))
3234
3235 /* Fixed size portion of the DWARF comdat type unit header. */
3236 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3237 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3238 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3239
3240 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3241 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3242 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3243
3244 /* Fixed size portion of public names info. */
3245 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3246
3247 /* Fixed size portion of the address range info. */
3248 #define DWARF_ARANGES_HEADER_SIZE \
3249 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3250 DWARF2_ADDR_SIZE * 2) \
3251 - DWARF_INITIAL_LENGTH_SIZE)
3252
3253 /* Size of padding portion in the address range info. It must be
3254 aligned to twice the pointer size. */
3255 #define DWARF_ARANGES_PAD_SIZE \
3256 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3257 DWARF2_ADDR_SIZE * 2) \
3258 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3259
3260 /* Use assembler line directives if available. */
3261 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3262 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3263 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3264 #else
3265 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3266 #endif
3267 #endif
3268
3269 /* Use assembler views in line directives if available. */
3270 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3271 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3272 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3273 #else
3274 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3275 #endif
3276 #endif
3277
3278 /* Return true if GCC configure detected assembler support for .loc. */
3279
3280 bool
3281 dwarf2out_default_as_loc_support (void)
3282 {
3283 return DWARF2_ASM_LINE_DEBUG_INFO;
3284 #if (GCC_VERSION >= 3000)
3285 # undef DWARF2_ASM_LINE_DEBUG_INFO
3286 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3287 #endif
3288 }
3289
3290 /* Return true if GCC configure detected assembler support for views
3291 in .loc directives. */
3292
3293 bool
3294 dwarf2out_default_as_locview_support (void)
3295 {
3296 return DWARF2_ASM_VIEW_DEBUG_INFO;
3297 #if (GCC_VERSION >= 3000)
3298 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3299 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3300 #endif
3301 }
3302
3303 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3304 view computation, and it refers to a view identifier for which we
3305 will not emit a label because it is known to map to a view number
3306 zero. We won't allocate the bitmap if we're not using assembler
3307 support for location views, but we have to make the variable
3308 visible for GGC and for code that will be optimized out for lack of
3309 support but that's still parsed and compiled. We could abstract it
3310 out with macros, but it's not worth it. */
3311 static GTY(()) bitmap zero_view_p;
3312
3313 /* Evaluate to TRUE iff N is known to identify the first location view
3314 at its PC. When not using assembler location view computation,
3315 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3316 and views label numbers recorded in it are the ones known to be
3317 zero. */
3318 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3319 || (N) == (var_loc_view)-1 \
3320 || (zero_view_p \
3321 && bitmap_bit_p (zero_view_p, (N))))
3322
3323 /* Return true iff we're to emit .loc directives for the assembler to
3324 generate line number sections.
3325
3326 When we're not emitting views, all we need from the assembler is
3327 support for .loc directives.
3328
3329 If we are emitting views, we can only use the assembler's .loc
3330 support if it also supports views.
3331
3332 When the compiler is emitting the line number programs and
3333 computing view numbers itself, it resets view numbers at known PC
3334 changes and counts from that, and then it emits view numbers as
3335 literal constants in locviewlists. There are cases in which the
3336 compiler is not sure about PC changes, e.g. when extra alignment is
3337 requested for a label. In these cases, the compiler may not reset
3338 the view counter, and the potential PC advance in the line number
3339 program will use an opcode that does not reset the view counter
3340 even if the PC actually changes, so that compiler and debug info
3341 consumer can keep view numbers in sync.
3342
3343 When the compiler defers view computation to the assembler, it
3344 emits symbolic view numbers in locviewlists, with the exception of
3345 views known to be zero (forced resets, or reset after
3346 compiler-visible PC changes): instead of emitting symbols for
3347 these, we emit literal zero and assert the assembler agrees with
3348 the compiler's assessment. We could use symbolic views everywhere,
3349 instead of special-casing zero views, but then we'd be unable to
3350 optimize out locviewlists that contain only zeros. */
3351
3352 static bool
3353 output_asm_line_debug_info (void)
3354 {
3355 return (dwarf2out_as_loc_support
3356 && (dwarf2out_as_locview_support
3357 || !debug_variable_location_views));
3358 }
3359
3360 static bool asm_outputs_debug_line_str (void);
3361
3362 /* Minimum line offset in a special line info. opcode.
3363 This value was chosen to give a reasonable range of values. */
3364 #define DWARF_LINE_BASE -10
3365
3366 /* First special line opcode - leave room for the standard opcodes. */
3367 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3368
3369 /* Range of line offsets in a special line info. opcode. */
3370 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3371
3372 /* Flag that indicates the initial value of the is_stmt_start flag.
3373 In the present implementation, we do not mark any lines as
3374 the beginning of a source statement, because that information
3375 is not made available by the GCC front-end. */
3376 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3377
3378 /* Maximum number of operations per instruction bundle. */
3379 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3380 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3381 #endif
3382
3383 /* This location is used by calc_die_sizes() to keep track
3384 the offset of each DIE within the .debug_info section. */
3385 static unsigned long next_die_offset;
3386
3387 /* Record the root of the DIE's built for the current compilation unit. */
3388 static GTY(()) dw_die_ref single_comp_unit_die;
3389
3390 /* A list of type DIEs that have been separated into comdat sections. */
3391 static GTY(()) comdat_type_node *comdat_type_list;
3392
3393 /* A list of CU DIEs that have been separated. */
3394 static GTY(()) limbo_die_node *cu_die_list;
3395
3396 /* A list of DIEs with a NULL parent waiting to be relocated. */
3397 static GTY(()) limbo_die_node *limbo_die_list;
3398
3399 /* A list of DIEs for which we may have to generate
3400 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3401 static GTY(()) limbo_die_node *deferred_asm_name;
3402
3403 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3404 {
3405 typedef const char *compare_type;
3406
3407 static hashval_t hash (dwarf_file_data *);
3408 static bool equal (dwarf_file_data *, const char *);
3409 };
3410
3411 /* Filenames referenced by this compilation unit. */
3412 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3413
3414 struct decl_die_hasher : ggc_ptr_hash<die_node>
3415 {
3416 typedef tree compare_type;
3417
3418 static hashval_t hash (die_node *);
3419 static bool equal (die_node *, tree);
3420 };
3421 /* A hash table of references to DIE's that describe declarations.
3422 The key is a DECL_UID() which is a unique number identifying each decl. */
3423 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3424
3425 struct GTY ((for_user)) variable_value_struct {
3426 unsigned int decl_id;
3427 vec<dw_die_ref, va_gc> *dies;
3428 };
3429
3430 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3431 {
3432 typedef tree compare_type;
3433
3434 static hashval_t hash (variable_value_struct *);
3435 static bool equal (variable_value_struct *, tree);
3436 };
3437 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3438 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3439 DECL_CONTEXT of the referenced VAR_DECLs. */
3440 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3441
3442 struct block_die_hasher : ggc_ptr_hash<die_struct>
3443 {
3444 static hashval_t hash (die_struct *);
3445 static bool equal (die_struct *, die_struct *);
3446 };
3447
3448 /* A hash table of references to DIE's that describe COMMON blocks.
3449 The key is DECL_UID() ^ die_parent. */
3450 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3451
3452 typedef struct GTY(()) die_arg_entry_struct {
3453 dw_die_ref die;
3454 tree arg;
3455 } die_arg_entry;
3456
3457
3458 /* Node of the variable location list. */
3459 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3460 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3461 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3462 in mode of the EXPR_LIST node and first EXPR_LIST operand
3463 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3464 location or NULL for padding. For larger bitsizes,
3465 mode is 0 and first operand is a CONCAT with bitsize
3466 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3467 NULL as second operand. */
3468 rtx GTY (()) loc;
3469 const char * GTY (()) label;
3470 struct var_loc_node * GTY (()) next;
3471 var_loc_view view;
3472 };
3473
3474 /* Variable location list. */
3475 struct GTY ((for_user)) var_loc_list_def {
3476 struct var_loc_node * GTY (()) first;
3477
3478 /* Pointer to the last but one or last element of the
3479 chained list. If the list is empty, both first and
3480 last are NULL, if the list contains just one node
3481 or the last node certainly is not redundant, it points
3482 to the last node, otherwise points to the last but one.
3483 Do not mark it for GC because it is marked through the chain. */
3484 struct var_loc_node * GTY ((skip ("%h"))) last;
3485
3486 /* Pointer to the last element before section switch,
3487 if NULL, either sections weren't switched or first
3488 is after section switch. */
3489 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3490
3491 /* DECL_UID of the variable decl. */
3492 unsigned int decl_id;
3493 };
3494 typedef struct var_loc_list_def var_loc_list;
3495
3496 /* Call argument location list. */
3497 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3498 rtx GTY (()) call_arg_loc_note;
3499 const char * GTY (()) label;
3500 tree GTY (()) block;
3501 bool tail_call_p;
3502 rtx GTY (()) symbol_ref;
3503 struct call_arg_loc_node * GTY (()) next;
3504 };
3505
3506
3507 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3508 {
3509 typedef const_tree compare_type;
3510
3511 static hashval_t hash (var_loc_list *);
3512 static bool equal (var_loc_list *, const_tree);
3513 };
3514
3515 /* Table of decl location linked lists. */
3516 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3517
3518 /* Head and tail of call_arg_loc chain. */
3519 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3520 static struct call_arg_loc_node *call_arg_loc_last;
3521
3522 /* Number of call sites in the current function. */
3523 static int call_site_count = -1;
3524 /* Number of tail call sites in the current function. */
3525 static int tail_call_site_count = -1;
3526
3527 /* A cached location list. */
3528 struct GTY ((for_user)) cached_dw_loc_list_def {
3529 /* The DECL_UID of the decl that this entry describes. */
3530 unsigned int decl_id;
3531
3532 /* The cached location list. */
3533 dw_loc_list_ref loc_list;
3534 };
3535 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3536
3537 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3538 {
3539
3540 typedef const_tree compare_type;
3541
3542 static hashval_t hash (cached_dw_loc_list *);
3543 static bool equal (cached_dw_loc_list *, const_tree);
3544 };
3545
3546 /* Table of cached location lists. */
3547 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3548
3549 /* A vector of references to DIE's that are uniquely identified by their tag,
3550 presence/absence of children DIE's, and list of attribute/value pairs. */
3551 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3552
3553 /* A hash map to remember the stack usage for DWARF procedures. The value
3554 stored is the stack size difference between before the DWARF procedure
3555 invokation and after it returned. In other words, for a DWARF procedure
3556 that consumes N stack slots and that pushes M ones, this stores M - N. */
3557 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3558
3559 /* A global counter for generating labels for line number data. */
3560 static unsigned int line_info_label_num;
3561
3562 /* The current table to which we should emit line number information
3563 for the current function. This will be set up at the beginning of
3564 assembly for the function. */
3565 static GTY(()) dw_line_info_table *cur_line_info_table;
3566
3567 /* The two default tables of line number info. */
3568 static GTY(()) dw_line_info_table *text_section_line_info;
3569 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3570
3571 /* The set of all non-default tables of line number info. */
3572 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3573
3574 /* A flag to tell pubnames/types export if there is an info section to
3575 refer to. */
3576 static bool info_section_emitted;
3577
3578 /* A pointer to the base of a table that contains a list of publicly
3579 accessible names. */
3580 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3581
3582 /* A pointer to the base of a table that contains a list of publicly
3583 accessible types. */
3584 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3585
3586 /* A pointer to the base of a table that contains a list of macro
3587 defines/undefines (and file start/end markers). */
3588 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3589
3590 /* True if .debug_macinfo or .debug_macros section is going to be
3591 emitted. */
3592 #define have_macinfo \
3593 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3594 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3595 && !macinfo_table->is_empty ())
3596
3597 /* Vector of dies for which we should generate .debug_ranges info. */
3598 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3599
3600 /* Vector of pairs of labels referenced in ranges_table. */
3601 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3602
3603 /* Whether we have location lists that need outputting */
3604 static GTY(()) bool have_location_lists;
3605
3606 /* Unique label counter. */
3607 static GTY(()) unsigned int loclabel_num;
3608
3609 /* Unique label counter for point-of-call tables. */
3610 static GTY(()) unsigned int poc_label_num;
3611
3612 /* The last file entry emitted by maybe_emit_file(). */
3613 static GTY(()) struct dwarf_file_data * last_emitted_file;
3614
3615 /* Number of internal labels generated by gen_internal_sym(). */
3616 static GTY(()) int label_num;
3617
3618 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3619
3620 /* Instances of generic types for which we need to generate debug
3621 info that describe their generic parameters and arguments. That
3622 generation needs to happen once all types are properly laid out so
3623 we do it at the end of compilation. */
3624 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3625
3626 /* Offset from the "steady-state frame pointer" to the frame base,
3627 within the current function. */
3628 static poly_int64 frame_pointer_fb_offset;
3629 static bool frame_pointer_fb_offset_valid;
3630
3631 static vec<dw_die_ref> base_types;
3632
3633 /* Flags to represent a set of attribute classes for attributes that represent
3634 a scalar value (bounds, pointers, ...). */
3635 enum dw_scalar_form
3636 {
3637 dw_scalar_form_constant = 0x01,
3638 dw_scalar_form_exprloc = 0x02,
3639 dw_scalar_form_reference = 0x04
3640 };
3641
3642 /* Forward declarations for functions defined in this file. */
3643
3644 static int is_pseudo_reg (const_rtx);
3645 static tree type_main_variant (tree);
3646 static int is_tagged_type (const_tree);
3647 static const char *dwarf_tag_name (unsigned);
3648 static const char *dwarf_attr_name (unsigned);
3649 static const char *dwarf_form_name (unsigned);
3650 static tree decl_ultimate_origin (const_tree);
3651 static tree decl_class_context (tree);
3652 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3653 static inline enum dw_val_class AT_class (dw_attr_node *);
3654 static inline unsigned int AT_index (dw_attr_node *);
3655 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3656 static inline unsigned AT_flag (dw_attr_node *);
3657 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3658 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3659 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3660 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3661 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3662 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3663 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3664 unsigned int, unsigned char *);
3665 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3666 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3667 static inline const char *AT_string (dw_attr_node *);
3668 static enum dwarf_form AT_string_form (dw_attr_node *);
3669 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3670 static void add_AT_specification (dw_die_ref, dw_die_ref);
3671 static inline dw_die_ref AT_ref (dw_attr_node *);
3672 static inline int AT_ref_external (dw_attr_node *);
3673 static inline void set_AT_ref_external (dw_attr_node *, int);
3674 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3675 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3676 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3677 dw_loc_list_ref);
3678 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3679 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3680 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3681 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3682 static void remove_addr_table_entry (addr_table_entry *);
3683 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3684 static inline rtx AT_addr (dw_attr_node *);
3685 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3686 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3687 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3688 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3689 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3690 unsigned long, bool);
3691 static inline const char *AT_lbl (dw_attr_node *);
3692 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3693 static const char *get_AT_low_pc (dw_die_ref);
3694 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3695 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3696 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3697 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3698 static bool is_c (void);
3699 static bool is_cxx (void);
3700 static bool is_cxx (const_tree);
3701 static bool is_fortran (void);
3702 static bool is_ada (void);
3703 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3704 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3705 static void add_child_die (dw_die_ref, dw_die_ref);
3706 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3707 static dw_die_ref lookup_type_die (tree);
3708 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3709 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3710 static void equate_type_number_to_die (tree, dw_die_ref);
3711 static dw_die_ref lookup_decl_die (tree);
3712 static var_loc_list *lookup_decl_loc (const_tree);
3713 static void equate_decl_number_to_die (tree, dw_die_ref);
3714 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3715 static void print_spaces (FILE *);
3716 static void print_die (dw_die_ref, FILE *);
3717 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3718 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3719 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3720 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3721 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3722 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3723 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3724 struct md5_ctx *, int *);
3725 struct checksum_attributes;
3726 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3727 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3728 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3729 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3730 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3731 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3732 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3733 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3734 static int is_type_die (dw_die_ref);
3735 static inline bool is_template_instantiation (dw_die_ref);
3736 static int is_declaration_die (dw_die_ref);
3737 static int should_move_die_to_comdat (dw_die_ref);
3738 static dw_die_ref clone_as_declaration (dw_die_ref);
3739 static dw_die_ref clone_die (dw_die_ref);
3740 static dw_die_ref clone_tree (dw_die_ref);
3741 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3742 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3743 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3744 static dw_die_ref generate_skeleton (dw_die_ref);
3745 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3746 dw_die_ref,
3747 dw_die_ref);
3748 static void break_out_comdat_types (dw_die_ref);
3749 static void copy_decls_for_unworthy_types (dw_die_ref);
3750
3751 static void add_sibling_attributes (dw_die_ref);
3752 static void output_location_lists (dw_die_ref);
3753 static int constant_size (unsigned HOST_WIDE_INT);
3754 static unsigned long size_of_die (dw_die_ref);
3755 static void calc_die_sizes (dw_die_ref);
3756 static void calc_base_type_die_sizes (void);
3757 static void mark_dies (dw_die_ref);
3758 static void unmark_dies (dw_die_ref);
3759 static void unmark_all_dies (dw_die_ref);
3760 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3761 static unsigned long size_of_aranges (void);
3762 static enum dwarf_form value_format (dw_attr_node *);
3763 static void output_value_format (dw_attr_node *);
3764 static void output_abbrev_section (void);
3765 static void output_die_abbrevs (unsigned long, dw_die_ref);
3766 static void output_die (dw_die_ref);
3767 static void output_compilation_unit_header (enum dwarf_unit_type);
3768 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3769 static void output_comdat_type_unit (comdat_type_node *, bool);
3770 static const char *dwarf2_name (tree, int);
3771 static void add_pubname (tree, dw_die_ref);
3772 static void add_enumerator_pubname (const char *, dw_die_ref);
3773 static void add_pubname_string (const char *, dw_die_ref);
3774 static void add_pubtype (tree, dw_die_ref);
3775 static void output_pubnames (vec<pubname_entry, va_gc> *);
3776 static void output_aranges (void);
3777 static unsigned int add_ranges (const_tree, bool = false);
3778 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3779 bool *, bool);
3780 static void output_ranges (void);
3781 static dw_line_info_table *new_line_info_table (void);
3782 static void output_line_info (bool);
3783 static void output_file_names (void);
3784 static dw_die_ref base_type_die (tree, bool);
3785 static int is_base_type (tree);
3786 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3787 static int decl_quals (const_tree);
3788 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3789 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3790 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3791 static unsigned int dbx_reg_number (const_rtx);
3792 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3793 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3794 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3795 enum var_init_status);
3796 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3797 enum var_init_status);
3798 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3799 enum var_init_status);
3800 static int is_based_loc (const_rtx);
3801 static bool resolve_one_addr (rtx *);
3802 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3803 enum var_init_status);
3804 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3805 enum var_init_status);
3806 struct loc_descr_context;
3807 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3808 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3809 static dw_loc_list_ref loc_list_from_tree (tree, int,
3810 struct loc_descr_context *);
3811 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3812 struct loc_descr_context *);
3813 static tree field_type (const_tree);
3814 static unsigned int simple_type_align_in_bits (const_tree);
3815 static unsigned int simple_decl_align_in_bits (const_tree);
3816 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3817 struct vlr_context;
3818 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3819 HOST_WIDE_INT *);
3820 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3821 dw_loc_list_ref);
3822 static void add_data_member_location_attribute (dw_die_ref, tree,
3823 struct vlr_context *);
3824 static bool add_const_value_attribute (dw_die_ref, rtx);
3825 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3826 static void insert_wide_int (const wide_int &, unsigned char *, int);
3827 static void insert_float (const_rtx, unsigned char *);
3828 static rtx rtl_for_decl_location (tree);
3829 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3830 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3831 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3832 static void add_name_attribute (dw_die_ref, const char *);
3833 static void add_desc_attribute (dw_die_ref, tree);
3834 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3835 static void add_comp_dir_attribute (dw_die_ref);
3836 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3837 struct loc_descr_context *);
3838 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3839 struct loc_descr_context *);
3840 static void add_subscript_info (dw_die_ref, tree, bool);
3841 static void add_byte_size_attribute (dw_die_ref, tree);
3842 static void add_alignment_attribute (dw_die_ref, tree);
3843 static void add_bit_offset_attribute (dw_die_ref, tree);
3844 static void add_bit_size_attribute (dw_die_ref, tree);
3845 static void add_prototyped_attribute (dw_die_ref, tree);
3846 static void add_abstract_origin_attribute (dw_die_ref, tree);
3847 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3848 static void add_src_coords_attributes (dw_die_ref, tree);
3849 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3850 static void add_discr_value (dw_die_ref, dw_discr_value *);
3851 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3852 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3853 static dw_die_ref scope_die_for (tree, dw_die_ref);
3854 static inline int local_scope_p (dw_die_ref);
3855 static inline int class_scope_p (dw_die_ref);
3856 static inline int class_or_namespace_scope_p (dw_die_ref);
3857 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3858 static void add_calling_convention_attribute (dw_die_ref, tree);
3859 static const char *type_tag (const_tree);
3860 static tree member_declared_type (const_tree);
3861 #if 0
3862 static const char *decl_start_label (tree);
3863 #endif
3864 static void gen_array_type_die (tree, dw_die_ref);
3865 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3866 #if 0
3867 static void gen_entry_point_die (tree, dw_die_ref);
3868 #endif
3869 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3870 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3871 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3872 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3873 static void gen_formal_types_die (tree, dw_die_ref);
3874 static void gen_subprogram_die (tree, dw_die_ref);
3875 static void gen_variable_die (tree, tree, dw_die_ref);
3876 static void gen_const_die (tree, dw_die_ref);
3877 static void gen_label_die (tree, dw_die_ref);
3878 static void gen_lexical_block_die (tree, dw_die_ref);
3879 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3880 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3881 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3882 static dw_die_ref gen_compile_unit_die (const char *);
3883 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3884 static void gen_member_die (tree, dw_die_ref);
3885 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3886 enum debug_info_usage);
3887 static void gen_subroutine_type_die (tree, dw_die_ref);
3888 static void gen_typedef_die (tree, dw_die_ref);
3889 static void gen_type_die (tree, dw_die_ref);
3890 static void gen_block_die (tree, dw_die_ref);
3891 static void decls_for_scope (tree, dw_die_ref, bool = true);
3892 static bool is_naming_typedef_decl (const_tree);
3893 static inline dw_die_ref get_context_die (tree);
3894 static void gen_namespace_die (tree, dw_die_ref);
3895 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3896 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3897 static dw_die_ref force_decl_die (tree);
3898 static dw_die_ref force_type_die (tree);
3899 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3900 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3901 static struct dwarf_file_data * lookup_filename (const char *);
3902 static void retry_incomplete_types (void);
3903 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3904 static void gen_generic_params_dies (tree);
3905 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3906 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3907 static void splice_child_die (dw_die_ref, dw_die_ref);
3908 static int file_info_cmp (const void *, const void *);
3909 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3910 const char *, var_loc_view, const char *);
3911 static void output_loc_list (dw_loc_list_ref);
3912 static char *gen_internal_sym (const char *);
3913 static bool want_pubnames (void);
3914
3915 static void prune_unmark_dies (dw_die_ref);
3916 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3917 static void prune_unused_types_mark (dw_die_ref, int);
3918 static void prune_unused_types_walk (dw_die_ref);
3919 static void prune_unused_types_walk_attribs (dw_die_ref);
3920 static void prune_unused_types_prune (dw_die_ref);
3921 static void prune_unused_types (void);
3922 static int maybe_emit_file (struct dwarf_file_data *fd);
3923 static inline const char *AT_vms_delta1 (dw_attr_node *);
3924 static inline const char *AT_vms_delta2 (dw_attr_node *);
3925 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3926 const char *, const char *);
3927 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3928 static void gen_remaining_tmpl_value_param_die_attribute (void);
3929 static bool generic_type_p (tree);
3930 static void schedule_generic_params_dies_gen (tree t);
3931 static void gen_scheduled_generic_parms_dies (void);
3932 static void resolve_variable_values (void);
3933
3934 static const char *comp_dir_string (void);
3935
3936 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3937
3938 /* enum for tracking thread-local variables whose address is really an offset
3939 relative to the TLS pointer, which will need link-time relocation, but will
3940 not need relocation by the DWARF consumer. */
3941
3942 enum dtprel_bool
3943 {
3944 dtprel_false = 0,
3945 dtprel_true = 1
3946 };
3947
3948 /* Return the operator to use for an address of a variable. For dtprel_true, we
3949 use DW_OP_const*. For regular variables, which need both link-time
3950 relocation and consumer-level relocation (e.g., to account for shared objects
3951 loaded at a random address), we use DW_OP_addr*. */
3952
3953 static inline enum dwarf_location_atom
3954 dw_addr_op (enum dtprel_bool dtprel)
3955 {
3956 if (dtprel == dtprel_true)
3957 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3958 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3959 else
3960 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3961 }
3962
3963 /* Return a pointer to a newly allocated address location description. If
3964 dwarf_split_debug_info is true, then record the address with the appropriate
3965 relocation. */
3966 static inline dw_loc_descr_ref
3967 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3968 {
3969 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3970
3971 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3972 ref->dw_loc_oprnd1.v.val_addr = addr;
3973 ref->dtprel = dtprel;
3974 if (dwarf_split_debug_info)
3975 ref->dw_loc_oprnd1.val_entry
3976 = add_addr_table_entry (addr,
3977 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3978 else
3979 ref->dw_loc_oprnd1.val_entry = NULL;
3980
3981 return ref;
3982 }
3983
3984 /* Section names used to hold DWARF debugging information. */
3985
3986 #ifndef DEBUG_INFO_SECTION
3987 #define DEBUG_INFO_SECTION ".debug_info"
3988 #endif
3989 #ifndef DEBUG_DWO_INFO_SECTION
3990 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3991 #endif
3992 #ifndef DEBUG_LTO_INFO_SECTION
3993 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3994 #endif
3995 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3996 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3997 #endif
3998 #ifndef DEBUG_ABBREV_SECTION
3999 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
4000 #endif
4001 #ifndef DEBUG_LTO_ABBREV_SECTION
4002 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
4003 #endif
4004 #ifndef DEBUG_DWO_ABBREV_SECTION
4005 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
4006 #endif
4007 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
4008 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
4009 #endif
4010 #ifndef DEBUG_ARANGES_SECTION
4011 #define DEBUG_ARANGES_SECTION ".debug_aranges"
4012 #endif
4013 #ifndef DEBUG_ADDR_SECTION
4014 #define DEBUG_ADDR_SECTION ".debug_addr"
4015 #endif
4016 #ifndef DEBUG_MACINFO_SECTION
4017 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4018 #endif
4019 #ifndef DEBUG_LTO_MACINFO_SECTION
4020 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4021 #endif
4022 #ifndef DEBUG_DWO_MACINFO_SECTION
4023 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4024 #endif
4025 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4026 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4027 #endif
4028 #ifndef DEBUG_MACRO_SECTION
4029 #define DEBUG_MACRO_SECTION ".debug_macro"
4030 #endif
4031 #ifndef DEBUG_LTO_MACRO_SECTION
4032 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4033 #endif
4034 #ifndef DEBUG_DWO_MACRO_SECTION
4035 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4036 #endif
4037 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4038 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4039 #endif
4040 #ifndef DEBUG_LINE_SECTION
4041 #define DEBUG_LINE_SECTION ".debug_line"
4042 #endif
4043 #ifndef DEBUG_LTO_LINE_SECTION
4044 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4045 #endif
4046 #ifndef DEBUG_DWO_LINE_SECTION
4047 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4048 #endif
4049 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4050 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4051 #endif
4052 #ifndef DEBUG_LOC_SECTION
4053 #define DEBUG_LOC_SECTION ".debug_loc"
4054 #endif
4055 #ifndef DEBUG_DWO_LOC_SECTION
4056 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4057 #endif
4058 #ifndef DEBUG_LOCLISTS_SECTION
4059 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4060 #endif
4061 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4062 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4063 #endif
4064 #ifndef DEBUG_PUBNAMES_SECTION
4065 #define DEBUG_PUBNAMES_SECTION \
4066 ((debug_generate_pub_sections == 2) \
4067 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4068 #endif
4069 #ifndef DEBUG_PUBTYPES_SECTION
4070 #define DEBUG_PUBTYPES_SECTION \
4071 ((debug_generate_pub_sections == 2) \
4072 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4073 #endif
4074 #ifndef DEBUG_STR_OFFSETS_SECTION
4075 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4076 #endif
4077 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4078 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4079 #endif
4080 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4081 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4082 #endif
4083 #ifndef DEBUG_STR_SECTION
4084 #define DEBUG_STR_SECTION ".debug_str"
4085 #endif
4086 #ifndef DEBUG_LTO_STR_SECTION
4087 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4088 #endif
4089 #ifndef DEBUG_STR_DWO_SECTION
4090 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4091 #endif
4092 #ifndef DEBUG_LTO_STR_DWO_SECTION
4093 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4094 #endif
4095 #ifndef DEBUG_RANGES_SECTION
4096 #define DEBUG_RANGES_SECTION ".debug_ranges"
4097 #endif
4098 #ifndef DEBUG_RNGLISTS_SECTION
4099 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4100 #endif
4101 #ifndef DEBUG_LINE_STR_SECTION
4102 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4103 #endif
4104 #ifndef DEBUG_LTO_LINE_STR_SECTION
4105 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4106 #endif
4107
4108 /* Standard ELF section names for compiled code and data. */
4109 #ifndef TEXT_SECTION_NAME
4110 #define TEXT_SECTION_NAME ".text"
4111 #endif
4112
4113 /* Section flags for .debug_str section. */
4114 #define DEBUG_STR_SECTION_FLAGS \
4115 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4116 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4117 : SECTION_DEBUG)
4118
4119 /* Section flags for .debug_str.dwo section. */
4120 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4121
4122 /* Attribute used to refer to the macro section. */
4123 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4124 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4125
4126 /* Labels we insert at beginning sections we can reference instead of
4127 the section names themselves. */
4128
4129 #ifndef TEXT_SECTION_LABEL
4130 #define TEXT_SECTION_LABEL "Ltext"
4131 #endif
4132 #ifndef COLD_TEXT_SECTION_LABEL
4133 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4134 #endif
4135 #ifndef DEBUG_LINE_SECTION_LABEL
4136 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4137 #endif
4138 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4139 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4140 #endif
4141 #ifndef DEBUG_INFO_SECTION_LABEL
4142 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4143 #endif
4144 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4145 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4146 #endif
4147 #ifndef DEBUG_ABBREV_SECTION_LABEL
4148 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4149 #endif
4150 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4151 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4152 #endif
4153 #ifndef DEBUG_ADDR_SECTION_LABEL
4154 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4155 #endif
4156 #ifndef DEBUG_LOC_SECTION_LABEL
4157 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4158 #endif
4159 #ifndef DEBUG_RANGES_SECTION_LABEL
4160 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4161 #endif
4162 #ifndef DEBUG_MACINFO_SECTION_LABEL
4163 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4164 #endif
4165 #ifndef DEBUG_MACRO_SECTION_LABEL
4166 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4167 #endif
4168 #define SKELETON_COMP_DIE_ABBREV 1
4169 #define SKELETON_TYPE_DIE_ABBREV 2
4170
4171 /* Definitions of defaults for formats and names of various special
4172 (artificial) labels which may be generated within this file (when the -g
4173 options is used and DWARF2_DEBUGGING_INFO is in effect.
4174 If necessary, these may be overridden from within the tm.h file, but
4175 typically, overriding these defaults is unnecessary. */
4176
4177 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4178 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4179 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4180 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4181 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4182 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4183 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4184 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4185 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4186 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4187 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4188 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4189 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4190 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4191 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4192
4193 #ifndef TEXT_END_LABEL
4194 #define TEXT_END_LABEL "Letext"
4195 #endif
4196 #ifndef COLD_END_LABEL
4197 #define COLD_END_LABEL "Letext_cold"
4198 #endif
4199 #ifndef BLOCK_BEGIN_LABEL
4200 #define BLOCK_BEGIN_LABEL "LBB"
4201 #endif
4202 #ifndef BLOCK_INLINE_ENTRY_LABEL
4203 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4204 #endif
4205 #ifndef BLOCK_END_LABEL
4206 #define BLOCK_END_LABEL "LBE"
4207 #endif
4208 #ifndef LINE_CODE_LABEL
4209 #define LINE_CODE_LABEL "LM"
4210 #endif
4211
4212 \f
4213 /* Return the root of the DIE's built for the current compilation unit. */
4214 static dw_die_ref
4215 comp_unit_die (void)
4216 {
4217 if (!single_comp_unit_die)
4218 single_comp_unit_die = gen_compile_unit_die (NULL);
4219 return single_comp_unit_die;
4220 }
4221
4222 /* We allow a language front-end to designate a function that is to be
4223 called to "demangle" any name before it is put into a DIE. */
4224
4225 static const char *(*demangle_name_func) (const char *);
4226
4227 void
4228 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4229 {
4230 demangle_name_func = func;
4231 }
4232
4233 /* Test if rtl node points to a pseudo register. */
4234
4235 static inline int
4236 is_pseudo_reg (const_rtx rtl)
4237 {
4238 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4239 || (GET_CODE (rtl) == SUBREG
4240 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4241 }
4242
4243 /* Return a reference to a type, with its const and volatile qualifiers
4244 removed. */
4245
4246 static inline tree
4247 type_main_variant (tree type)
4248 {
4249 type = TYPE_MAIN_VARIANT (type);
4250
4251 /* ??? There really should be only one main variant among any group of
4252 variants of a given type (and all of the MAIN_VARIANT values for all
4253 members of the group should point to that one type) but sometimes the C
4254 front-end messes this up for array types, so we work around that bug
4255 here. */
4256 if (TREE_CODE (type) == ARRAY_TYPE)
4257 while (type != TYPE_MAIN_VARIANT (type))
4258 type = TYPE_MAIN_VARIANT (type);
4259
4260 return type;
4261 }
4262
4263 /* Return nonzero if the given type node represents a tagged type. */
4264
4265 static inline int
4266 is_tagged_type (const_tree type)
4267 {
4268 enum tree_code code = TREE_CODE (type);
4269
4270 return (code == RECORD_TYPE || code == UNION_TYPE
4271 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4272 }
4273
4274 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4275
4276 static void
4277 get_ref_die_offset_label (char *label, dw_die_ref ref)
4278 {
4279 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4280 }
4281
4282 /* Return die_offset of a DIE reference to a base type. */
4283
4284 static unsigned long int
4285 get_base_type_offset (dw_die_ref ref)
4286 {
4287 if (ref->die_offset)
4288 return ref->die_offset;
4289 if (comp_unit_die ()->die_abbrev)
4290 {
4291 calc_base_type_die_sizes ();
4292 gcc_assert (ref->die_offset);
4293 }
4294 return ref->die_offset;
4295 }
4296
4297 /* Return die_offset of a DIE reference other than base type. */
4298
4299 static unsigned long int
4300 get_ref_die_offset (dw_die_ref ref)
4301 {
4302 gcc_assert (ref->die_offset);
4303 return ref->die_offset;
4304 }
4305
4306 /* Convert a DIE tag into its string name. */
4307
4308 static const char *
4309 dwarf_tag_name (unsigned int tag)
4310 {
4311 const char *name = get_DW_TAG_name (tag);
4312
4313 if (name != NULL)
4314 return name;
4315
4316 return "DW_TAG_<unknown>";
4317 }
4318
4319 /* Convert a DWARF attribute code into its string name. */
4320
4321 static const char *
4322 dwarf_attr_name (unsigned int attr)
4323 {
4324 const char *name;
4325
4326 switch (attr)
4327 {
4328 #if VMS_DEBUGGING_INFO
4329 case DW_AT_HP_prologue:
4330 return "DW_AT_HP_prologue";
4331 #else
4332 case DW_AT_MIPS_loop_unroll_factor:
4333 return "DW_AT_MIPS_loop_unroll_factor";
4334 #endif
4335
4336 #if VMS_DEBUGGING_INFO
4337 case DW_AT_HP_epilogue:
4338 return "DW_AT_HP_epilogue";
4339 #else
4340 case DW_AT_MIPS_stride:
4341 return "DW_AT_MIPS_stride";
4342 #endif
4343 }
4344
4345 name = get_DW_AT_name (attr);
4346
4347 if (name != NULL)
4348 return name;
4349
4350 return "DW_AT_<unknown>";
4351 }
4352
4353 /* Convert a DWARF value form code into its string name. */
4354
4355 static const char *
4356 dwarf_form_name (unsigned int form)
4357 {
4358 const char *name = get_DW_FORM_name (form);
4359
4360 if (name != NULL)
4361 return name;
4362
4363 return "DW_FORM_<unknown>";
4364 }
4365 \f
4366 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4367 instance of an inlined instance of a decl which is local to an inline
4368 function, so we have to trace all of the way back through the origin chain
4369 to find out what sort of node actually served as the original seed for the
4370 given block. */
4371
4372 static tree
4373 decl_ultimate_origin (const_tree decl)
4374 {
4375 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4376 return NULL_TREE;
4377
4378 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4379 we're trying to output the abstract instance of this function. */
4380 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4381 return NULL_TREE;
4382
4383 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4384 most distant ancestor, this should never happen. */
4385 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4386
4387 return DECL_ABSTRACT_ORIGIN (decl);
4388 }
4389
4390 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4391 of a virtual function may refer to a base class, so we check the 'this'
4392 parameter. */
4393
4394 static tree
4395 decl_class_context (tree decl)
4396 {
4397 tree context = NULL_TREE;
4398
4399 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4400 context = DECL_CONTEXT (decl);
4401 else
4402 context = TYPE_MAIN_VARIANT
4403 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4404
4405 if (context && !TYPE_P (context))
4406 context = NULL_TREE;
4407
4408 return context;
4409 }
4410 \f
4411 /* Add an attribute/value pair to a DIE. */
4412
4413 static inline void
4414 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4415 {
4416 /* Maybe this should be an assert? */
4417 if (die == NULL)
4418 return;
4419
4420 if (flag_checking)
4421 {
4422 /* Check we do not add duplicate attrs. Can't use get_AT here
4423 because that recurses to the specification/abstract origin DIE. */
4424 dw_attr_node *a;
4425 unsigned ix;
4426 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4427 gcc_assert (a->dw_attr != attr->dw_attr);
4428 }
4429
4430 vec_safe_reserve (die->die_attr, 1);
4431 vec_safe_push (die->die_attr, *attr);
4432 }
4433
4434 static inline enum dw_val_class
4435 AT_class (dw_attr_node *a)
4436 {
4437 return a->dw_attr_val.val_class;
4438 }
4439
4440 /* Return the index for any attribute that will be referenced with a
4441 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4442 indices are stored in dw_attr_val.v.val_str for reference counting
4443 pruning. */
4444
4445 static inline unsigned int
4446 AT_index (dw_attr_node *a)
4447 {
4448 if (AT_class (a) == dw_val_class_str)
4449 return a->dw_attr_val.v.val_str->index;
4450 else if (a->dw_attr_val.val_entry != NULL)
4451 return a->dw_attr_val.val_entry->index;
4452 return NOT_INDEXED;
4453 }
4454
4455 /* Add a flag value attribute to a DIE. */
4456
4457 static inline void
4458 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4459 {
4460 dw_attr_node attr;
4461
4462 attr.dw_attr = attr_kind;
4463 attr.dw_attr_val.val_class = dw_val_class_flag;
4464 attr.dw_attr_val.val_entry = NULL;
4465 attr.dw_attr_val.v.val_flag = flag;
4466 add_dwarf_attr (die, &attr);
4467 }
4468
4469 static inline unsigned
4470 AT_flag (dw_attr_node *a)
4471 {
4472 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4473 return a->dw_attr_val.v.val_flag;
4474 }
4475
4476 /* Add a signed integer attribute value to a DIE. */
4477
4478 static inline void
4479 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4480 {
4481 dw_attr_node attr;
4482
4483 attr.dw_attr = attr_kind;
4484 attr.dw_attr_val.val_class = dw_val_class_const;
4485 attr.dw_attr_val.val_entry = NULL;
4486 attr.dw_attr_val.v.val_int = int_val;
4487 add_dwarf_attr (die, &attr);
4488 }
4489
4490 static inline HOST_WIDE_INT
4491 AT_int (dw_attr_node *a)
4492 {
4493 gcc_assert (a && (AT_class (a) == dw_val_class_const
4494 || AT_class (a) == dw_val_class_const_implicit));
4495 return a->dw_attr_val.v.val_int;
4496 }
4497
4498 /* Add an unsigned integer attribute value to a DIE. */
4499
4500 static inline void
4501 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4502 unsigned HOST_WIDE_INT unsigned_val)
4503 {
4504 dw_attr_node attr;
4505
4506 attr.dw_attr = attr_kind;
4507 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4508 attr.dw_attr_val.val_entry = NULL;
4509 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4510 add_dwarf_attr (die, &attr);
4511 }
4512
4513 static inline unsigned HOST_WIDE_INT
4514 AT_unsigned (dw_attr_node *a)
4515 {
4516 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4517 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4518 return a->dw_attr_val.v.val_unsigned;
4519 }
4520
4521 /* Add an unsigned wide integer attribute value to a DIE. */
4522
4523 static inline void
4524 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4525 const wide_int& w)
4526 {
4527 dw_attr_node attr;
4528
4529 attr.dw_attr = attr_kind;
4530 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4531 attr.dw_attr_val.val_entry = NULL;
4532 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4533 *attr.dw_attr_val.v.val_wide = w;
4534 add_dwarf_attr (die, &attr);
4535 }
4536
4537 /* Add an unsigned double integer attribute value to a DIE. */
4538
4539 static inline void
4540 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4541 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4542 {
4543 dw_attr_node attr;
4544
4545 attr.dw_attr = attr_kind;
4546 attr.dw_attr_val.val_class = dw_val_class_const_double;
4547 attr.dw_attr_val.val_entry = NULL;
4548 attr.dw_attr_val.v.val_double.high = high;
4549 attr.dw_attr_val.v.val_double.low = low;
4550 add_dwarf_attr (die, &attr);
4551 }
4552
4553 /* Add a floating point attribute value to a DIE and return it. */
4554
4555 static inline void
4556 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4557 unsigned int length, unsigned int elt_size, unsigned char *array)
4558 {
4559 dw_attr_node attr;
4560
4561 attr.dw_attr = attr_kind;
4562 attr.dw_attr_val.val_class = dw_val_class_vec;
4563 attr.dw_attr_val.val_entry = NULL;
4564 attr.dw_attr_val.v.val_vec.length = length;
4565 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4566 attr.dw_attr_val.v.val_vec.array = array;
4567 add_dwarf_attr (die, &attr);
4568 }
4569
4570 /* Add an 8-byte data attribute value to a DIE. */
4571
4572 static inline void
4573 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4574 unsigned char data8[8])
4575 {
4576 dw_attr_node attr;
4577
4578 attr.dw_attr = attr_kind;
4579 attr.dw_attr_val.val_class = dw_val_class_data8;
4580 attr.dw_attr_val.val_entry = NULL;
4581 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4582 add_dwarf_attr (die, &attr);
4583 }
4584
4585 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4586 dwarf_split_debug_info, address attributes in dies destined for the
4587 final executable have force_direct set to avoid using indexed
4588 references. */
4589
4590 static inline void
4591 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4592 bool force_direct)
4593 {
4594 dw_attr_node attr;
4595 char * lbl_id;
4596
4597 lbl_id = xstrdup (lbl_low);
4598 attr.dw_attr = DW_AT_low_pc;
4599 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4600 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4601 if (dwarf_split_debug_info && !force_direct)
4602 attr.dw_attr_val.val_entry
4603 = add_addr_table_entry (lbl_id, ate_kind_label);
4604 else
4605 attr.dw_attr_val.val_entry = NULL;
4606 add_dwarf_attr (die, &attr);
4607
4608 attr.dw_attr = DW_AT_high_pc;
4609 if (dwarf_version < 4)
4610 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4611 else
4612 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4613 lbl_id = xstrdup (lbl_high);
4614 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4615 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4616 && dwarf_split_debug_info && !force_direct)
4617 attr.dw_attr_val.val_entry
4618 = add_addr_table_entry (lbl_id, ate_kind_label);
4619 else
4620 attr.dw_attr_val.val_entry = NULL;
4621 add_dwarf_attr (die, &attr);
4622 }
4623
4624 /* Hash and equality functions for debug_str_hash. */
4625
4626 hashval_t
4627 indirect_string_hasher::hash (indirect_string_node *x)
4628 {
4629 return htab_hash_string (x->str);
4630 }
4631
4632 bool
4633 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4634 {
4635 return strcmp (x1->str, x2) == 0;
4636 }
4637
4638 /* Add STR to the given string hash table. */
4639
4640 static struct indirect_string_node *
4641 find_AT_string_in_table (const char *str,
4642 hash_table<indirect_string_hasher> *table,
4643 enum insert_option insert = INSERT)
4644 {
4645 struct indirect_string_node *node;
4646
4647 indirect_string_node **slot
4648 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4649 if (*slot == NULL)
4650 {
4651 node = ggc_cleared_alloc<indirect_string_node> ();
4652 node->str = ggc_strdup (str);
4653 *slot = node;
4654 }
4655 else
4656 node = *slot;
4657
4658 node->refcount++;
4659 return node;
4660 }
4661
4662 /* Add STR to the indirect string hash table. */
4663
4664 static struct indirect_string_node *
4665 find_AT_string (const char *str, enum insert_option insert = INSERT)
4666 {
4667 if (! debug_str_hash)
4668 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4669
4670 return find_AT_string_in_table (str, debug_str_hash, insert);
4671 }
4672
4673 /* Add a string attribute value to a DIE. */
4674
4675 static inline void
4676 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4677 {
4678 dw_attr_node attr;
4679 struct indirect_string_node *node;
4680
4681 node = find_AT_string (str);
4682
4683 attr.dw_attr = attr_kind;
4684 attr.dw_attr_val.val_class = dw_val_class_str;
4685 attr.dw_attr_val.val_entry = NULL;
4686 attr.dw_attr_val.v.val_str = node;
4687 add_dwarf_attr (die, &attr);
4688 }
4689
4690 static inline const char *
4691 AT_string (dw_attr_node *a)
4692 {
4693 gcc_assert (a && AT_class (a) == dw_val_class_str);
4694 return a->dw_attr_val.v.val_str->str;
4695 }
4696
4697 /* Call this function directly to bypass AT_string_form's logic to put
4698 the string inline in the die. */
4699
4700 static void
4701 set_indirect_string (struct indirect_string_node *node)
4702 {
4703 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4704 /* Already indirect is a no op. */
4705 if (node->form == DW_FORM_strp
4706 || node->form == DW_FORM_line_strp
4707 || node->form == dwarf_FORM (DW_FORM_strx))
4708 {
4709 gcc_assert (node->label);
4710 return;
4711 }
4712 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4713 ++dw2_string_counter;
4714 node->label = xstrdup (label);
4715
4716 if (!dwarf_split_debug_info)
4717 {
4718 node->form = DW_FORM_strp;
4719 node->index = NOT_INDEXED;
4720 }
4721 else
4722 {
4723 node->form = dwarf_FORM (DW_FORM_strx);
4724 node->index = NO_INDEX_ASSIGNED;
4725 }
4726 }
4727
4728 /* A helper function for dwarf2out_finish, called to reset indirect
4729 string decisions done for early LTO dwarf output before fat object
4730 dwarf output. */
4731
4732 int
4733 reset_indirect_string (indirect_string_node **h, void *)
4734 {
4735 struct indirect_string_node *node = *h;
4736 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4737 {
4738 free (node->label);
4739 node->label = NULL;
4740 node->form = (dwarf_form) 0;
4741 node->index = 0;
4742 }
4743 return 1;
4744 }
4745
4746 /* Add a string representing a file or filepath attribute value to a DIE. */
4747
4748 static inline void
4749 add_filepath_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
4750 const char *str)
4751 {
4752 if (! asm_outputs_debug_line_str ())
4753 add_AT_string (die, attr_kind, str);
4754 else
4755 {
4756 dw_attr_node attr;
4757 struct indirect_string_node *node;
4758
4759 if (!debug_line_str_hash)
4760 debug_line_str_hash
4761 = hash_table<indirect_string_hasher>::create_ggc (10);
4762
4763 node = find_AT_string_in_table (str, debug_line_str_hash);
4764 set_indirect_string (node);
4765 node->form = DW_FORM_line_strp;
4766
4767 attr.dw_attr = attr_kind;
4768 attr.dw_attr_val.val_class = dw_val_class_str;
4769 attr.dw_attr_val.val_entry = NULL;
4770 attr.dw_attr_val.v.val_str = node;
4771 add_dwarf_attr (die, &attr);
4772 }
4773 }
4774
4775 /* Find out whether a string should be output inline in DIE
4776 or out-of-line in .debug_str section. */
4777
4778 static enum dwarf_form
4779 find_string_form (struct indirect_string_node *node)
4780 {
4781 unsigned int len;
4782
4783 if (node->form)
4784 return node->form;
4785
4786 len = strlen (node->str) + 1;
4787
4788 /* If the string is shorter or equal to the size of the reference, it is
4789 always better to put it inline. */
4790 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4791 return node->form = DW_FORM_string;
4792
4793 /* If we cannot expect the linker to merge strings in .debug_str
4794 section, only put it into .debug_str if it is worth even in this
4795 single module. */
4796 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4797 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4798 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4799 return node->form = DW_FORM_string;
4800
4801 set_indirect_string (node);
4802
4803 return node->form;
4804 }
4805
4806 /* Find out whether the string referenced from the attribute should be
4807 output inline in DIE or out-of-line in .debug_str section. */
4808
4809 static enum dwarf_form
4810 AT_string_form (dw_attr_node *a)
4811 {
4812 gcc_assert (a && AT_class (a) == dw_val_class_str);
4813 return find_string_form (a->dw_attr_val.v.val_str);
4814 }
4815
4816 /* Add a DIE reference attribute value to a DIE. */
4817
4818 static inline void
4819 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4820 {
4821 dw_attr_node attr;
4822 gcc_checking_assert (targ_die != NULL);
4823
4824 /* With LTO we can end up trying to reference something we didn't create
4825 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4826 if (targ_die == NULL)
4827 return;
4828
4829 attr.dw_attr = attr_kind;
4830 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4831 attr.dw_attr_val.val_entry = NULL;
4832 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4833 attr.dw_attr_val.v.val_die_ref.external = 0;
4834 add_dwarf_attr (die, &attr);
4835 }
4836
4837 /* Change DIE reference REF to point to NEW_DIE instead. */
4838
4839 static inline void
4840 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4841 {
4842 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4843 ref->dw_attr_val.v.val_die_ref.die = new_die;
4844 ref->dw_attr_val.v.val_die_ref.external = 0;
4845 }
4846
4847 /* Add an AT_specification attribute to a DIE, and also make the back
4848 pointer from the specification to the definition. */
4849
4850 static inline void
4851 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4852 {
4853 add_AT_die_ref (die, DW_AT_specification, targ_die);
4854 gcc_assert (!targ_die->die_definition);
4855 targ_die->die_definition = die;
4856 }
4857
4858 static inline dw_die_ref
4859 AT_ref (dw_attr_node *a)
4860 {
4861 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4862 return a->dw_attr_val.v.val_die_ref.die;
4863 }
4864
4865 static inline int
4866 AT_ref_external (dw_attr_node *a)
4867 {
4868 if (a && AT_class (a) == dw_val_class_die_ref)
4869 return a->dw_attr_val.v.val_die_ref.external;
4870
4871 return 0;
4872 }
4873
4874 static inline void
4875 set_AT_ref_external (dw_attr_node *a, int i)
4876 {
4877 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4878 a->dw_attr_val.v.val_die_ref.external = i;
4879 }
4880
4881 /* Add a location description attribute value to a DIE. */
4882
4883 static inline void
4884 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4885 {
4886 dw_attr_node attr;
4887
4888 attr.dw_attr = attr_kind;
4889 attr.dw_attr_val.val_class = dw_val_class_loc;
4890 attr.dw_attr_val.val_entry = NULL;
4891 attr.dw_attr_val.v.val_loc = loc;
4892 add_dwarf_attr (die, &attr);
4893 }
4894
4895 static inline dw_loc_descr_ref
4896 AT_loc (dw_attr_node *a)
4897 {
4898 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4899 return a->dw_attr_val.v.val_loc;
4900 }
4901
4902 static inline void
4903 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4904 {
4905 dw_attr_node attr;
4906
4907 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4908 return;
4909
4910 attr.dw_attr = attr_kind;
4911 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4912 attr.dw_attr_val.val_entry = NULL;
4913 attr.dw_attr_val.v.val_loc_list = loc_list;
4914 add_dwarf_attr (die, &attr);
4915 have_location_lists = true;
4916 }
4917
4918 static inline dw_loc_list_ref
4919 AT_loc_list (dw_attr_node *a)
4920 {
4921 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4922 return a->dw_attr_val.v.val_loc_list;
4923 }
4924
4925 /* Add a view list attribute to DIE. It must have a DW_AT_location
4926 attribute, because the view list complements the location list. */
4927
4928 static inline void
4929 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4930 {
4931 dw_attr_node attr;
4932
4933 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4934 return;
4935
4936 attr.dw_attr = attr_kind;
4937 attr.dw_attr_val.val_class = dw_val_class_view_list;
4938 attr.dw_attr_val.val_entry = NULL;
4939 attr.dw_attr_val.v.val_view_list = die;
4940 add_dwarf_attr (die, &attr);
4941 gcc_checking_assert (get_AT (die, DW_AT_location));
4942 gcc_assert (have_location_lists);
4943 }
4944
4945 /* Return a pointer to the location list referenced by the attribute.
4946 If the named attribute is a view list, look up the corresponding
4947 DW_AT_location attribute and return its location list. */
4948
4949 static inline dw_loc_list_ref *
4950 AT_loc_list_ptr (dw_attr_node *a)
4951 {
4952 gcc_assert (a);
4953 switch (AT_class (a))
4954 {
4955 case dw_val_class_loc_list:
4956 return &a->dw_attr_val.v.val_loc_list;
4957 case dw_val_class_view_list:
4958 {
4959 dw_attr_node *l;
4960 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4961 if (!l)
4962 return NULL;
4963 gcc_checking_assert (l + 1 == a);
4964 return AT_loc_list_ptr (l);
4965 }
4966 default:
4967 gcc_unreachable ();
4968 }
4969 }
4970
4971 /* Return the location attribute value associated with a view list
4972 attribute value. */
4973
4974 static inline dw_val_node *
4975 view_list_to_loc_list_val_node (dw_val_node *val)
4976 {
4977 gcc_assert (val->val_class == dw_val_class_view_list);
4978 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4979 if (!loc)
4980 return NULL;
4981 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4982 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4983 return &loc->dw_attr_val;
4984 }
4985
4986 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4987 {
4988 static hashval_t hash (addr_table_entry *);
4989 static bool equal (addr_table_entry *, addr_table_entry *);
4990 };
4991
4992 /* Table of entries into the .debug_addr section. */
4993
4994 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4995
4996 /* Hash an address_table_entry. */
4997
4998 hashval_t
4999 addr_hasher::hash (addr_table_entry *a)
5000 {
5001 inchash::hash hstate;
5002 switch (a->kind)
5003 {
5004 case ate_kind_rtx:
5005 hstate.add_int (0);
5006 break;
5007 case ate_kind_rtx_dtprel:
5008 hstate.add_int (1);
5009 break;
5010 case ate_kind_label:
5011 return htab_hash_string (a->addr.label);
5012 default:
5013 gcc_unreachable ();
5014 }
5015 inchash::add_rtx (a->addr.rtl, hstate);
5016 return hstate.end ();
5017 }
5018
5019 /* Determine equality for two address_table_entries. */
5020
5021 bool
5022 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
5023 {
5024 if (a1->kind != a2->kind)
5025 return 0;
5026 switch (a1->kind)
5027 {
5028 case ate_kind_rtx:
5029 case ate_kind_rtx_dtprel:
5030 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
5031 case ate_kind_label:
5032 return strcmp (a1->addr.label, a2->addr.label) == 0;
5033 default:
5034 gcc_unreachable ();
5035 }
5036 }
5037
5038 /* Initialize an addr_table_entry. */
5039
5040 void
5041 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5042 {
5043 e->kind = kind;
5044 switch (kind)
5045 {
5046 case ate_kind_rtx:
5047 case ate_kind_rtx_dtprel:
5048 e->addr.rtl = (rtx) addr;
5049 break;
5050 case ate_kind_label:
5051 e->addr.label = (char *) addr;
5052 break;
5053 }
5054 e->refcount = 0;
5055 e->index = NO_INDEX_ASSIGNED;
5056 }
5057
5058 /* Add attr to the address table entry to the table. Defer setting an
5059 index until output time. */
5060
5061 static addr_table_entry *
5062 add_addr_table_entry (void *addr, enum ate_kind kind)
5063 {
5064 addr_table_entry *node;
5065 addr_table_entry finder;
5066
5067 gcc_assert (dwarf_split_debug_info);
5068 if (! addr_index_table)
5069 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5070 init_addr_table_entry (&finder, kind, addr);
5071 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5072
5073 if (*slot == HTAB_EMPTY_ENTRY)
5074 {
5075 node = ggc_cleared_alloc<addr_table_entry> ();
5076 init_addr_table_entry (node, kind, addr);
5077 *slot = node;
5078 }
5079 else
5080 node = *slot;
5081
5082 node->refcount++;
5083 return node;
5084 }
5085
5086 /* Remove an entry from the addr table by decrementing its refcount.
5087 Strictly, decrementing the refcount would be enough, but the
5088 assertion that the entry is actually in the table has found
5089 bugs. */
5090
5091 static void
5092 remove_addr_table_entry (addr_table_entry *entry)
5093 {
5094 gcc_assert (dwarf_split_debug_info && addr_index_table);
5095 /* After an index is assigned, the table is frozen. */
5096 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5097 entry->refcount--;
5098 }
5099
5100 /* Given a location list, remove all addresses it refers to from the
5101 address_table. */
5102
5103 static void
5104 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5105 {
5106 for (; descr; descr = descr->dw_loc_next)
5107 if (descr->dw_loc_oprnd1.val_entry != NULL)
5108 {
5109 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5110 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5111 }
5112 }
5113
5114 /* A helper function for dwarf2out_finish called through
5115 htab_traverse. Assign an addr_table_entry its index. All entries
5116 must be collected into the table when this function is called,
5117 because the indexing code relies on htab_traverse to traverse nodes
5118 in the same order for each run. */
5119
5120 int
5121 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5122 {
5123 addr_table_entry *node = *h;
5124
5125 /* Don't index unreferenced nodes. */
5126 if (node->refcount == 0)
5127 return 1;
5128
5129 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5130 node->index = *index;
5131 *index += 1;
5132
5133 return 1;
5134 }
5135
5136 /* Add an address constant attribute value to a DIE. When using
5137 dwarf_split_debug_info, address attributes in dies destined for the
5138 final executable should be direct references--setting the parameter
5139 force_direct ensures this behavior. */
5140
5141 static inline void
5142 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5143 bool force_direct)
5144 {
5145 dw_attr_node attr;
5146
5147 attr.dw_attr = attr_kind;
5148 attr.dw_attr_val.val_class = dw_val_class_addr;
5149 attr.dw_attr_val.v.val_addr = addr;
5150 if (dwarf_split_debug_info && !force_direct)
5151 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5152 else
5153 attr.dw_attr_val.val_entry = NULL;
5154 add_dwarf_attr (die, &attr);
5155 }
5156
5157 /* Get the RTX from to an address DIE attribute. */
5158
5159 static inline rtx
5160 AT_addr (dw_attr_node *a)
5161 {
5162 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5163 return a->dw_attr_val.v.val_addr;
5164 }
5165
5166 /* Add a file attribute value to a DIE. */
5167
5168 static inline void
5169 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5170 struct dwarf_file_data *fd)
5171 {
5172 dw_attr_node attr;
5173
5174 attr.dw_attr = attr_kind;
5175 attr.dw_attr_val.val_class = dw_val_class_file;
5176 attr.dw_attr_val.val_entry = NULL;
5177 attr.dw_attr_val.v.val_file = fd;
5178 add_dwarf_attr (die, &attr);
5179 }
5180
5181 /* Get the dwarf_file_data from a file DIE attribute. */
5182
5183 static inline struct dwarf_file_data *
5184 AT_file (dw_attr_node *a)
5185 {
5186 gcc_assert (a && (AT_class (a) == dw_val_class_file
5187 || AT_class (a) == dw_val_class_file_implicit));
5188 return a->dw_attr_val.v.val_file;
5189 }
5190
5191 /* Add a vms delta attribute value to a DIE. */
5192
5193 static inline void
5194 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5195 const char *lbl1, const char *lbl2)
5196 {
5197 dw_attr_node attr;
5198
5199 attr.dw_attr = attr_kind;
5200 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5201 attr.dw_attr_val.val_entry = NULL;
5202 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5203 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5204 add_dwarf_attr (die, &attr);
5205 }
5206
5207 /* Add a symbolic view identifier attribute value to a DIE. */
5208
5209 static inline void
5210 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5211 const char *view_label)
5212 {
5213 dw_attr_node attr;
5214
5215 attr.dw_attr = attr_kind;
5216 attr.dw_attr_val.val_class = dw_val_class_symview;
5217 attr.dw_attr_val.val_entry = NULL;
5218 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5219 add_dwarf_attr (die, &attr);
5220 }
5221
5222 /* Add a label identifier attribute value to a DIE. */
5223
5224 static inline void
5225 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5226 const char *lbl_id)
5227 {
5228 dw_attr_node attr;
5229
5230 attr.dw_attr = attr_kind;
5231 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5232 attr.dw_attr_val.val_entry = NULL;
5233 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5234 if (dwarf_split_debug_info)
5235 attr.dw_attr_val.val_entry
5236 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5237 ate_kind_label);
5238 add_dwarf_attr (die, &attr);
5239 }
5240
5241 /* Add a section offset attribute value to a DIE, an offset into the
5242 debug_line section. */
5243
5244 static inline void
5245 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5246 const char *label)
5247 {
5248 dw_attr_node attr;
5249
5250 attr.dw_attr = attr_kind;
5251 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5252 attr.dw_attr_val.val_entry = NULL;
5253 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5254 add_dwarf_attr (die, &attr);
5255 }
5256
5257 /* Add a section offset attribute value to a DIE, an offset into the
5258 debug_macinfo section. */
5259
5260 static inline void
5261 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5262 const char *label)
5263 {
5264 dw_attr_node attr;
5265
5266 attr.dw_attr = attr_kind;
5267 attr.dw_attr_val.val_class = dw_val_class_macptr;
5268 attr.dw_attr_val.val_entry = NULL;
5269 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5270 add_dwarf_attr (die, &attr);
5271 }
5272
5273 /* Add a range_list attribute value to a DIE. When using
5274 dwarf_split_debug_info, address attributes in dies destined for the
5275 final executable should be direct references--setting the parameter
5276 force_direct ensures this behavior. */
5277
5278 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5279 #define RELOCATED_OFFSET (NULL)
5280
5281 static void
5282 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5283 long unsigned int offset, bool force_direct)
5284 {
5285 dw_attr_node attr;
5286
5287 attr.dw_attr = attr_kind;
5288 attr.dw_attr_val.val_class = dw_val_class_range_list;
5289 /* For the range_list attribute, use val_entry to store whether the
5290 offset should follow split-debug-info or normal semantics. This
5291 value is read in output_range_list_offset. */
5292 if (dwarf_split_debug_info && !force_direct)
5293 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5294 else
5295 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5296 attr.dw_attr_val.v.val_offset = offset;
5297 add_dwarf_attr (die, &attr);
5298 }
5299
5300 /* Return the start label of a delta attribute. */
5301
5302 static inline const char *
5303 AT_vms_delta1 (dw_attr_node *a)
5304 {
5305 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5306 return a->dw_attr_val.v.val_vms_delta.lbl1;
5307 }
5308
5309 /* Return the end label of a delta attribute. */
5310
5311 static inline const char *
5312 AT_vms_delta2 (dw_attr_node *a)
5313 {
5314 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5315 return a->dw_attr_val.v.val_vms_delta.lbl2;
5316 }
5317
5318 static inline const char *
5319 AT_lbl (dw_attr_node *a)
5320 {
5321 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5322 || AT_class (a) == dw_val_class_lineptr
5323 || AT_class (a) == dw_val_class_macptr
5324 || AT_class (a) == dw_val_class_loclistsptr
5325 || AT_class (a) == dw_val_class_high_pc));
5326 return a->dw_attr_val.v.val_lbl_id;
5327 }
5328
5329 /* Get the attribute of type attr_kind. */
5330
5331 static dw_attr_node *
5332 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5333 {
5334 dw_attr_node *a;
5335 unsigned ix;
5336 dw_die_ref spec = NULL;
5337
5338 if (! die)
5339 return NULL;
5340
5341 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5342 if (a->dw_attr == attr_kind)
5343 return a;
5344 else if (a->dw_attr == DW_AT_specification
5345 || a->dw_attr == DW_AT_abstract_origin)
5346 spec = AT_ref (a);
5347
5348 if (spec)
5349 return get_AT (spec, attr_kind);
5350
5351 return NULL;
5352 }
5353
5354 /* Returns the parent of the declaration of DIE. */
5355
5356 static dw_die_ref
5357 get_die_parent (dw_die_ref die)
5358 {
5359 dw_die_ref t;
5360
5361 if (!die)
5362 return NULL;
5363
5364 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5365 || (t = get_AT_ref (die, DW_AT_specification)))
5366 die = t;
5367
5368 return die->die_parent;
5369 }
5370
5371 /* Return the "low pc" attribute value, typically associated with a subprogram
5372 DIE. Return null if the "low pc" attribute is either not present, or if it
5373 cannot be represented as an assembler label identifier. */
5374
5375 static inline const char *
5376 get_AT_low_pc (dw_die_ref die)
5377 {
5378 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5379
5380 return a ? AT_lbl (a) : NULL;
5381 }
5382
5383 /* Return the value of the string attribute designated by ATTR_KIND, or
5384 NULL if it is not present. */
5385
5386 static inline const char *
5387 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5388 {
5389 dw_attr_node *a = get_AT (die, attr_kind);
5390
5391 return a ? AT_string (a) : NULL;
5392 }
5393
5394 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5395 if it is not present. */
5396
5397 static inline int
5398 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5399 {
5400 dw_attr_node *a = get_AT (die, attr_kind);
5401
5402 return a ? AT_flag (a) : 0;
5403 }
5404
5405 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5406 if it is not present. */
5407
5408 static inline unsigned
5409 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5410 {
5411 dw_attr_node *a = get_AT (die, attr_kind);
5412
5413 return a ? AT_unsigned (a) : 0;
5414 }
5415
5416 static inline dw_die_ref
5417 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5418 {
5419 dw_attr_node *a = get_AT (die, attr_kind);
5420
5421 return a ? AT_ref (a) : NULL;
5422 }
5423
5424 static inline struct dwarf_file_data *
5425 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5426 {
5427 dw_attr_node *a = get_AT (die, attr_kind);
5428
5429 return a ? AT_file (a) : NULL;
5430 }
5431
5432 /* Return TRUE if the language is C. */
5433
5434 static inline bool
5435 is_c (void)
5436 {
5437 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5438
5439 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5440 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5441
5442
5443 }
5444
5445 /* Return TRUE if the language is C++. */
5446
5447 static inline bool
5448 is_cxx (void)
5449 {
5450 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5451
5452 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5453 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5454 }
5455
5456 /* Return TRUE if DECL was created by the C++ frontend. */
5457
5458 static bool
5459 is_cxx (const_tree decl)
5460 {
5461 if (in_lto_p)
5462 {
5463 const_tree context = get_ultimate_context (decl);
5464 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5465 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5466 }
5467 return is_cxx ();
5468 }
5469
5470 /* Return TRUE if the language is Fortran. */
5471
5472 static inline bool
5473 is_fortran (void)
5474 {
5475 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5476
5477 return (lang == DW_LANG_Fortran77
5478 || lang == DW_LANG_Fortran90
5479 || lang == DW_LANG_Fortran95
5480 || lang == DW_LANG_Fortran03
5481 || lang == DW_LANG_Fortran08);
5482 }
5483
5484 static inline bool
5485 is_fortran (const_tree decl)
5486 {
5487 if (in_lto_p)
5488 {
5489 const_tree context = get_ultimate_context (decl);
5490 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5491 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5492 "GNU Fortran", 11) == 0
5493 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5494 "GNU F77") == 0);
5495 }
5496 return is_fortran ();
5497 }
5498
5499 /* Return TRUE if the language is Ada. */
5500
5501 static inline bool
5502 is_ada (void)
5503 {
5504 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5505
5506 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5507 }
5508
5509 /* Return TRUE if the language is D. */
5510
5511 static inline bool
5512 is_dlang (void)
5513 {
5514 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5515
5516 return lang == DW_LANG_D;
5517 }
5518
5519 /* Remove the specified attribute if present. Return TRUE if removal
5520 was successful. */
5521
5522 static bool
5523 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5524 {
5525 dw_attr_node *a;
5526 unsigned ix;
5527
5528 if (! die)
5529 return false;
5530
5531 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5532 if (a->dw_attr == attr_kind)
5533 {
5534 if (AT_class (a) == dw_val_class_str)
5535 if (a->dw_attr_val.v.val_str->refcount)
5536 a->dw_attr_val.v.val_str->refcount--;
5537
5538 /* vec::ordered_remove should help reduce the number of abbrevs
5539 that are needed. */
5540 die->die_attr->ordered_remove (ix);
5541 return true;
5542 }
5543 return false;
5544 }
5545
5546 /* Remove CHILD from its parent. PREV must have the property that
5547 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5548
5549 static void
5550 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5551 {
5552 gcc_assert (child->die_parent == prev->die_parent);
5553 gcc_assert (prev->die_sib == child);
5554 if (prev == child)
5555 {
5556 gcc_assert (child->die_parent->die_child == child);
5557 prev = NULL;
5558 }
5559 else
5560 prev->die_sib = child->die_sib;
5561 if (child->die_parent->die_child == child)
5562 child->die_parent->die_child = prev;
5563 child->die_sib = NULL;
5564 }
5565
5566 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5567 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5568
5569 static void
5570 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5571 {
5572 dw_die_ref parent = old_child->die_parent;
5573
5574 gcc_assert (parent == prev->die_parent);
5575 gcc_assert (prev->die_sib == old_child);
5576
5577 new_child->die_parent = parent;
5578 if (prev == old_child)
5579 {
5580 gcc_assert (parent->die_child == old_child);
5581 new_child->die_sib = new_child;
5582 }
5583 else
5584 {
5585 prev->die_sib = new_child;
5586 new_child->die_sib = old_child->die_sib;
5587 }
5588 if (old_child->die_parent->die_child == old_child)
5589 old_child->die_parent->die_child = new_child;
5590 old_child->die_sib = NULL;
5591 }
5592
5593 /* Move all children from OLD_PARENT to NEW_PARENT. */
5594
5595 static void
5596 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5597 {
5598 dw_die_ref c;
5599 new_parent->die_child = old_parent->die_child;
5600 old_parent->die_child = NULL;
5601 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5602 }
5603
5604 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5605 matches TAG. */
5606
5607 static void
5608 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5609 {
5610 dw_die_ref c;
5611
5612 c = die->die_child;
5613 if (c) do {
5614 dw_die_ref prev = c;
5615 c = c->die_sib;
5616 while (c->die_tag == tag)
5617 {
5618 remove_child_with_prev (c, prev);
5619 c->die_parent = NULL;
5620 /* Might have removed every child. */
5621 if (die->die_child == NULL)
5622 return;
5623 c = prev->die_sib;
5624 }
5625 } while (c != die->die_child);
5626 }
5627
5628 /* Add a CHILD_DIE as the last child of DIE. */
5629
5630 static void
5631 add_child_die (dw_die_ref die, dw_die_ref child_die)
5632 {
5633 /* FIXME this should probably be an assert. */
5634 if (! die || ! child_die)
5635 return;
5636 gcc_assert (die != child_die);
5637
5638 child_die->die_parent = die;
5639 if (die->die_child)
5640 {
5641 child_die->die_sib = die->die_child->die_sib;
5642 die->die_child->die_sib = child_die;
5643 }
5644 else
5645 child_die->die_sib = child_die;
5646 die->die_child = child_die;
5647 }
5648
5649 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5650
5651 static void
5652 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5653 dw_die_ref after_die)
5654 {
5655 gcc_assert (die
5656 && child_die
5657 && after_die
5658 && die->die_child
5659 && die != child_die);
5660
5661 child_die->die_parent = die;
5662 child_die->die_sib = after_die->die_sib;
5663 after_die->die_sib = child_die;
5664 if (die->die_child == after_die)
5665 die->die_child = child_die;
5666 }
5667
5668 /* Unassociate CHILD from its parent, and make its parent be
5669 NEW_PARENT. */
5670
5671 static void
5672 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5673 {
5674 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5675 if (p->die_sib == child)
5676 {
5677 remove_child_with_prev (child, p);
5678 break;
5679 }
5680 add_child_die (new_parent, child);
5681 }
5682
5683 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5684 is the specification, to the end of PARENT's list of children.
5685 This is done by removing and re-adding it. */
5686
5687 static void
5688 splice_child_die (dw_die_ref parent, dw_die_ref child)
5689 {
5690 /* We want the declaration DIE from inside the class, not the
5691 specification DIE at toplevel. */
5692 if (child->die_parent != parent)
5693 {
5694 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5695
5696 if (tmp)
5697 child = tmp;
5698 }
5699
5700 gcc_assert (child->die_parent == parent
5701 || (child->die_parent
5702 == get_AT_ref (parent, DW_AT_specification)));
5703
5704 reparent_child (child, parent);
5705 }
5706
5707 /* Create and return a new die with TAG_VALUE as tag. */
5708
5709 static inline dw_die_ref
5710 new_die_raw (enum dwarf_tag tag_value)
5711 {
5712 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5713 die->die_tag = tag_value;
5714 return die;
5715 }
5716
5717 /* Create and return a new die with a parent of PARENT_DIE. If
5718 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5719 associated tree T must be supplied to determine parenthood
5720 later. */
5721
5722 static inline dw_die_ref
5723 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5724 {
5725 dw_die_ref die = new_die_raw (tag_value);
5726
5727 if (parent_die != NULL)
5728 add_child_die (parent_die, die);
5729 else
5730 {
5731 limbo_die_node *limbo_node;
5732
5733 /* No DIEs created after early dwarf should end up in limbo,
5734 because the limbo list should not persist past LTO
5735 streaming. */
5736 if (tag_value != DW_TAG_compile_unit
5737 /* These are allowed because they're generated while
5738 breaking out COMDAT units late. */
5739 && tag_value != DW_TAG_type_unit
5740 && tag_value != DW_TAG_skeleton_unit
5741 && !early_dwarf
5742 /* Allow nested functions to live in limbo because they will
5743 only temporarily live there, as decls_for_scope will fix
5744 them up. */
5745 && (TREE_CODE (t) != FUNCTION_DECL
5746 || !decl_function_context (t))
5747 /* Same as nested functions above but for types. Types that
5748 are local to a function will be fixed in
5749 decls_for_scope. */
5750 && (!RECORD_OR_UNION_TYPE_P (t)
5751 || !TYPE_CONTEXT (t)
5752 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5753 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5754 especially in the ltrans stage, but once we implement LTO
5755 dwarf streaming, we should remove this exception. */
5756 && !in_lto_p)
5757 {
5758 fprintf (stderr, "symbol ended up in limbo too late:");
5759 debug_generic_stmt (t);
5760 gcc_unreachable ();
5761 }
5762
5763 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5764 limbo_node->die = die;
5765 limbo_node->created_for = t;
5766 limbo_node->next = limbo_die_list;
5767 limbo_die_list = limbo_node;
5768 }
5769
5770 return die;
5771 }
5772
5773 /* Return the DIE associated with the given type specifier. */
5774
5775 static inline dw_die_ref
5776 lookup_type_die (tree type)
5777 {
5778 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5779 if (die && die->removed)
5780 {
5781 TYPE_SYMTAB_DIE (type) = NULL;
5782 return NULL;
5783 }
5784 return die;
5785 }
5786
5787 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5788 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5789 anonymous type instead the one of the naming typedef. */
5790
5791 static inline dw_die_ref
5792 strip_naming_typedef (tree type, dw_die_ref type_die)
5793 {
5794 if (type
5795 && TREE_CODE (type) == RECORD_TYPE
5796 && type_die
5797 && type_die->die_tag == DW_TAG_typedef
5798 && is_naming_typedef_decl (TYPE_NAME (type)))
5799 type_die = get_AT_ref (type_die, DW_AT_type);
5800 return type_die;
5801 }
5802
5803 /* Like lookup_type_die, but if type is an anonymous type named by a
5804 typedef[1], return the DIE of the anonymous type instead the one of
5805 the naming typedef. This is because in gen_typedef_die, we did
5806 equate the anonymous struct named by the typedef with the DIE of
5807 the naming typedef. So by default, lookup_type_die on an anonymous
5808 struct yields the DIE of the naming typedef.
5809
5810 [1]: Read the comment of is_naming_typedef_decl to learn about what
5811 a naming typedef is. */
5812
5813 static inline dw_die_ref
5814 lookup_type_die_strip_naming_typedef (tree type)
5815 {
5816 dw_die_ref die = lookup_type_die (type);
5817 return strip_naming_typedef (type, die);
5818 }
5819
5820 /* Equate a DIE to a given type specifier. */
5821
5822 static inline void
5823 equate_type_number_to_die (tree type, dw_die_ref type_die)
5824 {
5825 TYPE_SYMTAB_DIE (type) = type_die;
5826 }
5827
5828 static dw_die_ref maybe_create_die_with_external_ref (tree);
5829 struct GTY(()) sym_off_pair
5830 {
5831 const char * GTY((skip)) sym;
5832 unsigned HOST_WIDE_INT off;
5833 };
5834 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5835
5836 /* Returns a hash value for X (which really is a die_struct). */
5837
5838 inline hashval_t
5839 decl_die_hasher::hash (die_node *x)
5840 {
5841 return (hashval_t) x->decl_id;
5842 }
5843
5844 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5845
5846 inline bool
5847 decl_die_hasher::equal (die_node *x, tree y)
5848 {
5849 return (x->decl_id == DECL_UID (y));
5850 }
5851
5852 /* Return the DIE associated with a given declaration. */
5853
5854 static inline dw_die_ref
5855 lookup_decl_die (tree decl)
5856 {
5857 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5858 NO_INSERT);
5859 if (!die)
5860 {
5861 if (in_lto_p)
5862 return maybe_create_die_with_external_ref (decl);
5863 return NULL;
5864 }
5865 if ((*die)->removed)
5866 {
5867 decl_die_table->clear_slot (die);
5868 return NULL;
5869 }
5870 return *die;
5871 }
5872
5873
5874 /* Return the DIE associated with BLOCK. */
5875
5876 static inline dw_die_ref
5877 lookup_block_die (tree block)
5878 {
5879 dw_die_ref die = BLOCK_DIE (block);
5880 if (!die && in_lto_p)
5881 return maybe_create_die_with_external_ref (block);
5882 return die;
5883 }
5884
5885 /* Associate DIE with BLOCK. */
5886
5887 static inline void
5888 equate_block_to_die (tree block, dw_die_ref die)
5889 {
5890 BLOCK_DIE (block) = die;
5891 }
5892 #undef BLOCK_DIE
5893
5894
5895 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5896 style reference. Return true if we found one refering to a DIE for
5897 DECL, otherwise return false. */
5898
5899 static bool
5900 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5901 unsigned HOST_WIDE_INT *off)
5902 {
5903 dw_die_ref die;
5904
5905 if (in_lto_p)
5906 {
5907 /* During WPA stage and incremental linking we use a hash-map
5908 to store the decl <-> label + offset map. */
5909 if (!external_die_map)
5910 return false;
5911 sym_off_pair *desc = external_die_map->get (decl);
5912 if (!desc)
5913 return false;
5914 *sym = desc->sym;
5915 *off = desc->off;
5916 return true;
5917 }
5918
5919 if (TREE_CODE (decl) == BLOCK)
5920 die = lookup_block_die (decl);
5921 else
5922 die = lookup_decl_die (decl);
5923 if (!die)
5924 return false;
5925
5926 /* Similar to get_ref_die_offset_label, but using the "correct"
5927 label. */
5928 *off = die->die_offset;
5929 while (die->die_parent)
5930 die = die->die_parent;
5931 /* For the containing CU DIE we compute a die_symbol in
5932 compute_comp_unit_symbol. */
5933 gcc_assert (die->die_tag == DW_TAG_compile_unit
5934 && die->die_id.die_symbol != NULL);
5935 *sym = die->die_id.die_symbol;
5936 return true;
5937 }
5938
5939 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5940
5941 static void
5942 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5943 const char *symbol, HOST_WIDE_INT offset)
5944 {
5945 /* Create a fake DIE that contains the reference. Don't use
5946 new_die because we don't want to end up in the limbo list. */
5947 /* ??? We probably want to share these, thus put a ref to the DIE
5948 we create here to the external_die_map entry. */
5949 dw_die_ref ref = new_die_raw (die->die_tag);
5950 ref->die_id.die_symbol = symbol;
5951 ref->die_offset = offset;
5952 ref->with_offset = 1;
5953 add_AT_die_ref (die, attr_kind, ref);
5954 }
5955
5956 /* Create a DIE for DECL if required and add a reference to a DIE
5957 at SYMBOL + OFFSET which contains attributes dumped early. */
5958
5959 static void
5960 dwarf2out_register_external_die (tree decl, const char *sym,
5961 unsigned HOST_WIDE_INT off)
5962 {
5963 if (debug_info_level == DINFO_LEVEL_NONE)
5964 return;
5965
5966 if (!external_die_map)
5967 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5968 gcc_checking_assert (!external_die_map->get (decl));
5969 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5970 external_die_map->put (decl, p);
5971 }
5972
5973 /* If we have a registered external DIE for DECL return a new DIE for
5974 the concrete instance with an appropriate abstract origin. */
5975
5976 static dw_die_ref
5977 maybe_create_die_with_external_ref (tree decl)
5978 {
5979 if (!external_die_map)
5980 return NULL;
5981 sym_off_pair *desc = external_die_map->get (decl);
5982 if (!desc)
5983 return NULL;
5984
5985 const char *sym = desc->sym;
5986 unsigned HOST_WIDE_INT off = desc->off;
5987 external_die_map->remove (decl);
5988
5989 in_lto_p = false;
5990 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5991 ? lookup_block_die (decl) : lookup_decl_die (decl));
5992 gcc_assert (!die);
5993 in_lto_p = true;
5994
5995 tree ctx;
5996 dw_die_ref parent = NULL;
5997 /* Need to lookup a DIE for the decls context - the containing
5998 function or translation unit. */
5999 if (TREE_CODE (decl) == BLOCK)
6000 {
6001 ctx = BLOCK_SUPERCONTEXT (decl);
6002 /* ??? We do not output DIEs for all scopes thus skip as
6003 many DIEs as needed. */
6004 while (TREE_CODE (ctx) == BLOCK
6005 && !lookup_block_die (ctx))
6006 ctx = BLOCK_SUPERCONTEXT (ctx);
6007 }
6008 else
6009 ctx = DECL_CONTEXT (decl);
6010 /* Peel types in the context stack. */
6011 while (ctx && TYPE_P (ctx))
6012 ctx = TYPE_CONTEXT (ctx);
6013 /* Likewise namespaces in case we do not want to emit DIEs for them. */
6014 if (debug_info_level <= DINFO_LEVEL_TERSE)
6015 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
6016 ctx = DECL_CONTEXT (ctx);
6017 if (ctx)
6018 {
6019 if (TREE_CODE (ctx) == BLOCK)
6020 parent = lookup_block_die (ctx);
6021 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
6022 /* Keep the 1:1 association during WPA. */
6023 && !flag_wpa
6024 && flag_incremental_link != INCREMENTAL_LINK_LTO)
6025 /* Otherwise all late annotations go to the main CU which
6026 imports the original CUs. */
6027 parent = comp_unit_die ();
6028 else if (TREE_CODE (ctx) == FUNCTION_DECL
6029 && TREE_CODE (decl) != FUNCTION_DECL
6030 && TREE_CODE (decl) != PARM_DECL
6031 && TREE_CODE (decl) != RESULT_DECL
6032 && TREE_CODE (decl) != BLOCK)
6033 /* Leave function local entities parent determination to when
6034 we process scope vars. */
6035 ;
6036 else
6037 parent = lookup_decl_die (ctx);
6038 }
6039 else
6040 /* In some cases the FEs fail to set DECL_CONTEXT properly.
6041 Handle this case gracefully by globalizing stuff. */
6042 parent = comp_unit_die ();
6043 /* Create a DIE "stub". */
6044 switch (TREE_CODE (decl))
6045 {
6046 case TRANSLATION_UNIT_DECL:
6047 {
6048 die = comp_unit_die ();
6049 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6050 to create a DIE for the original CUs. */
6051 return die;
6052 }
6053 case NAMESPACE_DECL:
6054 if (is_fortran (decl))
6055 die = new_die (DW_TAG_module, parent, decl);
6056 else
6057 die = new_die (DW_TAG_namespace, parent, decl);
6058 break;
6059 case FUNCTION_DECL:
6060 die = new_die (DW_TAG_subprogram, parent, decl);
6061 break;
6062 case VAR_DECL:
6063 die = new_die (DW_TAG_variable, parent, decl);
6064 break;
6065 case RESULT_DECL:
6066 die = new_die (DW_TAG_variable, parent, decl);
6067 break;
6068 case PARM_DECL:
6069 die = new_die (DW_TAG_formal_parameter, parent, decl);
6070 break;
6071 case CONST_DECL:
6072 die = new_die (DW_TAG_constant, parent, decl);
6073 break;
6074 case LABEL_DECL:
6075 die = new_die (DW_TAG_label, parent, decl);
6076 break;
6077 case BLOCK:
6078 die = new_die (DW_TAG_lexical_block, parent, decl);
6079 break;
6080 default:
6081 gcc_unreachable ();
6082 }
6083 if (TREE_CODE (decl) == BLOCK)
6084 equate_block_to_die (decl, die);
6085 else
6086 equate_decl_number_to_die (decl, die);
6087
6088 add_desc_attribute (die, decl);
6089
6090 /* Add a reference to the DIE providing early debug at $sym + off. */
6091 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6092
6093 return die;
6094 }
6095
6096 /* Returns a hash value for X (which really is a var_loc_list). */
6097
6098 inline hashval_t
6099 decl_loc_hasher::hash (var_loc_list *x)
6100 {
6101 return (hashval_t) x->decl_id;
6102 }
6103
6104 /* Return nonzero if decl_id of var_loc_list X is the same as
6105 UID of decl *Y. */
6106
6107 inline bool
6108 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6109 {
6110 return (x->decl_id == DECL_UID (y));
6111 }
6112
6113 /* Return the var_loc list associated with a given declaration. */
6114
6115 static inline var_loc_list *
6116 lookup_decl_loc (const_tree decl)
6117 {
6118 if (!decl_loc_table)
6119 return NULL;
6120 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6121 }
6122
6123 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6124
6125 inline hashval_t
6126 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6127 {
6128 return (hashval_t) x->decl_id;
6129 }
6130
6131 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6132 UID of decl *Y. */
6133
6134 inline bool
6135 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6136 {
6137 return (x->decl_id == DECL_UID (y));
6138 }
6139
6140 /* Equate a DIE to a particular declaration. */
6141
6142 static void
6143 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6144 {
6145 unsigned int decl_id = DECL_UID (decl);
6146
6147 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6148 decl_die->decl_id = decl_id;
6149 }
6150
6151 /* Return how many bits covers PIECE EXPR_LIST. */
6152
6153 static HOST_WIDE_INT
6154 decl_piece_bitsize (rtx piece)
6155 {
6156 int ret = (int) GET_MODE (piece);
6157 if (ret)
6158 return ret;
6159 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6160 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6161 return INTVAL (XEXP (XEXP (piece, 0), 0));
6162 }
6163
6164 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6165
6166 static rtx *
6167 decl_piece_varloc_ptr (rtx piece)
6168 {
6169 if ((int) GET_MODE (piece))
6170 return &XEXP (piece, 0);
6171 else
6172 return &XEXP (XEXP (piece, 0), 1);
6173 }
6174
6175 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6176 Next is the chain of following piece nodes. */
6177
6178 static rtx_expr_list *
6179 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6180 {
6181 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6182 return alloc_EXPR_LIST (bitsize, loc_note, next);
6183 else
6184 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6185 GEN_INT (bitsize),
6186 loc_note), next);
6187 }
6188
6189 /* Return rtx that should be stored into loc field for
6190 LOC_NOTE and BITPOS/BITSIZE. */
6191
6192 static rtx
6193 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6194 HOST_WIDE_INT bitsize)
6195 {
6196 if (bitsize != -1)
6197 {
6198 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6199 if (bitpos != 0)
6200 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6201 }
6202 return loc_note;
6203 }
6204
6205 /* This function either modifies location piece list *DEST in
6206 place (if SRC and INNER is NULL), or copies location piece list
6207 *SRC to *DEST while modifying it. Location BITPOS is modified
6208 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6209 not copied and if needed some padding around it is added.
6210 When modifying in place, DEST should point to EXPR_LIST where
6211 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6212 to the start of the whole list and INNER points to the EXPR_LIST
6213 where earlier pieces cover PIECE_BITPOS bits. */
6214
6215 static void
6216 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6217 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6218 HOST_WIDE_INT bitsize, rtx loc_note)
6219 {
6220 HOST_WIDE_INT diff;
6221 bool copy = inner != NULL;
6222
6223 if (copy)
6224 {
6225 /* First copy all nodes preceding the current bitpos. */
6226 while (src != inner)
6227 {
6228 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6229 decl_piece_bitsize (*src), NULL_RTX);
6230 dest = &XEXP (*dest, 1);
6231 src = &XEXP (*src, 1);
6232 }
6233 }
6234 /* Add padding if needed. */
6235 if (bitpos != piece_bitpos)
6236 {
6237 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6238 copy ? NULL_RTX : *dest);
6239 dest = &XEXP (*dest, 1);
6240 }
6241 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6242 {
6243 gcc_assert (!copy);
6244 /* A piece with correct bitpos and bitsize already exist,
6245 just update the location for it and return. */
6246 *decl_piece_varloc_ptr (*dest) = loc_note;
6247 return;
6248 }
6249 /* Add the piece that changed. */
6250 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6251 dest = &XEXP (*dest, 1);
6252 /* Skip over pieces that overlap it. */
6253 diff = bitpos - piece_bitpos + bitsize;
6254 if (!copy)
6255 src = dest;
6256 while (diff > 0 && *src)
6257 {
6258 rtx piece = *src;
6259 diff -= decl_piece_bitsize (piece);
6260 if (copy)
6261 src = &XEXP (piece, 1);
6262 else
6263 {
6264 *src = XEXP (piece, 1);
6265 free_EXPR_LIST_node (piece);
6266 }
6267 }
6268 /* Add padding if needed. */
6269 if (diff < 0 && *src)
6270 {
6271 if (!copy)
6272 dest = src;
6273 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6274 dest = &XEXP (*dest, 1);
6275 }
6276 if (!copy)
6277 return;
6278 /* Finally copy all nodes following it. */
6279 while (*src)
6280 {
6281 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6282 decl_piece_bitsize (*src), NULL_RTX);
6283 dest = &XEXP (*dest, 1);
6284 src = &XEXP (*src, 1);
6285 }
6286 }
6287
6288 /* Add a variable location node to the linked list for DECL. */
6289
6290 static struct var_loc_node *
6291 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6292 {
6293 unsigned int decl_id;
6294 var_loc_list *temp;
6295 struct var_loc_node *loc = NULL;
6296 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6297
6298 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6299 {
6300 tree realdecl = DECL_DEBUG_EXPR (decl);
6301 if (handled_component_p (realdecl)
6302 || (TREE_CODE (realdecl) == MEM_REF
6303 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6304 {
6305 bool reverse;
6306 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6307 &bitsize, &reverse);
6308 if (!innerdecl
6309 || !DECL_P (innerdecl)
6310 || DECL_IGNORED_P (innerdecl)
6311 || TREE_STATIC (innerdecl)
6312 || bitsize == 0
6313 || bitpos + bitsize > 256)
6314 return NULL;
6315 decl = innerdecl;
6316 }
6317 }
6318
6319 decl_id = DECL_UID (decl);
6320 var_loc_list **slot
6321 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6322 if (*slot == NULL)
6323 {
6324 temp = ggc_cleared_alloc<var_loc_list> ();
6325 temp->decl_id = decl_id;
6326 *slot = temp;
6327 }
6328 else
6329 temp = *slot;
6330
6331 /* For PARM_DECLs try to keep around the original incoming value,
6332 even if that means we'll emit a zero-range .debug_loc entry. */
6333 if (temp->last
6334 && temp->first == temp->last
6335 && TREE_CODE (decl) == PARM_DECL
6336 && NOTE_P (temp->first->loc)
6337 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6338 && DECL_INCOMING_RTL (decl)
6339 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6340 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6341 == GET_CODE (DECL_INCOMING_RTL (decl))
6342 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6343 && (bitsize != -1
6344 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6345 NOTE_VAR_LOCATION_LOC (loc_note))
6346 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6347 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6348 {
6349 loc = ggc_cleared_alloc<var_loc_node> ();
6350 temp->first->next = loc;
6351 temp->last = loc;
6352 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6353 }
6354 else if (temp->last)
6355 {
6356 struct var_loc_node *last = temp->last, *unused = NULL;
6357 rtx *piece_loc = NULL, last_loc_note;
6358 HOST_WIDE_INT piece_bitpos = 0;
6359 if (last->next)
6360 {
6361 last = last->next;
6362 gcc_assert (last->next == NULL);
6363 }
6364 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6365 {
6366 piece_loc = &last->loc;
6367 do
6368 {
6369 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6370 if (piece_bitpos + cur_bitsize > bitpos)
6371 break;
6372 piece_bitpos += cur_bitsize;
6373 piece_loc = &XEXP (*piece_loc, 1);
6374 }
6375 while (*piece_loc);
6376 }
6377 /* TEMP->LAST here is either pointer to the last but one or
6378 last element in the chained list, LAST is pointer to the
6379 last element. */
6380 if (label && strcmp (last->label, label) == 0 && last->view == view)
6381 {
6382 /* For SRA optimized variables if there weren't any real
6383 insns since last note, just modify the last node. */
6384 if (piece_loc != NULL)
6385 {
6386 adjust_piece_list (piece_loc, NULL, NULL,
6387 bitpos, piece_bitpos, bitsize, loc_note);
6388 return NULL;
6389 }
6390 /* If the last note doesn't cover any instructions, remove it. */
6391 if (temp->last != last)
6392 {
6393 temp->last->next = NULL;
6394 unused = last;
6395 last = temp->last;
6396 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6397 }
6398 else
6399 {
6400 gcc_assert (temp->first == temp->last
6401 || (temp->first->next == temp->last
6402 && TREE_CODE (decl) == PARM_DECL));
6403 memset (temp->last, '\0', sizeof (*temp->last));
6404 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6405 return temp->last;
6406 }
6407 }
6408 if (bitsize == -1 && NOTE_P (last->loc))
6409 last_loc_note = last->loc;
6410 else if (piece_loc != NULL
6411 && *piece_loc != NULL_RTX
6412 && piece_bitpos == bitpos
6413 && decl_piece_bitsize (*piece_loc) == bitsize)
6414 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6415 else
6416 last_loc_note = NULL_RTX;
6417 /* If the current location is the same as the end of the list,
6418 and either both or neither of the locations is uninitialized,
6419 we have nothing to do. */
6420 if (last_loc_note == NULL_RTX
6421 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6422 NOTE_VAR_LOCATION_LOC (loc_note)))
6423 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6424 != NOTE_VAR_LOCATION_STATUS (loc_note))
6425 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6426 == VAR_INIT_STATUS_UNINITIALIZED)
6427 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6428 == VAR_INIT_STATUS_UNINITIALIZED))))
6429 {
6430 /* Add LOC to the end of list and update LAST. If the last
6431 element of the list has been removed above, reuse its
6432 memory for the new node, otherwise allocate a new one. */
6433 if (unused)
6434 {
6435 loc = unused;
6436 memset (loc, '\0', sizeof (*loc));
6437 }
6438 else
6439 loc = ggc_cleared_alloc<var_loc_node> ();
6440 if (bitsize == -1 || piece_loc == NULL)
6441 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6442 else
6443 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6444 bitpos, piece_bitpos, bitsize, loc_note);
6445 last->next = loc;
6446 /* Ensure TEMP->LAST will point either to the new last but one
6447 element of the chain, or to the last element in it. */
6448 if (last != temp->last)
6449 temp->last = last;
6450 }
6451 else if (unused)
6452 ggc_free (unused);
6453 }
6454 else
6455 {
6456 loc = ggc_cleared_alloc<var_loc_node> ();
6457 temp->first = loc;
6458 temp->last = loc;
6459 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6460 }
6461 return loc;
6462 }
6463 \f
6464 /* Keep track of the number of spaces used to indent the
6465 output of the debugging routines that print the structure of
6466 the DIE internal representation. */
6467 static int print_indent;
6468
6469 /* Indent the line the number of spaces given by print_indent. */
6470
6471 static inline void
6472 print_spaces (FILE *outfile)
6473 {
6474 fprintf (outfile, "%*s", print_indent, "");
6475 }
6476
6477 /* Print a type signature in hex. */
6478
6479 static inline void
6480 print_signature (FILE *outfile, char *sig)
6481 {
6482 int i;
6483
6484 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6485 fprintf (outfile, "%02x", sig[i] & 0xff);
6486 }
6487
6488 static inline void
6489 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6490 {
6491 if (discr_value->pos)
6492 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6493 else
6494 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6495 }
6496
6497 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6498
6499 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6500 RECURSE, output location descriptor operations. */
6501
6502 static void
6503 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6504 {
6505 switch (val->val_class)
6506 {
6507 case dw_val_class_addr:
6508 fprintf (outfile, "address");
6509 break;
6510 case dw_val_class_offset:
6511 fprintf (outfile, "offset");
6512 break;
6513 case dw_val_class_loc:
6514 fprintf (outfile, "location descriptor");
6515 if (val->v.val_loc == NULL)
6516 fprintf (outfile, " -> <null>\n");
6517 else if (recurse)
6518 {
6519 fprintf (outfile, ":\n");
6520 print_indent += 4;
6521 print_loc_descr (val->v.val_loc, outfile);
6522 print_indent -= 4;
6523 }
6524 else
6525 {
6526 if (flag_dump_noaddr || flag_dump_unnumbered)
6527 fprintf (outfile, " #\n");
6528 else
6529 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6530 }
6531 break;
6532 case dw_val_class_loc_list:
6533 fprintf (outfile, "location list -> label:%s",
6534 val->v.val_loc_list->ll_symbol);
6535 break;
6536 case dw_val_class_view_list:
6537 val = view_list_to_loc_list_val_node (val);
6538 fprintf (outfile, "location list with views -> labels:%s and %s",
6539 val->v.val_loc_list->ll_symbol,
6540 val->v.val_loc_list->vl_symbol);
6541 break;
6542 case dw_val_class_range_list:
6543 fprintf (outfile, "range list");
6544 break;
6545 case dw_val_class_const:
6546 case dw_val_class_const_implicit:
6547 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6548 break;
6549 case dw_val_class_unsigned_const:
6550 case dw_val_class_unsigned_const_implicit:
6551 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6552 break;
6553 case dw_val_class_const_double:
6554 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6555 HOST_WIDE_INT_PRINT_UNSIGNED")",
6556 val->v.val_double.high,
6557 val->v.val_double.low);
6558 break;
6559 case dw_val_class_wide_int:
6560 {
6561 int i = val->v.val_wide->get_len ();
6562 fprintf (outfile, "constant (");
6563 gcc_assert (i > 0);
6564 if (val->v.val_wide->elt (i - 1) == 0)
6565 fprintf (outfile, "0x");
6566 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6567 val->v.val_wide->elt (--i));
6568 while (--i >= 0)
6569 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6570 val->v.val_wide->elt (i));
6571 fprintf (outfile, ")");
6572 break;
6573 }
6574 case dw_val_class_vec:
6575 fprintf (outfile, "floating-point or vector constant");
6576 break;
6577 case dw_val_class_flag:
6578 fprintf (outfile, "%u", val->v.val_flag);
6579 break;
6580 case dw_val_class_die_ref:
6581 if (val->v.val_die_ref.die != NULL)
6582 {
6583 dw_die_ref die = val->v.val_die_ref.die;
6584
6585 if (die->comdat_type_p)
6586 {
6587 fprintf (outfile, "die -> signature: ");
6588 print_signature (outfile,
6589 die->die_id.die_type_node->signature);
6590 }
6591 else if (die->die_id.die_symbol)
6592 {
6593 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6594 if (die->with_offset)
6595 fprintf (outfile, " + %ld", die->die_offset);
6596 }
6597 else
6598 fprintf (outfile, "die -> %ld", die->die_offset);
6599 if (flag_dump_noaddr || flag_dump_unnumbered)
6600 fprintf (outfile, " #");
6601 else
6602 fprintf (outfile, " (%p)", (void *) die);
6603 }
6604 else
6605 fprintf (outfile, "die -> <null>");
6606 break;
6607 case dw_val_class_vms_delta:
6608 fprintf (outfile, "delta: @slotcount(%s-%s)",
6609 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6610 break;
6611 case dw_val_class_symview:
6612 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6613 break;
6614 case dw_val_class_lbl_id:
6615 case dw_val_class_lineptr:
6616 case dw_val_class_macptr:
6617 case dw_val_class_loclistsptr:
6618 case dw_val_class_high_pc:
6619 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6620 break;
6621 case dw_val_class_str:
6622 if (val->v.val_str->str != NULL)
6623 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6624 else
6625 fprintf (outfile, "<null>");
6626 break;
6627 case dw_val_class_file:
6628 case dw_val_class_file_implicit:
6629 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6630 val->v.val_file->emitted_number);
6631 break;
6632 case dw_val_class_data8:
6633 {
6634 int i;
6635
6636 for (i = 0; i < 8; i++)
6637 fprintf (outfile, "%02x", val->v.val_data8[i]);
6638 break;
6639 }
6640 case dw_val_class_discr_value:
6641 print_discr_value (outfile, &val->v.val_discr_value);
6642 break;
6643 case dw_val_class_discr_list:
6644 for (dw_discr_list_ref node = val->v.val_discr_list;
6645 node != NULL;
6646 node = node->dw_discr_next)
6647 {
6648 if (node->dw_discr_range)
6649 {
6650 fprintf (outfile, " .. ");
6651 print_discr_value (outfile, &node->dw_discr_lower_bound);
6652 print_discr_value (outfile, &node->dw_discr_upper_bound);
6653 }
6654 else
6655 print_discr_value (outfile, &node->dw_discr_lower_bound);
6656
6657 if (node->dw_discr_next != NULL)
6658 fprintf (outfile, " | ");
6659 }
6660 default:
6661 break;
6662 }
6663 }
6664
6665 /* Likewise, for a DIE attribute. */
6666
6667 static void
6668 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6669 {
6670 print_dw_val (&a->dw_attr_val, recurse, outfile);
6671 }
6672
6673
6674 /* Print the list of operands in the LOC location description to OUTFILE. This
6675 routine is a debugging aid only. */
6676
6677 static void
6678 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6679 {
6680 dw_loc_descr_ref l = loc;
6681
6682 if (loc == NULL)
6683 {
6684 print_spaces (outfile);
6685 fprintf (outfile, "<null>\n");
6686 return;
6687 }
6688
6689 for (l = loc; l != NULL; l = l->dw_loc_next)
6690 {
6691 print_spaces (outfile);
6692 if (flag_dump_noaddr || flag_dump_unnumbered)
6693 fprintf (outfile, "#");
6694 else
6695 fprintf (outfile, "(%p)", (void *) l);
6696 fprintf (outfile, " %s",
6697 dwarf_stack_op_name (l->dw_loc_opc));
6698 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6699 {
6700 fprintf (outfile, " ");
6701 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6702 }
6703 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6704 {
6705 fprintf (outfile, ", ");
6706 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6707 }
6708 fprintf (outfile, "\n");
6709 }
6710 }
6711
6712 /* Print the information associated with a given DIE, and its children.
6713 This routine is a debugging aid only. */
6714
6715 static void
6716 print_die (dw_die_ref die, FILE *outfile)
6717 {
6718 dw_attr_node *a;
6719 dw_die_ref c;
6720 unsigned ix;
6721
6722 print_spaces (outfile);
6723 fprintf (outfile, "DIE %4ld: %s ",
6724 die->die_offset, dwarf_tag_name (die->die_tag));
6725 if (flag_dump_noaddr || flag_dump_unnumbered)
6726 fprintf (outfile, "#\n");
6727 else
6728 fprintf (outfile, "(%p)\n", (void*) die);
6729 print_spaces (outfile);
6730 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6731 fprintf (outfile, " offset: %ld", die->die_offset);
6732 fprintf (outfile, " mark: %d\n", die->die_mark);
6733
6734 if (die->comdat_type_p)
6735 {
6736 print_spaces (outfile);
6737 fprintf (outfile, " signature: ");
6738 print_signature (outfile, die->die_id.die_type_node->signature);
6739 fprintf (outfile, "\n");
6740 }
6741
6742 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6743 {
6744 print_spaces (outfile);
6745 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6746
6747 print_attribute (a, true, outfile);
6748 fprintf (outfile, "\n");
6749 }
6750
6751 if (die->die_child != NULL)
6752 {
6753 print_indent += 4;
6754 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6755 print_indent -= 4;
6756 }
6757 if (print_indent == 0)
6758 fprintf (outfile, "\n");
6759 }
6760
6761 /* Print the list of operations in the LOC location description. */
6762
6763 DEBUG_FUNCTION void
6764 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6765 {
6766 print_loc_descr (loc, stderr);
6767 }
6768
6769 /* Print the information collected for a given DIE. */
6770
6771 DEBUG_FUNCTION void
6772 debug_dwarf_die (dw_die_ref die)
6773 {
6774 print_die (die, stderr);
6775 }
6776
6777 DEBUG_FUNCTION void
6778 debug (die_struct &ref)
6779 {
6780 print_die (&ref, stderr);
6781 }
6782
6783 DEBUG_FUNCTION void
6784 debug (die_struct *ptr)
6785 {
6786 if (ptr)
6787 debug (*ptr);
6788 else
6789 fprintf (stderr, "<nil>\n");
6790 }
6791
6792
6793 /* Print all DWARF information collected for the compilation unit.
6794 This routine is a debugging aid only. */
6795
6796 DEBUG_FUNCTION void
6797 debug_dwarf (void)
6798 {
6799 print_indent = 0;
6800 print_die (comp_unit_die (), stderr);
6801 }
6802
6803 /* Verify the DIE tree structure. */
6804
6805 DEBUG_FUNCTION void
6806 verify_die (dw_die_ref die)
6807 {
6808 gcc_assert (!die->die_mark);
6809 if (die->die_parent == NULL
6810 && die->die_sib == NULL)
6811 return;
6812 /* Verify the die_sib list is cyclic. */
6813 dw_die_ref x = die;
6814 do
6815 {
6816 x->die_mark = 1;
6817 x = x->die_sib;
6818 }
6819 while (x && !x->die_mark);
6820 gcc_assert (x == die);
6821 x = die;
6822 do
6823 {
6824 /* Verify all dies have the same parent. */
6825 gcc_assert (x->die_parent == die->die_parent);
6826 if (x->die_child)
6827 {
6828 /* Verify the child has the proper parent and recurse. */
6829 gcc_assert (x->die_child->die_parent == x);
6830 verify_die (x->die_child);
6831 }
6832 x->die_mark = 0;
6833 x = x->die_sib;
6834 }
6835 while (x && x->die_mark);
6836 }
6837
6838 /* Sanity checks on DIEs. */
6839
6840 static void
6841 check_die (dw_die_ref die)
6842 {
6843 unsigned ix;
6844 dw_attr_node *a;
6845 bool inline_found = false;
6846 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6847 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6848 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6849 {
6850 switch (a->dw_attr)
6851 {
6852 case DW_AT_inline:
6853 if (a->dw_attr_val.v.val_unsigned)
6854 inline_found = true;
6855 break;
6856 case DW_AT_location:
6857 ++n_location;
6858 break;
6859 case DW_AT_low_pc:
6860 ++n_low_pc;
6861 break;
6862 case DW_AT_high_pc:
6863 ++n_high_pc;
6864 break;
6865 case DW_AT_artificial:
6866 ++n_artificial;
6867 break;
6868 case DW_AT_decl_column:
6869 ++n_decl_column;
6870 break;
6871 case DW_AT_decl_line:
6872 ++n_decl_line;
6873 break;
6874 case DW_AT_decl_file:
6875 ++n_decl_file;
6876 break;
6877 default:
6878 break;
6879 }
6880 }
6881 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6882 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6883 {
6884 fprintf (stderr, "Duplicate attributes in DIE:\n");
6885 debug_dwarf_die (die);
6886 gcc_unreachable ();
6887 }
6888 if (inline_found)
6889 {
6890 /* A debugging information entry that is a member of an abstract
6891 instance tree [that has DW_AT_inline] should not contain any
6892 attributes which describe aspects of the subroutine which vary
6893 between distinct inlined expansions or distinct out-of-line
6894 expansions. */
6895 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6896 gcc_assert (a->dw_attr != DW_AT_low_pc
6897 && a->dw_attr != DW_AT_high_pc
6898 && a->dw_attr != DW_AT_location
6899 && a->dw_attr != DW_AT_frame_base
6900 && a->dw_attr != DW_AT_call_all_calls
6901 && a->dw_attr != DW_AT_GNU_all_call_sites);
6902 }
6903 }
6904 \f
6905 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6906 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6907 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6908
6909 /* Calculate the checksum of a location expression. */
6910
6911 static inline void
6912 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6913 {
6914 int tem;
6915 inchash::hash hstate;
6916 hashval_t hash;
6917
6918 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6919 CHECKSUM (tem);
6920 hash_loc_operands (loc, hstate);
6921 hash = hstate.end();
6922 CHECKSUM (hash);
6923 }
6924
6925 /* Calculate the checksum of an attribute. */
6926
6927 static void
6928 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6929 {
6930 dw_loc_descr_ref loc;
6931 rtx r;
6932
6933 CHECKSUM (at->dw_attr);
6934
6935 /* We don't care that this was compiled with a different compiler
6936 snapshot; if the output is the same, that's what matters. */
6937 if (at->dw_attr == DW_AT_producer)
6938 return;
6939
6940 switch (AT_class (at))
6941 {
6942 case dw_val_class_const:
6943 case dw_val_class_const_implicit:
6944 CHECKSUM (at->dw_attr_val.v.val_int);
6945 break;
6946 case dw_val_class_unsigned_const:
6947 case dw_val_class_unsigned_const_implicit:
6948 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6949 break;
6950 case dw_val_class_const_double:
6951 CHECKSUM (at->dw_attr_val.v.val_double);
6952 break;
6953 case dw_val_class_wide_int:
6954 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6955 get_full_len (*at->dw_attr_val.v.val_wide)
6956 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6957 break;
6958 case dw_val_class_vec:
6959 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6960 (at->dw_attr_val.v.val_vec.length
6961 * at->dw_attr_val.v.val_vec.elt_size));
6962 break;
6963 case dw_val_class_flag:
6964 CHECKSUM (at->dw_attr_val.v.val_flag);
6965 break;
6966 case dw_val_class_str:
6967 CHECKSUM_STRING (AT_string (at));
6968 break;
6969
6970 case dw_val_class_addr:
6971 r = AT_addr (at);
6972 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6973 CHECKSUM_STRING (XSTR (r, 0));
6974 break;
6975
6976 case dw_val_class_offset:
6977 CHECKSUM (at->dw_attr_val.v.val_offset);
6978 break;
6979
6980 case dw_val_class_loc:
6981 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6982 loc_checksum (loc, ctx);
6983 break;
6984
6985 case dw_val_class_die_ref:
6986 die_checksum (AT_ref (at), ctx, mark);
6987 break;
6988
6989 case dw_val_class_fde_ref:
6990 case dw_val_class_vms_delta:
6991 case dw_val_class_symview:
6992 case dw_val_class_lbl_id:
6993 case dw_val_class_lineptr:
6994 case dw_val_class_macptr:
6995 case dw_val_class_loclistsptr:
6996 case dw_val_class_high_pc:
6997 break;
6998
6999 case dw_val_class_file:
7000 case dw_val_class_file_implicit:
7001 CHECKSUM_STRING (AT_file (at)->filename);
7002 break;
7003
7004 case dw_val_class_data8:
7005 CHECKSUM (at->dw_attr_val.v.val_data8);
7006 break;
7007
7008 default:
7009 break;
7010 }
7011 }
7012
7013 /* Calculate the checksum of a DIE. */
7014
7015 static void
7016 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7017 {
7018 dw_die_ref c;
7019 dw_attr_node *a;
7020 unsigned ix;
7021
7022 /* To avoid infinite recursion. */
7023 if (die->die_mark)
7024 {
7025 CHECKSUM (die->die_mark);
7026 return;
7027 }
7028 die->die_mark = ++(*mark);
7029
7030 CHECKSUM (die->die_tag);
7031
7032 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7033 attr_checksum (a, ctx, mark);
7034
7035 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
7036 }
7037
7038 #undef CHECKSUM
7039 #undef CHECKSUM_BLOCK
7040 #undef CHECKSUM_STRING
7041
7042 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
7043 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
7044 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7045 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7046 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7047 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7048 #define CHECKSUM_ATTR(FOO) \
7049 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7050
7051 /* Calculate the checksum of a number in signed LEB128 format. */
7052
7053 static void
7054 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7055 {
7056 unsigned char byte;
7057 bool more;
7058
7059 while (1)
7060 {
7061 byte = (value & 0x7f);
7062 value >>= 7;
7063 more = !((value == 0 && (byte & 0x40) == 0)
7064 || (value == -1 && (byte & 0x40) != 0));
7065 if (more)
7066 byte |= 0x80;
7067 CHECKSUM (byte);
7068 if (!more)
7069 break;
7070 }
7071 }
7072
7073 /* Calculate the checksum of a number in unsigned LEB128 format. */
7074
7075 static void
7076 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7077 {
7078 while (1)
7079 {
7080 unsigned char byte = (value & 0x7f);
7081 value >>= 7;
7082 if (value != 0)
7083 /* More bytes to follow. */
7084 byte |= 0x80;
7085 CHECKSUM (byte);
7086 if (value == 0)
7087 break;
7088 }
7089 }
7090
7091 /* Checksum the context of the DIE. This adds the names of any
7092 surrounding namespaces or structures to the checksum. */
7093
7094 static void
7095 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7096 {
7097 const char *name;
7098 dw_die_ref spec;
7099 int tag = die->die_tag;
7100
7101 if (tag != DW_TAG_namespace
7102 && tag != DW_TAG_structure_type
7103 && tag != DW_TAG_class_type)
7104 return;
7105
7106 name = get_AT_string (die, DW_AT_name);
7107
7108 spec = get_AT_ref (die, DW_AT_specification);
7109 if (spec != NULL)
7110 die = spec;
7111
7112 if (die->die_parent != NULL)
7113 checksum_die_context (die->die_parent, ctx);
7114
7115 CHECKSUM_ULEB128 ('C');
7116 CHECKSUM_ULEB128 (tag);
7117 if (name != NULL)
7118 CHECKSUM_STRING (name);
7119 }
7120
7121 /* Calculate the checksum of a location expression. */
7122
7123 static inline void
7124 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7125 {
7126 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7127 were emitted as a DW_FORM_sdata instead of a location expression. */
7128 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7129 {
7130 CHECKSUM_ULEB128 (DW_FORM_sdata);
7131 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7132 return;
7133 }
7134
7135 /* Otherwise, just checksum the raw location expression. */
7136 while (loc != NULL)
7137 {
7138 inchash::hash hstate;
7139 hashval_t hash;
7140
7141 CHECKSUM_ULEB128 (loc->dtprel);
7142 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7143 hash_loc_operands (loc, hstate);
7144 hash = hstate.end ();
7145 CHECKSUM (hash);
7146 loc = loc->dw_loc_next;
7147 }
7148 }
7149
7150 /* Calculate the checksum of an attribute. */
7151
7152 static void
7153 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7154 struct md5_ctx *ctx, int *mark)
7155 {
7156 dw_loc_descr_ref loc;
7157 rtx r;
7158
7159 if (AT_class (at) == dw_val_class_die_ref)
7160 {
7161 dw_die_ref target_die = AT_ref (at);
7162
7163 /* For pointer and reference types, we checksum only the (qualified)
7164 name of the target type (if there is a name). For friend entries,
7165 we checksum only the (qualified) name of the target type or function.
7166 This allows the checksum to remain the same whether the target type
7167 is complete or not. */
7168 if ((at->dw_attr == DW_AT_type
7169 && (tag == DW_TAG_pointer_type
7170 || tag == DW_TAG_reference_type
7171 || tag == DW_TAG_rvalue_reference_type
7172 || tag == DW_TAG_ptr_to_member_type))
7173 || (at->dw_attr == DW_AT_friend
7174 && tag == DW_TAG_friend))
7175 {
7176 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7177
7178 if (name_attr != NULL)
7179 {
7180 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7181
7182 if (decl == NULL)
7183 decl = target_die;
7184 CHECKSUM_ULEB128 ('N');
7185 CHECKSUM_ULEB128 (at->dw_attr);
7186 if (decl->die_parent != NULL)
7187 checksum_die_context (decl->die_parent, ctx);
7188 CHECKSUM_ULEB128 ('E');
7189 CHECKSUM_STRING (AT_string (name_attr));
7190 return;
7191 }
7192 }
7193
7194 /* For all other references to another DIE, we check to see if the
7195 target DIE has already been visited. If it has, we emit a
7196 backward reference; if not, we descend recursively. */
7197 if (target_die->die_mark > 0)
7198 {
7199 CHECKSUM_ULEB128 ('R');
7200 CHECKSUM_ULEB128 (at->dw_attr);
7201 CHECKSUM_ULEB128 (target_die->die_mark);
7202 }
7203 else
7204 {
7205 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7206
7207 if (decl == NULL)
7208 decl = target_die;
7209 target_die->die_mark = ++(*mark);
7210 CHECKSUM_ULEB128 ('T');
7211 CHECKSUM_ULEB128 (at->dw_attr);
7212 if (decl->die_parent != NULL)
7213 checksum_die_context (decl->die_parent, ctx);
7214 die_checksum_ordered (target_die, ctx, mark);
7215 }
7216 return;
7217 }
7218
7219 CHECKSUM_ULEB128 ('A');
7220 CHECKSUM_ULEB128 (at->dw_attr);
7221
7222 switch (AT_class (at))
7223 {
7224 case dw_val_class_const:
7225 case dw_val_class_const_implicit:
7226 CHECKSUM_ULEB128 (DW_FORM_sdata);
7227 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7228 break;
7229
7230 case dw_val_class_unsigned_const:
7231 case dw_val_class_unsigned_const_implicit:
7232 CHECKSUM_ULEB128 (DW_FORM_sdata);
7233 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7234 break;
7235
7236 case dw_val_class_const_double:
7237 CHECKSUM_ULEB128 (DW_FORM_block);
7238 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7239 CHECKSUM (at->dw_attr_val.v.val_double);
7240 break;
7241
7242 case dw_val_class_wide_int:
7243 CHECKSUM_ULEB128 (DW_FORM_block);
7244 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7245 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7246 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7247 get_full_len (*at->dw_attr_val.v.val_wide)
7248 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7249 break;
7250
7251 case dw_val_class_vec:
7252 CHECKSUM_ULEB128 (DW_FORM_block);
7253 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7254 * at->dw_attr_val.v.val_vec.elt_size);
7255 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7256 (at->dw_attr_val.v.val_vec.length
7257 * at->dw_attr_val.v.val_vec.elt_size));
7258 break;
7259
7260 case dw_val_class_flag:
7261 CHECKSUM_ULEB128 (DW_FORM_flag);
7262 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7263 break;
7264
7265 case dw_val_class_str:
7266 CHECKSUM_ULEB128 (DW_FORM_string);
7267 CHECKSUM_STRING (AT_string (at));
7268 break;
7269
7270 case dw_val_class_addr:
7271 r = AT_addr (at);
7272 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7273 CHECKSUM_ULEB128 (DW_FORM_string);
7274 CHECKSUM_STRING (XSTR (r, 0));
7275 break;
7276
7277 case dw_val_class_offset:
7278 CHECKSUM_ULEB128 (DW_FORM_sdata);
7279 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7280 break;
7281
7282 case dw_val_class_loc:
7283 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7284 loc_checksum_ordered (loc, ctx);
7285 break;
7286
7287 case dw_val_class_fde_ref:
7288 case dw_val_class_symview:
7289 case dw_val_class_lbl_id:
7290 case dw_val_class_lineptr:
7291 case dw_val_class_macptr:
7292 case dw_val_class_loclistsptr:
7293 case dw_val_class_high_pc:
7294 break;
7295
7296 case dw_val_class_file:
7297 case dw_val_class_file_implicit:
7298 CHECKSUM_ULEB128 (DW_FORM_string);
7299 CHECKSUM_STRING (AT_file (at)->filename);
7300 break;
7301
7302 case dw_val_class_data8:
7303 CHECKSUM (at->dw_attr_val.v.val_data8);
7304 break;
7305
7306 default:
7307 break;
7308 }
7309 }
7310
7311 struct checksum_attributes
7312 {
7313 dw_attr_node *at_name;
7314 dw_attr_node *at_type;
7315 dw_attr_node *at_friend;
7316 dw_attr_node *at_accessibility;
7317 dw_attr_node *at_address_class;
7318 dw_attr_node *at_alignment;
7319 dw_attr_node *at_allocated;
7320 dw_attr_node *at_artificial;
7321 dw_attr_node *at_associated;
7322 dw_attr_node *at_binary_scale;
7323 dw_attr_node *at_bit_offset;
7324 dw_attr_node *at_bit_size;
7325 dw_attr_node *at_bit_stride;
7326 dw_attr_node *at_byte_size;
7327 dw_attr_node *at_byte_stride;
7328 dw_attr_node *at_const_value;
7329 dw_attr_node *at_containing_type;
7330 dw_attr_node *at_count;
7331 dw_attr_node *at_data_location;
7332 dw_attr_node *at_data_member_location;
7333 dw_attr_node *at_decimal_scale;
7334 dw_attr_node *at_decimal_sign;
7335 dw_attr_node *at_default_value;
7336 dw_attr_node *at_digit_count;
7337 dw_attr_node *at_discr;
7338 dw_attr_node *at_discr_list;
7339 dw_attr_node *at_discr_value;
7340 dw_attr_node *at_encoding;
7341 dw_attr_node *at_endianity;
7342 dw_attr_node *at_explicit;
7343 dw_attr_node *at_is_optional;
7344 dw_attr_node *at_location;
7345 dw_attr_node *at_lower_bound;
7346 dw_attr_node *at_mutable;
7347 dw_attr_node *at_ordering;
7348 dw_attr_node *at_picture_string;
7349 dw_attr_node *at_prototyped;
7350 dw_attr_node *at_small;
7351 dw_attr_node *at_segment;
7352 dw_attr_node *at_string_length;
7353 dw_attr_node *at_string_length_bit_size;
7354 dw_attr_node *at_string_length_byte_size;
7355 dw_attr_node *at_threads_scaled;
7356 dw_attr_node *at_upper_bound;
7357 dw_attr_node *at_use_location;
7358 dw_attr_node *at_use_UTF8;
7359 dw_attr_node *at_variable_parameter;
7360 dw_attr_node *at_virtuality;
7361 dw_attr_node *at_visibility;
7362 dw_attr_node *at_vtable_elem_location;
7363 };
7364
7365 /* Collect the attributes that we will want to use for the checksum. */
7366
7367 static void
7368 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7369 {
7370 dw_attr_node *a;
7371 unsigned ix;
7372
7373 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7374 {
7375 switch (a->dw_attr)
7376 {
7377 case DW_AT_name:
7378 attrs->at_name = a;
7379 break;
7380 case DW_AT_type:
7381 attrs->at_type = a;
7382 break;
7383 case DW_AT_friend:
7384 attrs->at_friend = a;
7385 break;
7386 case DW_AT_accessibility:
7387 attrs->at_accessibility = a;
7388 break;
7389 case DW_AT_address_class:
7390 attrs->at_address_class = a;
7391 break;
7392 case DW_AT_alignment:
7393 attrs->at_alignment = a;
7394 break;
7395 case DW_AT_allocated:
7396 attrs->at_allocated = a;
7397 break;
7398 case DW_AT_artificial:
7399 attrs->at_artificial = a;
7400 break;
7401 case DW_AT_associated:
7402 attrs->at_associated = a;
7403 break;
7404 case DW_AT_binary_scale:
7405 attrs->at_binary_scale = a;
7406 break;
7407 case DW_AT_bit_offset:
7408 attrs->at_bit_offset = a;
7409 break;
7410 case DW_AT_bit_size:
7411 attrs->at_bit_size = a;
7412 break;
7413 case DW_AT_bit_stride:
7414 attrs->at_bit_stride = a;
7415 break;
7416 case DW_AT_byte_size:
7417 attrs->at_byte_size = a;
7418 break;
7419 case DW_AT_byte_stride:
7420 attrs->at_byte_stride = a;
7421 break;
7422 case DW_AT_const_value:
7423 attrs->at_const_value = a;
7424 break;
7425 case DW_AT_containing_type:
7426 attrs->at_containing_type = a;
7427 break;
7428 case DW_AT_count:
7429 attrs->at_count = a;
7430 break;
7431 case DW_AT_data_location:
7432 attrs->at_data_location = a;
7433 break;
7434 case DW_AT_data_member_location:
7435 attrs->at_data_member_location = a;
7436 break;
7437 case DW_AT_decimal_scale:
7438 attrs->at_decimal_scale = a;
7439 break;
7440 case DW_AT_decimal_sign:
7441 attrs->at_decimal_sign = a;
7442 break;
7443 case DW_AT_default_value:
7444 attrs->at_default_value = a;
7445 break;
7446 case DW_AT_digit_count:
7447 attrs->at_digit_count = a;
7448 break;
7449 case DW_AT_discr:
7450 attrs->at_discr = a;
7451 break;
7452 case DW_AT_discr_list:
7453 attrs->at_discr_list = a;
7454 break;
7455 case DW_AT_discr_value:
7456 attrs->at_discr_value = a;
7457 break;
7458 case DW_AT_encoding:
7459 attrs->at_encoding = a;
7460 break;
7461 case DW_AT_endianity:
7462 attrs->at_endianity = a;
7463 break;
7464 case DW_AT_explicit:
7465 attrs->at_explicit = a;
7466 break;
7467 case DW_AT_is_optional:
7468 attrs->at_is_optional = a;
7469 break;
7470 case DW_AT_location:
7471 attrs->at_location = a;
7472 break;
7473 case DW_AT_lower_bound:
7474 attrs->at_lower_bound = a;
7475 break;
7476 case DW_AT_mutable:
7477 attrs->at_mutable = a;
7478 break;
7479 case DW_AT_ordering:
7480 attrs->at_ordering = a;
7481 break;
7482 case DW_AT_picture_string:
7483 attrs->at_picture_string = a;
7484 break;
7485 case DW_AT_prototyped:
7486 attrs->at_prototyped = a;
7487 break;
7488 case DW_AT_small:
7489 attrs->at_small = a;
7490 break;
7491 case DW_AT_segment:
7492 attrs->at_segment = a;
7493 break;
7494 case DW_AT_string_length:
7495 attrs->at_string_length = a;
7496 break;
7497 case DW_AT_string_length_bit_size:
7498 attrs->at_string_length_bit_size = a;
7499 break;
7500 case DW_AT_string_length_byte_size:
7501 attrs->at_string_length_byte_size = a;
7502 break;
7503 case DW_AT_threads_scaled:
7504 attrs->at_threads_scaled = a;
7505 break;
7506 case DW_AT_upper_bound:
7507 attrs->at_upper_bound = a;
7508 break;
7509 case DW_AT_use_location:
7510 attrs->at_use_location = a;
7511 break;
7512 case DW_AT_use_UTF8:
7513 attrs->at_use_UTF8 = a;
7514 break;
7515 case DW_AT_variable_parameter:
7516 attrs->at_variable_parameter = a;
7517 break;
7518 case DW_AT_virtuality:
7519 attrs->at_virtuality = a;
7520 break;
7521 case DW_AT_visibility:
7522 attrs->at_visibility = a;
7523 break;
7524 case DW_AT_vtable_elem_location:
7525 attrs->at_vtable_elem_location = a;
7526 break;
7527 default:
7528 break;
7529 }
7530 }
7531 }
7532
7533 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7534
7535 static void
7536 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7537 {
7538 dw_die_ref c;
7539 dw_die_ref decl;
7540 struct checksum_attributes attrs;
7541
7542 CHECKSUM_ULEB128 ('D');
7543 CHECKSUM_ULEB128 (die->die_tag);
7544
7545 memset (&attrs, 0, sizeof (attrs));
7546
7547 decl = get_AT_ref (die, DW_AT_specification);
7548 if (decl != NULL)
7549 collect_checksum_attributes (&attrs, decl);
7550 collect_checksum_attributes (&attrs, die);
7551
7552 CHECKSUM_ATTR (attrs.at_name);
7553 CHECKSUM_ATTR (attrs.at_accessibility);
7554 CHECKSUM_ATTR (attrs.at_address_class);
7555 CHECKSUM_ATTR (attrs.at_allocated);
7556 CHECKSUM_ATTR (attrs.at_artificial);
7557 CHECKSUM_ATTR (attrs.at_associated);
7558 CHECKSUM_ATTR (attrs.at_binary_scale);
7559 CHECKSUM_ATTR (attrs.at_bit_offset);
7560 CHECKSUM_ATTR (attrs.at_bit_size);
7561 CHECKSUM_ATTR (attrs.at_bit_stride);
7562 CHECKSUM_ATTR (attrs.at_byte_size);
7563 CHECKSUM_ATTR (attrs.at_byte_stride);
7564 CHECKSUM_ATTR (attrs.at_const_value);
7565 CHECKSUM_ATTR (attrs.at_containing_type);
7566 CHECKSUM_ATTR (attrs.at_count);
7567 CHECKSUM_ATTR (attrs.at_data_location);
7568 CHECKSUM_ATTR (attrs.at_data_member_location);
7569 CHECKSUM_ATTR (attrs.at_decimal_scale);
7570 CHECKSUM_ATTR (attrs.at_decimal_sign);
7571 CHECKSUM_ATTR (attrs.at_default_value);
7572 CHECKSUM_ATTR (attrs.at_digit_count);
7573 CHECKSUM_ATTR (attrs.at_discr);
7574 CHECKSUM_ATTR (attrs.at_discr_list);
7575 CHECKSUM_ATTR (attrs.at_discr_value);
7576 CHECKSUM_ATTR (attrs.at_encoding);
7577 CHECKSUM_ATTR (attrs.at_endianity);
7578 CHECKSUM_ATTR (attrs.at_explicit);
7579 CHECKSUM_ATTR (attrs.at_is_optional);
7580 CHECKSUM_ATTR (attrs.at_location);
7581 CHECKSUM_ATTR (attrs.at_lower_bound);
7582 CHECKSUM_ATTR (attrs.at_mutable);
7583 CHECKSUM_ATTR (attrs.at_ordering);
7584 CHECKSUM_ATTR (attrs.at_picture_string);
7585 CHECKSUM_ATTR (attrs.at_prototyped);
7586 CHECKSUM_ATTR (attrs.at_small);
7587 CHECKSUM_ATTR (attrs.at_segment);
7588 CHECKSUM_ATTR (attrs.at_string_length);
7589 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7590 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7591 CHECKSUM_ATTR (attrs.at_threads_scaled);
7592 CHECKSUM_ATTR (attrs.at_upper_bound);
7593 CHECKSUM_ATTR (attrs.at_use_location);
7594 CHECKSUM_ATTR (attrs.at_use_UTF8);
7595 CHECKSUM_ATTR (attrs.at_variable_parameter);
7596 CHECKSUM_ATTR (attrs.at_virtuality);
7597 CHECKSUM_ATTR (attrs.at_visibility);
7598 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7599 CHECKSUM_ATTR (attrs.at_type);
7600 CHECKSUM_ATTR (attrs.at_friend);
7601 CHECKSUM_ATTR (attrs.at_alignment);
7602
7603 /* Checksum the child DIEs. */
7604 c = die->die_child;
7605 if (c) do {
7606 dw_attr_node *name_attr;
7607
7608 c = c->die_sib;
7609 name_attr = get_AT (c, DW_AT_name);
7610 if (is_template_instantiation (c))
7611 {
7612 /* Ignore instantiations of member type and function templates. */
7613 }
7614 else if (name_attr != NULL
7615 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7616 {
7617 /* Use a shallow checksum for named nested types and member
7618 functions. */
7619 CHECKSUM_ULEB128 ('S');
7620 CHECKSUM_ULEB128 (c->die_tag);
7621 CHECKSUM_STRING (AT_string (name_attr));
7622 }
7623 else
7624 {
7625 /* Use a deep checksum for other children. */
7626 /* Mark this DIE so it gets processed when unmarking. */
7627 if (c->die_mark == 0)
7628 c->die_mark = -1;
7629 die_checksum_ordered (c, ctx, mark);
7630 }
7631 } while (c != die->die_child);
7632
7633 CHECKSUM_ULEB128 (0);
7634 }
7635
7636 /* Add a type name and tag to a hash. */
7637 static void
7638 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7639 {
7640 CHECKSUM_ULEB128 (tag);
7641 CHECKSUM_STRING (name);
7642 }
7643
7644 #undef CHECKSUM
7645 #undef CHECKSUM_STRING
7646 #undef CHECKSUM_ATTR
7647 #undef CHECKSUM_LEB128
7648 #undef CHECKSUM_ULEB128
7649
7650 /* Generate the type signature for DIE. This is computed by generating an
7651 MD5 checksum over the DIE's tag, its relevant attributes, and its
7652 children. Attributes that are references to other DIEs are processed
7653 by recursion, using the MARK field to prevent infinite recursion.
7654 If the DIE is nested inside a namespace or another type, we also
7655 need to include that context in the signature. The lower 64 bits
7656 of the resulting MD5 checksum comprise the signature. */
7657
7658 static void
7659 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7660 {
7661 int mark;
7662 const char *name;
7663 unsigned char checksum[16];
7664 struct md5_ctx ctx;
7665 dw_die_ref decl;
7666 dw_die_ref parent;
7667
7668 name = get_AT_string (die, DW_AT_name);
7669 decl = get_AT_ref (die, DW_AT_specification);
7670 parent = get_die_parent (die);
7671
7672 /* First, compute a signature for just the type name (and its surrounding
7673 context, if any. This is stored in the type unit DIE for link-time
7674 ODR (one-definition rule) checking. */
7675
7676 if (is_cxx () && name != NULL)
7677 {
7678 md5_init_ctx (&ctx);
7679
7680 /* Checksum the names of surrounding namespaces and structures. */
7681 if (parent != NULL)
7682 checksum_die_context (parent, &ctx);
7683
7684 /* Checksum the current DIE. */
7685 die_odr_checksum (die->die_tag, name, &ctx);
7686 md5_finish_ctx (&ctx, checksum);
7687
7688 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7689 }
7690
7691 /* Next, compute the complete type signature. */
7692
7693 md5_init_ctx (&ctx);
7694 mark = 1;
7695 die->die_mark = mark;
7696
7697 /* Checksum the names of surrounding namespaces and structures. */
7698 if (parent != NULL)
7699 checksum_die_context (parent, &ctx);
7700
7701 /* Checksum the DIE and its children. */
7702 die_checksum_ordered (die, &ctx, &mark);
7703 unmark_all_dies (die);
7704 md5_finish_ctx (&ctx, checksum);
7705
7706 /* Store the signature in the type node and link the type DIE and the
7707 type node together. */
7708 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7709 DWARF_TYPE_SIGNATURE_SIZE);
7710 die->comdat_type_p = true;
7711 die->die_id.die_type_node = type_node;
7712 type_node->type_die = die;
7713
7714 /* If the DIE is a specification, link its declaration to the type node
7715 as well. */
7716 if (decl != NULL)
7717 {
7718 decl->comdat_type_p = true;
7719 decl->die_id.die_type_node = type_node;
7720 }
7721 }
7722
7723 /* Do the location expressions look same? */
7724 static inline int
7725 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7726 {
7727 return loc1->dw_loc_opc == loc2->dw_loc_opc
7728 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7729 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7730 }
7731
7732 /* Do the values look the same? */
7733 static int
7734 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7735 {
7736 dw_loc_descr_ref loc1, loc2;
7737 rtx r1, r2;
7738
7739 if (v1->val_class != v2->val_class)
7740 return 0;
7741
7742 switch (v1->val_class)
7743 {
7744 case dw_val_class_const:
7745 case dw_val_class_const_implicit:
7746 return v1->v.val_int == v2->v.val_int;
7747 case dw_val_class_unsigned_const:
7748 case dw_val_class_unsigned_const_implicit:
7749 return v1->v.val_unsigned == v2->v.val_unsigned;
7750 case dw_val_class_const_double:
7751 return v1->v.val_double.high == v2->v.val_double.high
7752 && v1->v.val_double.low == v2->v.val_double.low;
7753 case dw_val_class_wide_int:
7754 return *v1->v.val_wide == *v2->v.val_wide;
7755 case dw_val_class_vec:
7756 if (v1->v.val_vec.length != v2->v.val_vec.length
7757 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7758 return 0;
7759 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7760 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7761 return 0;
7762 return 1;
7763 case dw_val_class_flag:
7764 return v1->v.val_flag == v2->v.val_flag;
7765 case dw_val_class_str:
7766 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7767
7768 case dw_val_class_addr:
7769 r1 = v1->v.val_addr;
7770 r2 = v2->v.val_addr;
7771 if (GET_CODE (r1) != GET_CODE (r2))
7772 return 0;
7773 return !rtx_equal_p (r1, r2);
7774
7775 case dw_val_class_offset:
7776 return v1->v.val_offset == v2->v.val_offset;
7777
7778 case dw_val_class_loc:
7779 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7780 loc1 && loc2;
7781 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7782 if (!same_loc_p (loc1, loc2, mark))
7783 return 0;
7784 return !loc1 && !loc2;
7785
7786 case dw_val_class_die_ref:
7787 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7788
7789 case dw_val_class_symview:
7790 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7791
7792 case dw_val_class_fde_ref:
7793 case dw_val_class_vms_delta:
7794 case dw_val_class_lbl_id:
7795 case dw_val_class_lineptr:
7796 case dw_val_class_macptr:
7797 case dw_val_class_loclistsptr:
7798 case dw_val_class_high_pc:
7799 return 1;
7800
7801 case dw_val_class_file:
7802 case dw_val_class_file_implicit:
7803 return v1->v.val_file == v2->v.val_file;
7804
7805 case dw_val_class_data8:
7806 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7807
7808 default:
7809 return 1;
7810 }
7811 }
7812
7813 /* Do the attributes look the same? */
7814
7815 static int
7816 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7817 {
7818 if (at1->dw_attr != at2->dw_attr)
7819 return 0;
7820
7821 /* We don't care that this was compiled with a different compiler
7822 snapshot; if the output is the same, that's what matters. */
7823 if (at1->dw_attr == DW_AT_producer)
7824 return 1;
7825
7826 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7827 }
7828
7829 /* Do the dies look the same? */
7830
7831 static int
7832 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7833 {
7834 dw_die_ref c1, c2;
7835 dw_attr_node *a1;
7836 unsigned ix;
7837
7838 /* To avoid infinite recursion. */
7839 if (die1->die_mark)
7840 return die1->die_mark == die2->die_mark;
7841 die1->die_mark = die2->die_mark = ++(*mark);
7842
7843 if (die1->die_tag != die2->die_tag)
7844 return 0;
7845
7846 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7847 return 0;
7848
7849 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7850 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7851 return 0;
7852
7853 c1 = die1->die_child;
7854 c2 = die2->die_child;
7855 if (! c1)
7856 {
7857 if (c2)
7858 return 0;
7859 }
7860 else
7861 for (;;)
7862 {
7863 if (!same_die_p (c1, c2, mark))
7864 return 0;
7865 c1 = c1->die_sib;
7866 c2 = c2->die_sib;
7867 if (c1 == die1->die_child)
7868 {
7869 if (c2 == die2->die_child)
7870 break;
7871 else
7872 return 0;
7873 }
7874 }
7875
7876 return 1;
7877 }
7878
7879 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7880 children, and set die_symbol. */
7881
7882 static void
7883 compute_comp_unit_symbol (dw_die_ref unit_die)
7884 {
7885 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7886 const char *base = die_name ? lbasename (die_name) : "anonymous";
7887 char *name = XALLOCAVEC (char, strlen (base) + 64);
7888 char *p;
7889 int i, mark;
7890 unsigned char checksum[16];
7891 struct md5_ctx ctx;
7892
7893 /* Compute the checksum of the DIE, then append part of it as hex digits to
7894 the name filename of the unit. */
7895
7896 md5_init_ctx (&ctx);
7897 mark = 0;
7898 die_checksum (unit_die, &ctx, &mark);
7899 unmark_all_dies (unit_die);
7900 md5_finish_ctx (&ctx, checksum);
7901
7902 /* When we this for comp_unit_die () we have a DW_AT_name that might
7903 not start with a letter but with anything valid for filenames and
7904 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7905 character is not a letter. */
7906 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7907 clean_symbol_name (name);
7908
7909 p = name + strlen (name);
7910 for (i = 0; i < 4; i++)
7911 {
7912 sprintf (p, "%.2x", checksum[i]);
7913 p += 2;
7914 }
7915
7916 unit_die->die_id.die_symbol = xstrdup (name);
7917 }
7918
7919 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7920
7921 static int
7922 is_type_die (dw_die_ref die)
7923 {
7924 switch (die->die_tag)
7925 {
7926 case DW_TAG_array_type:
7927 case DW_TAG_class_type:
7928 case DW_TAG_interface_type:
7929 case DW_TAG_enumeration_type:
7930 case DW_TAG_pointer_type:
7931 case DW_TAG_reference_type:
7932 case DW_TAG_rvalue_reference_type:
7933 case DW_TAG_string_type:
7934 case DW_TAG_structure_type:
7935 case DW_TAG_subroutine_type:
7936 case DW_TAG_union_type:
7937 case DW_TAG_ptr_to_member_type:
7938 case DW_TAG_set_type:
7939 case DW_TAG_subrange_type:
7940 case DW_TAG_base_type:
7941 case DW_TAG_const_type:
7942 case DW_TAG_file_type:
7943 case DW_TAG_packed_type:
7944 case DW_TAG_volatile_type:
7945 case DW_TAG_typedef:
7946 return 1;
7947 default:
7948 return 0;
7949 }
7950 }
7951
7952 /* Returns true iff C is a compile-unit DIE. */
7953
7954 static inline bool
7955 is_cu_die (dw_die_ref c)
7956 {
7957 return c && (c->die_tag == DW_TAG_compile_unit
7958 || c->die_tag == DW_TAG_skeleton_unit);
7959 }
7960
7961 /* Returns true iff C is a unit DIE of some sort. */
7962
7963 static inline bool
7964 is_unit_die (dw_die_ref c)
7965 {
7966 return c && (c->die_tag == DW_TAG_compile_unit
7967 || c->die_tag == DW_TAG_partial_unit
7968 || c->die_tag == DW_TAG_type_unit
7969 || c->die_tag == DW_TAG_skeleton_unit);
7970 }
7971
7972 /* Returns true iff C is a namespace DIE. */
7973
7974 static inline bool
7975 is_namespace_die (dw_die_ref c)
7976 {
7977 return c && c->die_tag == DW_TAG_namespace;
7978 }
7979
7980 /* Return non-zero if this DIE is a template parameter. */
7981
7982 static inline bool
7983 is_template_parameter (dw_die_ref die)
7984 {
7985 switch (die->die_tag)
7986 {
7987 case DW_TAG_template_type_param:
7988 case DW_TAG_template_value_param:
7989 case DW_TAG_GNU_template_template_param:
7990 case DW_TAG_GNU_template_parameter_pack:
7991 return true;
7992 default:
7993 return false;
7994 }
7995 }
7996
7997 /* Return non-zero if this DIE represents a template instantiation. */
7998
7999 static inline bool
8000 is_template_instantiation (dw_die_ref die)
8001 {
8002 dw_die_ref c;
8003
8004 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
8005 return false;
8006 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
8007 return false;
8008 }
8009
8010 static char *
8011 gen_internal_sym (const char *prefix)
8012 {
8013 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
8014
8015 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
8016 return xstrdup (buf);
8017 }
8018
8019 /* Return non-zero if this DIE is a declaration. */
8020
8021 static int
8022 is_declaration_die (dw_die_ref die)
8023 {
8024 dw_attr_node *a;
8025 unsigned ix;
8026
8027 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8028 if (a->dw_attr == DW_AT_declaration)
8029 return 1;
8030
8031 return 0;
8032 }
8033
8034 /* Return non-zero if this DIE is nested inside a subprogram. */
8035
8036 static int
8037 is_nested_in_subprogram (dw_die_ref die)
8038 {
8039 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8040
8041 if (decl == NULL)
8042 decl = die;
8043 return local_scope_p (decl);
8044 }
8045
8046 /* Return non-zero if this DIE contains a defining declaration of a
8047 subprogram. */
8048
8049 static int
8050 contains_subprogram_definition (dw_die_ref die)
8051 {
8052 dw_die_ref c;
8053
8054 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8055 return 1;
8056 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8057 return 0;
8058 }
8059
8060 /* Return non-zero if this is a type DIE that should be moved to a
8061 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8062 unit type. */
8063
8064 static int
8065 should_move_die_to_comdat (dw_die_ref die)
8066 {
8067 switch (die->die_tag)
8068 {
8069 case DW_TAG_class_type:
8070 case DW_TAG_structure_type:
8071 case DW_TAG_enumeration_type:
8072 case DW_TAG_union_type:
8073 /* Don't move declarations, inlined instances, types nested in a
8074 subprogram, or types that contain subprogram definitions. */
8075 if (is_declaration_die (die)
8076 || get_AT (die, DW_AT_abstract_origin)
8077 || is_nested_in_subprogram (die)
8078 || contains_subprogram_definition (die))
8079 return 0;
8080 return 1;
8081 case DW_TAG_array_type:
8082 case DW_TAG_interface_type:
8083 case DW_TAG_pointer_type:
8084 case DW_TAG_reference_type:
8085 case DW_TAG_rvalue_reference_type:
8086 case DW_TAG_string_type:
8087 case DW_TAG_subroutine_type:
8088 case DW_TAG_ptr_to_member_type:
8089 case DW_TAG_set_type:
8090 case DW_TAG_subrange_type:
8091 case DW_TAG_base_type:
8092 case DW_TAG_const_type:
8093 case DW_TAG_file_type:
8094 case DW_TAG_packed_type:
8095 case DW_TAG_volatile_type:
8096 case DW_TAG_typedef:
8097 default:
8098 return 0;
8099 }
8100 }
8101
8102 /* Make a clone of DIE. */
8103
8104 static dw_die_ref
8105 clone_die (dw_die_ref die)
8106 {
8107 dw_die_ref clone = new_die_raw (die->die_tag);
8108 dw_attr_node *a;
8109 unsigned ix;
8110
8111 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8112 add_dwarf_attr (clone, a);
8113
8114 return clone;
8115 }
8116
8117 /* Make a clone of the tree rooted at DIE. */
8118
8119 static dw_die_ref
8120 clone_tree (dw_die_ref die)
8121 {
8122 dw_die_ref c;
8123 dw_die_ref clone = clone_die (die);
8124
8125 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8126
8127 return clone;
8128 }
8129
8130 /* Make a clone of DIE as a declaration. */
8131
8132 static dw_die_ref
8133 clone_as_declaration (dw_die_ref die)
8134 {
8135 dw_die_ref clone;
8136 dw_die_ref decl;
8137 dw_attr_node *a;
8138 unsigned ix;
8139
8140 /* If the DIE is already a declaration, just clone it. */
8141 if (is_declaration_die (die))
8142 return clone_die (die);
8143
8144 /* If the DIE is a specification, just clone its declaration DIE. */
8145 decl = get_AT_ref (die, DW_AT_specification);
8146 if (decl != NULL)
8147 {
8148 clone = clone_die (decl);
8149 if (die->comdat_type_p)
8150 add_AT_die_ref (clone, DW_AT_signature, die);
8151 return clone;
8152 }
8153
8154 clone = new_die_raw (die->die_tag);
8155
8156 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8157 {
8158 /* We don't want to copy over all attributes.
8159 For example we don't want DW_AT_byte_size because otherwise we will no
8160 longer have a declaration and GDB will treat it as a definition. */
8161
8162 switch (a->dw_attr)
8163 {
8164 case DW_AT_abstract_origin:
8165 case DW_AT_artificial:
8166 case DW_AT_containing_type:
8167 case DW_AT_external:
8168 case DW_AT_name:
8169 case DW_AT_type:
8170 case DW_AT_virtuality:
8171 case DW_AT_linkage_name:
8172 case DW_AT_MIPS_linkage_name:
8173 add_dwarf_attr (clone, a);
8174 break;
8175 case DW_AT_byte_size:
8176 case DW_AT_alignment:
8177 default:
8178 break;
8179 }
8180 }
8181
8182 if (die->comdat_type_p)
8183 add_AT_die_ref (clone, DW_AT_signature, die);
8184
8185 add_AT_flag (clone, DW_AT_declaration, 1);
8186 return clone;
8187 }
8188
8189
8190 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8191
8192 struct decl_table_entry
8193 {
8194 dw_die_ref orig;
8195 dw_die_ref copy;
8196 };
8197
8198 /* Helpers to manipulate hash table of copied declarations. */
8199
8200 /* Hashtable helpers. */
8201
8202 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8203 {
8204 typedef die_struct *compare_type;
8205 static inline hashval_t hash (const decl_table_entry *);
8206 static inline bool equal (const decl_table_entry *, const die_struct *);
8207 };
8208
8209 inline hashval_t
8210 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8211 {
8212 return htab_hash_pointer (entry->orig);
8213 }
8214
8215 inline bool
8216 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8217 const die_struct *entry2)
8218 {
8219 return entry1->orig == entry2;
8220 }
8221
8222 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8223
8224 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8225 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8226 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8227 to check if the ancestor has already been copied into UNIT. */
8228
8229 static dw_die_ref
8230 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8231 decl_hash_type *decl_table)
8232 {
8233 dw_die_ref parent = die->die_parent;
8234 dw_die_ref new_parent = unit;
8235 dw_die_ref copy;
8236 decl_table_entry **slot = NULL;
8237 struct decl_table_entry *entry = NULL;
8238
8239 /* If DIE refers to a stub unfold that so we get the appropriate
8240 DIE registered as orig in decl_table. */
8241 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8242 die = c;
8243
8244 if (decl_table)
8245 {
8246 /* Check if the entry has already been copied to UNIT. */
8247 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8248 INSERT);
8249 if (*slot != HTAB_EMPTY_ENTRY)
8250 {
8251 entry = *slot;
8252 return entry->copy;
8253 }
8254
8255 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8256 entry = XCNEW (struct decl_table_entry);
8257 entry->orig = die;
8258 entry->copy = NULL;
8259 *slot = entry;
8260 }
8261
8262 if (parent != NULL)
8263 {
8264 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8265 if (spec != NULL)
8266 parent = spec;
8267 if (!is_unit_die (parent))
8268 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8269 }
8270
8271 copy = clone_as_declaration (die);
8272 add_child_die (new_parent, copy);
8273
8274 if (decl_table)
8275 {
8276 /* Record the pointer to the copy. */
8277 entry->copy = copy;
8278 }
8279
8280 return copy;
8281 }
8282 /* Copy the declaration context to the new type unit DIE. This includes
8283 any surrounding namespace or type declarations. If the DIE has an
8284 AT_specification attribute, it also includes attributes and children
8285 attached to the specification, and returns a pointer to the original
8286 parent of the declaration DIE. Returns NULL otherwise. */
8287
8288 static dw_die_ref
8289 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8290 {
8291 dw_die_ref decl;
8292 dw_die_ref new_decl;
8293 dw_die_ref orig_parent = NULL;
8294
8295 decl = get_AT_ref (die, DW_AT_specification);
8296 if (decl == NULL)
8297 decl = die;
8298 else
8299 {
8300 unsigned ix;
8301 dw_die_ref c;
8302 dw_attr_node *a;
8303
8304 /* The original DIE will be changed to a declaration, and must
8305 be moved to be a child of the original declaration DIE. */
8306 orig_parent = decl->die_parent;
8307
8308 /* Copy the type node pointer from the new DIE to the original
8309 declaration DIE so we can forward references later. */
8310 decl->comdat_type_p = true;
8311 decl->die_id.die_type_node = die->die_id.die_type_node;
8312
8313 remove_AT (die, DW_AT_specification);
8314
8315 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8316 {
8317 if (a->dw_attr != DW_AT_name
8318 && a->dw_attr != DW_AT_declaration
8319 && a->dw_attr != DW_AT_external)
8320 add_dwarf_attr (die, a);
8321 }
8322
8323 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8324 }
8325
8326 if (decl->die_parent != NULL
8327 && !is_unit_die (decl->die_parent))
8328 {
8329 new_decl = copy_ancestor_tree (unit, decl, NULL);
8330 if (new_decl != NULL)
8331 {
8332 remove_AT (new_decl, DW_AT_signature);
8333 add_AT_specification (die, new_decl);
8334 }
8335 }
8336
8337 return orig_parent;
8338 }
8339
8340 /* Generate the skeleton ancestor tree for the given NODE, then clone
8341 the DIE and add the clone into the tree. */
8342
8343 static void
8344 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8345 {
8346 if (node->new_die != NULL)
8347 return;
8348
8349 node->new_die = clone_as_declaration (node->old_die);
8350
8351 if (node->parent != NULL)
8352 {
8353 generate_skeleton_ancestor_tree (node->parent);
8354 add_child_die (node->parent->new_die, node->new_die);
8355 }
8356 }
8357
8358 /* Generate a skeleton tree of DIEs containing any declarations that are
8359 found in the original tree. We traverse the tree looking for declaration
8360 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8361
8362 static void
8363 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8364 {
8365 skeleton_chain_node node;
8366 dw_die_ref c;
8367 dw_die_ref first;
8368 dw_die_ref prev = NULL;
8369 dw_die_ref next = NULL;
8370
8371 node.parent = parent;
8372
8373 first = c = parent->old_die->die_child;
8374 if (c)
8375 next = c->die_sib;
8376 if (c) do {
8377 if (prev == NULL || prev->die_sib == c)
8378 prev = c;
8379 c = next;
8380 next = (c == first ? NULL : c->die_sib);
8381 node.old_die = c;
8382 node.new_die = NULL;
8383 if (is_declaration_die (c))
8384 {
8385 if (is_template_instantiation (c))
8386 {
8387 /* Instantiated templates do not need to be cloned into the
8388 type unit. Just move the DIE and its children back to
8389 the skeleton tree (in the main CU). */
8390 remove_child_with_prev (c, prev);
8391 add_child_die (parent->new_die, c);
8392 c = prev;
8393 }
8394 else if (c->comdat_type_p)
8395 {
8396 /* This is the skeleton of earlier break_out_comdat_types
8397 type. Clone the existing DIE, but keep the children
8398 under the original (which is in the main CU). */
8399 dw_die_ref clone = clone_die (c);
8400
8401 replace_child (c, clone, prev);
8402 generate_skeleton_ancestor_tree (parent);
8403 add_child_die (parent->new_die, c);
8404 c = clone;
8405 continue;
8406 }
8407 else
8408 {
8409 /* Clone the existing DIE, move the original to the skeleton
8410 tree (which is in the main CU), and put the clone, with
8411 all the original's children, where the original came from
8412 (which is about to be moved to the type unit). */
8413 dw_die_ref clone = clone_die (c);
8414 move_all_children (c, clone);
8415
8416 /* If the original has a DW_AT_object_pointer attribute,
8417 it would now point to a child DIE just moved to the
8418 cloned tree, so we need to remove that attribute from
8419 the original. */
8420 remove_AT (c, DW_AT_object_pointer);
8421
8422 replace_child (c, clone, prev);
8423 generate_skeleton_ancestor_tree (parent);
8424 add_child_die (parent->new_die, c);
8425 node.old_die = clone;
8426 node.new_die = c;
8427 c = clone;
8428 }
8429 }
8430 generate_skeleton_bottom_up (&node);
8431 } while (next != NULL);
8432 }
8433
8434 /* Wrapper function for generate_skeleton_bottom_up. */
8435
8436 static dw_die_ref
8437 generate_skeleton (dw_die_ref die)
8438 {
8439 skeleton_chain_node node;
8440
8441 node.old_die = die;
8442 node.new_die = NULL;
8443 node.parent = NULL;
8444
8445 /* If this type definition is nested inside another type,
8446 and is not an instantiation of a template, always leave
8447 at least a declaration in its place. */
8448 if (die->die_parent != NULL
8449 && is_type_die (die->die_parent)
8450 && !is_template_instantiation (die))
8451 node.new_die = clone_as_declaration (die);
8452
8453 generate_skeleton_bottom_up (&node);
8454 return node.new_die;
8455 }
8456
8457 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8458 declaration. The original DIE is moved to a new compile unit so that
8459 existing references to it follow it to the new location. If any of the
8460 original DIE's descendants is a declaration, we need to replace the
8461 original DIE with a skeleton tree and move the declarations back into the
8462 skeleton tree. */
8463
8464 static dw_die_ref
8465 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8466 dw_die_ref prev)
8467 {
8468 dw_die_ref skeleton, orig_parent;
8469
8470 /* Copy the declaration context to the type unit DIE. If the returned
8471 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8472 that DIE. */
8473 orig_parent = copy_declaration_context (unit, child);
8474
8475 skeleton = generate_skeleton (child);
8476 if (skeleton == NULL)
8477 remove_child_with_prev (child, prev);
8478 else
8479 {
8480 skeleton->comdat_type_p = true;
8481 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8482
8483 /* If the original DIE was a specification, we need to put
8484 the skeleton under the parent DIE of the declaration.
8485 This leaves the original declaration in the tree, but
8486 it will be pruned later since there are no longer any
8487 references to it. */
8488 if (orig_parent != NULL)
8489 {
8490 remove_child_with_prev (child, prev);
8491 add_child_die (orig_parent, skeleton);
8492 }
8493 else
8494 replace_child (child, skeleton, prev);
8495 }
8496
8497 return skeleton;
8498 }
8499
8500 static void
8501 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8502 comdat_type_node *type_node,
8503 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8504
8505 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8506 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8507 DWARF procedure references in the DW_AT_location attribute. */
8508
8509 static dw_die_ref
8510 copy_dwarf_procedure (dw_die_ref die,
8511 comdat_type_node *type_node,
8512 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8513 {
8514 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8515
8516 /* DWARF procedures are not supposed to have children... */
8517 gcc_assert (die->die_child == NULL);
8518
8519 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8520 gcc_assert (vec_safe_length (die->die_attr) == 1
8521 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8522
8523 /* Do not copy more than once DWARF procedures. */
8524 bool existed;
8525 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8526 if (existed)
8527 return die_copy;
8528
8529 die_copy = clone_die (die);
8530 add_child_die (type_node->root_die, die_copy);
8531 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8532 return die_copy;
8533 }
8534
8535 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8536 procedures in DIE's attributes. */
8537
8538 static void
8539 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8540 comdat_type_node *type_node,
8541 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8542 {
8543 dw_attr_node *a;
8544 unsigned i;
8545
8546 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8547 {
8548 dw_loc_descr_ref loc;
8549
8550 if (a->dw_attr_val.val_class != dw_val_class_loc)
8551 continue;
8552
8553 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8554 {
8555 switch (loc->dw_loc_opc)
8556 {
8557 case DW_OP_call2:
8558 case DW_OP_call4:
8559 case DW_OP_call_ref:
8560 gcc_assert (loc->dw_loc_oprnd1.val_class
8561 == dw_val_class_die_ref);
8562 loc->dw_loc_oprnd1.v.val_die_ref.die
8563 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8564 type_node,
8565 copied_dwarf_procs);
8566
8567 default:
8568 break;
8569 }
8570 }
8571 }
8572 }
8573
8574 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8575 rewrite references to point to the copies.
8576
8577 References are looked for in DIE's attributes and recursively in all its
8578 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8579 mapping from old DWARF procedures to their copy. It is used not to copy
8580 twice the same DWARF procedure under TYPE_NODE. */
8581
8582 static void
8583 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8584 comdat_type_node *type_node,
8585 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8586 {
8587 dw_die_ref c;
8588
8589 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8590 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8591 type_node,
8592 copied_dwarf_procs));
8593 }
8594
8595 /* Traverse the DIE and set up additional .debug_types or .debug_info
8596 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8597 section. */
8598
8599 static void
8600 break_out_comdat_types (dw_die_ref die)
8601 {
8602 dw_die_ref c;
8603 dw_die_ref first;
8604 dw_die_ref prev = NULL;
8605 dw_die_ref next = NULL;
8606 dw_die_ref unit = NULL;
8607
8608 first = c = die->die_child;
8609 if (c)
8610 next = c->die_sib;
8611 if (c) do {
8612 if (prev == NULL || prev->die_sib == c)
8613 prev = c;
8614 c = next;
8615 next = (c == first ? NULL : c->die_sib);
8616 if (should_move_die_to_comdat (c))
8617 {
8618 dw_die_ref replacement;
8619 comdat_type_node *type_node;
8620
8621 /* Break out nested types into their own type units. */
8622 break_out_comdat_types (c);
8623
8624 /* Create a new type unit DIE as the root for the new tree. */
8625 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8626 add_AT_unsigned (unit, DW_AT_language,
8627 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8628
8629 /* Add the new unit's type DIE into the comdat type list. */
8630 type_node = ggc_cleared_alloc<comdat_type_node> ();
8631 type_node->root_die = unit;
8632 type_node->next = comdat_type_list;
8633 comdat_type_list = type_node;
8634
8635 /* Generate the type signature. */
8636 generate_type_signature (c, type_node);
8637
8638 /* Copy the declaration context, attributes, and children of the
8639 declaration into the new type unit DIE, then remove this DIE
8640 from the main CU (or replace it with a skeleton if necessary). */
8641 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8642 type_node->skeleton_die = replacement;
8643
8644 /* Add the DIE to the new compunit. */
8645 add_child_die (unit, c);
8646
8647 /* Types can reference DWARF procedures for type size or data location
8648 expressions. Calls in DWARF expressions cannot target procedures
8649 that are not in the same section. So we must copy DWARF procedures
8650 along with this type and then rewrite references to them. */
8651 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8652 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8653
8654 if (replacement != NULL)
8655 c = replacement;
8656 }
8657 else if (c->die_tag == DW_TAG_namespace
8658 || c->die_tag == DW_TAG_class_type
8659 || c->die_tag == DW_TAG_structure_type
8660 || c->die_tag == DW_TAG_union_type)
8661 {
8662 /* Look for nested types that can be broken out. */
8663 break_out_comdat_types (c);
8664 }
8665 } while (next != NULL);
8666 }
8667
8668 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8669 Enter all the cloned children into the hash table decl_table. */
8670
8671 static dw_die_ref
8672 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8673 {
8674 dw_die_ref c;
8675 dw_die_ref clone;
8676 struct decl_table_entry *entry;
8677 decl_table_entry **slot;
8678
8679 if (die->die_tag == DW_TAG_subprogram)
8680 clone = clone_as_declaration (die);
8681 else
8682 clone = clone_die (die);
8683
8684 slot = decl_table->find_slot_with_hash (die,
8685 htab_hash_pointer (die), INSERT);
8686
8687 /* Assert that DIE isn't in the hash table yet. If it would be there
8688 before, the ancestors would be necessarily there as well, therefore
8689 clone_tree_partial wouldn't be called. */
8690 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8691
8692 entry = XCNEW (struct decl_table_entry);
8693 entry->orig = die;
8694 entry->copy = clone;
8695 *slot = entry;
8696
8697 if (die->die_tag != DW_TAG_subprogram)
8698 FOR_EACH_CHILD (die, c,
8699 add_child_die (clone, clone_tree_partial (c, decl_table)));
8700
8701 return clone;
8702 }
8703
8704 /* Walk the DIE and its children, looking for references to incomplete
8705 or trivial types that are unmarked (i.e., that are not in the current
8706 type_unit). */
8707
8708 static void
8709 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8710 {
8711 dw_die_ref c;
8712 dw_attr_node *a;
8713 unsigned ix;
8714
8715 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8716 {
8717 if (AT_class (a) == dw_val_class_die_ref)
8718 {
8719 dw_die_ref targ = AT_ref (a);
8720 decl_table_entry **slot;
8721 struct decl_table_entry *entry;
8722
8723 if (targ->die_mark != 0 || targ->comdat_type_p)
8724 continue;
8725
8726 slot = decl_table->find_slot_with_hash (targ,
8727 htab_hash_pointer (targ),
8728 INSERT);
8729
8730 if (*slot != HTAB_EMPTY_ENTRY)
8731 {
8732 /* TARG has already been copied, so we just need to
8733 modify the reference to point to the copy. */
8734 entry = *slot;
8735 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8736 }
8737 else
8738 {
8739 dw_die_ref parent = unit;
8740 dw_die_ref copy = clone_die (targ);
8741
8742 /* Record in DECL_TABLE that TARG has been copied.
8743 Need to do this now, before the recursive call,
8744 because DECL_TABLE may be expanded and SLOT
8745 would no longer be a valid pointer. */
8746 entry = XCNEW (struct decl_table_entry);
8747 entry->orig = targ;
8748 entry->copy = copy;
8749 *slot = entry;
8750
8751 /* If TARG is not a declaration DIE, we need to copy its
8752 children. */
8753 if (!is_declaration_die (targ))
8754 {
8755 FOR_EACH_CHILD (
8756 targ, c,
8757 add_child_die (copy,
8758 clone_tree_partial (c, decl_table)));
8759 }
8760
8761 /* Make sure the cloned tree is marked as part of the
8762 type unit. */
8763 mark_dies (copy);
8764
8765 /* If TARG has surrounding context, copy its ancestor tree
8766 into the new type unit. */
8767 if (targ->die_parent != NULL
8768 && !is_unit_die (targ->die_parent))
8769 parent = copy_ancestor_tree (unit, targ->die_parent,
8770 decl_table);
8771
8772 add_child_die (parent, copy);
8773 a->dw_attr_val.v.val_die_ref.die = copy;
8774
8775 /* Make sure the newly-copied DIE is walked. If it was
8776 installed in a previously-added context, it won't
8777 get visited otherwise. */
8778 if (parent != unit)
8779 {
8780 /* Find the highest point of the newly-added tree,
8781 mark each node along the way, and walk from there. */
8782 parent->die_mark = 1;
8783 while (parent->die_parent
8784 && parent->die_parent->die_mark == 0)
8785 {
8786 parent = parent->die_parent;
8787 parent->die_mark = 1;
8788 }
8789 copy_decls_walk (unit, parent, decl_table);
8790 }
8791 }
8792 }
8793 }
8794
8795 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8796 }
8797
8798 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8799 and record them in DECL_TABLE. */
8800
8801 static void
8802 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8803 {
8804 dw_die_ref c;
8805
8806 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8807 {
8808 dw_die_ref targ = AT_ref (a);
8809 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8810 decl_table_entry **slot
8811 = decl_table->find_slot_with_hash (targ,
8812 htab_hash_pointer (targ),
8813 INSERT);
8814 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8815 /* Record in DECL_TABLE that TARG has been already copied
8816 by remove_child_or_replace_with_skeleton. */
8817 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8818 entry->orig = targ;
8819 entry->copy = die;
8820 *slot = entry;
8821 }
8822 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8823 }
8824
8825 /* Copy declarations for "unworthy" types into the new comdat section.
8826 Incomplete types, modified types, and certain other types aren't broken
8827 out into comdat sections of their own, so they don't have a signature,
8828 and we need to copy the declaration into the same section so that we
8829 don't have an external reference. */
8830
8831 static void
8832 copy_decls_for_unworthy_types (dw_die_ref unit)
8833 {
8834 mark_dies (unit);
8835 decl_hash_type decl_table (10);
8836 collect_skeleton_dies (unit, &decl_table);
8837 copy_decls_walk (unit, unit, &decl_table);
8838 unmark_dies (unit);
8839 }
8840
8841 /* Traverse the DIE and add a sibling attribute if it may have the
8842 effect of speeding up access to siblings. To save some space,
8843 avoid generating sibling attributes for DIE's without children. */
8844
8845 static void
8846 add_sibling_attributes (dw_die_ref die)
8847 {
8848 dw_die_ref c;
8849
8850 if (! die->die_child)
8851 return;
8852
8853 if (die->die_parent && die != die->die_parent->die_child)
8854 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8855
8856 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8857 }
8858
8859 /* Output all location lists for the DIE and its children. */
8860
8861 static void
8862 output_location_lists (dw_die_ref die)
8863 {
8864 dw_die_ref c;
8865 dw_attr_node *a;
8866 unsigned ix;
8867
8868 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8869 if (AT_class (a) == dw_val_class_loc_list)
8870 output_loc_list (AT_loc_list (a));
8871
8872 FOR_EACH_CHILD (die, c, output_location_lists (c));
8873 }
8874
8875 /* During assign_location_list_indexes and output_loclists_offset the
8876 current index, after it the number of assigned indexes (i.e. how
8877 large the .debug_loclists* offset table should be). */
8878 static unsigned int loc_list_idx;
8879
8880 /* Output all location list offsets for the DIE and its children. */
8881
8882 static void
8883 output_loclists_offsets (dw_die_ref die)
8884 {
8885 dw_die_ref c;
8886 dw_attr_node *a;
8887 unsigned ix;
8888
8889 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8890 if (AT_class (a) == dw_val_class_loc_list)
8891 {
8892 dw_loc_list_ref l = AT_loc_list (a);
8893 if (l->offset_emitted)
8894 continue;
8895 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8896 loc_section_label, NULL);
8897 gcc_assert (l->hash == loc_list_idx);
8898 loc_list_idx++;
8899 l->offset_emitted = true;
8900 }
8901
8902 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8903 }
8904
8905 /* Recursively set indexes of location lists. */
8906
8907 static void
8908 assign_location_list_indexes (dw_die_ref die)
8909 {
8910 dw_die_ref c;
8911 dw_attr_node *a;
8912 unsigned ix;
8913
8914 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8915 if (AT_class (a) == dw_val_class_loc_list)
8916 {
8917 dw_loc_list_ref list = AT_loc_list (a);
8918 if (!list->num_assigned)
8919 {
8920 list->num_assigned = true;
8921 list->hash = loc_list_idx++;
8922 }
8923 }
8924
8925 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8926 }
8927
8928 /* We want to limit the number of external references, because they are
8929 larger than local references: a relocation takes multiple words, and
8930 even a sig8 reference is always eight bytes, whereas a local reference
8931 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8932 So if we encounter multiple external references to the same type DIE, we
8933 make a local typedef stub for it and redirect all references there.
8934
8935 This is the element of the hash table for keeping track of these
8936 references. */
8937
8938 struct external_ref
8939 {
8940 dw_die_ref type;
8941 dw_die_ref stub;
8942 unsigned n_refs;
8943 };
8944
8945 /* Hashtable helpers. */
8946
8947 struct external_ref_hasher : free_ptr_hash <external_ref>
8948 {
8949 static inline hashval_t hash (const external_ref *);
8950 static inline bool equal (const external_ref *, const external_ref *);
8951 };
8952
8953 inline hashval_t
8954 external_ref_hasher::hash (const external_ref *r)
8955 {
8956 dw_die_ref die = r->type;
8957 hashval_t h = 0;
8958
8959 /* We can't use the address of the DIE for hashing, because
8960 that will make the order of the stub DIEs non-deterministic. */
8961 if (! die->comdat_type_p)
8962 /* We have a symbol; use it to compute a hash. */
8963 h = htab_hash_string (die->die_id.die_symbol);
8964 else
8965 {
8966 /* We have a type signature; use a subset of the bits as the hash.
8967 The 8-byte signature is at least as large as hashval_t. */
8968 comdat_type_node *type_node = die->die_id.die_type_node;
8969 memcpy (&h, type_node->signature, sizeof (h));
8970 }
8971 return h;
8972 }
8973
8974 inline bool
8975 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8976 {
8977 return r1->type == r2->type;
8978 }
8979
8980 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8981
8982 /* Return a pointer to the external_ref for references to DIE. */
8983
8984 static struct external_ref *
8985 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8986 {
8987 struct external_ref ref, *ref_p;
8988 external_ref **slot;
8989
8990 ref.type = die;
8991 slot = map->find_slot (&ref, INSERT);
8992 if (*slot != HTAB_EMPTY_ENTRY)
8993 return *slot;
8994
8995 ref_p = XCNEW (struct external_ref);
8996 ref_p->type = die;
8997 *slot = ref_p;
8998 return ref_p;
8999 }
9000
9001 /* Subroutine of optimize_external_refs, below.
9002
9003 If we see a type skeleton, record it as our stub. If we see external
9004 references, remember how many we've seen. */
9005
9006 static void
9007 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
9008 {
9009 dw_die_ref c;
9010 dw_attr_node *a;
9011 unsigned ix;
9012 struct external_ref *ref_p;
9013
9014 if (is_type_die (die)
9015 && (c = get_AT_ref (die, DW_AT_signature)))
9016 {
9017 /* This is a local skeleton; use it for local references. */
9018 ref_p = lookup_external_ref (map, c);
9019 ref_p->stub = die;
9020 }
9021
9022 /* Scan the DIE references, and remember any that refer to DIEs from
9023 other CUs (i.e. those which are not marked). */
9024 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9025 if (AT_class (a) == dw_val_class_die_ref
9026 && (c = AT_ref (a))->die_mark == 0
9027 && is_type_die (c))
9028 {
9029 ref_p = lookup_external_ref (map, c);
9030 ref_p->n_refs++;
9031 }
9032
9033 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
9034 }
9035
9036 /* htab_traverse callback function for optimize_external_refs, below. SLOT
9037 points to an external_ref, DATA is the CU we're processing. If we don't
9038 already have a local stub, and we have multiple refs, build a stub. */
9039
9040 int
9041 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
9042 {
9043 struct external_ref *ref_p = *slot;
9044
9045 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9046 {
9047 /* We have multiple references to this type, so build a small stub.
9048 Both of these forms are a bit dodgy from the perspective of the
9049 DWARF standard, since technically they should have names. */
9050 dw_die_ref cu = data;
9051 dw_die_ref type = ref_p->type;
9052 dw_die_ref stub = NULL;
9053
9054 if (type->comdat_type_p)
9055 {
9056 /* If we refer to this type via sig8, use AT_signature. */
9057 stub = new_die (type->die_tag, cu, NULL_TREE);
9058 add_AT_die_ref (stub, DW_AT_signature, type);
9059 }
9060 else
9061 {
9062 /* Otherwise, use a typedef with no name. */
9063 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9064 add_AT_die_ref (stub, DW_AT_type, type);
9065 }
9066
9067 stub->die_mark++;
9068 ref_p->stub = stub;
9069 }
9070 return 1;
9071 }
9072
9073 /* DIE is a unit; look through all the DIE references to see if there are
9074 any external references to types, and if so, create local stubs for
9075 them which will be applied in build_abbrev_table. This is useful because
9076 references to local DIEs are smaller. */
9077
9078 static external_ref_hash_type *
9079 optimize_external_refs (dw_die_ref die)
9080 {
9081 external_ref_hash_type *map = new external_ref_hash_type (10);
9082 optimize_external_refs_1 (die, map);
9083 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9084 return map;
9085 }
9086
9087 /* The following 3 variables are temporaries that are computed only during the
9088 build_abbrev_table call and used and released during the following
9089 optimize_abbrev_table call. */
9090
9091 /* First abbrev_id that can be optimized based on usage. */
9092 static unsigned int abbrev_opt_start;
9093
9094 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9095 abbrev_id smaller than this, because they must be already sized
9096 during build_abbrev_table). */
9097 static unsigned int abbrev_opt_base_type_end;
9098
9099 /* Vector of usage counts during build_abbrev_table. Indexed by
9100 abbrev_id - abbrev_opt_start. */
9101 static vec<unsigned int> abbrev_usage_count;
9102
9103 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9104 static vec<dw_die_ref> sorted_abbrev_dies;
9105
9106 /* The format of each DIE (and its attribute value pairs) is encoded in an
9107 abbreviation table. This routine builds the abbreviation table and assigns
9108 a unique abbreviation id for each abbreviation entry. The children of each
9109 die are visited recursively. */
9110
9111 static void
9112 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9113 {
9114 unsigned int abbrev_id = 0;
9115 dw_die_ref c;
9116 dw_attr_node *a;
9117 unsigned ix;
9118 dw_die_ref abbrev;
9119
9120 /* Scan the DIE references, and replace any that refer to
9121 DIEs from other CUs (i.e. those which are not marked) with
9122 the local stubs we built in optimize_external_refs. */
9123 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9124 if (AT_class (a) == dw_val_class_die_ref
9125 && (c = AT_ref (a))->die_mark == 0)
9126 {
9127 struct external_ref *ref_p;
9128 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9129
9130 if (is_type_die (c)
9131 && (ref_p = lookup_external_ref (extern_map, c))
9132 && ref_p->stub && ref_p->stub != die)
9133 {
9134 gcc_assert (a->dw_attr != DW_AT_signature);
9135 change_AT_die_ref (a, ref_p->stub);
9136 }
9137 else
9138 /* We aren't changing this reference, so mark it external. */
9139 set_AT_ref_external (a, 1);
9140 }
9141
9142 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9143 {
9144 dw_attr_node *die_a, *abbrev_a;
9145 unsigned ix;
9146 bool ok = true;
9147
9148 if (abbrev_id == 0)
9149 continue;
9150 if (abbrev->die_tag != die->die_tag)
9151 continue;
9152 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9153 continue;
9154
9155 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9156 continue;
9157
9158 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9159 {
9160 abbrev_a = &(*abbrev->die_attr)[ix];
9161 if ((abbrev_a->dw_attr != die_a->dw_attr)
9162 || (value_format (abbrev_a) != value_format (die_a)))
9163 {
9164 ok = false;
9165 break;
9166 }
9167 }
9168 if (ok)
9169 break;
9170 }
9171
9172 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9173 {
9174 vec_safe_push (abbrev_die_table, die);
9175 if (abbrev_opt_start)
9176 abbrev_usage_count.safe_push (0);
9177 }
9178 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9179 {
9180 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9181 sorted_abbrev_dies.safe_push (die);
9182 }
9183
9184 die->die_abbrev = abbrev_id;
9185 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9186 }
9187
9188 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9189 by die_abbrev's usage count, from the most commonly used
9190 abbreviation to the least. */
9191
9192 static int
9193 die_abbrev_cmp (const void *p1, const void *p2)
9194 {
9195 dw_die_ref die1 = *(const dw_die_ref *) p1;
9196 dw_die_ref die2 = *(const dw_die_ref *) p2;
9197
9198 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9199 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9200
9201 if (die1->die_abbrev >= abbrev_opt_base_type_end
9202 && die2->die_abbrev >= abbrev_opt_base_type_end)
9203 {
9204 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9205 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9206 return -1;
9207 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9208 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9209 return 1;
9210 }
9211
9212 /* Stabilize the sort. */
9213 if (die1->die_abbrev < die2->die_abbrev)
9214 return -1;
9215 if (die1->die_abbrev > die2->die_abbrev)
9216 return 1;
9217
9218 return 0;
9219 }
9220
9221 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9222 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9223 into dw_val_class_const_implicit or
9224 dw_val_class_unsigned_const_implicit. */
9225
9226 static void
9227 optimize_implicit_const (unsigned int first_id, unsigned int end,
9228 vec<bool> &implicit_consts)
9229 {
9230 /* It never makes sense if there is just one DIE using the abbreviation. */
9231 if (end < first_id + 2)
9232 return;
9233
9234 dw_attr_node *a;
9235 unsigned ix, i;
9236 dw_die_ref die = sorted_abbrev_dies[first_id];
9237 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9238 if (implicit_consts[ix])
9239 {
9240 enum dw_val_class new_class = dw_val_class_none;
9241 switch (AT_class (a))
9242 {
9243 case dw_val_class_unsigned_const:
9244 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9245 continue;
9246
9247 /* The .debug_abbrev section will grow by
9248 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9249 in all the DIEs using that abbreviation. */
9250 if (constant_size (AT_unsigned (a)) * (end - first_id)
9251 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9252 continue;
9253
9254 new_class = dw_val_class_unsigned_const_implicit;
9255 break;
9256
9257 case dw_val_class_const:
9258 new_class = dw_val_class_const_implicit;
9259 break;
9260
9261 case dw_val_class_file:
9262 new_class = dw_val_class_file_implicit;
9263 break;
9264
9265 default:
9266 continue;
9267 }
9268 for (i = first_id; i < end; i++)
9269 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9270 = new_class;
9271 }
9272 }
9273
9274 /* Attempt to optimize abbreviation table from abbrev_opt_start
9275 abbreviation above. */
9276
9277 static void
9278 optimize_abbrev_table (void)
9279 {
9280 if (abbrev_opt_start
9281 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9282 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9283 {
9284 auto_vec<bool, 32> implicit_consts;
9285 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9286
9287 unsigned int abbrev_id = abbrev_opt_start - 1;
9288 unsigned int first_id = ~0U;
9289 unsigned int last_abbrev_id = 0;
9290 unsigned int i;
9291 dw_die_ref die;
9292 if (abbrev_opt_base_type_end > abbrev_opt_start)
9293 abbrev_id = abbrev_opt_base_type_end - 1;
9294 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9295 most commonly used abbreviations come first. */
9296 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9297 {
9298 dw_attr_node *a;
9299 unsigned ix;
9300
9301 /* If calc_base_type_die_sizes has been called, the CU and
9302 base types after it can't be optimized, because we've already
9303 calculated their DIE offsets. We've sorted them first. */
9304 if (die->die_abbrev < abbrev_opt_base_type_end)
9305 continue;
9306 if (die->die_abbrev != last_abbrev_id)
9307 {
9308 last_abbrev_id = die->die_abbrev;
9309 if (dwarf_version >= 5 && first_id != ~0U)
9310 optimize_implicit_const (first_id, i, implicit_consts);
9311 abbrev_id++;
9312 (*abbrev_die_table)[abbrev_id] = die;
9313 if (dwarf_version >= 5)
9314 {
9315 first_id = i;
9316 implicit_consts.truncate (0);
9317
9318 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9319 switch (AT_class (a))
9320 {
9321 case dw_val_class_const:
9322 case dw_val_class_unsigned_const:
9323 case dw_val_class_file:
9324 implicit_consts.safe_push (true);
9325 break;
9326 default:
9327 implicit_consts.safe_push (false);
9328 break;
9329 }
9330 }
9331 }
9332 else if (dwarf_version >= 5)
9333 {
9334 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9335 if (!implicit_consts[ix])
9336 continue;
9337 else
9338 {
9339 dw_attr_node *other_a
9340 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9341 if (!dw_val_equal_p (&a->dw_attr_val,
9342 &other_a->dw_attr_val))
9343 implicit_consts[ix] = false;
9344 }
9345 }
9346 die->die_abbrev = abbrev_id;
9347 }
9348 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9349 if (dwarf_version >= 5 && first_id != ~0U)
9350 optimize_implicit_const (first_id, i, implicit_consts);
9351 }
9352
9353 abbrev_opt_start = 0;
9354 abbrev_opt_base_type_end = 0;
9355 abbrev_usage_count.release ();
9356 sorted_abbrev_dies.release ();
9357 }
9358 \f
9359 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9360
9361 static int
9362 constant_size (unsigned HOST_WIDE_INT value)
9363 {
9364 int log;
9365
9366 if (value == 0)
9367 log = 0;
9368 else
9369 log = floor_log2 (value);
9370
9371 log = log / 8;
9372 log = 1 << (floor_log2 (log) + 1);
9373
9374 return log;
9375 }
9376
9377 /* Return the size of a DIE as it is represented in the
9378 .debug_info section. */
9379
9380 static unsigned long
9381 size_of_die (dw_die_ref die)
9382 {
9383 unsigned long size = 0;
9384 dw_attr_node *a;
9385 unsigned ix;
9386 enum dwarf_form form;
9387
9388 size += size_of_uleb128 (die->die_abbrev);
9389 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9390 {
9391 switch (AT_class (a))
9392 {
9393 case dw_val_class_addr:
9394 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9395 {
9396 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9397 size += size_of_uleb128 (AT_index (a));
9398 }
9399 else
9400 size += DWARF2_ADDR_SIZE;
9401 break;
9402 case dw_val_class_offset:
9403 size += DWARF_OFFSET_SIZE;
9404 break;
9405 case dw_val_class_loc:
9406 {
9407 unsigned long lsize = size_of_locs (AT_loc (a));
9408
9409 /* Block length. */
9410 if (dwarf_version >= 4)
9411 size += size_of_uleb128 (lsize);
9412 else
9413 size += constant_size (lsize);
9414 size += lsize;
9415 }
9416 break;
9417 case dw_val_class_loc_list:
9418 if (dwarf_split_debug_info && dwarf_version >= 5)
9419 {
9420 gcc_assert (AT_loc_list (a)->num_assigned);
9421 size += size_of_uleb128 (AT_loc_list (a)->hash);
9422 }
9423 else
9424 size += DWARF_OFFSET_SIZE;
9425 break;
9426 case dw_val_class_view_list:
9427 size += DWARF_OFFSET_SIZE;
9428 break;
9429 case dw_val_class_range_list:
9430 if (value_format (a) == DW_FORM_rnglistx)
9431 {
9432 gcc_assert (rnglist_idx);
9433 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9434 size += size_of_uleb128 (r->idx);
9435 }
9436 else
9437 size += DWARF_OFFSET_SIZE;
9438 break;
9439 case dw_val_class_const:
9440 size += size_of_sleb128 (AT_int (a));
9441 break;
9442 case dw_val_class_unsigned_const:
9443 {
9444 int csize = constant_size (AT_unsigned (a));
9445 if (dwarf_version == 3
9446 && a->dw_attr == DW_AT_data_member_location
9447 && csize >= 4)
9448 size += size_of_uleb128 (AT_unsigned (a));
9449 else
9450 size += csize;
9451 }
9452 break;
9453 case dw_val_class_symview:
9454 if (symview_upper_bound <= 0xff)
9455 size += 1;
9456 else if (symview_upper_bound <= 0xffff)
9457 size += 2;
9458 else if (symview_upper_bound <= 0xffffffff)
9459 size += 4;
9460 else
9461 size += 8;
9462 break;
9463 case dw_val_class_const_implicit:
9464 case dw_val_class_unsigned_const_implicit:
9465 case dw_val_class_file_implicit:
9466 /* These occupy no size in the DIE, just an extra sleb128 in
9467 .debug_abbrev. */
9468 break;
9469 case dw_val_class_const_double:
9470 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9471 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9472 size++; /* block */
9473 break;
9474 case dw_val_class_wide_int:
9475 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9476 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9477 if (get_full_len (*a->dw_attr_val.v.val_wide)
9478 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9479 size++; /* block */
9480 break;
9481 case dw_val_class_vec:
9482 size += constant_size (a->dw_attr_val.v.val_vec.length
9483 * a->dw_attr_val.v.val_vec.elt_size)
9484 + a->dw_attr_val.v.val_vec.length
9485 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9486 break;
9487 case dw_val_class_flag:
9488 if (dwarf_version >= 4)
9489 /* Currently all add_AT_flag calls pass in 1 as last argument,
9490 so DW_FORM_flag_present can be used. If that ever changes,
9491 we'll need to use DW_FORM_flag and have some optimization
9492 in build_abbrev_table that will change those to
9493 DW_FORM_flag_present if it is set to 1 in all DIEs using
9494 the same abbrev entry. */
9495 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9496 else
9497 size += 1;
9498 break;
9499 case dw_val_class_die_ref:
9500 if (AT_ref_external (a))
9501 {
9502 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9503 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9504 is sized by target address length, whereas in DWARF3
9505 it's always sized as an offset. */
9506 if (AT_ref (a)->comdat_type_p)
9507 size += DWARF_TYPE_SIGNATURE_SIZE;
9508 else if (dwarf_version == 2)
9509 size += DWARF2_ADDR_SIZE;
9510 else
9511 size += DWARF_OFFSET_SIZE;
9512 }
9513 else
9514 size += DWARF_OFFSET_SIZE;
9515 break;
9516 case dw_val_class_fde_ref:
9517 size += DWARF_OFFSET_SIZE;
9518 break;
9519 case dw_val_class_lbl_id:
9520 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9521 {
9522 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9523 size += size_of_uleb128 (AT_index (a));
9524 }
9525 else
9526 size += DWARF2_ADDR_SIZE;
9527 break;
9528 case dw_val_class_lineptr:
9529 case dw_val_class_macptr:
9530 case dw_val_class_loclistsptr:
9531 size += DWARF_OFFSET_SIZE;
9532 break;
9533 case dw_val_class_str:
9534 form = AT_string_form (a);
9535 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9536 size += DWARF_OFFSET_SIZE;
9537 else if (form == dwarf_FORM (DW_FORM_strx))
9538 size += size_of_uleb128 (AT_index (a));
9539 else
9540 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9541 break;
9542 case dw_val_class_file:
9543 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9544 break;
9545 case dw_val_class_data8:
9546 size += 8;
9547 break;
9548 case dw_val_class_vms_delta:
9549 size += DWARF_OFFSET_SIZE;
9550 break;
9551 case dw_val_class_high_pc:
9552 size += DWARF2_ADDR_SIZE;
9553 break;
9554 case dw_val_class_discr_value:
9555 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9556 break;
9557 case dw_val_class_discr_list:
9558 {
9559 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9560
9561 /* This is a block, so we have the block length and then its
9562 data. */
9563 size += constant_size (block_size) + block_size;
9564 }
9565 break;
9566 default:
9567 gcc_unreachable ();
9568 }
9569 }
9570
9571 return size;
9572 }
9573
9574 /* Size the debugging information associated with a given DIE. Visits the
9575 DIE's children recursively. Updates the global variable next_die_offset, on
9576 each time through. Uses the current value of next_die_offset to update the
9577 die_offset field in each DIE. */
9578
9579 static void
9580 calc_die_sizes (dw_die_ref die)
9581 {
9582 dw_die_ref c;
9583
9584 gcc_assert (die->die_offset == 0
9585 || (unsigned long int) die->die_offset == next_die_offset);
9586 die->die_offset = next_die_offset;
9587 next_die_offset += size_of_die (die);
9588
9589 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9590
9591 if (die->die_child != NULL)
9592 /* Count the null byte used to terminate sibling lists. */
9593 next_die_offset += 1;
9594 }
9595
9596 /* Size just the base type children at the start of the CU.
9597 This is needed because build_abbrev needs to size locs
9598 and sizing of type based stack ops needs to know die_offset
9599 values for the base types. */
9600
9601 static void
9602 calc_base_type_die_sizes (void)
9603 {
9604 unsigned long die_offset = (dwarf_split_debug_info
9605 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9606 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9607 unsigned int i;
9608 dw_die_ref base_type;
9609 #if ENABLE_ASSERT_CHECKING
9610 dw_die_ref prev = comp_unit_die ()->die_child;
9611 #endif
9612
9613 die_offset += size_of_die (comp_unit_die ());
9614 for (i = 0; base_types.iterate (i, &base_type); i++)
9615 {
9616 #if ENABLE_ASSERT_CHECKING
9617 gcc_assert (base_type->die_offset == 0
9618 && prev->die_sib == base_type
9619 && base_type->die_child == NULL
9620 && base_type->die_abbrev);
9621 prev = base_type;
9622 #endif
9623 if (abbrev_opt_start
9624 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9625 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9626 base_type->die_offset = die_offset;
9627 die_offset += size_of_die (base_type);
9628 }
9629 }
9630
9631 /* Set the marks for a die and its children. We do this so
9632 that we know whether or not a reference needs to use FORM_ref_addr; only
9633 DIEs in the same CU will be marked. We used to clear out the offset
9634 and use that as the flag, but ran into ordering problems. */
9635
9636 static void
9637 mark_dies (dw_die_ref die)
9638 {
9639 dw_die_ref c;
9640
9641 gcc_assert (!die->die_mark);
9642
9643 die->die_mark = 1;
9644 FOR_EACH_CHILD (die, c, mark_dies (c));
9645 }
9646
9647 /* Clear the marks for a die and its children. */
9648
9649 static void
9650 unmark_dies (dw_die_ref die)
9651 {
9652 dw_die_ref c;
9653
9654 if (! use_debug_types)
9655 gcc_assert (die->die_mark);
9656
9657 die->die_mark = 0;
9658 FOR_EACH_CHILD (die, c, unmark_dies (c));
9659 }
9660
9661 /* Clear the marks for a die, its children and referred dies. */
9662
9663 static void
9664 unmark_all_dies (dw_die_ref die)
9665 {
9666 dw_die_ref c;
9667 dw_attr_node *a;
9668 unsigned ix;
9669
9670 if (!die->die_mark)
9671 return;
9672 die->die_mark = 0;
9673
9674 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9675
9676 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9677 if (AT_class (a) == dw_val_class_die_ref)
9678 unmark_all_dies (AT_ref (a));
9679 }
9680
9681 /* Calculate if the entry should appear in the final output file. It may be
9682 from a pruned a type. */
9683
9684 static bool
9685 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9686 {
9687 /* By limiting gnu pubnames to definitions only, gold can generate a
9688 gdb index without entries for declarations, which don't include
9689 enough information to be useful. */
9690 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9691 return false;
9692
9693 if (table == pubname_table)
9694 {
9695 /* Enumerator names are part of the pubname table, but the
9696 parent DW_TAG_enumeration_type die may have been pruned.
9697 Don't output them if that is the case. */
9698 if (p->die->die_tag == DW_TAG_enumerator &&
9699 (p->die->die_parent == NULL
9700 || !p->die->die_parent->die_perennial_p))
9701 return false;
9702
9703 /* Everything else in the pubname table is included. */
9704 return true;
9705 }
9706
9707 /* The pubtypes table shouldn't include types that have been
9708 pruned. */
9709 return (p->die->die_offset != 0
9710 || !flag_eliminate_unused_debug_types);
9711 }
9712
9713 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9714 generated for the compilation unit. */
9715
9716 static unsigned long
9717 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9718 {
9719 unsigned long size;
9720 unsigned i;
9721 pubname_entry *p;
9722 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9723
9724 size = DWARF_PUBNAMES_HEADER_SIZE;
9725 FOR_EACH_VEC_ELT (*names, i, p)
9726 if (include_pubname_in_output (names, p))
9727 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9728
9729 size += DWARF_OFFSET_SIZE;
9730 return size;
9731 }
9732
9733 /* Return the size of the information in the .debug_aranges section. */
9734
9735 static unsigned long
9736 size_of_aranges (void)
9737 {
9738 unsigned long size;
9739
9740 size = DWARF_ARANGES_HEADER_SIZE;
9741
9742 /* Count the address/length pair for this compilation unit. */
9743 if (text_section_used)
9744 size += 2 * DWARF2_ADDR_SIZE;
9745 if (cold_text_section_used)
9746 size += 2 * DWARF2_ADDR_SIZE;
9747 if (have_multiple_function_sections)
9748 {
9749 unsigned fde_idx;
9750 dw_fde_ref fde;
9751
9752 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9753 {
9754 if (DECL_IGNORED_P (fde->decl))
9755 continue;
9756 if (!fde->in_std_section)
9757 size += 2 * DWARF2_ADDR_SIZE;
9758 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9759 size += 2 * DWARF2_ADDR_SIZE;
9760 }
9761 }
9762
9763 /* Count the two zero words used to terminated the address range table. */
9764 size += 2 * DWARF2_ADDR_SIZE;
9765 return size;
9766 }
9767 \f
9768 /* Select the encoding of an attribute value. */
9769
9770 static enum dwarf_form
9771 value_format (dw_attr_node *a)
9772 {
9773 switch (AT_class (a))
9774 {
9775 case dw_val_class_addr:
9776 /* Only very few attributes allow DW_FORM_addr. */
9777 switch (a->dw_attr)
9778 {
9779 case DW_AT_low_pc:
9780 case DW_AT_high_pc:
9781 case DW_AT_entry_pc:
9782 case DW_AT_trampoline:
9783 return (AT_index (a) == NOT_INDEXED
9784 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9785 default:
9786 break;
9787 }
9788 switch (DWARF2_ADDR_SIZE)
9789 {
9790 case 1:
9791 return DW_FORM_data1;
9792 case 2:
9793 return DW_FORM_data2;
9794 case 4:
9795 return DW_FORM_data4;
9796 case 8:
9797 return DW_FORM_data8;
9798 default:
9799 gcc_unreachable ();
9800 }
9801 case dw_val_class_loc_list:
9802 if (dwarf_split_debug_info
9803 && dwarf_version >= 5
9804 && AT_loc_list (a)->num_assigned)
9805 return DW_FORM_loclistx;
9806 /* FALLTHRU */
9807 case dw_val_class_view_list:
9808 case dw_val_class_range_list:
9809 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9810 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9811 care about sizes of .debug* sections in shared libraries and
9812 executables and don't take into account relocations that affect just
9813 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9814 table in the .debug_rnglists section. */
9815 if (dwarf_split_debug_info
9816 && dwarf_version >= 5
9817 && AT_class (a) == dw_val_class_range_list
9818 && rnglist_idx
9819 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9820 return DW_FORM_rnglistx;
9821 if (dwarf_version >= 4)
9822 return DW_FORM_sec_offset;
9823 /* FALLTHRU */
9824 case dw_val_class_vms_delta:
9825 case dw_val_class_offset:
9826 switch (DWARF_OFFSET_SIZE)
9827 {
9828 case 4:
9829 return DW_FORM_data4;
9830 case 8:
9831 return DW_FORM_data8;
9832 default:
9833 gcc_unreachable ();
9834 }
9835 case dw_val_class_loc:
9836 if (dwarf_version >= 4)
9837 return DW_FORM_exprloc;
9838 switch (constant_size (size_of_locs (AT_loc (a))))
9839 {
9840 case 1:
9841 return DW_FORM_block1;
9842 case 2:
9843 return DW_FORM_block2;
9844 case 4:
9845 return DW_FORM_block4;
9846 default:
9847 gcc_unreachable ();
9848 }
9849 case dw_val_class_const:
9850 return DW_FORM_sdata;
9851 case dw_val_class_unsigned_const:
9852 switch (constant_size (AT_unsigned (a)))
9853 {
9854 case 1:
9855 return DW_FORM_data1;
9856 case 2:
9857 return DW_FORM_data2;
9858 case 4:
9859 /* In DWARF3 DW_AT_data_member_location with
9860 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9861 constant, so we need to use DW_FORM_udata if we need
9862 a large constant. */
9863 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9864 return DW_FORM_udata;
9865 return DW_FORM_data4;
9866 case 8:
9867 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9868 return DW_FORM_udata;
9869 return DW_FORM_data8;
9870 default:
9871 gcc_unreachable ();
9872 }
9873 case dw_val_class_const_implicit:
9874 case dw_val_class_unsigned_const_implicit:
9875 case dw_val_class_file_implicit:
9876 return DW_FORM_implicit_const;
9877 case dw_val_class_const_double:
9878 switch (HOST_BITS_PER_WIDE_INT)
9879 {
9880 case 8:
9881 return DW_FORM_data2;
9882 case 16:
9883 return DW_FORM_data4;
9884 case 32:
9885 return DW_FORM_data8;
9886 case 64:
9887 if (dwarf_version >= 5)
9888 return DW_FORM_data16;
9889 /* FALLTHRU */
9890 default:
9891 return DW_FORM_block1;
9892 }
9893 case dw_val_class_wide_int:
9894 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9895 {
9896 case 8:
9897 return DW_FORM_data1;
9898 case 16:
9899 return DW_FORM_data2;
9900 case 32:
9901 return DW_FORM_data4;
9902 case 64:
9903 return DW_FORM_data8;
9904 case 128:
9905 if (dwarf_version >= 5)
9906 return DW_FORM_data16;
9907 /* FALLTHRU */
9908 default:
9909 return DW_FORM_block1;
9910 }
9911 case dw_val_class_symview:
9912 /* ??? We might use uleb128, but then we'd have to compute
9913 .debug_info offsets in the assembler. */
9914 if (symview_upper_bound <= 0xff)
9915 return DW_FORM_data1;
9916 else if (symview_upper_bound <= 0xffff)
9917 return DW_FORM_data2;
9918 else if (symview_upper_bound <= 0xffffffff)
9919 return DW_FORM_data4;
9920 else
9921 return DW_FORM_data8;
9922 case dw_val_class_vec:
9923 switch (constant_size (a->dw_attr_val.v.val_vec.length
9924 * a->dw_attr_val.v.val_vec.elt_size))
9925 {
9926 case 1:
9927 return DW_FORM_block1;
9928 case 2:
9929 return DW_FORM_block2;
9930 case 4:
9931 return DW_FORM_block4;
9932 default:
9933 gcc_unreachable ();
9934 }
9935 case dw_val_class_flag:
9936 if (dwarf_version >= 4)
9937 {
9938 /* Currently all add_AT_flag calls pass in 1 as last argument,
9939 so DW_FORM_flag_present can be used. If that ever changes,
9940 we'll need to use DW_FORM_flag and have some optimization
9941 in build_abbrev_table that will change those to
9942 DW_FORM_flag_present if it is set to 1 in all DIEs using
9943 the same abbrev entry. */
9944 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9945 return DW_FORM_flag_present;
9946 }
9947 return DW_FORM_flag;
9948 case dw_val_class_die_ref:
9949 if (AT_ref_external (a))
9950 {
9951 if (AT_ref (a)->comdat_type_p)
9952 return DW_FORM_ref_sig8;
9953 else
9954 return DW_FORM_ref_addr;
9955 }
9956 else
9957 return DW_FORM_ref;
9958 case dw_val_class_fde_ref:
9959 return DW_FORM_data;
9960 case dw_val_class_lbl_id:
9961 return (AT_index (a) == NOT_INDEXED
9962 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9963 case dw_val_class_lineptr:
9964 case dw_val_class_macptr:
9965 case dw_val_class_loclistsptr:
9966 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9967 case dw_val_class_str:
9968 return AT_string_form (a);
9969 case dw_val_class_file:
9970 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9971 {
9972 case 1:
9973 return DW_FORM_data1;
9974 case 2:
9975 return DW_FORM_data2;
9976 case 4:
9977 return DW_FORM_data4;
9978 default:
9979 gcc_unreachable ();
9980 }
9981
9982 case dw_val_class_data8:
9983 return DW_FORM_data8;
9984
9985 case dw_val_class_high_pc:
9986 switch (DWARF2_ADDR_SIZE)
9987 {
9988 case 1:
9989 return DW_FORM_data1;
9990 case 2:
9991 return DW_FORM_data2;
9992 case 4:
9993 return DW_FORM_data4;
9994 case 8:
9995 return DW_FORM_data8;
9996 default:
9997 gcc_unreachable ();
9998 }
9999
10000 case dw_val_class_discr_value:
10001 return (a->dw_attr_val.v.val_discr_value.pos
10002 ? DW_FORM_udata
10003 : DW_FORM_sdata);
10004 case dw_val_class_discr_list:
10005 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
10006 {
10007 case 1:
10008 return DW_FORM_block1;
10009 case 2:
10010 return DW_FORM_block2;
10011 case 4:
10012 return DW_FORM_block4;
10013 default:
10014 gcc_unreachable ();
10015 }
10016
10017 default:
10018 gcc_unreachable ();
10019 }
10020 }
10021
10022 /* Output the encoding of an attribute value. */
10023
10024 static void
10025 output_value_format (dw_attr_node *a)
10026 {
10027 enum dwarf_form form = value_format (a);
10028
10029 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
10030 }
10031
10032 /* Given a die and id, produce the appropriate abbreviations. */
10033
10034 static void
10035 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
10036 {
10037 unsigned ix;
10038 dw_attr_node *a_attr;
10039
10040 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
10041 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
10042 dwarf_tag_name (abbrev->die_tag));
10043
10044 if (abbrev->die_child != NULL)
10045 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10046 else
10047 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10048
10049 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10050 {
10051 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10052 dwarf_attr_name (a_attr->dw_attr));
10053 output_value_format (a_attr);
10054 if (value_format (a_attr) == DW_FORM_implicit_const)
10055 {
10056 if (AT_class (a_attr) == dw_val_class_file_implicit)
10057 {
10058 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10059 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10060 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10061 }
10062 else
10063 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10064 }
10065 }
10066
10067 dw2_asm_output_data (1, 0, NULL);
10068 dw2_asm_output_data (1, 0, NULL);
10069 }
10070
10071
10072 /* Output the .debug_abbrev section which defines the DIE abbreviation
10073 table. */
10074
10075 static void
10076 output_abbrev_section (void)
10077 {
10078 unsigned int abbrev_id;
10079 dw_die_ref abbrev;
10080
10081 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10082 if (abbrev_id != 0)
10083 output_die_abbrevs (abbrev_id, abbrev);
10084
10085 /* Terminate the table. */
10086 dw2_asm_output_data (1, 0, NULL);
10087 }
10088
10089 /* Return a new location list, given the begin and end range, and the
10090 expression. */
10091
10092 static inline dw_loc_list_ref
10093 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10094 const char *end, var_loc_view vend,
10095 const char *section)
10096 {
10097 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10098
10099 retlist->begin = begin;
10100 retlist->begin_entry = NULL;
10101 retlist->end = end;
10102 retlist->expr = expr;
10103 retlist->section = section;
10104 retlist->vbegin = vbegin;
10105 retlist->vend = vend;
10106
10107 return retlist;
10108 }
10109
10110 /* Return true iff there's any nonzero view number in the loc list.
10111
10112 ??? When views are not enabled, we'll often extend a single range
10113 to the entire function, so that we emit a single location
10114 expression rather than a location list. With views, even with a
10115 single range, we'll output a list if start or end have a nonzero
10116 view. If we change this, we may want to stop splitting a single
10117 range in dw_loc_list just because of a nonzero view, even if it
10118 straddles across hot/cold partitions. */
10119
10120 static bool
10121 loc_list_has_views (dw_loc_list_ref list)
10122 {
10123 if (!debug_variable_location_views)
10124 return false;
10125
10126 for (dw_loc_list_ref loc = list;
10127 loc != NULL; loc = loc->dw_loc_next)
10128 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10129 return true;
10130
10131 return false;
10132 }
10133
10134 /* Generate a new internal symbol for this location list node, if it
10135 hasn't got one yet. */
10136
10137 static inline void
10138 gen_llsym (dw_loc_list_ref list)
10139 {
10140 gcc_assert (!list->ll_symbol);
10141 list->ll_symbol = gen_internal_sym ("LLST");
10142
10143 if (!loc_list_has_views (list))
10144 return;
10145
10146 if (dwarf2out_locviews_in_attribute ())
10147 {
10148 /* Use the same label_num for the view list. */
10149 label_num--;
10150 list->vl_symbol = gen_internal_sym ("LVUS");
10151 }
10152 else
10153 list->vl_symbol = list->ll_symbol;
10154 }
10155
10156 /* Generate a symbol for the list, but only if we really want to emit
10157 it as a list. */
10158
10159 static inline void
10160 maybe_gen_llsym (dw_loc_list_ref list)
10161 {
10162 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10163 return;
10164
10165 gen_llsym (list);
10166 }
10167
10168 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10169 NULL, don't consider size of the location expression. If we're not
10170 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10171 representation in *SIZEP. */
10172
10173 static bool
10174 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10175 {
10176 /* Don't output an entry that starts and ends at the same address. */
10177 if (strcmp (curr->begin, curr->end) == 0
10178 && curr->vbegin == curr->vend && !curr->force)
10179 return true;
10180
10181 if (!sizep)
10182 return false;
10183
10184 unsigned long size = size_of_locs (curr->expr);
10185
10186 /* If the expression is too large, drop it on the floor. We could
10187 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10188 in the expression, but >= 64KB expressions for a single value
10189 in a single range are unlikely very useful. */
10190 if (dwarf_version < 5 && size > 0xffff)
10191 return true;
10192
10193 *sizep = size;
10194
10195 return false;
10196 }
10197
10198 /* Output a view pair loclist entry for CURR, if it requires one. */
10199
10200 static void
10201 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10202 {
10203 if (!dwarf2out_locviews_in_loclist ())
10204 return;
10205
10206 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10207 return;
10208
10209 #ifdef DW_LLE_view_pair
10210 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10211
10212 if (dwarf2out_as_locview_support)
10213 {
10214 if (ZERO_VIEW_P (curr->vbegin))
10215 dw2_asm_output_data_uleb128 (0, "Location view begin");
10216 else
10217 {
10218 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10219 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10220 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10221 }
10222
10223 if (ZERO_VIEW_P (curr->vend))
10224 dw2_asm_output_data_uleb128 (0, "Location view end");
10225 else
10226 {
10227 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10228 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10229 dw2_asm_output_symname_uleb128 (label, "Location view end");
10230 }
10231 }
10232 else
10233 {
10234 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10235 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10236 }
10237 #endif /* DW_LLE_view_pair */
10238
10239 return;
10240 }
10241
10242 /* Output the location list given to us. */
10243
10244 static void
10245 output_loc_list (dw_loc_list_ref list_head)
10246 {
10247 int vcount = 0, lcount = 0;
10248
10249 if (list_head->emitted)
10250 return;
10251 list_head->emitted = true;
10252
10253 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10254 {
10255 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10256
10257 for (dw_loc_list_ref curr = list_head; curr != NULL;
10258 curr = curr->dw_loc_next)
10259 {
10260 unsigned long size;
10261
10262 if (skip_loc_list_entry (curr, &size))
10263 continue;
10264
10265 vcount++;
10266
10267 /* ?? dwarf_split_debug_info? */
10268 if (dwarf2out_as_locview_support)
10269 {
10270 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10271
10272 if (!ZERO_VIEW_P (curr->vbegin))
10273 {
10274 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10275 dw2_asm_output_symname_uleb128 (label,
10276 "View list begin (%s)",
10277 list_head->vl_symbol);
10278 }
10279 else
10280 dw2_asm_output_data_uleb128 (0,
10281 "View list begin (%s)",
10282 list_head->vl_symbol);
10283
10284 if (!ZERO_VIEW_P (curr->vend))
10285 {
10286 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10287 dw2_asm_output_symname_uleb128 (label,
10288 "View list end (%s)",
10289 list_head->vl_symbol);
10290 }
10291 else
10292 dw2_asm_output_data_uleb128 (0,
10293 "View list end (%s)",
10294 list_head->vl_symbol);
10295 }
10296 else
10297 {
10298 dw2_asm_output_data_uleb128 (curr->vbegin,
10299 "View list begin (%s)",
10300 list_head->vl_symbol);
10301 dw2_asm_output_data_uleb128 (curr->vend,
10302 "View list end (%s)",
10303 list_head->vl_symbol);
10304 }
10305 }
10306 }
10307
10308 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10309
10310 const char *last_section = NULL;
10311 const char *base_label = NULL;
10312
10313 /* Walk the location list, and output each range + expression. */
10314 for (dw_loc_list_ref curr = list_head; curr != NULL;
10315 curr = curr->dw_loc_next)
10316 {
10317 unsigned long size;
10318
10319 /* Skip this entry? If we skip it here, we must skip it in the
10320 view list above as well. */
10321 if (skip_loc_list_entry (curr, &size))
10322 continue;
10323
10324 lcount++;
10325
10326 if (dwarf_version >= 5)
10327 {
10328 if (dwarf_split_debug_info)
10329 {
10330 dwarf2out_maybe_output_loclist_view_pair (curr);
10331 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10332 uleb128 index into .debug_addr and uleb128 length. */
10333 dw2_asm_output_data (1, DW_LLE_startx_length,
10334 "DW_LLE_startx_length (%s)",
10335 list_head->ll_symbol);
10336 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10337 "Location list range start index "
10338 "(%s)", curr->begin);
10339 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10340 For that case we probably need to emit DW_LLE_startx_endx,
10341 but we'd need 2 .debug_addr entries rather than just one. */
10342 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10343 "Location list length (%s)",
10344 list_head->ll_symbol);
10345 }
10346 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10347 {
10348 dwarf2out_maybe_output_loclist_view_pair (curr);
10349 /* If all code is in .text section, the base address is
10350 already provided by the CU attributes. Use
10351 DW_LLE_offset_pair where both addresses are uleb128 encoded
10352 offsets against that base. */
10353 dw2_asm_output_data (1, DW_LLE_offset_pair,
10354 "DW_LLE_offset_pair (%s)",
10355 list_head->ll_symbol);
10356 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10357 "Location list begin address (%s)",
10358 list_head->ll_symbol);
10359 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10360 "Location list end address (%s)",
10361 list_head->ll_symbol);
10362 }
10363 else if (HAVE_AS_LEB128)
10364 {
10365 /* Otherwise, find out how many consecutive entries could share
10366 the same base entry. If just one, emit DW_LLE_start_length,
10367 otherwise emit DW_LLE_base_address for the base address
10368 followed by a series of DW_LLE_offset_pair. */
10369 if (last_section == NULL || curr->section != last_section)
10370 {
10371 dw_loc_list_ref curr2;
10372 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10373 curr2 = curr2->dw_loc_next)
10374 {
10375 if (strcmp (curr2->begin, curr2->end) == 0
10376 && !curr2->force)
10377 continue;
10378 break;
10379 }
10380 if (curr2 == NULL || curr->section != curr2->section)
10381 last_section = NULL;
10382 else
10383 {
10384 last_section = curr->section;
10385 base_label = curr->begin;
10386 dw2_asm_output_data (1, DW_LLE_base_address,
10387 "DW_LLE_base_address (%s)",
10388 list_head->ll_symbol);
10389 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10390 "Base address (%s)",
10391 list_head->ll_symbol);
10392 }
10393 }
10394 /* Only one entry with the same base address. Use
10395 DW_LLE_start_length with absolute address and uleb128
10396 length. */
10397 if (last_section == NULL)
10398 {
10399 dwarf2out_maybe_output_loclist_view_pair (curr);
10400 dw2_asm_output_data (1, DW_LLE_start_length,
10401 "DW_LLE_start_length (%s)",
10402 list_head->ll_symbol);
10403 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10404 "Location list begin address (%s)",
10405 list_head->ll_symbol);
10406 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10407 "Location list length "
10408 "(%s)", list_head->ll_symbol);
10409 }
10410 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10411 DW_LLE_base_address. */
10412 else
10413 {
10414 dwarf2out_maybe_output_loclist_view_pair (curr);
10415 dw2_asm_output_data (1, DW_LLE_offset_pair,
10416 "DW_LLE_offset_pair (%s)",
10417 list_head->ll_symbol);
10418 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10419 "Location list begin address "
10420 "(%s)", list_head->ll_symbol);
10421 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10422 "Location list end address "
10423 "(%s)", list_head->ll_symbol);
10424 }
10425 }
10426 /* The assembler does not support .uleb128 directive. Emit
10427 DW_LLE_start_end with a pair of absolute addresses. */
10428 else
10429 {
10430 dwarf2out_maybe_output_loclist_view_pair (curr);
10431 dw2_asm_output_data (1, DW_LLE_start_end,
10432 "DW_LLE_start_end (%s)",
10433 list_head->ll_symbol);
10434 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10435 "Location list begin address (%s)",
10436 list_head->ll_symbol);
10437 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10438 "Location list end address (%s)",
10439 list_head->ll_symbol);
10440 }
10441 }
10442 else if (dwarf_split_debug_info)
10443 {
10444 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10445 and 4 byte length. */
10446 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10447 "Location list start/length entry (%s)",
10448 list_head->ll_symbol);
10449 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10450 "Location list range start index (%s)",
10451 curr->begin);
10452 /* The length field is 4 bytes. If we ever need to support
10453 an 8-byte length, we can add a new DW_LLE code or fall back
10454 to DW_LLE_GNU_start_end_entry. */
10455 dw2_asm_output_delta (4, curr->end, curr->begin,
10456 "Location list range length (%s)",
10457 list_head->ll_symbol);
10458 }
10459 else if (!have_multiple_function_sections)
10460 {
10461 /* Pair of relative addresses against start of text section. */
10462 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10463 "Location list begin address (%s)",
10464 list_head->ll_symbol);
10465 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10466 "Location list end address (%s)",
10467 list_head->ll_symbol);
10468 }
10469 else
10470 {
10471 /* Pair of absolute addresses. */
10472 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10473 "Location list begin address (%s)",
10474 list_head->ll_symbol);
10475 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10476 "Location list end address (%s)",
10477 list_head->ll_symbol);
10478 }
10479
10480 /* Output the block length for this list of location operations. */
10481 if (dwarf_version >= 5)
10482 dw2_asm_output_data_uleb128 (size, "Location expression size");
10483 else
10484 {
10485 gcc_assert (size <= 0xffff);
10486 dw2_asm_output_data (2, size, "Location expression size");
10487 }
10488
10489 output_loc_sequence (curr->expr, -1);
10490 }
10491
10492 /* And finally list termination. */
10493 if (dwarf_version >= 5)
10494 dw2_asm_output_data (1, DW_LLE_end_of_list,
10495 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10496 else if (dwarf_split_debug_info)
10497 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10498 "Location list terminator (%s)",
10499 list_head->ll_symbol);
10500 else
10501 {
10502 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10503 "Location list terminator begin (%s)",
10504 list_head->ll_symbol);
10505 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10506 "Location list terminator end (%s)",
10507 list_head->ll_symbol);
10508 }
10509
10510 gcc_assert (!list_head->vl_symbol
10511 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10512 }
10513
10514 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10515 section. Emit a relocated reference if val_entry is NULL, otherwise,
10516 emit an indirect reference. */
10517
10518 static void
10519 output_range_list_offset (dw_attr_node *a)
10520 {
10521 const char *name = dwarf_attr_name (a->dw_attr);
10522
10523 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10524 {
10525 if (dwarf_version >= 5)
10526 {
10527 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10528 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10529 debug_ranges_section, "%s", name);
10530 }
10531 else
10532 {
10533 char *p = strchr (ranges_section_label, '\0');
10534 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10535 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10536 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10537 debug_ranges_section, "%s", name);
10538 *p = '\0';
10539 }
10540 }
10541 else if (dwarf_version >= 5)
10542 {
10543 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10544 gcc_assert (rnglist_idx);
10545 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10546 }
10547 else
10548 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10549 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10550 "%s (offset from %s)", name, ranges_section_label);
10551 }
10552
10553 /* Output the offset into the debug_loc section. */
10554
10555 static void
10556 output_loc_list_offset (dw_attr_node *a)
10557 {
10558 char *sym = AT_loc_list (a)->ll_symbol;
10559
10560 gcc_assert (sym);
10561 if (!dwarf_split_debug_info)
10562 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10563 "%s", dwarf_attr_name (a->dw_attr));
10564 else if (dwarf_version >= 5)
10565 {
10566 gcc_assert (AT_loc_list (a)->num_assigned);
10567 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10568 dwarf_attr_name (a->dw_attr),
10569 sym);
10570 }
10571 else
10572 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10573 "%s", dwarf_attr_name (a->dw_attr));
10574 }
10575
10576 /* Output the offset into the debug_loc section. */
10577
10578 static void
10579 output_view_list_offset (dw_attr_node *a)
10580 {
10581 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10582
10583 gcc_assert (sym);
10584 if (dwarf_split_debug_info)
10585 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10586 "%s", dwarf_attr_name (a->dw_attr));
10587 else
10588 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10589 "%s", dwarf_attr_name (a->dw_attr));
10590 }
10591
10592 /* Output an attribute's index or value appropriately. */
10593
10594 static void
10595 output_attr_index_or_value (dw_attr_node *a)
10596 {
10597 const char *name = dwarf_attr_name (a->dw_attr);
10598
10599 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10600 {
10601 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10602 return;
10603 }
10604 switch (AT_class (a))
10605 {
10606 case dw_val_class_addr:
10607 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10608 break;
10609 case dw_val_class_high_pc:
10610 case dw_val_class_lbl_id:
10611 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10612 break;
10613 default:
10614 gcc_unreachable ();
10615 }
10616 }
10617
10618 /* Output a type signature. */
10619
10620 static inline void
10621 output_signature (const char *sig, const char *name)
10622 {
10623 int i;
10624
10625 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10626 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10627 }
10628
10629 /* Output a discriminant value. */
10630
10631 static inline void
10632 output_discr_value (dw_discr_value *discr_value, const char *name)
10633 {
10634 if (discr_value->pos)
10635 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10636 else
10637 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10638 }
10639
10640 /* Output the DIE and its attributes. Called recursively to generate
10641 the definitions of each child DIE. */
10642
10643 static void
10644 output_die (dw_die_ref die)
10645 {
10646 dw_attr_node *a;
10647 dw_die_ref c;
10648 unsigned long size;
10649 unsigned ix;
10650
10651 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10652 (unsigned long)die->die_offset,
10653 dwarf_tag_name (die->die_tag));
10654
10655 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10656 {
10657 const char *name = dwarf_attr_name (a->dw_attr);
10658
10659 switch (AT_class (a))
10660 {
10661 case dw_val_class_addr:
10662 output_attr_index_or_value (a);
10663 break;
10664
10665 case dw_val_class_offset:
10666 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10667 "%s", name);
10668 break;
10669
10670 case dw_val_class_range_list:
10671 output_range_list_offset (a);
10672 break;
10673
10674 case dw_val_class_loc:
10675 size = size_of_locs (AT_loc (a));
10676
10677 /* Output the block length for this list of location operations. */
10678 if (dwarf_version >= 4)
10679 dw2_asm_output_data_uleb128 (size, "%s", name);
10680 else
10681 dw2_asm_output_data (constant_size (size), size, "%s", name);
10682
10683 output_loc_sequence (AT_loc (a), -1);
10684 break;
10685
10686 case dw_val_class_const:
10687 /* ??? It would be slightly more efficient to use a scheme like is
10688 used for unsigned constants below, but gdb 4.x does not sign
10689 extend. Gdb 5.x does sign extend. */
10690 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10691 break;
10692
10693 case dw_val_class_unsigned_const:
10694 {
10695 int csize = constant_size (AT_unsigned (a));
10696 if (dwarf_version == 3
10697 && a->dw_attr == DW_AT_data_member_location
10698 && csize >= 4)
10699 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10700 else
10701 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10702 }
10703 break;
10704
10705 case dw_val_class_symview:
10706 {
10707 int vsize;
10708 if (symview_upper_bound <= 0xff)
10709 vsize = 1;
10710 else if (symview_upper_bound <= 0xffff)
10711 vsize = 2;
10712 else if (symview_upper_bound <= 0xffffffff)
10713 vsize = 4;
10714 else
10715 vsize = 8;
10716 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10717 "%s", name);
10718 }
10719 break;
10720
10721 case dw_val_class_const_implicit:
10722 if (flag_debug_asm)
10723 fprintf (asm_out_file, "\t\t\t%s %s ("
10724 HOST_WIDE_INT_PRINT_DEC ")\n",
10725 ASM_COMMENT_START, name, AT_int (a));
10726 break;
10727
10728 case dw_val_class_unsigned_const_implicit:
10729 if (flag_debug_asm)
10730 fprintf (asm_out_file, "\t\t\t%s %s ("
10731 HOST_WIDE_INT_PRINT_HEX ")\n",
10732 ASM_COMMENT_START, name, AT_unsigned (a));
10733 break;
10734
10735 case dw_val_class_const_double:
10736 {
10737 unsigned HOST_WIDE_INT first, second;
10738
10739 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10740 dw2_asm_output_data (1,
10741 HOST_BITS_PER_DOUBLE_INT
10742 / HOST_BITS_PER_CHAR,
10743 NULL);
10744
10745 if (WORDS_BIG_ENDIAN)
10746 {
10747 first = a->dw_attr_val.v.val_double.high;
10748 second = a->dw_attr_val.v.val_double.low;
10749 }
10750 else
10751 {
10752 first = a->dw_attr_val.v.val_double.low;
10753 second = a->dw_attr_val.v.val_double.high;
10754 }
10755
10756 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10757 first, "%s", name);
10758 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10759 second, NULL);
10760 }
10761 break;
10762
10763 case dw_val_class_wide_int:
10764 {
10765 int i;
10766 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10767 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10768 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10769 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10770 * l, NULL);
10771
10772 if (WORDS_BIG_ENDIAN)
10773 for (i = len - 1; i >= 0; --i)
10774 {
10775 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10776 "%s", name);
10777 name = "";
10778 }
10779 else
10780 for (i = 0; i < len; ++i)
10781 {
10782 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10783 "%s", name);
10784 name = "";
10785 }
10786 }
10787 break;
10788
10789 case dw_val_class_vec:
10790 {
10791 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10792 unsigned int len = a->dw_attr_val.v.val_vec.length;
10793 unsigned int i;
10794 unsigned char *p;
10795
10796 dw2_asm_output_data (constant_size (len * elt_size),
10797 len * elt_size, "%s", name);
10798 if (elt_size > sizeof (HOST_WIDE_INT))
10799 {
10800 elt_size /= 2;
10801 len *= 2;
10802 }
10803 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10804 i < len;
10805 i++, p += elt_size)
10806 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10807 "fp or vector constant word %u", i);
10808 break;
10809 }
10810
10811 case dw_val_class_flag:
10812 if (dwarf_version >= 4)
10813 {
10814 /* Currently all add_AT_flag calls pass in 1 as last argument,
10815 so DW_FORM_flag_present can be used. If that ever changes,
10816 we'll need to use DW_FORM_flag and have some optimization
10817 in build_abbrev_table that will change those to
10818 DW_FORM_flag_present if it is set to 1 in all DIEs using
10819 the same abbrev entry. */
10820 gcc_assert (AT_flag (a) == 1);
10821 if (flag_debug_asm)
10822 fprintf (asm_out_file, "\t\t\t%s %s\n",
10823 ASM_COMMENT_START, name);
10824 break;
10825 }
10826 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10827 break;
10828
10829 case dw_val_class_loc_list:
10830 output_loc_list_offset (a);
10831 break;
10832
10833 case dw_val_class_view_list:
10834 output_view_list_offset (a);
10835 break;
10836
10837 case dw_val_class_die_ref:
10838 if (AT_ref_external (a))
10839 {
10840 if (AT_ref (a)->comdat_type_p)
10841 {
10842 comdat_type_node *type_node
10843 = AT_ref (a)->die_id.die_type_node;
10844
10845 gcc_assert (type_node);
10846 output_signature (type_node->signature, name);
10847 }
10848 else
10849 {
10850 const char *sym = AT_ref (a)->die_id.die_symbol;
10851 int size;
10852
10853 gcc_assert (sym);
10854 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10855 length, whereas in DWARF3 it's always sized as an
10856 offset. */
10857 if (dwarf_version == 2)
10858 size = DWARF2_ADDR_SIZE;
10859 else
10860 size = DWARF_OFFSET_SIZE;
10861 /* ??? We cannot unconditionally output die_offset if
10862 non-zero - others might create references to those
10863 DIEs via symbols.
10864 And we do not clear its DIE offset after outputting it
10865 (and the label refers to the actual DIEs, not the
10866 DWARF CU unit header which is when using label + offset
10867 would be the correct thing to do).
10868 ??? This is the reason for the with_offset flag. */
10869 if (AT_ref (a)->with_offset)
10870 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10871 debug_info_section, "%s", name);
10872 else
10873 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10874 name);
10875 }
10876 }
10877 else
10878 {
10879 gcc_assert (AT_ref (a)->die_offset);
10880 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10881 "%s", name);
10882 }
10883 break;
10884
10885 case dw_val_class_fde_ref:
10886 {
10887 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10888
10889 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10890 a->dw_attr_val.v.val_fde_index * 2);
10891 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10892 "%s", name);
10893 }
10894 break;
10895
10896 case dw_val_class_vms_delta:
10897 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10898 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10899 AT_vms_delta2 (a), AT_vms_delta1 (a),
10900 "%s", name);
10901 #else
10902 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10903 AT_vms_delta2 (a), AT_vms_delta1 (a),
10904 "%s", name);
10905 #endif
10906 break;
10907
10908 case dw_val_class_lbl_id:
10909 output_attr_index_or_value (a);
10910 break;
10911
10912 case dw_val_class_lineptr:
10913 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10914 debug_line_section, "%s", name);
10915 break;
10916
10917 case dw_val_class_macptr:
10918 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10919 debug_macinfo_section, "%s", name);
10920 break;
10921
10922 case dw_val_class_loclistsptr:
10923 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10924 debug_loc_section, "%s", name);
10925 break;
10926
10927 case dw_val_class_str:
10928 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10929 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10930 a->dw_attr_val.v.val_str->label,
10931 debug_str_section,
10932 "%s: \"%s\"", name, AT_string (a));
10933 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10934 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10935 a->dw_attr_val.v.val_str->label,
10936 debug_line_str_section,
10937 "%s: \"%s\"", name, AT_string (a));
10938 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10939 dw2_asm_output_data_uleb128 (AT_index (a),
10940 "%s: \"%s\"", name, AT_string (a));
10941 else
10942 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10943 break;
10944
10945 case dw_val_class_file:
10946 {
10947 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10948
10949 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10950 a->dw_attr_val.v.val_file->filename);
10951 break;
10952 }
10953
10954 case dw_val_class_file_implicit:
10955 if (flag_debug_asm)
10956 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10957 ASM_COMMENT_START, name,
10958 maybe_emit_file (a->dw_attr_val.v.val_file),
10959 a->dw_attr_val.v.val_file->filename);
10960 break;
10961
10962 case dw_val_class_data8:
10963 {
10964 int i;
10965
10966 for (i = 0; i < 8; i++)
10967 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10968 i == 0 ? "%s" : NULL, name);
10969 break;
10970 }
10971
10972 case dw_val_class_high_pc:
10973 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10974 get_AT_low_pc (die), "DW_AT_high_pc");
10975 break;
10976
10977 case dw_val_class_discr_value:
10978 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10979 break;
10980
10981 case dw_val_class_discr_list:
10982 {
10983 dw_discr_list_ref list = AT_discr_list (a);
10984 const int size = size_of_discr_list (list);
10985
10986 /* This is a block, so output its length first. */
10987 dw2_asm_output_data (constant_size (size), size,
10988 "%s: block size", name);
10989
10990 for (; list != NULL; list = list->dw_discr_next)
10991 {
10992 /* One byte for the discriminant value descriptor, and then as
10993 many LEB128 numbers as required. */
10994 if (list->dw_discr_range)
10995 dw2_asm_output_data (1, DW_DSC_range,
10996 "%s: DW_DSC_range", name);
10997 else
10998 dw2_asm_output_data (1, DW_DSC_label,
10999 "%s: DW_DSC_label", name);
11000
11001 output_discr_value (&list->dw_discr_lower_bound, name);
11002 if (list->dw_discr_range)
11003 output_discr_value (&list->dw_discr_upper_bound, name);
11004 }
11005 break;
11006 }
11007
11008 default:
11009 gcc_unreachable ();
11010 }
11011 }
11012
11013 FOR_EACH_CHILD (die, c, output_die (c));
11014
11015 /* Add null byte to terminate sibling list. */
11016 if (die->die_child != NULL)
11017 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
11018 (unsigned long) die->die_offset);
11019 }
11020
11021 /* Output the dwarf version number. */
11022
11023 static void
11024 output_dwarf_version ()
11025 {
11026 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
11027 views in loclist. That will change eventually. */
11028 if (dwarf_version == 6)
11029 {
11030 static bool once;
11031 if (!once)
11032 {
11033 warning (0, "%<-gdwarf-6%> is output as version 5 with "
11034 "incompatibilities");
11035 once = true;
11036 }
11037 dw2_asm_output_data (2, 5, "DWARF version number");
11038 }
11039 else
11040 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
11041 }
11042
11043 /* Output the compilation unit that appears at the beginning of the
11044 .debug_info section, and precedes the DIE descriptions. */
11045
11046 static void
11047 output_compilation_unit_header (enum dwarf_unit_type ut)
11048 {
11049 if (!XCOFF_DEBUGGING_INFO)
11050 {
11051 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11052 dw2_asm_output_data (4, 0xffffffff,
11053 "Initial length escape value indicating 64-bit DWARF extension");
11054 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11055 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11056 "Length of Compilation Unit Info");
11057 }
11058
11059 output_dwarf_version ();
11060 if (dwarf_version >= 5)
11061 {
11062 const char *name;
11063 switch (ut)
11064 {
11065 case DW_UT_compile: name = "DW_UT_compile"; break;
11066 case DW_UT_type: name = "DW_UT_type"; break;
11067 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11068 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11069 default: gcc_unreachable ();
11070 }
11071 dw2_asm_output_data (1, ut, "%s", name);
11072 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11073 }
11074 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11075 debug_abbrev_section,
11076 "Offset Into Abbrev. Section");
11077 if (dwarf_version < 5)
11078 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11079 }
11080
11081 /* Output the compilation unit DIE and its children. */
11082
11083 static void
11084 output_comp_unit (dw_die_ref die, int output_if_empty,
11085 const unsigned char *dwo_id)
11086 {
11087 const char *secname, *oldsym;
11088 char *tmp;
11089
11090 /* Unless we are outputting main CU, we may throw away empty ones. */
11091 if (!output_if_empty && die->die_child == NULL)
11092 return;
11093
11094 /* Even if there are no children of this DIE, we must output the information
11095 about the compilation unit. Otherwise, on an empty translation unit, we
11096 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11097 will then complain when examining the file. First mark all the DIEs in
11098 this CU so we know which get local refs. */
11099 mark_dies (die);
11100
11101 external_ref_hash_type *extern_map = optimize_external_refs (die);
11102
11103 /* For now, optimize only the main CU, in order to optimize the rest
11104 we'd need to see all of them earlier. Leave the rest for post-linking
11105 tools like DWZ. */
11106 if (die == comp_unit_die ())
11107 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11108
11109 build_abbrev_table (die, extern_map);
11110
11111 optimize_abbrev_table ();
11112
11113 delete extern_map;
11114
11115 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11116 next_die_offset = (dwo_id
11117 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11118 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11119 calc_die_sizes (die);
11120
11121 oldsym = die->die_id.die_symbol;
11122 if (oldsym && die->comdat_type_p)
11123 {
11124 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11125
11126 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11127 secname = tmp;
11128 die->die_id.die_symbol = NULL;
11129 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11130 }
11131 else
11132 {
11133 switch_to_section (debug_info_section);
11134 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11135 info_section_emitted = true;
11136 }
11137
11138 /* For LTO cross unit DIE refs we want a symbol on the start of the
11139 debuginfo section, not on the CU DIE. */
11140 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11141 {
11142 /* ??? No way to get visibility assembled without a decl. */
11143 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11144 get_identifier (oldsym), char_type_node);
11145 TREE_PUBLIC (decl) = true;
11146 TREE_STATIC (decl) = true;
11147 DECL_ARTIFICIAL (decl) = true;
11148 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11149 DECL_VISIBILITY_SPECIFIED (decl) = true;
11150 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11151 #ifdef ASM_WEAKEN_LABEL
11152 /* We prefer a .weak because that handles duplicates from duplicate
11153 archive members in a graceful way. */
11154 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11155 #else
11156 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11157 #endif
11158 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11159 }
11160
11161 /* Output debugging information. */
11162 output_compilation_unit_header (dwo_id
11163 ? DW_UT_split_compile : DW_UT_compile);
11164 if (dwarf_version >= 5)
11165 {
11166 if (dwo_id != NULL)
11167 for (int i = 0; i < 8; i++)
11168 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11169 }
11170 output_die (die);
11171
11172 /* Leave the marks on the main CU, so we can check them in
11173 output_pubnames. */
11174 if (oldsym)
11175 {
11176 unmark_dies (die);
11177 die->die_id.die_symbol = oldsym;
11178 }
11179 }
11180
11181 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11182 and .debug_pubtypes. This is configured per-target, but can be
11183 overridden by the -gpubnames or -gno-pubnames options. */
11184
11185 static inline bool
11186 want_pubnames (void)
11187 {
11188 if (debug_info_level <= DINFO_LEVEL_TERSE
11189 /* Names and types go to the early debug part only. */
11190 || in_lto_p)
11191 return false;
11192 if (debug_generate_pub_sections != -1)
11193 return debug_generate_pub_sections;
11194 return targetm.want_debug_pub_sections;
11195 }
11196
11197 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11198
11199 static void
11200 add_AT_pubnames (dw_die_ref die)
11201 {
11202 if (want_pubnames ())
11203 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11204 }
11205
11206 /* Add a string attribute value to a skeleton DIE. */
11207
11208 static inline void
11209 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11210 const char *str)
11211 {
11212 dw_attr_node attr;
11213 struct indirect_string_node *node;
11214
11215 if (! skeleton_debug_str_hash)
11216 skeleton_debug_str_hash
11217 = hash_table<indirect_string_hasher>::create_ggc (10);
11218
11219 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11220 find_string_form (node);
11221 if (node->form == dwarf_FORM (DW_FORM_strx))
11222 node->form = DW_FORM_strp;
11223
11224 attr.dw_attr = attr_kind;
11225 attr.dw_attr_val.val_class = dw_val_class_str;
11226 attr.dw_attr_val.val_entry = NULL;
11227 attr.dw_attr_val.v.val_str = node;
11228 add_dwarf_attr (die, &attr);
11229 }
11230
11231 /* Helper function to generate top-level dies for skeleton debug_info and
11232 debug_types. */
11233
11234 static void
11235 add_top_level_skeleton_die_attrs (dw_die_ref die)
11236 {
11237 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11238 const char *comp_dir = comp_dir_string ();
11239
11240 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11241 if (comp_dir != NULL)
11242 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11243 add_AT_pubnames (die);
11244 if (addr_index_table != NULL && addr_index_table->size () > 0)
11245 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11246 }
11247
11248 /* Output skeleton debug sections that point to the dwo file. */
11249
11250 static void
11251 output_skeleton_debug_sections (dw_die_ref comp_unit,
11252 const unsigned char *dwo_id)
11253 {
11254 /* These attributes will be found in the full debug_info section. */
11255 remove_AT (comp_unit, DW_AT_producer);
11256 remove_AT (comp_unit, DW_AT_language);
11257
11258 switch_to_section (debug_skeleton_info_section);
11259 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11260
11261 /* Produce the skeleton compilation-unit header. This one differs enough from
11262 a normal CU header that it's better not to call output_compilation_unit
11263 header. */
11264 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11265 dw2_asm_output_data (4, 0xffffffff,
11266 "Initial length escape value indicating 64-bit "
11267 "DWARF extension");
11268
11269 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11270 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11271 - DWARF_INITIAL_LENGTH_SIZE
11272 + size_of_die (comp_unit),
11273 "Length of Compilation Unit Info");
11274 output_dwarf_version ();
11275 if (dwarf_version >= 5)
11276 {
11277 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11278 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11279 }
11280 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11281 debug_skeleton_abbrev_section,
11282 "Offset Into Abbrev. Section");
11283 if (dwarf_version < 5)
11284 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11285 else
11286 for (int i = 0; i < 8; i++)
11287 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11288
11289 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11290 output_die (comp_unit);
11291
11292 /* Build the skeleton debug_abbrev section. */
11293 switch_to_section (debug_skeleton_abbrev_section);
11294 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11295
11296 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11297
11298 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11299 }
11300
11301 /* Output a comdat type unit DIE and its children. */
11302
11303 static void
11304 output_comdat_type_unit (comdat_type_node *node,
11305 bool early_lto_debug ATTRIBUTE_UNUSED)
11306 {
11307 const char *secname;
11308 char *tmp;
11309 int i;
11310 #if defined (OBJECT_FORMAT_ELF)
11311 tree comdat_key;
11312 #endif
11313
11314 /* First mark all the DIEs in this CU so we know which get local refs. */
11315 mark_dies (node->root_die);
11316
11317 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11318
11319 build_abbrev_table (node->root_die, extern_map);
11320
11321 delete extern_map;
11322 extern_map = NULL;
11323
11324 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11325 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11326 calc_die_sizes (node->root_die);
11327
11328 #if defined (OBJECT_FORMAT_ELF)
11329 if (dwarf_version >= 5)
11330 {
11331 if (!dwarf_split_debug_info)
11332 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11333 else
11334 secname = (early_lto_debug
11335 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11336 }
11337 else if (!dwarf_split_debug_info)
11338 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11339 else
11340 secname = (early_lto_debug
11341 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11342
11343 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11344 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11345 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11346 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11347 comdat_key = get_identifier (tmp);
11348 targetm.asm_out.named_section (secname,
11349 SECTION_DEBUG | SECTION_LINKONCE,
11350 comdat_key);
11351 #else
11352 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11353 sprintf (tmp, (dwarf_version >= 5
11354 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11355 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11356 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11357 secname = tmp;
11358 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11359 #endif
11360
11361 /* Output debugging information. */
11362 output_compilation_unit_header (dwarf_split_debug_info
11363 ? DW_UT_split_type : DW_UT_type);
11364 output_signature (node->signature, "Type Signature");
11365 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11366 "Offset to Type DIE");
11367 output_die (node->root_die);
11368
11369 unmark_dies (node->root_die);
11370 }
11371
11372 /* Return the DWARF2/3 pubname associated with a decl. */
11373
11374 static const char *
11375 dwarf2_name (tree decl, int scope)
11376 {
11377 if (DECL_NAMELESS (decl))
11378 return NULL;
11379 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11380 }
11381
11382 /* Add a new entry to .debug_pubnames if appropriate. */
11383
11384 static void
11385 add_pubname_string (const char *str, dw_die_ref die)
11386 {
11387 pubname_entry e;
11388
11389 e.die = die;
11390 e.name = xstrdup (str);
11391 vec_safe_push (pubname_table, e);
11392 }
11393
11394 static void
11395 add_pubname (tree decl, dw_die_ref die)
11396 {
11397 if (!want_pubnames ())
11398 return;
11399
11400 /* Don't add items to the table when we expect that the consumer will have
11401 just read the enclosing die. For example, if the consumer is looking at a
11402 class_member, it will either be inside the class already, or will have just
11403 looked up the class to find the member. Either way, searching the class is
11404 faster than searching the index. */
11405 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11406 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11407 {
11408 const char *name = dwarf2_name (decl, 1);
11409
11410 if (name)
11411 add_pubname_string (name, die);
11412 }
11413 }
11414
11415 /* Add an enumerator to the pubnames section. */
11416
11417 static void
11418 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11419 {
11420 pubname_entry e;
11421
11422 gcc_assert (scope_name);
11423 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11424 e.die = die;
11425 vec_safe_push (pubname_table, e);
11426 }
11427
11428 /* Add a new entry to .debug_pubtypes if appropriate. */
11429
11430 static void
11431 add_pubtype (tree decl, dw_die_ref die)
11432 {
11433 pubname_entry e;
11434
11435 if (!want_pubnames ())
11436 return;
11437
11438 if ((TREE_PUBLIC (decl)
11439 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11440 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11441 {
11442 tree scope = NULL;
11443 const char *scope_name = "";
11444 const char *sep = is_cxx () ? "::" : ".";
11445 const char *name;
11446
11447 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11448 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11449 {
11450 scope_name = lang_hooks.dwarf_name (scope, 1);
11451 if (scope_name != NULL && scope_name[0] != '\0')
11452 scope_name = concat (scope_name, sep, NULL);
11453 else
11454 scope_name = "";
11455 }
11456
11457 if (TYPE_P (decl))
11458 name = type_tag (decl);
11459 else
11460 name = lang_hooks.dwarf_name (decl, 1);
11461
11462 /* If we don't have a name for the type, there's no point in adding
11463 it to the table. */
11464 if (name != NULL && name[0] != '\0')
11465 {
11466 e.die = die;
11467 e.name = concat (scope_name, name, NULL);
11468 vec_safe_push (pubtype_table, e);
11469 }
11470
11471 /* Although it might be more consistent to add the pubinfo for the
11472 enumerators as their dies are created, they should only be added if the
11473 enum type meets the criteria above. So rather than re-check the parent
11474 enum type whenever an enumerator die is created, just output them all
11475 here. This isn't protected by the name conditional because anonymous
11476 enums don't have names. */
11477 if (die->die_tag == DW_TAG_enumeration_type)
11478 {
11479 dw_die_ref c;
11480
11481 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11482 }
11483 }
11484 }
11485
11486 /* Output a single entry in the pubnames table. */
11487
11488 static void
11489 output_pubname (dw_offset die_offset, pubname_entry *entry)
11490 {
11491 dw_die_ref die = entry->die;
11492 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11493
11494 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11495
11496 if (debug_generate_pub_sections == 2)
11497 {
11498 /* This logic follows gdb's method for determining the value of the flag
11499 byte. */
11500 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11501 switch (die->die_tag)
11502 {
11503 case DW_TAG_typedef:
11504 case DW_TAG_base_type:
11505 case DW_TAG_subrange_type:
11506 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11507 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11508 break;
11509 case DW_TAG_enumerator:
11510 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11511 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11512 if (!is_cxx ())
11513 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11514 break;
11515 case DW_TAG_subprogram:
11516 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11517 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11518 if (!is_ada ())
11519 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11520 break;
11521 case DW_TAG_constant:
11522 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11523 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11524 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11525 break;
11526 case DW_TAG_variable:
11527 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11528 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11529 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11530 break;
11531 case DW_TAG_namespace:
11532 case DW_TAG_imported_declaration:
11533 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11534 break;
11535 case DW_TAG_class_type:
11536 case DW_TAG_interface_type:
11537 case DW_TAG_structure_type:
11538 case DW_TAG_union_type:
11539 case DW_TAG_enumeration_type:
11540 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11541 if (!is_cxx ())
11542 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11543 break;
11544 default:
11545 /* An unusual tag. Leave the flag-byte empty. */
11546 break;
11547 }
11548 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11549 "GDB-index flags");
11550 }
11551
11552 dw2_asm_output_nstring (entry->name, -1, "external name");
11553 }
11554
11555
11556 /* Output the public names table used to speed up access to externally
11557 visible names; or the public types table used to find type definitions. */
11558
11559 static void
11560 output_pubnames (vec<pubname_entry, va_gc> *names)
11561 {
11562 unsigned i;
11563 unsigned long pubnames_length = size_of_pubnames (names);
11564 pubname_entry *pub;
11565
11566 if (!XCOFF_DEBUGGING_INFO)
11567 {
11568 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11569 dw2_asm_output_data (4, 0xffffffff,
11570 "Initial length escape value indicating 64-bit DWARF extension");
11571 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11572 "Pub Info Length");
11573 }
11574
11575 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11576 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11577
11578 if (dwarf_split_debug_info)
11579 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11580 debug_skeleton_info_section,
11581 "Offset of Compilation Unit Info");
11582 else
11583 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11584 debug_info_section,
11585 "Offset of Compilation Unit Info");
11586 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11587 "Compilation Unit Length");
11588
11589 FOR_EACH_VEC_ELT (*names, i, pub)
11590 {
11591 if (include_pubname_in_output (names, pub))
11592 {
11593 dw_offset die_offset = pub->die->die_offset;
11594
11595 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11596 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11597 gcc_assert (pub->die->die_mark);
11598
11599 /* If we're putting types in their own .debug_types sections,
11600 the .debug_pubtypes table will still point to the compile
11601 unit (not the type unit), so we want to use the offset of
11602 the skeleton DIE (if there is one). */
11603 if (pub->die->comdat_type_p && names == pubtype_table)
11604 {
11605 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11606
11607 if (type_node != NULL)
11608 die_offset = (type_node->skeleton_die != NULL
11609 ? type_node->skeleton_die->die_offset
11610 : comp_unit_die ()->die_offset);
11611 }
11612
11613 output_pubname (die_offset, pub);
11614 }
11615 }
11616
11617 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11618 }
11619
11620 /* Output public names and types tables if necessary. */
11621
11622 static void
11623 output_pubtables (void)
11624 {
11625 if (!want_pubnames () || !info_section_emitted)
11626 return;
11627
11628 switch_to_section (debug_pubnames_section);
11629 output_pubnames (pubname_table);
11630 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11631 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11632 simply won't look for the section. */
11633 switch_to_section (debug_pubtypes_section);
11634 output_pubnames (pubtype_table);
11635 }
11636
11637
11638 /* Output the information that goes into the .debug_aranges table.
11639 Namely, define the beginning and ending address range of the
11640 text section generated for this compilation unit. */
11641
11642 static void
11643 output_aranges (void)
11644 {
11645 unsigned i;
11646 unsigned long aranges_length = size_of_aranges ();
11647
11648 if (!XCOFF_DEBUGGING_INFO)
11649 {
11650 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11651 dw2_asm_output_data (4, 0xffffffff,
11652 "Initial length escape value indicating 64-bit DWARF extension");
11653 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11654 "Length of Address Ranges Info");
11655 }
11656
11657 /* Version number for aranges is still 2, even up to DWARF5. */
11658 dw2_asm_output_data (2, 2, "DWARF aranges version");
11659 if (dwarf_split_debug_info)
11660 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11661 debug_skeleton_info_section,
11662 "Offset of Compilation Unit Info");
11663 else
11664 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11665 debug_info_section,
11666 "Offset of Compilation Unit Info");
11667 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11668 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11669
11670 /* We need to align to twice the pointer size here. */
11671 if (DWARF_ARANGES_PAD_SIZE)
11672 {
11673 /* Pad using a 2 byte words so that padding is correct for any
11674 pointer size. */
11675 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11676 2 * DWARF2_ADDR_SIZE);
11677 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11678 dw2_asm_output_data (2, 0, NULL);
11679 }
11680
11681 /* It is necessary not to output these entries if the sections were
11682 not used; if the sections were not used, the length will be 0 and
11683 the address may end up as 0 if the section is discarded by ld
11684 --gc-sections, leaving an invalid (0, 0) entry that can be
11685 confused with the terminator. */
11686 if (text_section_used)
11687 {
11688 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11689 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11690 text_section_label, "Length");
11691 }
11692 if (cold_text_section_used)
11693 {
11694 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11695 "Address");
11696 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11697 cold_text_section_label, "Length");
11698 }
11699
11700 if (have_multiple_function_sections)
11701 {
11702 unsigned fde_idx;
11703 dw_fde_ref fde;
11704
11705 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11706 {
11707 if (DECL_IGNORED_P (fde->decl))
11708 continue;
11709 if (!fde->in_std_section)
11710 {
11711 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11712 "Address");
11713 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11714 fde->dw_fde_begin, "Length");
11715 }
11716 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11717 {
11718 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11719 "Address");
11720 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11721 fde->dw_fde_second_begin, "Length");
11722 }
11723 }
11724 }
11725
11726 /* Output the terminator words. */
11727 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11728 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11729 }
11730
11731 /* Add a new entry to .debug_ranges. Return its index into
11732 ranges_table vector. */
11733
11734 static unsigned int
11735 add_ranges_num (int num, bool maybe_new_sec)
11736 {
11737 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11738 vec_safe_push (ranges_table, r);
11739 return vec_safe_length (ranges_table) - 1;
11740 }
11741
11742 /* Add a new entry to .debug_ranges corresponding to a block, or a
11743 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11744 this entry might be in a different section from previous range. */
11745
11746 static unsigned int
11747 add_ranges (const_tree block, bool maybe_new_sec)
11748 {
11749 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11750 }
11751
11752 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11753 chain, or middle entry of a chain that will be directly referred to. */
11754
11755 static void
11756 note_rnglist_head (unsigned int offset)
11757 {
11758 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11759 return;
11760 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11761 }
11762
11763 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11764 When using dwarf_split_debug_info, address attributes in dies destined
11765 for the final executable should be direct references--setting the
11766 parameter force_direct ensures this behavior. */
11767
11768 static void
11769 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11770 bool *added, bool force_direct)
11771 {
11772 unsigned int in_use = vec_safe_length (ranges_by_label);
11773 unsigned int offset;
11774 dw_ranges_by_label rbl = { begin, end };
11775 vec_safe_push (ranges_by_label, rbl);
11776 offset = add_ranges_num (-(int)in_use - 1, true);
11777 if (!*added)
11778 {
11779 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11780 *added = true;
11781 note_rnglist_head (offset);
11782 }
11783 }
11784
11785 /* Emit .debug_ranges section. */
11786
11787 static void
11788 output_ranges (void)
11789 {
11790 unsigned i;
11791 static const char *const start_fmt = "Offset %#x";
11792 const char *fmt = start_fmt;
11793 dw_ranges *r;
11794
11795 switch_to_section (debug_ranges_section);
11796 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11797 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11798 {
11799 int block_num = r->num;
11800
11801 if (block_num > 0)
11802 {
11803 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11804 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11805
11806 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11807 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11808
11809 /* If all code is in the text section, then the compilation
11810 unit base address defaults to DW_AT_low_pc, which is the
11811 base of the text section. */
11812 if (!have_multiple_function_sections)
11813 {
11814 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11815 text_section_label,
11816 fmt, i * 2 * DWARF2_ADDR_SIZE);
11817 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11818 text_section_label, NULL);
11819 }
11820
11821 /* Otherwise, the compilation unit base address is zero,
11822 which allows us to use absolute addresses, and not worry
11823 about whether the target supports cross-section
11824 arithmetic. */
11825 else
11826 {
11827 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11828 fmt, i * 2 * DWARF2_ADDR_SIZE);
11829 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11830 }
11831
11832 fmt = NULL;
11833 }
11834
11835 /* Negative block_num stands for an index into ranges_by_label. */
11836 else if (block_num < 0)
11837 {
11838 int lab_idx = - block_num - 1;
11839
11840 if (!have_multiple_function_sections)
11841 {
11842 gcc_unreachable ();
11843 #if 0
11844 /* If we ever use add_ranges_by_labels () for a single
11845 function section, all we have to do is to take out
11846 the #if 0 above. */
11847 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11848 (*ranges_by_label)[lab_idx].begin,
11849 text_section_label,
11850 fmt, i * 2 * DWARF2_ADDR_SIZE);
11851 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11852 (*ranges_by_label)[lab_idx].end,
11853 text_section_label, NULL);
11854 #endif
11855 }
11856 else
11857 {
11858 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11859 (*ranges_by_label)[lab_idx].begin,
11860 fmt, i * 2 * DWARF2_ADDR_SIZE);
11861 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11862 (*ranges_by_label)[lab_idx].end,
11863 NULL);
11864 }
11865 }
11866 else
11867 {
11868 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11869 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11870 fmt = start_fmt;
11871 }
11872 }
11873 }
11874
11875 /* Non-zero if .debug_line_str should be used for .debug_line section
11876 strings or strings that are likely shareable with those. */
11877 #define DWARF5_USE_DEBUG_LINE_STR \
11878 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11879 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11880 /* FIXME: there is no .debug_line_str.dwo section, \
11881 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11882 && !dwarf_split_debug_info)
11883
11884
11885 /* Returns TRUE if we are outputting DWARF5 and the assembler supports
11886 DWARF5 .debug_line tables using .debug_line_str or we generate
11887 it ourselves, except for split-dwarf which doesn't have a
11888 .debug_line_str. */
11889 static bool
11890 asm_outputs_debug_line_str (void)
11891 {
11892 if (dwarf_version >= 5
11893 && ! output_asm_line_debug_info ()
11894 && DWARF5_USE_DEBUG_LINE_STR)
11895 return true;
11896 else
11897 {
11898 #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG)
11899 return !dwarf_split_debug_info && dwarf_version >= 5;
11900 #else
11901 return false;
11902 #endif
11903 }
11904 }
11905
11906
11907 /* Assign .debug_rnglists indexes. */
11908
11909 static void
11910 index_rnglists (void)
11911 {
11912 unsigned i;
11913 dw_ranges *r;
11914
11915 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11916 if (r->label)
11917 r->idx = rnglist_idx++;
11918 }
11919
11920 /* Emit .debug_rnglists section. */
11921
11922 static void
11923 output_rnglists (unsigned generation)
11924 {
11925 unsigned i;
11926 dw_ranges *r;
11927 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11928 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11929 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11930
11931 switch_to_section (debug_ranges_section);
11932 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11933 /* There are up to 4 unique ranges labels per generation.
11934 See also init_sections_and_labels. */
11935 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11936 2 + generation * 4);
11937 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11938 3 + generation * 4);
11939 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11940 dw2_asm_output_data (4, 0xffffffff,
11941 "Initial length escape value indicating "
11942 "64-bit DWARF extension");
11943 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11944 "Length of Range Lists");
11945 ASM_OUTPUT_LABEL (asm_out_file, l1);
11946 output_dwarf_version ();
11947 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11948 dw2_asm_output_data (1, 0, "Segment Size");
11949 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11950 about relocation sizes and primarily care about the size of .debug*
11951 sections in linked shared libraries and executables, then
11952 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11953 into it are usually larger than just DW_FORM_sec_offset offsets
11954 into the .debug_rnglists section. */
11955 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11956 "Offset Entry Count");
11957 if (dwarf_split_debug_info)
11958 {
11959 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11960 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11961 if (r->label)
11962 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11963 ranges_base_label, NULL);
11964 }
11965
11966 const char *lab = "";
11967 unsigned int len = vec_safe_length (ranges_table);
11968 const char *base = NULL;
11969 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11970 {
11971 int block_num = r->num;
11972
11973 if (r->label)
11974 {
11975 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11976 lab = r->label;
11977 }
11978 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11979 base = NULL;
11980 if (block_num > 0)
11981 {
11982 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11983 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11984
11985 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11986 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11987
11988 if (HAVE_AS_LEB128)
11989 {
11990 /* If all code is in the text section, then the compilation
11991 unit base address defaults to DW_AT_low_pc, which is the
11992 base of the text section. */
11993 if (!have_multiple_function_sections)
11994 {
11995 dw2_asm_output_data (1, DW_RLE_offset_pair,
11996 "DW_RLE_offset_pair (%s)", lab);
11997 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11998 "Range begin address (%s)", lab);
11999 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
12000 "Range end address (%s)", lab);
12001 continue;
12002 }
12003 if (base == NULL)
12004 {
12005 dw_ranges *r2 = NULL;
12006 if (i < len - 1)
12007 r2 = &(*ranges_table)[i + 1];
12008 if (r2
12009 && r2->num != 0
12010 && r2->label == NULL
12011 && !r2->maybe_new_sec)
12012 {
12013 dw2_asm_output_data (1, DW_RLE_base_address,
12014 "DW_RLE_base_address (%s)", lab);
12015 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12016 "Base address (%s)", lab);
12017 strcpy (basebuf, blabel);
12018 base = basebuf;
12019 }
12020 }
12021 if (base)
12022 {
12023 dw2_asm_output_data (1, DW_RLE_offset_pair,
12024 "DW_RLE_offset_pair (%s)", lab);
12025 dw2_asm_output_delta_uleb128 (blabel, base,
12026 "Range begin address (%s)", lab);
12027 dw2_asm_output_delta_uleb128 (elabel, base,
12028 "Range end address (%s)", lab);
12029 continue;
12030 }
12031 dw2_asm_output_data (1, DW_RLE_start_length,
12032 "DW_RLE_start_length (%s)", lab);
12033 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12034 "Range begin address (%s)", lab);
12035 dw2_asm_output_delta_uleb128 (elabel, blabel,
12036 "Range length (%s)", lab);
12037 }
12038 else
12039 {
12040 dw2_asm_output_data (1, DW_RLE_start_end,
12041 "DW_RLE_start_end (%s)", lab);
12042 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12043 "Range begin address (%s)", lab);
12044 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12045 "Range end address (%s)", lab);
12046 }
12047 }
12048
12049 /* Negative block_num stands for an index into ranges_by_label. */
12050 else if (block_num < 0)
12051 {
12052 int lab_idx = - block_num - 1;
12053 const char *blabel = (*ranges_by_label)[lab_idx].begin;
12054 const char *elabel = (*ranges_by_label)[lab_idx].end;
12055
12056 if (!have_multiple_function_sections)
12057 gcc_unreachable ();
12058 if (HAVE_AS_LEB128)
12059 {
12060 dw2_asm_output_data (1, DW_RLE_start_length,
12061 "DW_RLE_start_length (%s)", lab);
12062 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12063 "Range begin address (%s)", lab);
12064 dw2_asm_output_delta_uleb128 (elabel, blabel,
12065 "Range length (%s)", lab);
12066 }
12067 else
12068 {
12069 dw2_asm_output_data (1, DW_RLE_start_end,
12070 "DW_RLE_start_end (%s)", lab);
12071 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12072 "Range begin address (%s)", lab);
12073 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12074 "Range end address (%s)", lab);
12075 }
12076 }
12077 else
12078 dw2_asm_output_data (1, DW_RLE_end_of_list,
12079 "DW_RLE_end_of_list (%s)", lab);
12080 }
12081 ASM_OUTPUT_LABEL (asm_out_file, l2);
12082 }
12083
12084 /* Data structure containing information about input files. */
12085 struct file_info
12086 {
12087 const char *path; /* Complete file name. */
12088 const char *fname; /* File name part. */
12089 int length; /* Length of entire string. */
12090 struct dwarf_file_data * file_idx; /* Index in input file table. */
12091 int dir_idx; /* Index in directory table. */
12092 };
12093
12094 /* Data structure containing information about directories with source
12095 files. */
12096 struct dir_info
12097 {
12098 const char *path; /* Path including directory name. */
12099 int length; /* Path length. */
12100 int prefix; /* Index of directory entry which is a prefix. */
12101 int count; /* Number of files in this directory. */
12102 int dir_idx; /* Index of directory used as base. */
12103 };
12104
12105 /* Callback function for file_info comparison. We sort by looking at
12106 the directories in the path. */
12107
12108 static int
12109 file_info_cmp (const void *p1, const void *p2)
12110 {
12111 const struct file_info *const s1 = (const struct file_info *) p1;
12112 const struct file_info *const s2 = (const struct file_info *) p2;
12113 const unsigned char *cp1;
12114 const unsigned char *cp2;
12115
12116 /* Take care of file names without directories. We need to make sure that
12117 we return consistent values to qsort since some will get confused if
12118 we return the same value when identical operands are passed in opposite
12119 orders. So if neither has a directory, return 0 and otherwise return
12120 1 or -1 depending on which one has the directory. We want the one with
12121 the directory to sort after the one without, so all no directory files
12122 are at the start (normally only the compilation unit file). */
12123 if ((s1->path == s1->fname || s2->path == s2->fname))
12124 return (s2->path == s2->fname) - (s1->path == s1->fname);
12125
12126 cp1 = (const unsigned char *) s1->path;
12127 cp2 = (const unsigned char *) s2->path;
12128
12129 while (1)
12130 {
12131 ++cp1;
12132 ++cp2;
12133 /* Reached the end of the first path? If so, handle like above,
12134 but now we want longer directory prefixes before shorter ones. */
12135 if ((cp1 == (const unsigned char *) s1->fname)
12136 || (cp2 == (const unsigned char *) s2->fname))
12137 return ((cp1 == (const unsigned char *) s1->fname)
12138 - (cp2 == (const unsigned char *) s2->fname));
12139
12140 /* Character of current path component the same? */
12141 else if (*cp1 != *cp2)
12142 return *cp1 - *cp2;
12143 }
12144 }
12145
12146 struct file_name_acquire_data
12147 {
12148 struct file_info *files;
12149 int used_files;
12150 int max_files;
12151 };
12152
12153 /* Traversal function for the hash table. */
12154
12155 int
12156 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12157 {
12158 struct dwarf_file_data *d = *slot;
12159 struct file_info *fi;
12160 const char *f;
12161
12162 gcc_assert (fnad->max_files >= d->emitted_number);
12163
12164 if (! d->emitted_number)
12165 return 1;
12166
12167 gcc_assert (fnad->max_files != fnad->used_files);
12168
12169 fi = fnad->files + fnad->used_files++;
12170
12171 f = remap_debug_filename (d->filename);
12172
12173 /* Skip all leading "./". */
12174 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12175 f += 2;
12176
12177 /* Create a new array entry. */
12178 fi->path = f;
12179 fi->length = strlen (f);
12180 fi->file_idx = d;
12181
12182 /* Search for the file name part. */
12183 f = strrchr (f, DIR_SEPARATOR);
12184 #if defined (DIR_SEPARATOR_2)
12185 {
12186 const char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12187
12188 if (g != NULL)
12189 {
12190 if (f == NULL || f < g)
12191 f = g;
12192 }
12193 }
12194 #endif
12195
12196 fi->fname = f == NULL ? fi->path : f + 1;
12197 return 1;
12198 }
12199
12200 /* Helper function for output_file_names. Emit a FORM encoded
12201 string STR, with assembly comment start ENTRY_KIND and
12202 index IDX */
12203
12204 static void
12205 output_line_string (enum dwarf_form form, const char *str,
12206 const char *entry_kind, unsigned int idx)
12207 {
12208 switch (form)
12209 {
12210 case DW_FORM_string:
12211 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12212 break;
12213 case DW_FORM_line_strp:
12214 if (!debug_line_str_hash)
12215 debug_line_str_hash
12216 = hash_table<indirect_string_hasher>::create_ggc (10);
12217
12218 struct indirect_string_node *node;
12219 node = find_AT_string_in_table (str, debug_line_str_hash);
12220 set_indirect_string (node);
12221 node->form = form;
12222 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12223 debug_line_str_section, "%s: %#x: \"%s\"",
12224 entry_kind, 0, node->str);
12225 break;
12226 default:
12227 gcc_unreachable ();
12228 }
12229 }
12230
12231 /* Output the directory table and the file name table. We try to minimize
12232 the total amount of memory needed. A heuristic is used to avoid large
12233 slowdowns with many input files. */
12234
12235 static void
12236 output_file_names (void)
12237 {
12238 struct file_name_acquire_data fnad;
12239 int numfiles;
12240 struct file_info *files;
12241 struct dir_info *dirs;
12242 int *saved;
12243 int *savehere;
12244 int *backmap;
12245 int ndirs;
12246 int idx_offset;
12247 int i;
12248
12249 if (!last_emitted_file)
12250 {
12251 if (dwarf_version >= 5)
12252 {
12253 dw2_asm_output_data (1, 0, "Directory entry format count");
12254 dw2_asm_output_data_uleb128 (0, "Directories count");
12255 dw2_asm_output_data (1, 0, "File name entry format count");
12256 dw2_asm_output_data_uleb128 (0, "File names count");
12257 }
12258 else
12259 {
12260 dw2_asm_output_data (1, 0, "End directory table");
12261 dw2_asm_output_data (1, 0, "End file name table");
12262 }
12263 return;
12264 }
12265
12266 numfiles = last_emitted_file->emitted_number;
12267
12268 /* Allocate the various arrays we need. */
12269 files = XALLOCAVEC (struct file_info, numfiles);
12270 dirs = XALLOCAVEC (struct dir_info, numfiles);
12271
12272 fnad.files = files;
12273 fnad.used_files = 0;
12274 fnad.max_files = numfiles;
12275 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12276 gcc_assert (fnad.used_files == fnad.max_files);
12277
12278 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12279
12280 /* Find all the different directories used. */
12281 dirs[0].path = files[0].path;
12282 dirs[0].length = files[0].fname - files[0].path;
12283 dirs[0].prefix = -1;
12284 dirs[0].count = 1;
12285 dirs[0].dir_idx = 0;
12286 files[0].dir_idx = 0;
12287 ndirs = 1;
12288
12289 for (i = 1; i < numfiles; i++)
12290 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12291 && memcmp (dirs[ndirs - 1].path, files[i].path,
12292 dirs[ndirs - 1].length) == 0)
12293 {
12294 /* Same directory as last entry. */
12295 files[i].dir_idx = ndirs - 1;
12296 ++dirs[ndirs - 1].count;
12297 }
12298 else
12299 {
12300 int j;
12301
12302 /* This is a new directory. */
12303 dirs[ndirs].path = files[i].path;
12304 dirs[ndirs].length = files[i].fname - files[i].path;
12305 dirs[ndirs].count = 1;
12306 dirs[ndirs].dir_idx = ndirs;
12307 files[i].dir_idx = ndirs;
12308
12309 /* Search for a prefix. */
12310 dirs[ndirs].prefix = -1;
12311 for (j = 0; j < ndirs; j++)
12312 if (dirs[j].length < dirs[ndirs].length
12313 && dirs[j].length > 1
12314 && (dirs[ndirs].prefix == -1
12315 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12316 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12317 dirs[ndirs].prefix = j;
12318
12319 ++ndirs;
12320 }
12321
12322 /* Now to the actual work. We have to find a subset of the directories which
12323 allow expressing the file name using references to the directory table
12324 with the least amount of characters. We do not do an exhaustive search
12325 where we would have to check out every combination of every single
12326 possible prefix. Instead we use a heuristic which provides nearly optimal
12327 results in most cases and never is much off. */
12328 saved = XALLOCAVEC (int, ndirs);
12329 savehere = XALLOCAVEC (int, ndirs);
12330
12331 memset (saved, '\0', ndirs * sizeof (saved[0]));
12332 for (i = 0; i < ndirs; i++)
12333 {
12334 int j;
12335 int total;
12336
12337 /* We can always save some space for the current directory. But this
12338 does not mean it will be enough to justify adding the directory. */
12339 savehere[i] = dirs[i].length;
12340 total = (savehere[i] - saved[i]) * dirs[i].count;
12341
12342 for (j = i + 1; j < ndirs; j++)
12343 {
12344 savehere[j] = 0;
12345 if (saved[j] < dirs[i].length)
12346 {
12347 /* Determine whether the dirs[i] path is a prefix of the
12348 dirs[j] path. */
12349 int k;
12350
12351 k = dirs[j].prefix;
12352 while (k != -1 && k != (int) i)
12353 k = dirs[k].prefix;
12354
12355 if (k == (int) i)
12356 {
12357 /* Yes it is. We can possibly save some memory by
12358 writing the filenames in dirs[j] relative to
12359 dirs[i]. */
12360 savehere[j] = dirs[i].length;
12361 total += (savehere[j] - saved[j]) * dirs[j].count;
12362 }
12363 }
12364 }
12365
12366 /* Check whether we can save enough to justify adding the dirs[i]
12367 directory. */
12368 if (total > dirs[i].length + 1)
12369 {
12370 /* It's worthwhile adding. */
12371 for (j = i; j < ndirs; j++)
12372 if (savehere[j] > 0)
12373 {
12374 /* Remember how much we saved for this directory so far. */
12375 saved[j] = savehere[j];
12376
12377 /* Remember the prefix directory. */
12378 dirs[j].dir_idx = i;
12379 }
12380 }
12381 }
12382
12383 /* Emit the directory name table. */
12384 idx_offset = dirs[0].length > 0 ? 1 : 0;
12385 enum dwarf_form str_form = DW_FORM_string;
12386 enum dwarf_form idx_form = DW_FORM_udata;
12387 if (dwarf_version >= 5)
12388 {
12389 const char *comp_dir = comp_dir_string ();
12390 if (comp_dir == NULL)
12391 comp_dir = "";
12392 dw2_asm_output_data (1, 1, "Directory entry format count");
12393 if (DWARF5_USE_DEBUG_LINE_STR)
12394 str_form = DW_FORM_line_strp;
12395 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12396 dw2_asm_output_data_uleb128 (str_form, "%s",
12397 get_DW_FORM_name (str_form));
12398 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12399 if (str_form == DW_FORM_string)
12400 {
12401 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12402 for (i = 1 - idx_offset; i < ndirs; i++)
12403 dw2_asm_output_nstring (dirs[i].path,
12404 dirs[i].length
12405 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12406 "Directory Entry: %#x", i + idx_offset);
12407 }
12408 else
12409 {
12410 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12411 for (i = 1 - idx_offset; i < ndirs; i++)
12412 {
12413 const char *str
12414 = ggc_alloc_string (dirs[i].path,
12415 dirs[i].length
12416 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12417 output_line_string (str_form, str, "Directory Entry",
12418 (unsigned) i + idx_offset);
12419 }
12420 }
12421 }
12422 else
12423 {
12424 for (i = 1 - idx_offset; i < ndirs; i++)
12425 dw2_asm_output_nstring (dirs[i].path,
12426 dirs[i].length
12427 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12428 "Directory Entry: %#x", i + idx_offset);
12429
12430 dw2_asm_output_data (1, 0, "End directory table");
12431 }
12432
12433 /* We have to emit them in the order of emitted_number since that's
12434 used in the debug info generation. To do this efficiently we
12435 generate a back-mapping of the indices first. */
12436 backmap = XALLOCAVEC (int, numfiles);
12437 for (i = 0; i < numfiles; i++)
12438 backmap[files[i].file_idx->emitted_number - 1] = i;
12439
12440 if (dwarf_version >= 5)
12441 {
12442 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12443 if (filename0 == NULL)
12444 filename0 = "";
12445 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12446 DW_FORM_data2. Choose one based on the number of directories
12447 and how much space would they occupy in each encoding.
12448 If we have at most 256 directories, all indexes fit into
12449 a single byte, so DW_FORM_data1 is most compact (if there
12450 are at most 128 directories, DW_FORM_udata would be as
12451 compact as that, but not shorter and slower to decode). */
12452 if (ndirs + idx_offset <= 256)
12453 idx_form = DW_FORM_data1;
12454 /* If there are more than 65536 directories, we have to use
12455 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12456 Otherwise, compute what space would occupy if all the indexes
12457 used DW_FORM_udata - sum - and compare that to how large would
12458 be DW_FORM_data2 encoding, and pick the more efficient one. */
12459 else if (ndirs + idx_offset <= 65536)
12460 {
12461 unsigned HOST_WIDE_INT sum = 1;
12462 for (i = 0; i < numfiles; i++)
12463 {
12464 int file_idx = backmap[i];
12465 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12466 sum += size_of_uleb128 (dir_idx);
12467 }
12468 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12469 idx_form = DW_FORM_data2;
12470 }
12471 #ifdef VMS_DEBUGGING_INFO
12472 dw2_asm_output_data (1, 4, "File name entry format count");
12473 #else
12474 dw2_asm_output_data (1, 2, "File name entry format count");
12475 #endif
12476 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12477 dw2_asm_output_data_uleb128 (str_form, "%s",
12478 get_DW_FORM_name (str_form));
12479 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12480 "DW_LNCT_directory_index");
12481 dw2_asm_output_data_uleb128 (idx_form, "%s",
12482 get_DW_FORM_name (idx_form));
12483 #ifdef VMS_DEBUGGING_INFO
12484 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12485 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12486 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12487 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12488 #endif
12489 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12490
12491 output_line_string (str_form, filename0, "File Entry", 0);
12492
12493 /* Include directory index. */
12494 if (idx_form != DW_FORM_udata)
12495 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12496 0, NULL);
12497 else
12498 dw2_asm_output_data_uleb128 (0, NULL);
12499
12500 #ifdef VMS_DEBUGGING_INFO
12501 dw2_asm_output_data_uleb128 (0, NULL);
12502 dw2_asm_output_data_uleb128 (0, NULL);
12503 #endif
12504 }
12505
12506 /* Now write all the file names. */
12507 for (i = 0; i < numfiles; i++)
12508 {
12509 int file_idx = backmap[i];
12510 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12511
12512 #ifdef VMS_DEBUGGING_INFO
12513 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12514
12515 /* Setting these fields can lead to debugger miscomparisons,
12516 but VMS Debug requires them to be set correctly. */
12517
12518 int ver;
12519 long long cdt;
12520 long siz;
12521 int maxfilelen = (strlen (files[file_idx].path)
12522 + dirs[dir_idx].length
12523 + MAX_VMS_VERSION_LEN + 1);
12524 char *filebuf = XALLOCAVEC (char, maxfilelen);
12525
12526 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12527 snprintf (filebuf, maxfilelen, "%s;%d",
12528 files[file_idx].path + dirs[dir_idx].length, ver);
12529
12530 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12531
12532 /* Include directory index. */
12533 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12534 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12535 dir_idx + idx_offset, NULL);
12536 else
12537 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12538
12539 /* Modification time. */
12540 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12541 &cdt, 0, 0, 0) == 0)
12542 ? cdt : 0, NULL);
12543
12544 /* File length in bytes. */
12545 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12546 0, &siz, 0, 0) == 0)
12547 ? siz : 0, NULL);
12548 #else
12549 output_line_string (str_form,
12550 files[file_idx].path + dirs[dir_idx].length,
12551 "File Entry", (unsigned) i + 1);
12552
12553 /* Include directory index. */
12554 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12555 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12556 dir_idx + idx_offset, NULL);
12557 else
12558 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12559
12560 if (dwarf_version >= 5)
12561 continue;
12562
12563 /* Modification time. */
12564 dw2_asm_output_data_uleb128 (0, NULL);
12565
12566 /* File length in bytes. */
12567 dw2_asm_output_data_uleb128 (0, NULL);
12568 #endif /* VMS_DEBUGGING_INFO */
12569 }
12570
12571 if (dwarf_version < 5)
12572 dw2_asm_output_data (1, 0, "End file name table");
12573 }
12574
12575
12576 /* Output one line number table into the .debug_line section. */
12577
12578 static void
12579 output_one_line_info_table (dw_line_info_table *table)
12580 {
12581 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12582 unsigned int current_line = 1;
12583 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12584 dw_line_info_entry *ent, *prev_addr;
12585 size_t i;
12586 unsigned int view;
12587
12588 view = 0;
12589
12590 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12591 {
12592 switch (ent->opcode)
12593 {
12594 case LI_set_address:
12595 /* ??? Unfortunately, we have little choice here currently, and
12596 must always use the most general form. GCC does not know the
12597 address delta itself, so we can't use DW_LNS_advance_pc. Many
12598 ports do have length attributes which will give an upper bound
12599 on the address range. We could perhaps use length attributes
12600 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12601 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12602
12603 view = 0;
12604
12605 /* This can handle any delta. This takes
12606 4+DWARF2_ADDR_SIZE bytes. */
12607 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12608 debug_variable_location_views
12609 ? ", reset view to 0" : "");
12610 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12611 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12612 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12613
12614 prev_addr = ent;
12615 break;
12616
12617 case LI_adv_address:
12618 {
12619 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12620 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12621 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12622
12623 view++;
12624
12625 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12626 dw2_asm_output_delta (2, line_label, prev_label,
12627 "from %s to %s", prev_label, line_label);
12628
12629 prev_addr = ent;
12630 break;
12631 }
12632
12633 case LI_set_line:
12634 if (ent->val == current_line)
12635 {
12636 /* We still need to start a new row, so output a copy insn. */
12637 dw2_asm_output_data (1, DW_LNS_copy,
12638 "copy line %u", current_line);
12639 }
12640 else
12641 {
12642 int line_offset = ent->val - current_line;
12643 int line_delta = line_offset - DWARF_LINE_BASE;
12644
12645 current_line = ent->val;
12646 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12647 {
12648 /* This can handle deltas from -10 to 234, using the current
12649 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12650 This takes 1 byte. */
12651 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12652 "line %u", current_line);
12653 }
12654 else
12655 {
12656 /* This can handle any delta. This takes at least 4 bytes,
12657 depending on the value being encoded. */
12658 dw2_asm_output_data (1, DW_LNS_advance_line,
12659 "advance to line %u", current_line);
12660 dw2_asm_output_data_sleb128 (line_offset, NULL);
12661 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12662 }
12663 }
12664 break;
12665
12666 case LI_set_file:
12667 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12668 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12669 break;
12670
12671 case LI_set_column:
12672 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12673 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12674 break;
12675
12676 case LI_negate_stmt:
12677 current_is_stmt = !current_is_stmt;
12678 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12679 "is_stmt %d", current_is_stmt);
12680 break;
12681
12682 case LI_set_prologue_end:
12683 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12684 "set prologue end");
12685 break;
12686
12687 case LI_set_epilogue_begin:
12688 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12689 "set epilogue begin");
12690 break;
12691
12692 case LI_set_discriminator:
12693 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12694 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12695 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12696 dw2_asm_output_data_uleb128 (ent->val, NULL);
12697 break;
12698 }
12699 }
12700
12701 /* Emit debug info for the address of the end of the table. */
12702 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12703 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12704 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12705 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12706
12707 dw2_asm_output_data (1, 0, "end sequence");
12708 dw2_asm_output_data_uleb128 (1, NULL);
12709 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12710 }
12711
12712 /* Output the source line number correspondence information. This
12713 information goes into the .debug_line section. */
12714
12715 static void
12716 output_line_info (bool prologue_only)
12717 {
12718 static unsigned int generation;
12719 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12720 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12721 bool saw_one = false;
12722 int opc;
12723
12724 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12725 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12726 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12727 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12728
12729 if (!XCOFF_DEBUGGING_INFO)
12730 {
12731 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12732 dw2_asm_output_data (4, 0xffffffff,
12733 "Initial length escape value indicating 64-bit DWARF extension");
12734 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12735 "Length of Source Line Info");
12736 }
12737
12738 ASM_OUTPUT_LABEL (asm_out_file, l1);
12739
12740 output_dwarf_version ();
12741 if (dwarf_version >= 5)
12742 {
12743 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12744 dw2_asm_output_data (1, 0, "Segment Size");
12745 }
12746 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12747 ASM_OUTPUT_LABEL (asm_out_file, p1);
12748
12749 /* Define the architecture-dependent minimum instruction length (in bytes).
12750 In this implementation of DWARF, this field is used for information
12751 purposes only. Since GCC generates assembly language, we have no
12752 a priori knowledge of how many instruction bytes are generated for each
12753 source line, and therefore can use only the DW_LNE_set_address and
12754 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12755 this as '1', which is "correct enough" for all architectures,
12756 and don't let the target override. */
12757 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12758
12759 if (dwarf_version >= 4)
12760 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12761 "Maximum Operations Per Instruction");
12762 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12763 "Default is_stmt_start flag");
12764 dw2_asm_output_data (1, DWARF_LINE_BASE,
12765 "Line Base Value (Special Opcodes)");
12766 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12767 "Line Range Value (Special Opcodes)");
12768 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12769 "Special Opcode Base");
12770
12771 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12772 {
12773 int n_op_args;
12774 switch (opc)
12775 {
12776 case DW_LNS_advance_pc:
12777 case DW_LNS_advance_line:
12778 case DW_LNS_set_file:
12779 case DW_LNS_set_column:
12780 case DW_LNS_fixed_advance_pc:
12781 case DW_LNS_set_isa:
12782 n_op_args = 1;
12783 break;
12784 default:
12785 n_op_args = 0;
12786 break;
12787 }
12788
12789 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12790 opc, n_op_args);
12791 }
12792
12793 /* Write out the information about the files we use. */
12794 output_file_names ();
12795 ASM_OUTPUT_LABEL (asm_out_file, p2);
12796 if (prologue_only)
12797 {
12798 /* Output the marker for the end of the line number info. */
12799 ASM_OUTPUT_LABEL (asm_out_file, l2);
12800 return;
12801 }
12802
12803 if (separate_line_info)
12804 {
12805 dw_line_info_table *table;
12806 size_t i;
12807
12808 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12809 if (table->in_use)
12810 {
12811 output_one_line_info_table (table);
12812 saw_one = true;
12813 }
12814 }
12815 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12816 {
12817 output_one_line_info_table (cold_text_section_line_info);
12818 saw_one = true;
12819 }
12820
12821 /* ??? Some Darwin linkers crash on a .debug_line section with no
12822 sequences. Further, merely a DW_LNE_end_sequence entry is not
12823 sufficient -- the address column must also be initialized.
12824 Make sure to output at least one set_address/end_sequence pair,
12825 choosing .text since that section is always present. */
12826 if (text_section_line_info->in_use || !saw_one)
12827 output_one_line_info_table (text_section_line_info);
12828
12829 /* Output the marker for the end of the line number info. */
12830 ASM_OUTPUT_LABEL (asm_out_file, l2);
12831 }
12832 \f
12833 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12834
12835 static inline bool
12836 need_endianity_attribute_p (bool reverse)
12837 {
12838 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12839 }
12840
12841 /* Given a pointer to a tree node for some base type, return a pointer to
12842 a DIE that describes the given type. REVERSE is true if the type is
12843 to be interpreted in the reverse storage order wrt the target order.
12844
12845 This routine must only be called for GCC type nodes that correspond to
12846 Dwarf base (fundamental) types. */
12847
12848 static dw_die_ref
12849 base_type_die (tree type, bool reverse)
12850 {
12851 dw_die_ref base_type_result;
12852 enum dwarf_type encoding;
12853 bool fpt_used = false;
12854 struct fixed_point_type_info fpt_info;
12855 tree type_bias = NULL_TREE;
12856
12857 /* If this is a subtype that should not be emitted as a subrange type,
12858 use the base type. See subrange_type_for_debug_p. */
12859 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12860 type = TREE_TYPE (type);
12861
12862 switch (TREE_CODE (type))
12863 {
12864 case INTEGER_TYPE:
12865 if ((dwarf_version >= 4 || !dwarf_strict)
12866 && TYPE_NAME (type)
12867 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12868 && DECL_IS_UNDECLARED_BUILTIN (TYPE_NAME (type))
12869 && DECL_NAME (TYPE_NAME (type)))
12870 {
12871 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12872 if (strcmp (name, "char16_t") == 0
12873 || strcmp (name, "char32_t") == 0)
12874 {
12875 encoding = DW_ATE_UTF;
12876 break;
12877 }
12878 }
12879 if ((dwarf_version >= 3 || !dwarf_strict)
12880 && lang_hooks.types.get_fixed_point_type_info)
12881 {
12882 memset (&fpt_info, 0, sizeof (fpt_info));
12883 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12884 {
12885 fpt_used = true;
12886 encoding = ((TYPE_UNSIGNED (type))
12887 ? DW_ATE_unsigned_fixed
12888 : DW_ATE_signed_fixed);
12889 break;
12890 }
12891 }
12892 if (TYPE_STRING_FLAG (type))
12893 {
12894 if (TYPE_UNSIGNED (type))
12895 encoding = DW_ATE_unsigned_char;
12896 else
12897 encoding = DW_ATE_signed_char;
12898 }
12899 else if (TYPE_UNSIGNED (type))
12900 encoding = DW_ATE_unsigned;
12901 else
12902 encoding = DW_ATE_signed;
12903
12904 if (!dwarf_strict
12905 && lang_hooks.types.get_type_bias)
12906 type_bias = lang_hooks.types.get_type_bias (type);
12907 break;
12908
12909 case REAL_TYPE:
12910 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12911 {
12912 if (dwarf_version >= 3 || !dwarf_strict)
12913 encoding = DW_ATE_decimal_float;
12914 else
12915 encoding = DW_ATE_lo_user;
12916 }
12917 else
12918 encoding = DW_ATE_float;
12919 break;
12920
12921 case FIXED_POINT_TYPE:
12922 if (!(dwarf_version >= 3 || !dwarf_strict))
12923 encoding = DW_ATE_lo_user;
12924 else if (TYPE_UNSIGNED (type))
12925 encoding = DW_ATE_unsigned_fixed;
12926 else
12927 encoding = DW_ATE_signed_fixed;
12928 break;
12929
12930 /* Dwarf2 doesn't know anything about complex ints, so use
12931 a user defined type for it. */
12932 case COMPLEX_TYPE:
12933 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12934 encoding = DW_ATE_complex_float;
12935 else
12936 encoding = DW_ATE_lo_user;
12937 break;
12938
12939 case BOOLEAN_TYPE:
12940 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12941 encoding = DW_ATE_boolean;
12942 break;
12943
12944 default:
12945 /* No other TREE_CODEs are Dwarf fundamental types. */
12946 gcc_unreachable ();
12947 }
12948
12949 base_type_result = new_die_raw (DW_TAG_base_type);
12950
12951 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12952 int_size_in_bytes (type));
12953 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12954
12955 if (need_endianity_attribute_p (reverse))
12956 add_AT_unsigned (base_type_result, DW_AT_endianity,
12957 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12958
12959 add_alignment_attribute (base_type_result, type);
12960
12961 if (fpt_used)
12962 {
12963 switch (fpt_info.scale_factor_kind)
12964 {
12965 case fixed_point_scale_factor_binary:
12966 add_AT_int (base_type_result, DW_AT_binary_scale,
12967 fpt_info.scale_factor.binary);
12968 break;
12969
12970 case fixed_point_scale_factor_decimal:
12971 add_AT_int (base_type_result, DW_AT_decimal_scale,
12972 fpt_info.scale_factor.decimal);
12973 break;
12974
12975 case fixed_point_scale_factor_arbitrary:
12976 /* Arbitrary scale factors cannot be described in standard DWARF,
12977 yet. */
12978 if (!dwarf_strict)
12979 {
12980 /* Describe the scale factor as a rational constant. */
12981 const dw_die_ref scale_factor
12982 = new_die (DW_TAG_constant, comp_unit_die (), type);
12983
12984 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12985 fpt_info.scale_factor.arbitrary.numerator);
12986 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12987 fpt_info.scale_factor.arbitrary.denominator);
12988
12989 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12990 }
12991 break;
12992
12993 default:
12994 gcc_unreachable ();
12995 }
12996 }
12997
12998 if (type_bias)
12999 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
13000 dw_scalar_form_constant
13001 | dw_scalar_form_exprloc
13002 | dw_scalar_form_reference,
13003 NULL);
13004
13005 return base_type_result;
13006 }
13007
13008 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
13009 named 'auto' in its type: return true for it, false otherwise. */
13010
13011 static inline bool
13012 is_cxx_auto (tree type)
13013 {
13014 if (is_cxx ())
13015 {
13016 tree name = TYPE_IDENTIFIER (type);
13017 if (name == get_identifier ("auto")
13018 || name == get_identifier ("decltype(auto)"))
13019 return true;
13020 }
13021 return false;
13022 }
13023
13024 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
13025 given input type is a Dwarf "fundamental" type. Otherwise return null. */
13026
13027 static inline int
13028 is_base_type (tree type)
13029 {
13030 switch (TREE_CODE (type))
13031 {
13032 case INTEGER_TYPE:
13033 case REAL_TYPE:
13034 case FIXED_POINT_TYPE:
13035 case COMPLEX_TYPE:
13036 case BOOLEAN_TYPE:
13037 return 1;
13038
13039 case VOID_TYPE:
13040 case ARRAY_TYPE:
13041 case RECORD_TYPE:
13042 case UNION_TYPE:
13043 case QUAL_UNION_TYPE:
13044 case ENUMERAL_TYPE:
13045 case FUNCTION_TYPE:
13046 case METHOD_TYPE:
13047 case POINTER_TYPE:
13048 case REFERENCE_TYPE:
13049 case NULLPTR_TYPE:
13050 case OFFSET_TYPE:
13051 case LANG_TYPE:
13052 case VECTOR_TYPE:
13053 return 0;
13054
13055 default:
13056 if (is_cxx_auto (type))
13057 return 0;
13058 gcc_unreachable ();
13059 }
13060
13061 return 0;
13062 }
13063
13064 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
13065 node, return the size in bits for the type if it is a constant, or else
13066 return the alignment for the type if the type's size is not constant, or
13067 else return BITS_PER_WORD if the type actually turns out to be an
13068 ERROR_MARK node. */
13069
13070 static inline unsigned HOST_WIDE_INT
13071 simple_type_size_in_bits (const_tree type)
13072 {
13073 if (TREE_CODE (type) == ERROR_MARK)
13074 return BITS_PER_WORD;
13075 else if (TYPE_SIZE (type) == NULL_TREE)
13076 return 0;
13077 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13078 return tree_to_uhwi (TYPE_SIZE (type));
13079 else
13080 return TYPE_ALIGN (type);
13081 }
13082
13083 /* Similarly, but return an offset_int instead of UHWI. */
13084
13085 static inline offset_int
13086 offset_int_type_size_in_bits (const_tree type)
13087 {
13088 if (TREE_CODE (type) == ERROR_MARK)
13089 return BITS_PER_WORD;
13090 else if (TYPE_SIZE (type) == NULL_TREE)
13091 return 0;
13092 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13093 return wi::to_offset (TYPE_SIZE (type));
13094 else
13095 return TYPE_ALIGN (type);
13096 }
13097
13098 /* Given a pointer to a tree node for a subrange type, return a pointer
13099 to a DIE that describes the given type. */
13100
13101 static dw_die_ref
13102 subrange_type_die (tree type, tree low, tree high, tree bias,
13103 dw_die_ref context_die)
13104 {
13105 dw_die_ref subrange_die;
13106 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13107
13108 if (context_die == NULL)
13109 context_die = comp_unit_die ();
13110
13111 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13112
13113 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13114 {
13115 /* The size of the subrange type and its base type do not match,
13116 so we need to generate a size attribute for the subrange type. */
13117 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13118 }
13119
13120 add_alignment_attribute (subrange_die, type);
13121
13122 if (low)
13123 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13124 if (high)
13125 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13126 if (bias && !dwarf_strict)
13127 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13128 dw_scalar_form_constant
13129 | dw_scalar_form_exprloc
13130 | dw_scalar_form_reference,
13131 NULL);
13132
13133 return subrange_die;
13134 }
13135
13136 /* Returns the (const and/or volatile) cv_qualifiers associated with
13137 the decl node. This will normally be augmented with the
13138 cv_qualifiers of the underlying type in add_type_attribute. */
13139
13140 static int
13141 decl_quals (const_tree decl)
13142 {
13143 return ((TREE_READONLY (decl)
13144 /* The C++ front-end correctly marks reference-typed
13145 variables as readonly, but from a language (and debug
13146 info) standpoint they are not const-qualified. */
13147 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13148 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13149 | (TREE_THIS_VOLATILE (decl)
13150 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13151 }
13152
13153 /* Determine the TYPE whose qualifiers match the largest strict subset
13154 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13155 qualifiers outside QUAL_MASK. */
13156
13157 static int
13158 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13159 {
13160 tree t;
13161 int best_rank = 0, best_qual = 0, max_rank;
13162
13163 type_quals &= qual_mask;
13164 max_rank = popcount_hwi (type_quals) - 1;
13165
13166 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13167 t = TYPE_NEXT_VARIANT (t))
13168 {
13169 int q = TYPE_QUALS (t) & qual_mask;
13170
13171 if ((q & type_quals) == q && q != type_quals
13172 && check_base_type (t, type))
13173 {
13174 int rank = popcount_hwi (q);
13175
13176 if (rank > best_rank)
13177 {
13178 best_rank = rank;
13179 best_qual = q;
13180 }
13181 }
13182 }
13183
13184 return best_qual;
13185 }
13186
13187 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13188 static const dwarf_qual_info_t dwarf_qual_info[] =
13189 {
13190 { TYPE_QUAL_CONST, DW_TAG_const_type },
13191 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13192 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13193 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13194 };
13195 static const unsigned int dwarf_qual_info_size
13196 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13197
13198 /* If DIE is a qualified DIE of some base DIE with the same parent,
13199 return the base DIE, otherwise return NULL. Set MASK to the
13200 qualifiers added compared to the returned DIE. */
13201
13202 static dw_die_ref
13203 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13204 {
13205 unsigned int i;
13206 for (i = 0; i < dwarf_qual_info_size; i++)
13207 if (die->die_tag == dwarf_qual_info[i].t)
13208 break;
13209 if (i == dwarf_qual_info_size)
13210 return NULL;
13211 if (vec_safe_length (die->die_attr) != 1)
13212 return NULL;
13213 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13214 if (type == NULL || type->die_parent != die->die_parent)
13215 return NULL;
13216 *mask |= dwarf_qual_info[i].q;
13217 if (depth)
13218 {
13219 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13220 if (ret)
13221 return ret;
13222 }
13223 return type;
13224 }
13225
13226 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13227 entry that chains the modifiers specified by CV_QUALS in front of the
13228 given type. REVERSE is true if the type is to be interpreted in the
13229 reverse storage order wrt the target order. */
13230
13231 static dw_die_ref
13232 modified_type_die (tree type, int cv_quals, bool reverse,
13233 dw_die_ref context_die)
13234 {
13235 enum tree_code code = TREE_CODE (type);
13236 dw_die_ref mod_type_die;
13237 dw_die_ref sub_die = NULL;
13238 tree item_type = NULL;
13239 tree qualified_type;
13240 tree name, low, high;
13241 dw_die_ref mod_scope;
13242 /* Only these cv-qualifiers are currently handled. */
13243 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13244 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13245 ENCODE_QUAL_ADDR_SPACE(~0U));
13246 const bool reverse_base_type
13247 = need_endianity_attribute_p (reverse) && is_base_type (type);
13248
13249 if (code == ERROR_MARK)
13250 return NULL;
13251
13252 if (lang_hooks.types.get_debug_type)
13253 {
13254 tree debug_type = lang_hooks.types.get_debug_type (type);
13255
13256 if (debug_type != NULL_TREE && debug_type != type)
13257 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13258 }
13259
13260 cv_quals &= cv_qual_mask;
13261
13262 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13263 tag modifier (and not an attribute) old consumers won't be able
13264 to handle it. */
13265 if (dwarf_version < 3)
13266 cv_quals &= ~TYPE_QUAL_RESTRICT;
13267
13268 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13269 if (dwarf_version < 5)
13270 cv_quals &= ~TYPE_QUAL_ATOMIC;
13271
13272 /* See if we already have the appropriately qualified variant of
13273 this type. */
13274 qualified_type = get_qualified_type (type, cv_quals);
13275
13276 if (qualified_type == sizetype)
13277 {
13278 /* Try not to expose the internal sizetype type's name. */
13279 if (TYPE_NAME (qualified_type)
13280 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13281 {
13282 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13283
13284 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13285 && (TYPE_PRECISION (t)
13286 == TYPE_PRECISION (qualified_type))
13287 && (TYPE_UNSIGNED (t)
13288 == TYPE_UNSIGNED (qualified_type)));
13289 qualified_type = t;
13290 }
13291 else if (qualified_type == sizetype
13292 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13293 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13294 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13295 qualified_type = size_type_node;
13296 if (type == sizetype)
13297 type = qualified_type;
13298 }
13299
13300 /* If we do, then we can just use its DIE, if it exists. */
13301 if (qualified_type)
13302 {
13303 mod_type_die = lookup_type_die (qualified_type);
13304
13305 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13306 dealt with specially: the DIE with the attribute, if it exists, is
13307 placed immediately after the regular DIE for the same base type. */
13308 if (mod_type_die
13309 && (!reverse_base_type
13310 || ((mod_type_die = mod_type_die->die_sib) != NULL
13311 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13312 return mod_type_die;
13313 }
13314
13315 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13316
13317 /* Handle C typedef types. */
13318 if (name
13319 && TREE_CODE (name) == TYPE_DECL
13320 && DECL_ORIGINAL_TYPE (name)
13321 && !DECL_ARTIFICIAL (name))
13322 {
13323 tree dtype = TREE_TYPE (name);
13324
13325 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13326 if (qualified_type == dtype && !reverse_base_type)
13327 {
13328 tree origin = decl_ultimate_origin (name);
13329
13330 /* Typedef variants that have an abstract origin don't get their own
13331 type DIE (see gen_typedef_die), so fall back on the ultimate
13332 abstract origin instead. */
13333 if (origin != NULL && origin != name)
13334 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13335 context_die);
13336
13337 /* For a named type, use the typedef. */
13338 gen_type_die (qualified_type, context_die);
13339 return lookup_type_die (qualified_type);
13340 }
13341 else
13342 {
13343 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13344 dquals &= cv_qual_mask;
13345 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13346 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13347 /* cv-unqualified version of named type. Just use
13348 the unnamed type to which it refers. */
13349 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13350 reverse, context_die);
13351 /* Else cv-qualified version of named type; fall through. */
13352 }
13353 }
13354
13355 mod_scope = scope_die_for (type, context_die);
13356
13357 if (cv_quals)
13358 {
13359 int sub_quals = 0, first_quals = 0;
13360 unsigned i;
13361 dw_die_ref first = NULL, last = NULL;
13362
13363 /* Determine a lesser qualified type that most closely matches
13364 this one. Then generate DW_TAG_* entries for the remaining
13365 qualifiers. */
13366 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13367 cv_qual_mask);
13368 if (sub_quals && use_debug_types)
13369 {
13370 bool needed = false;
13371 /* If emitting type units, make sure the order of qualifiers
13372 is canonical. Thus, start from unqualified type if
13373 an earlier qualifier is missing in sub_quals, but some later
13374 one is present there. */
13375 for (i = 0; i < dwarf_qual_info_size; i++)
13376 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13377 needed = true;
13378 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13379 {
13380 sub_quals = 0;
13381 break;
13382 }
13383 }
13384 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13385 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13386 {
13387 /* As not all intermediate qualified DIEs have corresponding
13388 tree types, ensure that qualified DIEs in the same scope
13389 as their DW_AT_type are emitted after their DW_AT_type,
13390 only with other qualified DIEs for the same type possibly
13391 in between them. Determine the range of such qualified
13392 DIEs now (first being the base type, last being corresponding
13393 last qualified DIE for it). */
13394 unsigned int count = 0;
13395 first = qualified_die_p (mod_type_die, &first_quals,
13396 dwarf_qual_info_size);
13397 if (first == NULL)
13398 first = mod_type_die;
13399 gcc_assert ((first_quals & ~sub_quals) == 0);
13400 for (count = 0, last = first;
13401 count < (1U << dwarf_qual_info_size);
13402 count++, last = last->die_sib)
13403 {
13404 int quals = 0;
13405 if (last == mod_scope->die_child)
13406 break;
13407 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13408 != first)
13409 break;
13410 }
13411 }
13412
13413 for (i = 0; i < dwarf_qual_info_size; i++)
13414 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13415 {
13416 dw_die_ref d;
13417 if (first && first != last)
13418 {
13419 for (d = first->die_sib; ; d = d->die_sib)
13420 {
13421 int quals = 0;
13422 qualified_die_p (d, &quals, dwarf_qual_info_size);
13423 if (quals == (first_quals | dwarf_qual_info[i].q))
13424 break;
13425 if (d == last)
13426 {
13427 d = NULL;
13428 break;
13429 }
13430 }
13431 if (d)
13432 {
13433 mod_type_die = d;
13434 continue;
13435 }
13436 }
13437 if (first)
13438 {
13439 d = new_die_raw (dwarf_qual_info[i].t);
13440 add_child_die_after (mod_scope, d, last);
13441 last = d;
13442 }
13443 else
13444 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13445 if (mod_type_die)
13446 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13447 mod_type_die = d;
13448 first_quals |= dwarf_qual_info[i].q;
13449 }
13450 }
13451 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13452 {
13453 dwarf_tag tag = DW_TAG_pointer_type;
13454 if (code == REFERENCE_TYPE)
13455 {
13456 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13457 tag = DW_TAG_rvalue_reference_type;
13458 else
13459 tag = DW_TAG_reference_type;
13460 }
13461 mod_type_die = new_die (tag, mod_scope, type);
13462
13463 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13464 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13465 add_alignment_attribute (mod_type_die, type);
13466 item_type = TREE_TYPE (type);
13467
13468 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13469 if (!ADDR_SPACE_GENERIC_P (as))
13470 {
13471 int action = targetm.addr_space.debug (as);
13472 if (action >= 0)
13473 {
13474 /* Positive values indicate an address_class. */
13475 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13476 }
13477 else
13478 {
13479 /* Negative values indicate an (inverted) segment base reg. */
13480 dw_loc_descr_ref d
13481 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13482 add_AT_loc (mod_type_die, DW_AT_segment, d);
13483 }
13484 }
13485 }
13486 else if (code == INTEGER_TYPE
13487 && TREE_TYPE (type) != NULL_TREE
13488 && subrange_type_for_debug_p (type, &low, &high))
13489 {
13490 tree bias = NULL_TREE;
13491 if (lang_hooks.types.get_type_bias)
13492 bias = lang_hooks.types.get_type_bias (type);
13493 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13494 item_type = TREE_TYPE (type);
13495 }
13496 else if (is_base_type (type))
13497 {
13498 mod_type_die = base_type_die (type, reverse);
13499
13500 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13501 if (reverse_base_type)
13502 {
13503 dw_die_ref after_die
13504 = modified_type_die (type, cv_quals, false, context_die);
13505 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13506 }
13507 else
13508 add_child_die (comp_unit_die (), mod_type_die);
13509
13510 add_pubtype (type, mod_type_die);
13511 }
13512 else
13513 {
13514 gen_type_die (type, context_die);
13515
13516 /* We have to get the type_main_variant here (and pass that to the
13517 `lookup_type_die' routine) because the ..._TYPE node we have
13518 might simply be a *copy* of some original type node (where the
13519 copy was created to help us keep track of typedef names) and
13520 that copy might have a different TYPE_UID from the original
13521 ..._TYPE node. */
13522 if (TREE_CODE (type) == FUNCTION_TYPE
13523 || TREE_CODE (type) == METHOD_TYPE)
13524 {
13525 /* For function/method types, can't just use type_main_variant here,
13526 because that can have different ref-qualifiers for C++,
13527 but try to canonicalize. */
13528 tree main = TYPE_MAIN_VARIANT (type);
13529 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13530 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13531 && check_base_type (t, main)
13532 && check_lang_type (t, type))
13533 return lookup_type_die (t);
13534 return lookup_type_die (type);
13535 }
13536 else if (TREE_CODE (type) != VECTOR_TYPE
13537 && TREE_CODE (type) != ARRAY_TYPE)
13538 return lookup_type_die (type_main_variant (type));
13539 else
13540 /* Vectors have the debugging information in the type,
13541 not the main variant. */
13542 return lookup_type_die (type);
13543 }
13544
13545 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13546 don't output a DW_TAG_typedef, since there isn't one in the
13547 user's program; just attach a DW_AT_name to the type.
13548 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13549 if the base type already has the same name. */
13550 if (name
13551 && ((TREE_CODE (name) != TYPE_DECL
13552 && (qualified_type == TYPE_MAIN_VARIANT (type)
13553 || (cv_quals == TYPE_UNQUALIFIED)))
13554 || (TREE_CODE (name) == TYPE_DECL
13555 && TREE_TYPE (name) == qualified_type
13556 && DECL_NAME (name))))
13557 {
13558 if (TREE_CODE (name) == TYPE_DECL)
13559 /* Could just call add_name_and_src_coords_attributes here,
13560 but since this is a builtin type it doesn't have any
13561 useful source coordinates anyway. */
13562 name = DECL_NAME (name);
13563 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13564 }
13565 /* This probably indicates a bug. */
13566 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13567 {
13568 name = TYPE_IDENTIFIER (type);
13569 add_name_attribute (mod_type_die,
13570 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13571 }
13572
13573 if (qualified_type && !reverse_base_type)
13574 equate_type_number_to_die (qualified_type, mod_type_die);
13575
13576 if (item_type)
13577 /* We must do this after the equate_type_number_to_die call, in case
13578 this is a recursive type. This ensures that the modified_type_die
13579 recursion will terminate even if the type is recursive. Recursive
13580 types are possible in Ada. */
13581 sub_die = modified_type_die (item_type,
13582 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13583 reverse,
13584 context_die);
13585
13586 if (sub_die != NULL)
13587 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13588
13589 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13590 if (TYPE_ARTIFICIAL (type))
13591 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13592
13593 return mod_type_die;
13594 }
13595
13596 /* Generate DIEs for the generic parameters of T.
13597 T must be either a generic type or a generic function.
13598 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13599
13600 static void
13601 gen_generic_params_dies (tree t)
13602 {
13603 tree parms, args;
13604 int parms_num, i;
13605 dw_die_ref die = NULL;
13606 int non_default;
13607
13608 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13609 return;
13610
13611 if (TYPE_P (t))
13612 die = lookup_type_die (t);
13613 else if (DECL_P (t))
13614 die = lookup_decl_die (t);
13615
13616 gcc_assert (die);
13617
13618 parms = lang_hooks.get_innermost_generic_parms (t);
13619 if (!parms)
13620 /* T has no generic parameter. It means T is neither a generic type
13621 or function. End of story. */
13622 return;
13623
13624 parms_num = TREE_VEC_LENGTH (parms);
13625 args = lang_hooks.get_innermost_generic_args (t);
13626 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13627 non_default = int_cst_value (TREE_CHAIN (args));
13628 else
13629 non_default = TREE_VEC_LENGTH (args);
13630 for (i = 0; i < parms_num; i++)
13631 {
13632 tree parm, arg, arg_pack_elems;
13633 dw_die_ref parm_die;
13634
13635 parm = TREE_VEC_ELT (parms, i);
13636 arg = TREE_VEC_ELT (args, i);
13637 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13638 gcc_assert (parm && TREE_VALUE (parm) && arg);
13639
13640 if (parm && TREE_VALUE (parm) && arg)
13641 {
13642 /* If PARM represents a template parameter pack,
13643 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13644 by DW_TAG_template_*_parameter DIEs for the argument
13645 pack elements of ARG. Note that ARG would then be
13646 an argument pack. */
13647 if (arg_pack_elems)
13648 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13649 arg_pack_elems,
13650 die);
13651 else
13652 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13653 true /* emit name */, die);
13654 if (i >= non_default)
13655 add_AT_flag (parm_die, DW_AT_default_value, 1);
13656 }
13657 }
13658 }
13659
13660 /* Create and return a DIE for PARM which should be
13661 the representation of a generic type parameter.
13662 For instance, in the C++ front end, PARM would be a template parameter.
13663 ARG is the argument to PARM.
13664 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13665 name of the PARM.
13666 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13667 as a child node. */
13668
13669 static dw_die_ref
13670 generic_parameter_die (tree parm, tree arg,
13671 bool emit_name_p,
13672 dw_die_ref parent_die)
13673 {
13674 dw_die_ref tmpl_die = NULL;
13675 const char *name = NULL;
13676
13677 /* C++20 accepts class literals as template parameters, and var
13678 decls with initializers represent them. The VAR_DECLs would be
13679 rejected, but we can take the DECL_INITIAL constructor and
13680 attempt to expand it. */
13681 if (arg && VAR_P (arg))
13682 arg = DECL_INITIAL (arg);
13683
13684 if (!parm || !DECL_NAME (parm) || !arg)
13685 return NULL;
13686
13687 /* We support non-type generic parameters and arguments,
13688 type generic parameters and arguments, as well as
13689 generic generic parameters (a.k.a. template template parameters in C++)
13690 and arguments. */
13691 if (TREE_CODE (parm) == PARM_DECL)
13692 /* PARM is a nontype generic parameter */
13693 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13694 else if (TREE_CODE (parm) == TYPE_DECL)
13695 /* PARM is a type generic parameter. */
13696 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13697 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13698 /* PARM is a generic generic parameter.
13699 Its DIE is a GNU extension. It shall have a
13700 DW_AT_name attribute to represent the name of the template template
13701 parameter, and a DW_AT_GNU_template_name attribute to represent the
13702 name of the template template argument. */
13703 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13704 parent_die, parm);
13705 else
13706 gcc_unreachable ();
13707
13708 if (tmpl_die)
13709 {
13710 tree tmpl_type;
13711
13712 /* If PARM is a generic parameter pack, it means we are
13713 emitting debug info for a template argument pack element.
13714 In other terms, ARG is a template argument pack element.
13715 In that case, we don't emit any DW_AT_name attribute for
13716 the die. */
13717 if (emit_name_p)
13718 {
13719 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13720 gcc_assert (name);
13721 add_AT_string (tmpl_die, DW_AT_name, name);
13722 }
13723
13724 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13725 {
13726 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13727 TMPL_DIE should have a child DW_AT_type attribute that is set
13728 to the type of the argument to PARM, which is ARG.
13729 If PARM is a type generic parameter, TMPL_DIE should have a
13730 child DW_AT_type that is set to ARG. */
13731 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13732 add_type_attribute (tmpl_die, tmpl_type,
13733 (TREE_THIS_VOLATILE (tmpl_type)
13734 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13735 false, parent_die);
13736 }
13737 else
13738 {
13739 /* So TMPL_DIE is a DIE representing a
13740 a generic generic template parameter, a.k.a template template
13741 parameter in C++ and arg is a template. */
13742
13743 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13744 to the name of the argument. */
13745 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13746 if (name)
13747 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13748 }
13749
13750 if (TREE_CODE (parm) == PARM_DECL)
13751 /* So PARM is a non-type generic parameter.
13752 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13753 attribute of TMPL_DIE which value represents the value
13754 of ARG.
13755 We must be careful here:
13756 The value of ARG might reference some function decls.
13757 We might currently be emitting debug info for a generic
13758 type and types are emitted before function decls, we don't
13759 know if the function decls referenced by ARG will actually be
13760 emitted after cgraph computations.
13761 So must defer the generation of the DW_AT_const_value to
13762 after cgraph is ready. */
13763 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13764 }
13765
13766 return tmpl_die;
13767 }
13768
13769 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13770 PARM_PACK must be a template parameter pack. The returned DIE
13771 will be child DIE of PARENT_DIE. */
13772
13773 static dw_die_ref
13774 template_parameter_pack_die (tree parm_pack,
13775 tree parm_pack_args,
13776 dw_die_ref parent_die)
13777 {
13778 dw_die_ref die;
13779 int j;
13780
13781 gcc_assert (parent_die && parm_pack);
13782
13783 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13784 add_name_and_src_coords_attributes (die, parm_pack);
13785 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13786 generic_parameter_die (parm_pack,
13787 TREE_VEC_ELT (parm_pack_args, j),
13788 false /* Don't emit DW_AT_name */,
13789 die);
13790 return die;
13791 }
13792
13793 /* Return the DBX register number described by a given RTL node. */
13794
13795 static unsigned int
13796 dbx_reg_number (const_rtx rtl)
13797 {
13798 unsigned regno = REGNO (rtl);
13799
13800 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13801
13802 #ifdef LEAF_REG_REMAP
13803 if (crtl->uses_only_leaf_regs)
13804 {
13805 int leaf_reg = LEAF_REG_REMAP (regno);
13806 if (leaf_reg != -1)
13807 regno = (unsigned) leaf_reg;
13808 }
13809 #endif
13810
13811 regno = DBX_REGISTER_NUMBER (regno);
13812 gcc_assert (regno != INVALID_REGNUM);
13813 return regno;
13814 }
13815
13816 /* Optionally add a DW_OP_piece term to a location description expression.
13817 DW_OP_piece is only added if the location description expression already
13818 doesn't end with DW_OP_piece. */
13819
13820 static void
13821 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13822 {
13823 dw_loc_descr_ref loc;
13824
13825 if (*list_head != NULL)
13826 {
13827 /* Find the end of the chain. */
13828 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13829 ;
13830
13831 if (loc->dw_loc_opc != DW_OP_piece)
13832 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13833 }
13834 }
13835
13836 /* Return a location descriptor that designates a machine register or
13837 zero if there is none. */
13838
13839 static dw_loc_descr_ref
13840 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13841 {
13842 rtx regs;
13843
13844 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13845 return 0;
13846
13847 /* We only use "frame base" when we're sure we're talking about the
13848 post-prologue local stack frame. We do this by *not* running
13849 register elimination until this point, and recognizing the special
13850 argument pointer and soft frame pointer rtx's.
13851 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13852 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13853 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13854 {
13855 dw_loc_descr_ref result = NULL;
13856
13857 if (dwarf_version >= 4 || !dwarf_strict)
13858 {
13859 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13860 initialized);
13861 if (result)
13862 add_loc_descr (&result,
13863 new_loc_descr (DW_OP_stack_value, 0, 0));
13864 }
13865 return result;
13866 }
13867
13868 regs = targetm.dwarf_register_span (rtl);
13869
13870 if (REG_NREGS (rtl) > 1 || regs)
13871 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13872 else
13873 {
13874 unsigned int dbx_regnum = dbx_reg_number (rtl);
13875 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13876 return 0;
13877 return one_reg_loc_descriptor (dbx_regnum, initialized);
13878 }
13879 }
13880
13881 /* Return a location descriptor that designates a machine register for
13882 a given hard register number. */
13883
13884 static dw_loc_descr_ref
13885 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13886 {
13887 dw_loc_descr_ref reg_loc_descr;
13888
13889 if (regno <= 31)
13890 reg_loc_descr
13891 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13892 else
13893 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13894
13895 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13896 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13897
13898 return reg_loc_descr;
13899 }
13900
13901 /* Given an RTL of a register, return a location descriptor that
13902 designates a value that spans more than one register. */
13903
13904 static dw_loc_descr_ref
13905 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13906 enum var_init_status initialized)
13907 {
13908 int size, i;
13909 dw_loc_descr_ref loc_result = NULL;
13910
13911 /* Simple, contiguous registers. */
13912 if (regs == NULL_RTX)
13913 {
13914 unsigned reg = REGNO (rtl);
13915 int nregs;
13916
13917 #ifdef LEAF_REG_REMAP
13918 if (crtl->uses_only_leaf_regs)
13919 {
13920 int leaf_reg = LEAF_REG_REMAP (reg);
13921 if (leaf_reg != -1)
13922 reg = (unsigned) leaf_reg;
13923 }
13924 #endif
13925
13926 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13927 nregs = REG_NREGS (rtl);
13928
13929 /* At present we only track constant-sized pieces. */
13930 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13931 return NULL;
13932 size /= nregs;
13933
13934 loc_result = NULL;
13935 while (nregs--)
13936 {
13937 dw_loc_descr_ref t;
13938
13939 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13940 VAR_INIT_STATUS_INITIALIZED);
13941 add_loc_descr (&loc_result, t);
13942 add_loc_descr_op_piece (&loc_result, size);
13943 ++reg;
13944 }
13945 return loc_result;
13946 }
13947
13948 /* Now onto stupid register sets in non contiguous locations. */
13949
13950 gcc_assert (GET_CODE (regs) == PARALLEL);
13951
13952 /* At present we only track constant-sized pieces. */
13953 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13954 return NULL;
13955 loc_result = NULL;
13956
13957 for (i = 0; i < XVECLEN (regs, 0); ++i)
13958 {
13959 dw_loc_descr_ref t;
13960
13961 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13962 VAR_INIT_STATUS_INITIALIZED);
13963 add_loc_descr (&loc_result, t);
13964 add_loc_descr_op_piece (&loc_result, size);
13965 }
13966
13967 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13968 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13969 return loc_result;
13970 }
13971
13972 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13973
13974 /* Return a location descriptor that designates a constant i,
13975 as a compound operation from constant (i >> shift), constant shift
13976 and DW_OP_shl. */
13977
13978 static dw_loc_descr_ref
13979 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13980 {
13981 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13982 add_loc_descr (&ret, int_loc_descriptor (shift));
13983 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13984 return ret;
13985 }
13986
13987 /* Return a location descriptor that designates constant POLY_I. */
13988
13989 static dw_loc_descr_ref
13990 int_loc_descriptor (poly_int64 poly_i)
13991 {
13992 enum dwarf_location_atom op;
13993
13994 HOST_WIDE_INT i;
13995 if (!poly_i.is_constant (&i))
13996 {
13997 /* Create location descriptions for the non-constant part and
13998 add any constant offset at the end. */
13999 dw_loc_descr_ref ret = NULL;
14000 HOST_WIDE_INT constant = poly_i.coeffs[0];
14001 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
14002 {
14003 HOST_WIDE_INT coeff = poly_i.coeffs[j];
14004 if (coeff != 0)
14005 {
14006 dw_loc_descr_ref start = ret;
14007 unsigned int factor;
14008 int bias;
14009 unsigned int regno = targetm.dwarf_poly_indeterminate_value
14010 (j, &factor, &bias);
14011
14012 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
14013 add COEFF * (REGNO / FACTOR) now and subtract
14014 COEFF * BIAS from the final constant part. */
14015 constant -= coeff * bias;
14016 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
14017 if (coeff % factor == 0)
14018 coeff /= factor;
14019 else
14020 {
14021 int amount = exact_log2 (factor);
14022 gcc_assert (amount >= 0);
14023 add_loc_descr (&ret, int_loc_descriptor (amount));
14024 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14025 }
14026 if (coeff != 1)
14027 {
14028 add_loc_descr (&ret, int_loc_descriptor (coeff));
14029 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14030 }
14031 if (start)
14032 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
14033 }
14034 }
14035 loc_descr_plus_const (&ret, constant);
14036 return ret;
14037 }
14038
14039 /* Pick the smallest representation of a constant, rather than just
14040 defaulting to the LEB encoding. */
14041 if (i >= 0)
14042 {
14043 int clz = clz_hwi (i);
14044 int ctz = ctz_hwi (i);
14045 if (i <= 31)
14046 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
14047 else if (i <= 0xff)
14048 op = DW_OP_const1u;
14049 else if (i <= 0xffff)
14050 op = DW_OP_const2u;
14051 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14052 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14053 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
14054 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
14055 while DW_OP_const4u is 5 bytes. */
14056 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
14057 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14058 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14059 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
14060 while DW_OP_const4u is 5 bytes. */
14061 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14062
14063 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14064 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14065 <= 4)
14066 {
14067 /* As i >= 2**31, the double cast above will yield a negative number.
14068 Since wrapping is defined in DWARF expressions we can output big
14069 positive integers as small negative ones, regardless of the size
14070 of host wide ints.
14071
14072 Here, since the evaluator will handle 32-bit values and since i >=
14073 2**31, we know it's going to be interpreted as a negative literal:
14074 store it this way if we can do better than 5 bytes this way. */
14075 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14076 }
14077 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14078 op = DW_OP_const4u;
14079
14080 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14081 least 6 bytes: see if we can do better before falling back to it. */
14082 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14083 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14084 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14085 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14086 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14087 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14088 >= HOST_BITS_PER_WIDE_INT)
14089 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14090 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14091 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14092 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14093 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14094 && size_of_uleb128 (i) > 6)
14095 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14096 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14097 else
14098 op = DW_OP_constu;
14099 }
14100 else
14101 {
14102 if (i >= -0x80)
14103 op = DW_OP_const1s;
14104 else if (i >= -0x8000)
14105 op = DW_OP_const2s;
14106 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14107 {
14108 if (size_of_int_loc_descriptor (i) < 5)
14109 {
14110 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14111 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14112 return ret;
14113 }
14114 op = DW_OP_const4s;
14115 }
14116 else
14117 {
14118 if (size_of_int_loc_descriptor (i)
14119 < (unsigned long) 1 + size_of_sleb128 (i))
14120 {
14121 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14122 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14123 return ret;
14124 }
14125 op = DW_OP_consts;
14126 }
14127 }
14128
14129 return new_loc_descr (op, i, 0);
14130 }
14131
14132 /* Likewise, for unsigned constants. */
14133
14134 static dw_loc_descr_ref
14135 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14136 {
14137 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14138 const unsigned HOST_WIDE_INT max_uint
14139 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14140
14141 /* If possible, use the clever signed constants handling. */
14142 if (i <= max_int)
14143 return int_loc_descriptor ((HOST_WIDE_INT) i);
14144
14145 /* Here, we are left with positive numbers that cannot be represented as
14146 HOST_WIDE_INT, i.e.:
14147 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14148
14149 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14150 whereas may be better to output a negative integer: thanks to integer
14151 wrapping, we know that:
14152 x = x - 2 ** DWARF2_ADDR_SIZE
14153 = x - 2 * (max (HOST_WIDE_INT) + 1)
14154 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14155 small negative integers. Let's try that in cases it will clearly improve
14156 the encoding: there is no gain turning DW_OP_const4u into
14157 DW_OP_const4s. */
14158 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14159 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14160 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14161 {
14162 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14163
14164 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14165 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14166 const HOST_WIDE_INT second_shift
14167 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14168
14169 /* So we finally have:
14170 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14171 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14172 return int_loc_descriptor (second_shift);
14173 }
14174
14175 /* Last chance: fallback to a simple constant operation. */
14176 return new_loc_descr
14177 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14178 ? DW_OP_const4u
14179 : DW_OP_const8u,
14180 i, 0);
14181 }
14182
14183 /* Generate and return a location description that computes the unsigned
14184 comparison of the two stack top entries (a OP b where b is the top-most
14185 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14186 LE_EXPR, GT_EXPR or GE_EXPR. */
14187
14188 static dw_loc_descr_ref
14189 uint_comparison_loc_list (enum tree_code kind)
14190 {
14191 enum dwarf_location_atom op, flip_op;
14192 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14193
14194 switch (kind)
14195 {
14196 case LT_EXPR:
14197 op = DW_OP_lt;
14198 break;
14199 case LE_EXPR:
14200 op = DW_OP_le;
14201 break;
14202 case GT_EXPR:
14203 op = DW_OP_gt;
14204 break;
14205 case GE_EXPR:
14206 op = DW_OP_ge;
14207 break;
14208 default:
14209 gcc_unreachable ();
14210 }
14211
14212 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14213 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14214
14215 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14216 possible to perform unsigned comparisons: we just have to distinguish
14217 three cases:
14218
14219 1. when a and b have the same sign (as signed integers); then we should
14220 return: a OP(signed) b;
14221
14222 2. when a is a negative signed integer while b is a positive one, then a
14223 is a greater unsigned integer than b; likewise when a and b's roles
14224 are flipped.
14225
14226 So first, compare the sign of the two operands. */
14227 ret = new_loc_descr (DW_OP_over, 0, 0);
14228 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14229 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14230 /* If they have different signs (i.e. they have different sign bits), then
14231 the stack top value has now the sign bit set and thus it's smaller than
14232 zero. */
14233 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14234 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14235 add_loc_descr (&ret, bra_node);
14236
14237 /* We are in case 1. At this point, we know both operands have the same
14238 sign, to it's safe to use the built-in signed comparison. */
14239 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14240 add_loc_descr (&ret, jmp_node);
14241
14242 /* We are in case 2. Here, we know both operands do not have the same sign,
14243 so we have to flip the signed comparison. */
14244 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14245 tmp = new_loc_descr (flip_op, 0, 0);
14246 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14247 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14248 add_loc_descr (&ret, tmp);
14249
14250 /* This dummy operation is necessary to make the two branches join. */
14251 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14252 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14253 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14254 add_loc_descr (&ret, tmp);
14255
14256 return ret;
14257 }
14258
14259 /* Likewise, but takes the location description lists (might be destructive on
14260 them). Return NULL if either is NULL or if concatenation fails. */
14261
14262 static dw_loc_list_ref
14263 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14264 enum tree_code kind)
14265 {
14266 if (left == NULL || right == NULL)
14267 return NULL;
14268
14269 add_loc_list (&left, right);
14270 if (left == NULL)
14271 return NULL;
14272
14273 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14274 return left;
14275 }
14276
14277 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14278 without actually allocating it. */
14279
14280 static unsigned long
14281 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14282 {
14283 return size_of_int_loc_descriptor (i >> shift)
14284 + size_of_int_loc_descriptor (shift)
14285 + 1;
14286 }
14287
14288 /* Return size_of_locs (int_loc_descriptor (i)) without
14289 actually allocating it. */
14290
14291 static unsigned long
14292 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14293 {
14294 unsigned long s;
14295
14296 if (i >= 0)
14297 {
14298 int clz, ctz;
14299 if (i <= 31)
14300 return 1;
14301 else if (i <= 0xff)
14302 return 2;
14303 else if (i <= 0xffff)
14304 return 3;
14305 clz = clz_hwi (i);
14306 ctz = ctz_hwi (i);
14307 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14308 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14309 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14310 - clz - 5);
14311 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14312 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14313 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14314 - clz - 8);
14315 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14316 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14317 <= 4)
14318 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14319 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14320 return 5;
14321 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14322 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14323 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14324 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14325 - clz - 8);
14326 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14327 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14328 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14329 - clz - 16);
14330 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14331 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14332 && s > 6)
14333 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14334 - clz - 32);
14335 else
14336 return 1 + s;
14337 }
14338 else
14339 {
14340 if (i >= -0x80)
14341 return 2;
14342 else if (i >= -0x8000)
14343 return 3;
14344 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14345 {
14346 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14347 {
14348 s = size_of_int_loc_descriptor (-i) + 1;
14349 if (s < 5)
14350 return s;
14351 }
14352 return 5;
14353 }
14354 else
14355 {
14356 unsigned long r = 1 + size_of_sleb128 (i);
14357 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14358 {
14359 s = size_of_int_loc_descriptor (-i) + 1;
14360 if (s < r)
14361 return s;
14362 }
14363 return r;
14364 }
14365 }
14366 }
14367
14368 /* Return loc description representing "address" of integer value.
14369 This can appear only as toplevel expression. */
14370
14371 static dw_loc_descr_ref
14372 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14373 {
14374 int litsize;
14375 dw_loc_descr_ref loc_result = NULL;
14376
14377 if (!(dwarf_version >= 4 || !dwarf_strict))
14378 return NULL;
14379
14380 litsize = size_of_int_loc_descriptor (i);
14381 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14382 is more compact. For DW_OP_stack_value we need:
14383 litsize + 1 (DW_OP_stack_value)
14384 and for DW_OP_implicit_value:
14385 1 (DW_OP_implicit_value) + 1 (length) + size. */
14386 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14387 {
14388 loc_result = int_loc_descriptor (i);
14389 add_loc_descr (&loc_result,
14390 new_loc_descr (DW_OP_stack_value, 0, 0));
14391 return loc_result;
14392 }
14393
14394 loc_result = new_loc_descr (DW_OP_implicit_value,
14395 size, 0);
14396 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14397 loc_result->dw_loc_oprnd2.v.val_int = i;
14398 return loc_result;
14399 }
14400
14401 /* Return a location descriptor that designates a base+offset location. */
14402
14403 static dw_loc_descr_ref
14404 based_loc_descr (rtx reg, poly_int64 offset,
14405 enum var_init_status initialized)
14406 {
14407 unsigned int regno;
14408 dw_loc_descr_ref result;
14409 dw_fde_ref fde = cfun->fde;
14410
14411 /* We only use "frame base" when we're sure we're talking about the
14412 post-prologue local stack frame. We do this by *not* running
14413 register elimination until this point, and recognizing the special
14414 argument pointer and soft frame pointer rtx's. */
14415 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14416 {
14417 rtx elim = (ira_use_lra_p
14418 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14419 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14420
14421 if (elim != reg)
14422 {
14423 /* Allow hard frame pointer here even if frame pointer
14424 isn't used since hard frame pointer is encoded with
14425 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14426 not hard frame pointer directly. */
14427 elim = strip_offset_and_add (elim, &offset);
14428 gcc_assert (elim == hard_frame_pointer_rtx
14429 || elim == stack_pointer_rtx);
14430
14431 /* If drap register is used to align stack, use frame
14432 pointer + offset to access stack variables. If stack
14433 is aligned without drap, use stack pointer + offset to
14434 access stack variables. */
14435 if (crtl->stack_realign_tried
14436 && reg == frame_pointer_rtx)
14437 {
14438 int base_reg
14439 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14440 ? HARD_FRAME_POINTER_REGNUM
14441 : REGNO (elim));
14442 return new_reg_loc_descr (base_reg, offset);
14443 }
14444
14445 gcc_assert (frame_pointer_fb_offset_valid);
14446 offset += frame_pointer_fb_offset;
14447 HOST_WIDE_INT const_offset;
14448 if (offset.is_constant (&const_offset))
14449 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14450 else
14451 {
14452 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14453 loc_descr_plus_const (&ret, offset);
14454 return ret;
14455 }
14456 }
14457 }
14458
14459 regno = REGNO (reg);
14460 #ifdef LEAF_REG_REMAP
14461 if (crtl->uses_only_leaf_regs)
14462 {
14463 int leaf_reg = LEAF_REG_REMAP (regno);
14464 if (leaf_reg != -1)
14465 regno = (unsigned) leaf_reg;
14466 }
14467 #endif
14468 regno = DWARF_FRAME_REGNUM (regno);
14469
14470 HOST_WIDE_INT const_offset;
14471 if (!optimize && fde
14472 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14473 && offset.is_constant (&const_offset))
14474 {
14475 /* Use cfa+offset to represent the location of arguments passed
14476 on the stack when drap is used to align stack.
14477 Only do this when not optimizing, for optimized code var-tracking
14478 is supposed to track where the arguments live and the register
14479 used as vdrap or drap in some spot might be used for something
14480 else in other part of the routine. */
14481 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14482 }
14483
14484 result = new_reg_loc_descr (regno, offset);
14485
14486 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14487 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14488
14489 return result;
14490 }
14491
14492 /* Return true if this RTL expression describes a base+offset calculation. */
14493
14494 static inline int
14495 is_based_loc (const_rtx rtl)
14496 {
14497 return (GET_CODE (rtl) == PLUS
14498 && ((REG_P (XEXP (rtl, 0))
14499 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14500 && CONST_INT_P (XEXP (rtl, 1)))));
14501 }
14502
14503 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14504 failed. */
14505
14506 static dw_loc_descr_ref
14507 tls_mem_loc_descriptor (rtx mem)
14508 {
14509 tree base;
14510 dw_loc_descr_ref loc_result;
14511
14512 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14513 return NULL;
14514
14515 base = get_base_address (MEM_EXPR (mem));
14516 if (base == NULL
14517 || !VAR_P (base)
14518 || !DECL_THREAD_LOCAL_P (base))
14519 return NULL;
14520
14521 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14522 if (loc_result == NULL)
14523 return NULL;
14524
14525 if (maybe_ne (MEM_OFFSET (mem), 0))
14526 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14527
14528 return loc_result;
14529 }
14530
14531 /* Output debug info about reason why we failed to expand expression as dwarf
14532 expression. */
14533
14534 static void
14535 expansion_failed (tree expr, rtx rtl, char const *reason)
14536 {
14537 if (dump_file && (dump_flags & TDF_DETAILS))
14538 {
14539 fprintf (dump_file, "Failed to expand as dwarf: ");
14540 if (expr)
14541 print_generic_expr (dump_file, expr, dump_flags);
14542 if (rtl)
14543 {
14544 fprintf (dump_file, "\n");
14545 print_rtl (dump_file, rtl);
14546 }
14547 fprintf (dump_file, "\nReason: %s\n", reason);
14548 }
14549 }
14550
14551 /* Helper function for const_ok_for_output. */
14552
14553 static bool
14554 const_ok_for_output_1 (rtx rtl)
14555 {
14556 if (targetm.const_not_ok_for_debug_p (rtl))
14557 {
14558 if (GET_CODE (rtl) != UNSPEC)
14559 {
14560 expansion_failed (NULL_TREE, rtl,
14561 "Expression rejected for debug by the backend.\n");
14562 return false;
14563 }
14564
14565 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14566 the target hook doesn't explicitly allow it in debug info, assume
14567 we can't express it in the debug info. */
14568 /* Don't complain about TLS UNSPECs, those are just too hard to
14569 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14570 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14571 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14572 if (flag_checking
14573 && (XVECLEN (rtl, 0) == 0
14574 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14575 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14576 inform (current_function_decl
14577 ? DECL_SOURCE_LOCATION (current_function_decl)
14578 : UNKNOWN_LOCATION,
14579 #if NUM_UNSPEC_VALUES > 0
14580 "non-delegitimized UNSPEC %s (%d) found in variable location",
14581 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14582 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14583 #else
14584 "non-delegitimized UNSPEC %d found in variable location",
14585 #endif
14586 XINT (rtl, 1));
14587 expansion_failed (NULL_TREE, rtl,
14588 "UNSPEC hasn't been delegitimized.\n");
14589 return false;
14590 }
14591
14592 if (CONST_POLY_INT_P (rtl))
14593 return false;
14594
14595 /* FIXME: Refer to PR60655. It is possible for simplification
14596 of rtl expressions in var tracking to produce such expressions.
14597 We should really identify / validate expressions
14598 enclosed in CONST that can be handled by assemblers on various
14599 targets and only handle legitimate cases here. */
14600 switch (GET_CODE (rtl))
14601 {
14602 case SYMBOL_REF:
14603 break;
14604 case NOT:
14605 case NEG:
14606 return false;
14607 case PLUS:
14608 {
14609 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14610 operands. */
14611 subrtx_var_iterator::array_type array;
14612 bool first = false;
14613 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14614 if (SYMBOL_REF_P (*iter)
14615 || LABEL_P (*iter)
14616 || GET_CODE (*iter) == UNSPEC)
14617 {
14618 first = true;
14619 break;
14620 }
14621 if (!first)
14622 return true;
14623 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14624 if (SYMBOL_REF_P (*iter)
14625 || LABEL_P (*iter)
14626 || GET_CODE (*iter) == UNSPEC)
14627 return false;
14628 return true;
14629 }
14630 case MINUS:
14631 {
14632 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14633 appear in the second operand of MINUS. */
14634 subrtx_var_iterator::array_type array;
14635 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14636 if (SYMBOL_REF_P (*iter)
14637 || LABEL_P (*iter)
14638 || GET_CODE (*iter) == UNSPEC)
14639 return false;
14640 return true;
14641 }
14642 default:
14643 return true;
14644 }
14645
14646 if (CONSTANT_POOL_ADDRESS_P (rtl))
14647 {
14648 bool marked;
14649 get_pool_constant_mark (rtl, &marked);
14650 /* If all references to this pool constant were optimized away,
14651 it was not output and thus we can't represent it. */
14652 if (!marked)
14653 {
14654 expansion_failed (NULL_TREE, rtl,
14655 "Constant was removed from constant pool.\n");
14656 return false;
14657 }
14658 }
14659
14660 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14661 return false;
14662
14663 /* Avoid references to external symbols in debug info, on several targets
14664 the linker might even refuse to link when linking a shared library,
14665 and in many other cases the relocations for .debug_info/.debug_loc are
14666 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14667 to be defined within the same shared library or executable are fine. */
14668 if (SYMBOL_REF_EXTERNAL_P (rtl))
14669 {
14670 tree decl = SYMBOL_REF_DECL (rtl);
14671
14672 if (decl == NULL || !targetm.binds_local_p (decl))
14673 {
14674 expansion_failed (NULL_TREE, rtl,
14675 "Symbol not defined in current TU.\n");
14676 return false;
14677 }
14678 }
14679
14680 return true;
14681 }
14682
14683 /* Return true if constant RTL can be emitted in DW_OP_addr or
14684 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14685 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14686
14687 static bool
14688 const_ok_for_output (rtx rtl)
14689 {
14690 if (GET_CODE (rtl) == SYMBOL_REF)
14691 return const_ok_for_output_1 (rtl);
14692
14693 if (GET_CODE (rtl) == CONST)
14694 {
14695 subrtx_var_iterator::array_type array;
14696 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14697 if (!const_ok_for_output_1 (*iter))
14698 return false;
14699 return true;
14700 }
14701
14702 return true;
14703 }
14704
14705 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14706 if possible, NULL otherwise. */
14707
14708 static dw_die_ref
14709 base_type_for_mode (machine_mode mode, bool unsignedp)
14710 {
14711 dw_die_ref type_die;
14712 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14713
14714 if (type == NULL)
14715 return NULL;
14716 switch (TREE_CODE (type))
14717 {
14718 case INTEGER_TYPE:
14719 case REAL_TYPE:
14720 break;
14721 default:
14722 return NULL;
14723 }
14724 type_die = lookup_type_die (type);
14725 if (!type_die)
14726 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14727 comp_unit_die ());
14728 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14729 return NULL;
14730 return type_die;
14731 }
14732
14733 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14734 type matching MODE, or, if MODE is narrower than or as wide as
14735 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14736 possible. */
14737
14738 static dw_loc_descr_ref
14739 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14740 {
14741 machine_mode outer_mode = mode;
14742 dw_die_ref type_die;
14743 dw_loc_descr_ref cvt;
14744
14745 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14746 {
14747 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14748 return op;
14749 }
14750 type_die = base_type_for_mode (outer_mode, 1);
14751 if (type_die == NULL)
14752 return NULL;
14753 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14754 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14755 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14756 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14757 add_loc_descr (&op, cvt);
14758 return op;
14759 }
14760
14761 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14762
14763 static dw_loc_descr_ref
14764 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14765 dw_loc_descr_ref op1)
14766 {
14767 dw_loc_descr_ref ret = op0;
14768 add_loc_descr (&ret, op1);
14769 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14770 if (STORE_FLAG_VALUE != 1)
14771 {
14772 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14773 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14774 }
14775 return ret;
14776 }
14777
14778 /* Subroutine of scompare_loc_descriptor for the case in which we're
14779 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14780 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14781
14782 static dw_loc_descr_ref
14783 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14784 scalar_int_mode op_mode,
14785 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14786 {
14787 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14788 dw_loc_descr_ref cvt;
14789
14790 if (type_die == NULL)
14791 return NULL;
14792 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14793 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14794 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14795 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14796 add_loc_descr (&op0, cvt);
14797 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14798 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14799 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14800 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14801 add_loc_descr (&op1, cvt);
14802 return compare_loc_descriptor (op, op0, op1);
14803 }
14804
14805 /* Subroutine of scompare_loc_descriptor for the case in which we're
14806 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14807 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14808
14809 static dw_loc_descr_ref
14810 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14811 scalar_int_mode op_mode,
14812 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14813 {
14814 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14815 /* For eq/ne, if the operands are known to be zero-extended,
14816 there is no need to do the fancy shifting up. */
14817 if (op == DW_OP_eq || op == DW_OP_ne)
14818 {
14819 dw_loc_descr_ref last0, last1;
14820 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14821 ;
14822 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14823 ;
14824 /* deref_size zero extends, and for constants we can check
14825 whether they are zero extended or not. */
14826 if (((last0->dw_loc_opc == DW_OP_deref_size
14827 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14828 || (CONST_INT_P (XEXP (rtl, 0))
14829 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14830 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14831 && ((last1->dw_loc_opc == DW_OP_deref_size
14832 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14833 || (CONST_INT_P (XEXP (rtl, 1))
14834 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14835 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14836 return compare_loc_descriptor (op, op0, op1);
14837
14838 /* EQ/NE comparison against constant in narrower type than
14839 DWARF2_ADDR_SIZE can be performed either as
14840 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14841 DW_OP_{eq,ne}
14842 or
14843 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14844 DW_OP_{eq,ne}. Pick whatever is shorter. */
14845 if (CONST_INT_P (XEXP (rtl, 1))
14846 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14847 && (size_of_int_loc_descriptor (shift) + 1
14848 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14849 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14850 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14851 & GET_MODE_MASK (op_mode))))
14852 {
14853 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14854 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14855 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14856 & GET_MODE_MASK (op_mode));
14857 return compare_loc_descriptor (op, op0, op1);
14858 }
14859 }
14860 add_loc_descr (&op0, int_loc_descriptor (shift));
14861 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14862 if (CONST_INT_P (XEXP (rtl, 1)))
14863 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14864 else
14865 {
14866 add_loc_descr (&op1, int_loc_descriptor (shift));
14867 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14868 }
14869 return compare_loc_descriptor (op, op0, op1);
14870 }
14871
14872 /* Return location descriptor for unsigned comparison OP RTL. */
14873
14874 static dw_loc_descr_ref
14875 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14876 machine_mode mem_mode)
14877 {
14878 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14879 dw_loc_descr_ref op0, op1;
14880
14881 if (op_mode == VOIDmode)
14882 op_mode = GET_MODE (XEXP (rtl, 1));
14883 if (op_mode == VOIDmode)
14884 return NULL;
14885
14886 scalar_int_mode int_op_mode;
14887 if (dwarf_strict
14888 && dwarf_version < 5
14889 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14890 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14891 return NULL;
14892
14893 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14894 VAR_INIT_STATUS_INITIALIZED);
14895 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14896 VAR_INIT_STATUS_INITIALIZED);
14897
14898 if (op0 == NULL || op1 == NULL)
14899 return NULL;
14900
14901 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14902 {
14903 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14904 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14905
14906 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14907 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14908 }
14909 return compare_loc_descriptor (op, op0, op1);
14910 }
14911
14912 /* Return location descriptor for unsigned comparison OP RTL. */
14913
14914 static dw_loc_descr_ref
14915 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14916 machine_mode mem_mode)
14917 {
14918 dw_loc_descr_ref op0, op1;
14919
14920 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14921 if (test_op_mode == VOIDmode)
14922 test_op_mode = GET_MODE (XEXP (rtl, 1));
14923
14924 scalar_int_mode op_mode;
14925 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14926 return NULL;
14927
14928 if (dwarf_strict
14929 && dwarf_version < 5
14930 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14931 return NULL;
14932
14933 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14934 VAR_INIT_STATUS_INITIALIZED);
14935 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14936 VAR_INIT_STATUS_INITIALIZED);
14937
14938 if (op0 == NULL || op1 == NULL)
14939 return NULL;
14940
14941 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14942 {
14943 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14944 dw_loc_descr_ref last0, last1;
14945 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14946 ;
14947 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14948 ;
14949 if (CONST_INT_P (XEXP (rtl, 0)))
14950 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14951 /* deref_size zero extends, so no need to mask it again. */
14952 else if (last0->dw_loc_opc != DW_OP_deref_size
14953 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14954 {
14955 add_loc_descr (&op0, int_loc_descriptor (mask));
14956 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14957 }
14958 if (CONST_INT_P (XEXP (rtl, 1)))
14959 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14960 /* deref_size zero extends, so no need to mask it again. */
14961 else if (last1->dw_loc_opc != DW_OP_deref_size
14962 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14963 {
14964 add_loc_descr (&op1, int_loc_descriptor (mask));
14965 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14966 }
14967 }
14968 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14969 {
14970 HOST_WIDE_INT bias = 1;
14971 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14972 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14973 if (CONST_INT_P (XEXP (rtl, 1)))
14974 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14975 + INTVAL (XEXP (rtl, 1)));
14976 else
14977 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14978 bias, 0));
14979 }
14980 return compare_loc_descriptor (op, op0, op1);
14981 }
14982
14983 /* Return location descriptor for {U,S}{MIN,MAX}. */
14984
14985 static dw_loc_descr_ref
14986 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14987 machine_mode mem_mode)
14988 {
14989 enum dwarf_location_atom op;
14990 dw_loc_descr_ref op0, op1, ret;
14991 dw_loc_descr_ref bra_node, drop_node;
14992
14993 scalar_int_mode int_mode;
14994 if (dwarf_strict
14995 && dwarf_version < 5
14996 && (!is_a <scalar_int_mode> (mode, &int_mode)
14997 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14998 return NULL;
14999
15000 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15001 VAR_INIT_STATUS_INITIALIZED);
15002 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15003 VAR_INIT_STATUS_INITIALIZED);
15004
15005 if (op0 == NULL || op1 == NULL)
15006 return NULL;
15007
15008 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
15009 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
15010 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
15011 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
15012 {
15013 /* Checked by the caller. */
15014 int_mode = as_a <scalar_int_mode> (mode);
15015 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
15016 {
15017 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
15018 add_loc_descr (&op0, int_loc_descriptor (mask));
15019 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
15020 add_loc_descr (&op1, int_loc_descriptor (mask));
15021 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
15022 }
15023 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15024 {
15025 HOST_WIDE_INT bias = 1;
15026 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
15027 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
15028 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
15029 }
15030 }
15031 else if (is_a <scalar_int_mode> (mode, &int_mode)
15032 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
15033 {
15034 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
15035 add_loc_descr (&op0, int_loc_descriptor (shift));
15036 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
15037 add_loc_descr (&op1, int_loc_descriptor (shift));
15038 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
15039 }
15040 else if (is_a <scalar_int_mode> (mode, &int_mode)
15041 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15042 {
15043 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
15044 dw_loc_descr_ref cvt;
15045 if (type_die == NULL)
15046 return NULL;
15047 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15048 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15049 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15050 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15051 add_loc_descr (&op0, cvt);
15052 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15053 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15054 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15055 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15056 add_loc_descr (&op1, cvt);
15057 }
15058
15059 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
15060 op = DW_OP_lt;
15061 else
15062 op = DW_OP_gt;
15063 ret = op0;
15064 add_loc_descr (&ret, op1);
15065 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
15066 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15067 add_loc_descr (&ret, bra_node);
15068 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15069 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15070 add_loc_descr (&ret, drop_node);
15071 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15072 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15073 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15074 && is_a <scalar_int_mode> (mode, &int_mode)
15075 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15076 ret = convert_descriptor_to_mode (int_mode, ret);
15077 return ret;
15078 }
15079
15080 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15081 but after converting arguments to type_die, afterwards
15082 convert back to unsigned. */
15083
15084 static dw_loc_descr_ref
15085 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15086 scalar_int_mode mode, machine_mode mem_mode)
15087 {
15088 dw_loc_descr_ref cvt, op0, op1;
15089
15090 if (type_die == NULL)
15091 return NULL;
15092 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15093 VAR_INIT_STATUS_INITIALIZED);
15094 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15095 VAR_INIT_STATUS_INITIALIZED);
15096 if (op0 == NULL || op1 == NULL)
15097 return NULL;
15098 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15099 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15100 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15101 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15102 add_loc_descr (&op0, cvt);
15103 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15104 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15105 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15106 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15107 add_loc_descr (&op1, cvt);
15108 add_loc_descr (&op0, op1);
15109 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15110 return convert_descriptor_to_mode (mode, op0);
15111 }
15112
15113 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15114 const0 is DW_OP_lit0 or corresponding typed constant,
15115 const1 is DW_OP_lit1 or corresponding typed constant
15116 and constMSB is constant with just the MSB bit set
15117 for the mode):
15118 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15119 L1: const0 DW_OP_swap
15120 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15121 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15122 L3: DW_OP_drop
15123 L4: DW_OP_nop
15124
15125 CTZ is similar:
15126 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15127 L1: const0 DW_OP_swap
15128 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15129 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15130 L3: DW_OP_drop
15131 L4: DW_OP_nop
15132
15133 FFS is similar:
15134 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15135 L1: const1 DW_OP_swap
15136 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15137 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15138 L3: DW_OP_drop
15139 L4: DW_OP_nop */
15140
15141 static dw_loc_descr_ref
15142 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15143 machine_mode mem_mode)
15144 {
15145 dw_loc_descr_ref op0, ret, tmp;
15146 HOST_WIDE_INT valv;
15147 dw_loc_descr_ref l1jump, l1label;
15148 dw_loc_descr_ref l2jump, l2label;
15149 dw_loc_descr_ref l3jump, l3label;
15150 dw_loc_descr_ref l4jump, l4label;
15151 rtx msb;
15152
15153 if (GET_MODE (XEXP (rtl, 0)) != mode)
15154 return NULL;
15155
15156 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15157 VAR_INIT_STATUS_INITIALIZED);
15158 if (op0 == NULL)
15159 return NULL;
15160 ret = op0;
15161 if (GET_CODE (rtl) == CLZ)
15162 {
15163 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15164 valv = GET_MODE_BITSIZE (mode);
15165 }
15166 else if (GET_CODE (rtl) == FFS)
15167 valv = 0;
15168 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15169 valv = GET_MODE_BITSIZE (mode);
15170 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15171 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15172 add_loc_descr (&ret, l1jump);
15173 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15174 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15175 VAR_INIT_STATUS_INITIALIZED);
15176 if (tmp == NULL)
15177 return NULL;
15178 add_loc_descr (&ret, tmp);
15179 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15180 add_loc_descr (&ret, l4jump);
15181 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15182 ? const1_rtx : const0_rtx,
15183 mode, mem_mode,
15184 VAR_INIT_STATUS_INITIALIZED);
15185 if (l1label == NULL)
15186 return NULL;
15187 add_loc_descr (&ret, l1label);
15188 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15189 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15190 add_loc_descr (&ret, l2label);
15191 if (GET_CODE (rtl) != CLZ)
15192 msb = const1_rtx;
15193 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15194 msb = GEN_INT (HOST_WIDE_INT_1U
15195 << (GET_MODE_BITSIZE (mode) - 1));
15196 else
15197 msb = immed_wide_int_const
15198 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15199 GET_MODE_PRECISION (mode)), mode);
15200 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15201 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15202 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15203 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15204 else
15205 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15206 VAR_INIT_STATUS_INITIALIZED);
15207 if (tmp == NULL)
15208 return NULL;
15209 add_loc_descr (&ret, tmp);
15210 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15211 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15212 add_loc_descr (&ret, l3jump);
15213 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15214 VAR_INIT_STATUS_INITIALIZED);
15215 if (tmp == NULL)
15216 return NULL;
15217 add_loc_descr (&ret, tmp);
15218 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15219 ? DW_OP_shl : DW_OP_shr, 0, 0));
15220 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15222 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15223 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15224 add_loc_descr (&ret, l2jump);
15225 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15226 add_loc_descr (&ret, l3label);
15227 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15228 add_loc_descr (&ret, l4label);
15229 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15230 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15231 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15232 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15233 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15234 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15235 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15236 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15237 return ret;
15238 }
15239
15240 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15241 const1 is DW_OP_lit1 or corresponding typed constant):
15242 const0 DW_OP_swap
15243 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15244 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15245 L2: DW_OP_drop
15246
15247 PARITY is similar:
15248 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15249 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15250 L2: DW_OP_drop */
15251
15252 static dw_loc_descr_ref
15253 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15254 machine_mode mem_mode)
15255 {
15256 dw_loc_descr_ref op0, ret, tmp;
15257 dw_loc_descr_ref l1jump, l1label;
15258 dw_loc_descr_ref l2jump, l2label;
15259
15260 if (GET_MODE (XEXP (rtl, 0)) != mode)
15261 return NULL;
15262
15263 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15264 VAR_INIT_STATUS_INITIALIZED);
15265 if (op0 == NULL)
15266 return NULL;
15267 ret = op0;
15268 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15269 VAR_INIT_STATUS_INITIALIZED);
15270 if (tmp == NULL)
15271 return NULL;
15272 add_loc_descr (&ret, tmp);
15273 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15274 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15275 add_loc_descr (&ret, l1label);
15276 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15277 add_loc_descr (&ret, l2jump);
15278 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15279 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15280 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15281 VAR_INIT_STATUS_INITIALIZED);
15282 if (tmp == NULL)
15283 return NULL;
15284 add_loc_descr (&ret, tmp);
15285 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15286 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15287 ? DW_OP_plus : DW_OP_xor, 0, 0));
15288 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15289 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15290 VAR_INIT_STATUS_INITIALIZED);
15291 add_loc_descr (&ret, tmp);
15292 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15293 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15294 add_loc_descr (&ret, l1jump);
15295 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15296 add_loc_descr (&ret, l2label);
15297 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15298 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15299 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15300 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15301 return ret;
15302 }
15303
15304 /* BSWAP (constS is initial shift count, either 56 or 24):
15305 constS const0
15306 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15307 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15308 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15309 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15310 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15311
15312 static dw_loc_descr_ref
15313 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15314 machine_mode mem_mode)
15315 {
15316 dw_loc_descr_ref op0, ret, tmp;
15317 dw_loc_descr_ref l1jump, l1label;
15318 dw_loc_descr_ref l2jump, l2label;
15319
15320 if (BITS_PER_UNIT != 8
15321 || (GET_MODE_BITSIZE (mode) != 32
15322 && GET_MODE_BITSIZE (mode) != 64))
15323 return NULL;
15324
15325 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15326 VAR_INIT_STATUS_INITIALIZED);
15327 if (op0 == NULL)
15328 return NULL;
15329
15330 ret = op0;
15331 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15332 mode, mem_mode,
15333 VAR_INIT_STATUS_INITIALIZED);
15334 if (tmp == NULL)
15335 return NULL;
15336 add_loc_descr (&ret, tmp);
15337 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15338 VAR_INIT_STATUS_INITIALIZED);
15339 if (tmp == NULL)
15340 return NULL;
15341 add_loc_descr (&ret, tmp);
15342 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15343 add_loc_descr (&ret, l1label);
15344 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15345 mode, mem_mode,
15346 VAR_INIT_STATUS_INITIALIZED);
15347 add_loc_descr (&ret, tmp);
15348 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15349 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15350 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15351 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15352 VAR_INIT_STATUS_INITIALIZED);
15353 if (tmp == NULL)
15354 return NULL;
15355 add_loc_descr (&ret, tmp);
15356 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15357 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15358 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15359 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15360 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15361 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15362 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15363 VAR_INIT_STATUS_INITIALIZED);
15364 add_loc_descr (&ret, tmp);
15365 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15366 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15367 add_loc_descr (&ret, l2jump);
15368 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15369 VAR_INIT_STATUS_INITIALIZED);
15370 add_loc_descr (&ret, tmp);
15371 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15372 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15373 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15374 add_loc_descr (&ret, l1jump);
15375 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15376 add_loc_descr (&ret, l2label);
15377 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15378 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15379 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15380 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15381 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15382 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15383 return ret;
15384 }
15385
15386 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15387 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15388 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15389 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15390
15391 ROTATERT is similar:
15392 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15393 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15394 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15395
15396 static dw_loc_descr_ref
15397 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15398 machine_mode mem_mode)
15399 {
15400 rtx rtlop1 = XEXP (rtl, 1);
15401 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15402 int i;
15403
15404 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15405 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15406 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15407 VAR_INIT_STATUS_INITIALIZED);
15408 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15409 VAR_INIT_STATUS_INITIALIZED);
15410 if (op0 == NULL || op1 == NULL)
15411 return NULL;
15412 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15413 for (i = 0; i < 2; i++)
15414 {
15415 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15416 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15417 mode, mem_mode,
15418 VAR_INIT_STATUS_INITIALIZED);
15419 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15420 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15421 ? DW_OP_const4u
15422 : HOST_BITS_PER_WIDE_INT == 64
15423 ? DW_OP_const8u : DW_OP_constu,
15424 GET_MODE_MASK (mode), 0);
15425 else
15426 mask[i] = NULL;
15427 if (mask[i] == NULL)
15428 return NULL;
15429 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15430 }
15431 ret = op0;
15432 add_loc_descr (&ret, op1);
15433 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15434 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15435 if (GET_CODE (rtl) == ROTATERT)
15436 {
15437 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15438 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15439 GET_MODE_BITSIZE (mode), 0));
15440 }
15441 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15442 if (mask[0] != NULL)
15443 add_loc_descr (&ret, mask[0]);
15444 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15445 if (mask[1] != NULL)
15446 {
15447 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15448 add_loc_descr (&ret, mask[1]);
15449 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15450 }
15451 if (GET_CODE (rtl) == ROTATE)
15452 {
15453 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15454 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15455 GET_MODE_BITSIZE (mode), 0));
15456 }
15457 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15458 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15459 return ret;
15460 }
15461
15462 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15463 for DEBUG_PARAMETER_REF RTL. */
15464
15465 static dw_loc_descr_ref
15466 parameter_ref_descriptor (rtx rtl)
15467 {
15468 dw_loc_descr_ref ret;
15469 dw_die_ref ref;
15470
15471 if (dwarf_strict)
15472 return NULL;
15473 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15474 /* With LTO during LTRANS we get the late DIE that refers to the early
15475 DIE, thus we add another indirection here. This seems to confuse
15476 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15477 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15478 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15479 if (ref)
15480 {
15481 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15482 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15483 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15484 }
15485 else
15486 {
15487 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15488 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15489 }
15490 return ret;
15491 }
15492
15493 /* The following routine converts the RTL for a variable or parameter
15494 (resident in memory) into an equivalent Dwarf representation of a
15495 mechanism for getting the address of that same variable onto the top of a
15496 hypothetical "address evaluation" stack.
15497
15498 When creating memory location descriptors, we are effectively transforming
15499 the RTL for a memory-resident object into its Dwarf postfix expression
15500 equivalent. This routine recursively descends an RTL tree, turning
15501 it into Dwarf postfix code as it goes.
15502
15503 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15504
15505 MEM_MODE is the mode of the memory reference, needed to handle some
15506 autoincrement addressing modes.
15507
15508 Return 0 if we can't represent the location. */
15509
15510 dw_loc_descr_ref
15511 mem_loc_descriptor (rtx rtl, machine_mode mode,
15512 machine_mode mem_mode,
15513 enum var_init_status initialized)
15514 {
15515 dw_loc_descr_ref mem_loc_result = NULL;
15516 enum dwarf_location_atom op;
15517 dw_loc_descr_ref op0, op1;
15518 rtx inner = NULL_RTX;
15519 poly_int64 offset;
15520
15521 if (mode == VOIDmode)
15522 mode = GET_MODE (rtl);
15523
15524 /* Note that for a dynamically sized array, the location we will generate a
15525 description of here will be the lowest numbered location which is
15526 actually within the array. That's *not* necessarily the same as the
15527 zeroth element of the array. */
15528
15529 rtl = targetm.delegitimize_address (rtl);
15530
15531 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15532 return NULL;
15533
15534 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15535 switch (GET_CODE (rtl))
15536 {
15537 case POST_INC:
15538 case POST_DEC:
15539 case POST_MODIFY:
15540 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15541
15542 case SUBREG:
15543 /* The case of a subreg may arise when we have a local (register)
15544 variable or a formal (register) parameter which doesn't quite fill
15545 up an entire register. For now, just assume that it is
15546 legitimate to make the Dwarf info refer to the whole register which
15547 contains the given subreg. */
15548 if (!subreg_lowpart_p (rtl))
15549 break;
15550 inner = SUBREG_REG (rtl);
15551 /* FALLTHRU */
15552 case TRUNCATE:
15553 if (inner == NULL_RTX)
15554 inner = XEXP (rtl, 0);
15555 if (is_a <scalar_int_mode> (mode, &int_mode)
15556 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15557 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15558 #ifdef POINTERS_EXTEND_UNSIGNED
15559 || (int_mode == Pmode && mem_mode != VOIDmode)
15560 #endif
15561 )
15562 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15563 {
15564 mem_loc_result = mem_loc_descriptor (inner,
15565 inner_mode,
15566 mem_mode, initialized);
15567 break;
15568 }
15569 if (dwarf_strict && dwarf_version < 5)
15570 break;
15571 if (is_a <scalar_int_mode> (mode, &int_mode)
15572 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15573 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15574 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15575 {
15576 dw_die_ref type_die;
15577 dw_loc_descr_ref cvt;
15578
15579 mem_loc_result = mem_loc_descriptor (inner,
15580 GET_MODE (inner),
15581 mem_mode, initialized);
15582 if (mem_loc_result == NULL)
15583 break;
15584 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15585 if (type_die == NULL)
15586 {
15587 mem_loc_result = NULL;
15588 break;
15589 }
15590 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15591 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15592 else
15593 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15594 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15595 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15596 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15597 add_loc_descr (&mem_loc_result, cvt);
15598 if (is_a <scalar_int_mode> (mode, &int_mode)
15599 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15600 {
15601 /* Convert it to untyped afterwards. */
15602 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15603 add_loc_descr (&mem_loc_result, cvt);
15604 }
15605 }
15606 break;
15607
15608 case REG:
15609 if (!is_a <scalar_int_mode> (mode, &int_mode)
15610 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15611 && rtl != arg_pointer_rtx
15612 && rtl != frame_pointer_rtx
15613 #ifdef POINTERS_EXTEND_UNSIGNED
15614 && (int_mode != Pmode || mem_mode == VOIDmode)
15615 #endif
15616 ))
15617 {
15618 dw_die_ref type_die;
15619 unsigned int dbx_regnum;
15620
15621 if (dwarf_strict && dwarf_version < 5)
15622 break;
15623 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15624 break;
15625 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15626 if (type_die == NULL)
15627 break;
15628
15629 dbx_regnum = dbx_reg_number (rtl);
15630 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15631 break;
15632 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15633 dbx_regnum, 0);
15634 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15635 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15636 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15637 break;
15638 }
15639 /* Whenever a register number forms a part of the description of the
15640 method for calculating the (dynamic) address of a memory resident
15641 object, DWARF rules require the register number be referred to as
15642 a "base register". This distinction is not based in any way upon
15643 what category of register the hardware believes the given register
15644 belongs to. This is strictly DWARF terminology we're dealing with
15645 here. Note that in cases where the location of a memory-resident
15646 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15647 OP_CONST (0)) the actual DWARF location descriptor that we generate
15648 may just be OP_BASEREG (basereg). This may look deceptively like
15649 the object in question was allocated to a register (rather than in
15650 memory) so DWARF consumers need to be aware of the subtle
15651 distinction between OP_REG and OP_BASEREG. */
15652 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15653 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15654 else if (stack_realign_drap
15655 && crtl->drap_reg
15656 && crtl->args.internal_arg_pointer == rtl
15657 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15658 {
15659 /* If RTL is internal_arg_pointer, which has been optimized
15660 out, use DRAP instead. */
15661 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15662 VAR_INIT_STATUS_INITIALIZED);
15663 }
15664 break;
15665
15666 case SIGN_EXTEND:
15667 case ZERO_EXTEND:
15668 if (!is_a <scalar_int_mode> (mode, &int_mode)
15669 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15670 break;
15671 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15672 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15673 if (op0 == 0)
15674 break;
15675 else if (GET_CODE (rtl) == ZERO_EXTEND
15676 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15677 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15678 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15679 to expand zero extend as two shifts instead of
15680 masking. */
15681 && GET_MODE_SIZE (inner_mode) <= 4)
15682 {
15683 mem_loc_result = op0;
15684 add_loc_descr (&mem_loc_result,
15685 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15686 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15687 }
15688 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15689 {
15690 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15691 shift *= BITS_PER_UNIT;
15692 if (GET_CODE (rtl) == SIGN_EXTEND)
15693 op = DW_OP_shra;
15694 else
15695 op = DW_OP_shr;
15696 mem_loc_result = op0;
15697 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15698 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15699 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15700 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15701 }
15702 else if (!dwarf_strict || dwarf_version >= 5)
15703 {
15704 dw_die_ref type_die1, type_die2;
15705 dw_loc_descr_ref cvt;
15706
15707 type_die1 = base_type_for_mode (inner_mode,
15708 GET_CODE (rtl) == ZERO_EXTEND);
15709 if (type_die1 == NULL)
15710 break;
15711 type_die2 = base_type_for_mode (int_mode, 1);
15712 if (type_die2 == NULL)
15713 break;
15714 mem_loc_result = op0;
15715 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15716 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15717 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15718 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15719 add_loc_descr (&mem_loc_result, cvt);
15720 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15721 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15722 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15723 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15724 add_loc_descr (&mem_loc_result, cvt);
15725 }
15726 break;
15727
15728 case MEM:
15729 {
15730 rtx new_rtl = avoid_constant_pool_reference (rtl);
15731 if (new_rtl != rtl)
15732 {
15733 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15734 initialized);
15735 if (mem_loc_result != NULL)
15736 return mem_loc_result;
15737 }
15738 }
15739 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15740 get_address_mode (rtl), mode,
15741 VAR_INIT_STATUS_INITIALIZED);
15742 if (mem_loc_result == NULL)
15743 mem_loc_result = tls_mem_loc_descriptor (rtl);
15744 if (mem_loc_result != NULL)
15745 {
15746 if (!is_a <scalar_int_mode> (mode, &int_mode)
15747 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15748 {
15749 dw_die_ref type_die;
15750 dw_loc_descr_ref deref;
15751 HOST_WIDE_INT size;
15752
15753 if (dwarf_strict && dwarf_version < 5)
15754 return NULL;
15755 if (!GET_MODE_SIZE (mode).is_constant (&size))
15756 return NULL;
15757 type_die
15758 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15759 if (type_die == NULL)
15760 return NULL;
15761 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15762 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15763 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15764 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15765 add_loc_descr (&mem_loc_result, deref);
15766 }
15767 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15768 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15769 else
15770 add_loc_descr (&mem_loc_result,
15771 new_loc_descr (DW_OP_deref_size,
15772 GET_MODE_SIZE (int_mode), 0));
15773 }
15774 break;
15775
15776 case LO_SUM:
15777 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15778
15779 case LABEL_REF:
15780 /* Some ports can transform a symbol ref into a label ref, because
15781 the symbol ref is too far away and has to be dumped into a constant
15782 pool. */
15783 case CONST:
15784 case SYMBOL_REF:
15785 case UNSPEC:
15786 if (!is_a <scalar_int_mode> (mode, &int_mode)
15787 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15788 #ifdef POINTERS_EXTEND_UNSIGNED
15789 && (int_mode != Pmode || mem_mode == VOIDmode)
15790 #endif
15791 ))
15792 break;
15793
15794 if (GET_CODE (rtl) == UNSPEC)
15795 {
15796 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15797 can't express it in the debug info. This can happen e.g. with some
15798 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15799 approves. */
15800 bool not_ok = false;
15801 subrtx_var_iterator::array_type array;
15802 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15803 if (*iter != rtl && !CONSTANT_P (*iter))
15804 {
15805 not_ok = true;
15806 break;
15807 }
15808
15809 if (not_ok)
15810 break;
15811
15812 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15813 if (!const_ok_for_output_1 (*iter))
15814 {
15815 not_ok = true;
15816 break;
15817 }
15818
15819 if (not_ok)
15820 break;
15821
15822 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15823 goto symref;
15824 }
15825
15826 if (GET_CODE (rtl) == SYMBOL_REF
15827 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15828 {
15829 dw_loc_descr_ref temp;
15830
15831 /* If this is not defined, we have no way to emit the data. */
15832 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15833 break;
15834
15835 temp = new_addr_loc_descr (rtl, dtprel_true);
15836
15837 /* We check for DWARF 5 here because gdb did not implement
15838 DW_OP_form_tls_address until after 7.12. */
15839 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15840 ? DW_OP_form_tls_address
15841 : DW_OP_GNU_push_tls_address),
15842 0, 0);
15843 add_loc_descr (&mem_loc_result, temp);
15844
15845 break;
15846 }
15847
15848 if (!const_ok_for_output (rtl))
15849 {
15850 if (GET_CODE (rtl) == CONST)
15851 switch (GET_CODE (XEXP (rtl, 0)))
15852 {
15853 case NOT:
15854 op = DW_OP_not;
15855 goto try_const_unop;
15856 case NEG:
15857 op = DW_OP_neg;
15858 goto try_const_unop;
15859 try_const_unop:
15860 rtx arg;
15861 arg = XEXP (XEXP (rtl, 0), 0);
15862 if (!CONSTANT_P (arg))
15863 arg = gen_rtx_CONST (int_mode, arg);
15864 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15865 initialized);
15866 if (op0)
15867 {
15868 mem_loc_result = op0;
15869 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15870 }
15871 break;
15872 default:
15873 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15874 mem_mode, initialized);
15875 break;
15876 }
15877 break;
15878 }
15879
15880 symref:
15881 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15882 vec_safe_push (used_rtx_array, rtl);
15883 break;
15884
15885 case CONCAT:
15886 case CONCATN:
15887 case VAR_LOCATION:
15888 case DEBUG_IMPLICIT_PTR:
15889 expansion_failed (NULL_TREE, rtl,
15890 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15891 return 0;
15892
15893 case ENTRY_VALUE:
15894 if (dwarf_strict && dwarf_version < 5)
15895 return NULL;
15896 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15897 {
15898 if (!is_a <scalar_int_mode> (mode, &int_mode)
15899 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15900 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15901 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15902 else
15903 {
15904 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15905 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15906 return NULL;
15907 op0 = one_reg_loc_descriptor (dbx_regnum,
15908 VAR_INIT_STATUS_INITIALIZED);
15909 }
15910 }
15911 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15912 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15913 {
15914 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15915 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15916 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15917 return NULL;
15918 }
15919 else
15920 gcc_unreachable ();
15921 if (op0 == NULL)
15922 return NULL;
15923 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15924 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15925 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15926 break;
15927
15928 case DEBUG_PARAMETER_REF:
15929 mem_loc_result = parameter_ref_descriptor (rtl);
15930 break;
15931
15932 case PRE_MODIFY:
15933 /* Extract the PLUS expression nested inside and fall into
15934 PLUS code below. */
15935 rtl = XEXP (rtl, 1);
15936 goto plus;
15937
15938 case PRE_INC:
15939 case PRE_DEC:
15940 /* Turn these into a PLUS expression and fall into the PLUS code
15941 below. */
15942 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15943 gen_int_mode (GET_CODE (rtl) == PRE_INC
15944 ? GET_MODE_UNIT_SIZE (mem_mode)
15945 : -GET_MODE_UNIT_SIZE (mem_mode),
15946 mode));
15947
15948 /* fall through */
15949
15950 case PLUS:
15951 plus:
15952 if (is_based_loc (rtl)
15953 && is_a <scalar_int_mode> (mode, &int_mode)
15954 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15955 || XEXP (rtl, 0) == arg_pointer_rtx
15956 || XEXP (rtl, 0) == frame_pointer_rtx))
15957 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15958 INTVAL (XEXP (rtl, 1)),
15959 VAR_INIT_STATUS_INITIALIZED);
15960 else
15961 {
15962 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15963 VAR_INIT_STATUS_INITIALIZED);
15964 if (mem_loc_result == 0)
15965 break;
15966
15967 if (CONST_INT_P (XEXP (rtl, 1))
15968 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15969 <= DWARF2_ADDR_SIZE))
15970 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15971 else
15972 {
15973 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15974 VAR_INIT_STATUS_INITIALIZED);
15975 if (op1 == 0)
15976 return NULL;
15977 add_loc_descr (&mem_loc_result, op1);
15978 add_loc_descr (&mem_loc_result,
15979 new_loc_descr (DW_OP_plus, 0, 0));
15980 }
15981 }
15982 break;
15983
15984 /* If a pseudo-reg is optimized away, it is possible for it to
15985 be replaced with a MEM containing a multiply or shift. */
15986 case MINUS:
15987 op = DW_OP_minus;
15988 goto do_binop;
15989
15990 case MULT:
15991 op = DW_OP_mul;
15992 goto do_binop;
15993
15994 case DIV:
15995 if ((!dwarf_strict || dwarf_version >= 5)
15996 && is_a <scalar_int_mode> (mode, &int_mode)
15997 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15998 {
15999 mem_loc_result = typed_binop (DW_OP_div, rtl,
16000 base_type_for_mode (mode, 0),
16001 int_mode, mem_mode);
16002 break;
16003 }
16004 op = DW_OP_div;
16005 goto do_binop;
16006
16007 case UMOD:
16008 op = DW_OP_mod;
16009 goto do_binop;
16010
16011 case ASHIFT:
16012 op = DW_OP_shl;
16013 goto do_shift;
16014
16015 case ASHIFTRT:
16016 op = DW_OP_shra;
16017 goto do_shift;
16018
16019 case LSHIFTRT:
16020 op = DW_OP_shr;
16021 goto do_shift;
16022
16023 do_shift:
16024 if (!is_a <scalar_int_mode> (mode, &int_mode))
16025 break;
16026 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
16027 VAR_INIT_STATUS_INITIALIZED);
16028 {
16029 rtx rtlop1 = XEXP (rtl, 1);
16030 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
16031 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
16032 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
16033 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
16034 VAR_INIT_STATUS_INITIALIZED);
16035 }
16036
16037 if (op0 == 0 || op1 == 0)
16038 break;
16039
16040 mem_loc_result = op0;
16041 add_loc_descr (&mem_loc_result, op1);
16042 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16043 break;
16044
16045 case AND:
16046 op = DW_OP_and;
16047 goto do_binop;
16048
16049 case IOR:
16050 op = DW_OP_or;
16051 goto do_binop;
16052
16053 case XOR:
16054 op = DW_OP_xor;
16055 goto do_binop;
16056
16057 do_binop:
16058 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16059 VAR_INIT_STATUS_INITIALIZED);
16060 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16061 VAR_INIT_STATUS_INITIALIZED);
16062
16063 if (op0 == 0 || op1 == 0)
16064 break;
16065
16066 mem_loc_result = op0;
16067 add_loc_descr (&mem_loc_result, op1);
16068 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16069 break;
16070
16071 case MOD:
16072 if ((!dwarf_strict || dwarf_version >= 5)
16073 && is_a <scalar_int_mode> (mode, &int_mode)
16074 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16075 {
16076 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16077 base_type_for_mode (mode, 0),
16078 int_mode, mem_mode);
16079 break;
16080 }
16081
16082 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16083 VAR_INIT_STATUS_INITIALIZED);
16084 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16085 VAR_INIT_STATUS_INITIALIZED);
16086
16087 if (op0 == 0 || op1 == 0)
16088 break;
16089
16090 mem_loc_result = op0;
16091 add_loc_descr (&mem_loc_result, op1);
16092 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16093 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16094 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16095 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16096 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16097 break;
16098
16099 case UDIV:
16100 if ((!dwarf_strict || dwarf_version >= 5)
16101 && is_a <scalar_int_mode> (mode, &int_mode))
16102 {
16103 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16104 {
16105 op = DW_OP_div;
16106 goto do_binop;
16107 }
16108 mem_loc_result = typed_binop (DW_OP_div, rtl,
16109 base_type_for_mode (int_mode, 1),
16110 int_mode, mem_mode);
16111 }
16112 break;
16113
16114 case NOT:
16115 op = DW_OP_not;
16116 goto do_unop;
16117
16118 case ABS:
16119 op = DW_OP_abs;
16120 goto do_unop;
16121
16122 case NEG:
16123 op = DW_OP_neg;
16124 goto do_unop;
16125
16126 do_unop:
16127 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16128 VAR_INIT_STATUS_INITIALIZED);
16129
16130 if (op0 == 0)
16131 break;
16132
16133 mem_loc_result = op0;
16134 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16135 break;
16136
16137 case CONST_INT:
16138 if (!is_a <scalar_int_mode> (mode, &int_mode)
16139 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16140 #ifdef POINTERS_EXTEND_UNSIGNED
16141 || (int_mode == Pmode
16142 && mem_mode != VOIDmode
16143 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16144 #endif
16145 )
16146 {
16147 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16148 break;
16149 }
16150 if ((!dwarf_strict || dwarf_version >= 5)
16151 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16152 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16153 {
16154 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16155 scalar_int_mode amode;
16156 if (type_die == NULL)
16157 return NULL;
16158 if (INTVAL (rtl) >= 0
16159 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16160 .exists (&amode))
16161 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16162 /* const DW_OP_convert <XXX> vs.
16163 DW_OP_const_type <XXX, 1, const>. */
16164 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16165 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16166 {
16167 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16168 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16169 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16170 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16171 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16172 add_loc_descr (&mem_loc_result, op0);
16173 return mem_loc_result;
16174 }
16175 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16176 INTVAL (rtl));
16177 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16178 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16179 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16180 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16181 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16182 else
16183 {
16184 mem_loc_result->dw_loc_oprnd2.val_class
16185 = dw_val_class_const_double;
16186 mem_loc_result->dw_loc_oprnd2.v.val_double
16187 = double_int::from_shwi (INTVAL (rtl));
16188 }
16189 }
16190 break;
16191
16192 case CONST_DOUBLE:
16193 if (!dwarf_strict || dwarf_version >= 5)
16194 {
16195 dw_die_ref type_die;
16196
16197 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16198 CONST_DOUBLE rtx could represent either a large integer
16199 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16200 the value is always a floating point constant.
16201
16202 When it is an integer, a CONST_DOUBLE is used whenever
16203 the constant requires 2 HWIs to be adequately represented.
16204 We output CONST_DOUBLEs as blocks. */
16205 if (mode == VOIDmode
16206 || (GET_MODE (rtl) == VOIDmode
16207 && maybe_ne (GET_MODE_BITSIZE (mode),
16208 HOST_BITS_PER_DOUBLE_INT)))
16209 break;
16210 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16211 if (type_die == NULL)
16212 return NULL;
16213 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16214 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16215 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16216 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16217 #if TARGET_SUPPORTS_WIDE_INT == 0
16218 if (!SCALAR_FLOAT_MODE_P (mode))
16219 {
16220 mem_loc_result->dw_loc_oprnd2.val_class
16221 = dw_val_class_const_double;
16222 mem_loc_result->dw_loc_oprnd2.v.val_double
16223 = rtx_to_double_int (rtl);
16224 }
16225 else
16226 #endif
16227 {
16228 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16229 unsigned int length = GET_MODE_SIZE (float_mode);
16230 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16231
16232 insert_float (rtl, array);
16233 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16234 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16235 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16236 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16237 }
16238 }
16239 break;
16240
16241 case CONST_WIDE_INT:
16242 if (!dwarf_strict || dwarf_version >= 5)
16243 {
16244 dw_die_ref type_die;
16245
16246 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16247 if (type_die == NULL)
16248 return NULL;
16249 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16250 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16251 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16252 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16253 mem_loc_result->dw_loc_oprnd2.val_class
16254 = dw_val_class_wide_int;
16255 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16256 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16257 }
16258 break;
16259
16260 case CONST_POLY_INT:
16261 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16262 break;
16263
16264 case EQ:
16265 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16266 break;
16267
16268 case GE:
16269 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16270 break;
16271
16272 case GT:
16273 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16274 break;
16275
16276 case LE:
16277 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16278 break;
16279
16280 case LT:
16281 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16282 break;
16283
16284 case NE:
16285 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16286 break;
16287
16288 case GEU:
16289 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16290 break;
16291
16292 case GTU:
16293 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16294 break;
16295
16296 case LEU:
16297 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16298 break;
16299
16300 case LTU:
16301 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16302 break;
16303
16304 case UMIN:
16305 case UMAX:
16306 if (!SCALAR_INT_MODE_P (mode))
16307 break;
16308 /* FALLTHRU */
16309 case SMIN:
16310 case SMAX:
16311 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16312 break;
16313
16314 case ZERO_EXTRACT:
16315 case SIGN_EXTRACT:
16316 if (CONST_INT_P (XEXP (rtl, 1))
16317 && CONST_INT_P (XEXP (rtl, 2))
16318 && is_a <scalar_int_mode> (mode, &int_mode)
16319 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16320 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16321 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16322 && ((unsigned) INTVAL (XEXP (rtl, 1))
16323 + (unsigned) INTVAL (XEXP (rtl, 2))
16324 <= GET_MODE_BITSIZE (int_mode)))
16325 {
16326 int shift, size;
16327 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16328 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16329 if (op0 == 0)
16330 break;
16331 if (GET_CODE (rtl) == SIGN_EXTRACT)
16332 op = DW_OP_shra;
16333 else
16334 op = DW_OP_shr;
16335 mem_loc_result = op0;
16336 size = INTVAL (XEXP (rtl, 1));
16337 shift = INTVAL (XEXP (rtl, 2));
16338 if (BITS_BIG_ENDIAN)
16339 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16340 if (shift + size != (int) DWARF2_ADDR_SIZE)
16341 {
16342 add_loc_descr (&mem_loc_result,
16343 int_loc_descriptor (DWARF2_ADDR_SIZE
16344 - shift - size));
16345 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16346 }
16347 if (size != (int) DWARF2_ADDR_SIZE)
16348 {
16349 add_loc_descr (&mem_loc_result,
16350 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16351 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16352 }
16353 }
16354 break;
16355
16356 case IF_THEN_ELSE:
16357 {
16358 dw_loc_descr_ref op2, bra_node, drop_node;
16359 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16360 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16361 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16362 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16363 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16364 VAR_INIT_STATUS_INITIALIZED);
16365 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16366 VAR_INIT_STATUS_INITIALIZED);
16367 if (op0 == NULL || op1 == NULL || op2 == NULL)
16368 break;
16369
16370 mem_loc_result = op1;
16371 add_loc_descr (&mem_loc_result, op2);
16372 add_loc_descr (&mem_loc_result, op0);
16373 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16374 add_loc_descr (&mem_loc_result, bra_node);
16375 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16376 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16377 add_loc_descr (&mem_loc_result, drop_node);
16378 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16379 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16380 }
16381 break;
16382
16383 case FLOAT_EXTEND:
16384 case FLOAT_TRUNCATE:
16385 case FLOAT:
16386 case UNSIGNED_FLOAT:
16387 case FIX:
16388 case UNSIGNED_FIX:
16389 if (!dwarf_strict || dwarf_version >= 5)
16390 {
16391 dw_die_ref type_die;
16392 dw_loc_descr_ref cvt;
16393
16394 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16395 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16396 if (op0 == NULL)
16397 break;
16398 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16399 && (GET_CODE (rtl) == FLOAT
16400 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16401 {
16402 type_die = base_type_for_mode (int_mode,
16403 GET_CODE (rtl) == UNSIGNED_FLOAT);
16404 if (type_die == NULL)
16405 break;
16406 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16407 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16408 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16409 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16410 add_loc_descr (&op0, cvt);
16411 }
16412 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16413 if (type_die == NULL)
16414 break;
16415 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16416 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16417 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16418 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16419 add_loc_descr (&op0, cvt);
16420 if (is_a <scalar_int_mode> (mode, &int_mode)
16421 && (GET_CODE (rtl) == FIX
16422 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16423 {
16424 op0 = convert_descriptor_to_mode (int_mode, op0);
16425 if (op0 == NULL)
16426 break;
16427 }
16428 mem_loc_result = op0;
16429 }
16430 break;
16431
16432 case CLZ:
16433 case CTZ:
16434 case FFS:
16435 if (is_a <scalar_int_mode> (mode, &int_mode))
16436 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16437 break;
16438
16439 case POPCOUNT:
16440 case PARITY:
16441 if (is_a <scalar_int_mode> (mode, &int_mode))
16442 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16443 break;
16444
16445 case BSWAP:
16446 if (is_a <scalar_int_mode> (mode, &int_mode))
16447 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16448 break;
16449
16450 case ROTATE:
16451 case ROTATERT:
16452 if (is_a <scalar_int_mode> (mode, &int_mode))
16453 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16454 break;
16455
16456 case COMPARE:
16457 /* In theory, we could implement the above. */
16458 /* DWARF cannot represent the unsigned compare operations
16459 natively. */
16460 case SS_MULT:
16461 case US_MULT:
16462 case SS_DIV:
16463 case US_DIV:
16464 case SS_PLUS:
16465 case US_PLUS:
16466 case SS_MINUS:
16467 case US_MINUS:
16468 case SS_NEG:
16469 case US_NEG:
16470 case SS_ABS:
16471 case SS_ASHIFT:
16472 case US_ASHIFT:
16473 case SS_TRUNCATE:
16474 case US_TRUNCATE:
16475 case UNORDERED:
16476 case ORDERED:
16477 case UNEQ:
16478 case UNGE:
16479 case UNGT:
16480 case UNLE:
16481 case UNLT:
16482 case LTGT:
16483 case FRACT_CONVERT:
16484 case UNSIGNED_FRACT_CONVERT:
16485 case SAT_FRACT:
16486 case UNSIGNED_SAT_FRACT:
16487 case SQRT:
16488 case ASM_OPERANDS:
16489 case VEC_MERGE:
16490 case VEC_SELECT:
16491 case VEC_CONCAT:
16492 case VEC_DUPLICATE:
16493 case VEC_SERIES:
16494 case HIGH:
16495 case FMA:
16496 case STRICT_LOW_PART:
16497 case CONST_VECTOR:
16498 case CONST_FIXED:
16499 case CLRSB:
16500 case CLOBBER:
16501 break;
16502
16503 case CONST_STRING:
16504 resolve_one_addr (&rtl);
16505 goto symref;
16506
16507 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16508 the expression. An UNSPEC rtx represents a raw DWARF operation,
16509 new_loc_descr is called for it to build the operation directly.
16510 Otherwise mem_loc_descriptor is called recursively. */
16511 case PARALLEL:
16512 {
16513 int index = 0;
16514 dw_loc_descr_ref exp_result = NULL;
16515
16516 for (; index < XVECLEN (rtl, 0); index++)
16517 {
16518 rtx elem = XVECEXP (rtl, 0, index);
16519 if (GET_CODE (elem) == UNSPEC)
16520 {
16521 /* Each DWARF operation UNSPEC contain two operands, if
16522 one operand is not used for the operation, const0_rtx is
16523 passed. */
16524 gcc_assert (XVECLEN (elem, 0) == 2);
16525
16526 HOST_WIDE_INT dw_op = XINT (elem, 1);
16527 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16528 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16529 exp_result
16530 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16531 oprnd2);
16532 }
16533 else
16534 exp_result
16535 = mem_loc_descriptor (elem, mode, mem_mode,
16536 VAR_INIT_STATUS_INITIALIZED);
16537
16538 if (!mem_loc_result)
16539 mem_loc_result = exp_result;
16540 else
16541 add_loc_descr (&mem_loc_result, exp_result);
16542 }
16543
16544 break;
16545 }
16546
16547 default:
16548 if (flag_checking)
16549 {
16550 print_rtl (stderr, rtl);
16551 gcc_unreachable ();
16552 }
16553 break;
16554 }
16555
16556 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16557 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16558
16559 return mem_loc_result;
16560 }
16561
16562 /* Return a descriptor that describes the concatenation of two locations.
16563 This is typically a complex variable. */
16564
16565 static dw_loc_descr_ref
16566 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16567 {
16568 /* At present we only track constant-sized pieces. */
16569 unsigned int size0, size1;
16570 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16571 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16572 return 0;
16573
16574 dw_loc_descr_ref cc_loc_result = NULL;
16575 dw_loc_descr_ref x0_ref
16576 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16577 dw_loc_descr_ref x1_ref
16578 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16579
16580 if (x0_ref == 0 || x1_ref == 0)
16581 return 0;
16582
16583 cc_loc_result = x0_ref;
16584 add_loc_descr_op_piece (&cc_loc_result, size0);
16585
16586 add_loc_descr (&cc_loc_result, x1_ref);
16587 add_loc_descr_op_piece (&cc_loc_result, size1);
16588
16589 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16590 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16591
16592 return cc_loc_result;
16593 }
16594
16595 /* Return a descriptor that describes the concatenation of N
16596 locations. */
16597
16598 static dw_loc_descr_ref
16599 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16600 {
16601 unsigned int i;
16602 dw_loc_descr_ref cc_loc_result = NULL;
16603 unsigned int n = XVECLEN (concatn, 0);
16604 unsigned int size;
16605
16606 for (i = 0; i < n; ++i)
16607 {
16608 dw_loc_descr_ref ref;
16609 rtx x = XVECEXP (concatn, 0, i);
16610
16611 /* At present we only track constant-sized pieces. */
16612 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16613 return NULL;
16614
16615 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16616 if (ref == NULL)
16617 return NULL;
16618
16619 add_loc_descr (&cc_loc_result, ref);
16620 add_loc_descr_op_piece (&cc_loc_result, size);
16621 }
16622
16623 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16624 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16625
16626 return cc_loc_result;
16627 }
16628
16629 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16630 for DEBUG_IMPLICIT_PTR RTL. */
16631
16632 static dw_loc_descr_ref
16633 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16634 {
16635 dw_loc_descr_ref ret;
16636 dw_die_ref ref;
16637
16638 if (dwarf_strict && dwarf_version < 5)
16639 return NULL;
16640 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16641 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16642 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16643 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16644 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16645 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16646 if (ref)
16647 {
16648 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16649 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16650 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16651 }
16652 else
16653 {
16654 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16655 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16656 }
16657 return ret;
16658 }
16659
16660 /* Output a proper Dwarf location descriptor for a variable or parameter
16661 which is either allocated in a register or in a memory location. For a
16662 register, we just generate an OP_REG and the register number. For a
16663 memory location we provide a Dwarf postfix expression describing how to
16664 generate the (dynamic) address of the object onto the address stack.
16665
16666 MODE is mode of the decl if this loc_descriptor is going to be used in
16667 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16668 allowed, VOIDmode otherwise.
16669
16670 If we don't know how to describe it, return 0. */
16671
16672 static dw_loc_descr_ref
16673 loc_descriptor (rtx rtl, machine_mode mode,
16674 enum var_init_status initialized)
16675 {
16676 dw_loc_descr_ref loc_result = NULL;
16677 scalar_int_mode int_mode;
16678
16679 switch (GET_CODE (rtl))
16680 {
16681 case SUBREG:
16682 /* The case of a subreg may arise when we have a local (register)
16683 variable or a formal (register) parameter which doesn't quite fill
16684 up an entire register. For now, just assume that it is
16685 legitimate to make the Dwarf info refer to the whole register which
16686 contains the given subreg. */
16687 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16688 loc_result = loc_descriptor (SUBREG_REG (rtl),
16689 GET_MODE (SUBREG_REG (rtl)), initialized);
16690 else
16691 goto do_default;
16692 break;
16693
16694 case REG:
16695 loc_result = reg_loc_descriptor (rtl, initialized);
16696 break;
16697
16698 case MEM:
16699 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16700 GET_MODE (rtl), initialized);
16701 if (loc_result == NULL)
16702 loc_result = tls_mem_loc_descriptor (rtl);
16703 if (loc_result == NULL)
16704 {
16705 rtx new_rtl = avoid_constant_pool_reference (rtl);
16706 if (new_rtl != rtl)
16707 loc_result = loc_descriptor (new_rtl, mode, initialized);
16708 }
16709 break;
16710
16711 case CONCAT:
16712 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16713 initialized);
16714 break;
16715
16716 case CONCATN:
16717 loc_result = concatn_loc_descriptor (rtl, initialized);
16718 break;
16719
16720 case VAR_LOCATION:
16721 /* Single part. */
16722 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16723 {
16724 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16725 if (GET_CODE (loc) == EXPR_LIST)
16726 loc = XEXP (loc, 0);
16727 loc_result = loc_descriptor (loc, mode, initialized);
16728 break;
16729 }
16730
16731 rtl = XEXP (rtl, 1);
16732 /* FALLTHRU */
16733
16734 case PARALLEL:
16735 {
16736 rtvec par_elems = XVEC (rtl, 0);
16737 int num_elem = GET_NUM_ELEM (par_elems);
16738 machine_mode mode;
16739 int i, size;
16740
16741 /* Create the first one, so we have something to add to. */
16742 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16743 VOIDmode, initialized);
16744 if (loc_result == NULL)
16745 return NULL;
16746 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16747 /* At present we only track constant-sized pieces. */
16748 if (!GET_MODE_SIZE (mode).is_constant (&size))
16749 return NULL;
16750 add_loc_descr_op_piece (&loc_result, size);
16751 for (i = 1; i < num_elem; i++)
16752 {
16753 dw_loc_descr_ref temp;
16754
16755 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16756 VOIDmode, initialized);
16757 if (temp == NULL)
16758 return NULL;
16759 add_loc_descr (&loc_result, temp);
16760 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16761 /* At present we only track constant-sized pieces. */
16762 if (!GET_MODE_SIZE (mode).is_constant (&size))
16763 return NULL;
16764 add_loc_descr_op_piece (&loc_result, size);
16765 }
16766 }
16767 break;
16768
16769 case CONST_INT:
16770 if (mode != VOIDmode && mode != BLKmode)
16771 {
16772 int_mode = as_a <scalar_int_mode> (mode);
16773 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16774 INTVAL (rtl));
16775 }
16776 break;
16777
16778 case CONST_DOUBLE:
16779 if (mode == VOIDmode)
16780 mode = GET_MODE (rtl);
16781
16782 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16783 {
16784 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16785
16786 /* Note that a CONST_DOUBLE rtx could represent either an integer
16787 or a floating-point constant. A CONST_DOUBLE is used whenever
16788 the constant requires more than one word in order to be
16789 adequately represented. We output CONST_DOUBLEs as blocks. */
16790 scalar_mode smode = as_a <scalar_mode> (mode);
16791 loc_result = new_loc_descr (DW_OP_implicit_value,
16792 GET_MODE_SIZE (smode), 0);
16793 #if TARGET_SUPPORTS_WIDE_INT == 0
16794 if (!SCALAR_FLOAT_MODE_P (smode))
16795 {
16796 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16797 loc_result->dw_loc_oprnd2.v.val_double
16798 = rtx_to_double_int (rtl);
16799 }
16800 else
16801 #endif
16802 {
16803 unsigned int length = GET_MODE_SIZE (smode);
16804 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16805
16806 insert_float (rtl, array);
16807 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16808 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16809 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16810 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16811 }
16812 }
16813 break;
16814
16815 case CONST_WIDE_INT:
16816 if (mode == VOIDmode)
16817 mode = GET_MODE (rtl);
16818
16819 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16820 {
16821 int_mode = as_a <scalar_int_mode> (mode);
16822 loc_result = new_loc_descr (DW_OP_implicit_value,
16823 GET_MODE_SIZE (int_mode), 0);
16824 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16825 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16826 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16827 }
16828 break;
16829
16830 case CONST_VECTOR:
16831 if (mode == VOIDmode)
16832 mode = GET_MODE (rtl);
16833
16834 if (mode != VOIDmode
16835 /* The combination of a length and byte elt_size doesn't extend
16836 naturally to boolean vectors, where several elements are packed
16837 into the same byte. */
16838 && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL
16839 && (dwarf_version >= 4 || !dwarf_strict))
16840 {
16841 unsigned int length;
16842 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16843 return NULL;
16844
16845 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16846 unsigned char *array
16847 = ggc_vec_alloc<unsigned char> (length * elt_size);
16848 unsigned int i;
16849 unsigned char *p;
16850 machine_mode imode = GET_MODE_INNER (mode);
16851
16852 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16853 switch (GET_MODE_CLASS (mode))
16854 {
16855 case MODE_VECTOR_INT:
16856 for (i = 0, p = array; i < length; i++, p += elt_size)
16857 {
16858 rtx elt = CONST_VECTOR_ELT (rtl, i);
16859 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16860 }
16861 break;
16862
16863 case MODE_VECTOR_FLOAT:
16864 for (i = 0, p = array; i < length; i++, p += elt_size)
16865 {
16866 rtx elt = CONST_VECTOR_ELT (rtl, i);
16867 insert_float (elt, p);
16868 }
16869 break;
16870
16871 default:
16872 gcc_unreachable ();
16873 }
16874
16875 loc_result = new_loc_descr (DW_OP_implicit_value,
16876 length * elt_size, 0);
16877 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16878 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16879 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16880 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16881 }
16882 break;
16883
16884 case CONST:
16885 if (mode == VOIDmode
16886 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16887 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16888 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16889 {
16890 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16891 break;
16892 }
16893 /* FALLTHROUGH */
16894 case SYMBOL_REF:
16895 if (!const_ok_for_output (rtl))
16896 break;
16897 /* FALLTHROUGH */
16898 case LABEL_REF:
16899 if (is_a <scalar_int_mode> (mode, &int_mode)
16900 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16901 && (dwarf_version >= 4 || !dwarf_strict))
16902 {
16903 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16904 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16905 vec_safe_push (used_rtx_array, rtl);
16906 }
16907 break;
16908
16909 case DEBUG_IMPLICIT_PTR:
16910 loc_result = implicit_ptr_descriptor (rtl, 0);
16911 break;
16912
16913 case PLUS:
16914 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16915 && CONST_INT_P (XEXP (rtl, 1)))
16916 {
16917 loc_result
16918 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16919 break;
16920 }
16921 /* FALLTHRU */
16922 do_default:
16923 default:
16924 if ((is_a <scalar_int_mode> (mode, &int_mode)
16925 && GET_MODE (rtl) == int_mode
16926 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16927 && dwarf_version >= 4)
16928 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16929 {
16930 /* Value expression. */
16931 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16932 if (loc_result)
16933 add_loc_descr (&loc_result,
16934 new_loc_descr (DW_OP_stack_value, 0, 0));
16935 }
16936 break;
16937 }
16938
16939 return loc_result;
16940 }
16941
16942 /* We need to figure out what section we should use as the base for the
16943 address ranges where a given location is valid.
16944 1. If this particular DECL has a section associated with it, use that.
16945 2. If this function has a section associated with it, use that.
16946 3. Otherwise, use the text section.
16947 XXX: If you split a variable across multiple sections, we won't notice. */
16948
16949 static const char *
16950 secname_for_decl (const_tree decl)
16951 {
16952 const char *secname;
16953
16954 if (VAR_OR_FUNCTION_DECL_P (decl)
16955 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16956 && DECL_SECTION_NAME (decl))
16957 secname = DECL_SECTION_NAME (decl);
16958 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16959 {
16960 if (in_cold_section_p)
16961 {
16962 section *sec = current_function_section ();
16963 if (sec->common.flags & SECTION_NAMED)
16964 return sec->named.name;
16965 }
16966 secname = DECL_SECTION_NAME (current_function_decl);
16967 }
16968 else if (cfun && in_cold_section_p)
16969 secname = crtl->subsections.cold_section_label;
16970 else
16971 secname = text_section_label;
16972
16973 return secname;
16974 }
16975
16976 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16977
16978 static bool
16979 decl_by_reference_p (tree decl)
16980 {
16981 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16982 || VAR_P (decl))
16983 && DECL_BY_REFERENCE (decl));
16984 }
16985
16986 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16987 for VARLOC. */
16988
16989 static dw_loc_descr_ref
16990 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16991 enum var_init_status initialized)
16992 {
16993 int have_address = 0;
16994 dw_loc_descr_ref descr;
16995 machine_mode mode;
16996
16997 if (want_address != 2)
16998 {
16999 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
17000 /* Single part. */
17001 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17002 {
17003 varloc = PAT_VAR_LOCATION_LOC (varloc);
17004 if (GET_CODE (varloc) == EXPR_LIST)
17005 varloc = XEXP (varloc, 0);
17006 mode = GET_MODE (varloc);
17007 if (MEM_P (varloc))
17008 {
17009 rtx addr = XEXP (varloc, 0);
17010 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
17011 mode, initialized);
17012 if (descr)
17013 have_address = 1;
17014 else
17015 {
17016 rtx x = avoid_constant_pool_reference (varloc);
17017 if (x != varloc)
17018 descr = mem_loc_descriptor (x, mode, VOIDmode,
17019 initialized);
17020 }
17021 }
17022 else
17023 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
17024 }
17025 else
17026 return 0;
17027 }
17028 else
17029 {
17030 if (GET_CODE (varloc) == VAR_LOCATION)
17031 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
17032 else
17033 mode = DECL_MODE (loc);
17034 descr = loc_descriptor (varloc, mode, initialized);
17035 have_address = 1;
17036 }
17037
17038 if (!descr)
17039 return 0;
17040
17041 if (want_address == 2 && !have_address
17042 && (dwarf_version >= 4 || !dwarf_strict))
17043 {
17044 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
17045 {
17046 expansion_failed (loc, NULL_RTX,
17047 "DWARF address size mismatch");
17048 return 0;
17049 }
17050 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
17051 have_address = 1;
17052 }
17053 /* Show if we can't fill the request for an address. */
17054 if (want_address && !have_address)
17055 {
17056 expansion_failed (loc, NULL_RTX,
17057 "Want address and only have value");
17058 return 0;
17059 }
17060
17061 /* If we've got an address and don't want one, dereference. */
17062 if (!want_address && have_address)
17063 {
17064 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
17065 enum dwarf_location_atom op;
17066
17067 if (size > DWARF2_ADDR_SIZE || size == -1)
17068 {
17069 expansion_failed (loc, NULL_RTX,
17070 "DWARF address size mismatch");
17071 return 0;
17072 }
17073 else if (size == DWARF2_ADDR_SIZE)
17074 op = DW_OP_deref;
17075 else
17076 op = DW_OP_deref_size;
17077
17078 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17079 }
17080
17081 return descr;
17082 }
17083
17084 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17085 if it is not possible. */
17086
17087 static dw_loc_descr_ref
17088 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17089 {
17090 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17091 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17092 else if (dwarf_version >= 3 || !dwarf_strict)
17093 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17094 else
17095 return NULL;
17096 }
17097
17098 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17099 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17100
17101 static dw_loc_descr_ref
17102 dw_sra_loc_expr (tree decl, rtx loc)
17103 {
17104 rtx p;
17105 unsigned HOST_WIDE_INT padsize = 0;
17106 dw_loc_descr_ref descr, *descr_tail;
17107 unsigned HOST_WIDE_INT decl_size;
17108 rtx varloc;
17109 enum var_init_status initialized;
17110
17111 if (DECL_SIZE (decl) == NULL
17112 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17113 return NULL;
17114
17115 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17116 descr = NULL;
17117 descr_tail = &descr;
17118
17119 for (p = loc; p; p = XEXP (p, 1))
17120 {
17121 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17122 rtx loc_note = *decl_piece_varloc_ptr (p);
17123 dw_loc_descr_ref cur_descr;
17124 dw_loc_descr_ref *tail, last = NULL;
17125 unsigned HOST_WIDE_INT opsize = 0;
17126
17127 if (loc_note == NULL_RTX
17128 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17129 {
17130 padsize += bitsize;
17131 continue;
17132 }
17133 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17134 varloc = NOTE_VAR_LOCATION (loc_note);
17135 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17136 if (cur_descr == NULL)
17137 {
17138 padsize += bitsize;
17139 continue;
17140 }
17141
17142 /* Check that cur_descr either doesn't use
17143 DW_OP_*piece operations, or their sum is equal
17144 to bitsize. Otherwise we can't embed it. */
17145 for (tail = &cur_descr; *tail != NULL;
17146 tail = &(*tail)->dw_loc_next)
17147 if ((*tail)->dw_loc_opc == DW_OP_piece)
17148 {
17149 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17150 * BITS_PER_UNIT;
17151 last = *tail;
17152 }
17153 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17154 {
17155 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17156 last = *tail;
17157 }
17158
17159 if (last != NULL && opsize != bitsize)
17160 {
17161 padsize += bitsize;
17162 /* Discard the current piece of the descriptor and release any
17163 addr_table entries it uses. */
17164 remove_loc_list_addr_table_entries (cur_descr);
17165 continue;
17166 }
17167
17168 /* If there is a hole, add DW_OP_*piece after empty DWARF
17169 expression, which means that those bits are optimized out. */
17170 if (padsize)
17171 {
17172 if (padsize > decl_size)
17173 {
17174 remove_loc_list_addr_table_entries (cur_descr);
17175 goto discard_descr;
17176 }
17177 decl_size -= padsize;
17178 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17179 if (*descr_tail == NULL)
17180 {
17181 remove_loc_list_addr_table_entries (cur_descr);
17182 goto discard_descr;
17183 }
17184 descr_tail = &(*descr_tail)->dw_loc_next;
17185 padsize = 0;
17186 }
17187 *descr_tail = cur_descr;
17188 descr_tail = tail;
17189 if (bitsize > decl_size)
17190 goto discard_descr;
17191 decl_size -= bitsize;
17192 if (last == NULL)
17193 {
17194 HOST_WIDE_INT offset = 0;
17195 if (GET_CODE (varloc) == VAR_LOCATION
17196 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17197 {
17198 varloc = PAT_VAR_LOCATION_LOC (varloc);
17199 if (GET_CODE (varloc) == EXPR_LIST)
17200 varloc = XEXP (varloc, 0);
17201 }
17202 do
17203 {
17204 if (GET_CODE (varloc) == CONST
17205 || GET_CODE (varloc) == SIGN_EXTEND
17206 || GET_CODE (varloc) == ZERO_EXTEND)
17207 varloc = XEXP (varloc, 0);
17208 else if (GET_CODE (varloc) == SUBREG)
17209 varloc = SUBREG_REG (varloc);
17210 else
17211 break;
17212 }
17213 while (1);
17214 /* DW_OP_bit_size offset should be zero for register
17215 or implicit location descriptions and empty location
17216 descriptions, but for memory addresses needs big endian
17217 adjustment. */
17218 if (MEM_P (varloc))
17219 {
17220 unsigned HOST_WIDE_INT memsize;
17221 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17222 goto discard_descr;
17223 memsize *= BITS_PER_UNIT;
17224 if (memsize != bitsize)
17225 {
17226 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17227 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17228 goto discard_descr;
17229 if (memsize < bitsize)
17230 goto discard_descr;
17231 if (BITS_BIG_ENDIAN)
17232 offset = memsize - bitsize;
17233 }
17234 }
17235
17236 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17237 if (*descr_tail == NULL)
17238 goto discard_descr;
17239 descr_tail = &(*descr_tail)->dw_loc_next;
17240 }
17241 }
17242
17243 /* If there were any non-empty expressions, add padding till the end of
17244 the decl. */
17245 if (descr != NULL && decl_size != 0)
17246 {
17247 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17248 if (*descr_tail == NULL)
17249 goto discard_descr;
17250 }
17251 return descr;
17252
17253 discard_descr:
17254 /* Discard the descriptor and release any addr_table entries it uses. */
17255 remove_loc_list_addr_table_entries (descr);
17256 return NULL;
17257 }
17258
17259 /* Return the dwarf representation of the location list LOC_LIST of
17260 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17261 function. */
17262
17263 static dw_loc_list_ref
17264 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17265 {
17266 const char *endname, *secname;
17267 var_loc_view endview;
17268 rtx varloc;
17269 enum var_init_status initialized;
17270 struct var_loc_node *node;
17271 dw_loc_descr_ref descr;
17272 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17273 dw_loc_list_ref list = NULL;
17274 dw_loc_list_ref *listp = &list;
17275
17276 /* Now that we know what section we are using for a base,
17277 actually construct the list of locations.
17278 The first location information is what is passed to the
17279 function that creates the location list, and the remaining
17280 locations just get added on to that list.
17281 Note that we only know the start address for a location
17282 (IE location changes), so to build the range, we use
17283 the range [current location start, next location start].
17284 This means we have to special case the last node, and generate
17285 a range of [last location start, end of function label]. */
17286
17287 if (cfun && crtl->has_bb_partition)
17288 {
17289 bool save_in_cold_section_p = in_cold_section_p;
17290 in_cold_section_p = first_function_block_is_cold;
17291 if (loc_list->last_before_switch == NULL)
17292 in_cold_section_p = !in_cold_section_p;
17293 secname = secname_for_decl (decl);
17294 in_cold_section_p = save_in_cold_section_p;
17295 }
17296 else
17297 secname = secname_for_decl (decl);
17298
17299 for (node = loc_list->first; node; node = node->next)
17300 {
17301 bool range_across_switch = false;
17302 if (GET_CODE (node->loc) == EXPR_LIST
17303 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17304 {
17305 if (GET_CODE (node->loc) == EXPR_LIST)
17306 {
17307 descr = NULL;
17308 /* This requires DW_OP_{,bit_}piece, which is not usable
17309 inside DWARF expressions. */
17310 if (want_address == 2)
17311 descr = dw_sra_loc_expr (decl, node->loc);
17312 }
17313 else
17314 {
17315 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17316 varloc = NOTE_VAR_LOCATION (node->loc);
17317 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17318 }
17319 if (descr)
17320 {
17321 /* If section switch happens in between node->label
17322 and node->next->label (or end of function) and
17323 we can't emit it as a single entry list,
17324 emit two ranges, first one ending at the end
17325 of first partition and second one starting at the
17326 beginning of second partition. */
17327 if (node == loc_list->last_before_switch
17328 && (node != loc_list->first || loc_list->first->next
17329 /* If we are to emit a view number, we will emit
17330 a loclist rather than a single location
17331 expression for the entire function (see
17332 loc_list_has_views), so we have to split the
17333 range that straddles across partitions. */
17334 || !ZERO_VIEW_P (node->view))
17335 && current_function_decl)
17336 {
17337 endname = cfun->fde->dw_fde_end;
17338 endview = 0;
17339 range_across_switch = true;
17340 }
17341 /* The variable has a location between NODE->LABEL and
17342 NODE->NEXT->LABEL. */
17343 else if (node->next)
17344 endname = node->next->label, endview = node->next->view;
17345 /* If the variable has a location at the last label
17346 it keeps its location until the end of function. */
17347 else if (!current_function_decl)
17348 endname = text_end_label, endview = 0;
17349 else
17350 {
17351 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17352 current_function_funcdef_no);
17353 endname = ggc_strdup (label_id);
17354 endview = 0;
17355 }
17356
17357 *listp = new_loc_list (descr, node->label, node->view,
17358 endname, endview, secname);
17359 if (TREE_CODE (decl) == PARM_DECL
17360 && node == loc_list->first
17361 && NOTE_P (node->loc)
17362 && strcmp (node->label, endname) == 0)
17363 (*listp)->force = true;
17364 listp = &(*listp)->dw_loc_next;
17365 }
17366 }
17367
17368 if (cfun
17369 && crtl->has_bb_partition
17370 && node == loc_list->last_before_switch)
17371 {
17372 bool save_in_cold_section_p = in_cold_section_p;
17373 in_cold_section_p = !first_function_block_is_cold;
17374 secname = secname_for_decl (decl);
17375 in_cold_section_p = save_in_cold_section_p;
17376 }
17377
17378 if (range_across_switch)
17379 {
17380 if (GET_CODE (node->loc) == EXPR_LIST)
17381 descr = dw_sra_loc_expr (decl, node->loc);
17382 else
17383 {
17384 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17385 varloc = NOTE_VAR_LOCATION (node->loc);
17386 descr = dw_loc_list_1 (decl, varloc, want_address,
17387 initialized);
17388 }
17389 gcc_assert (descr);
17390 /* The variable has a location between NODE->LABEL and
17391 NODE->NEXT->LABEL. */
17392 if (node->next)
17393 endname = node->next->label, endview = node->next->view;
17394 else
17395 endname = cfun->fde->dw_fde_second_end, endview = 0;
17396 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17397 endname, endview, secname);
17398 listp = &(*listp)->dw_loc_next;
17399 }
17400 }
17401
17402 /* Try to avoid the overhead of a location list emitting a location
17403 expression instead, but only if we didn't have more than one
17404 location entry in the first place. If some entries were not
17405 representable, we don't want to pretend a single entry that was
17406 applies to the entire scope in which the variable is
17407 available. */
17408 if (list && loc_list->first->next)
17409 gen_llsym (list);
17410 else
17411 maybe_gen_llsym (list);
17412
17413 return list;
17414 }
17415
17416 /* Return if the loc_list has only single element and thus can be represented
17417 as location description. */
17418
17419 static bool
17420 single_element_loc_list_p (dw_loc_list_ref list)
17421 {
17422 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17423 return !list->ll_symbol;
17424 }
17425
17426 /* Duplicate a single element of location list. */
17427
17428 static inline dw_loc_descr_ref
17429 copy_loc_descr (dw_loc_descr_ref ref)
17430 {
17431 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17432 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17433 return copy;
17434 }
17435
17436 /* To each location in list LIST append loc descr REF. */
17437
17438 static void
17439 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17440 {
17441 dw_loc_descr_ref copy;
17442 add_loc_descr (&list->expr, ref);
17443 list = list->dw_loc_next;
17444 while (list)
17445 {
17446 copy = copy_loc_descr (ref);
17447 add_loc_descr (&list->expr, copy);
17448 while (copy->dw_loc_next)
17449 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17450 list = list->dw_loc_next;
17451 }
17452 }
17453
17454 /* To each location in list LIST prepend loc descr REF. */
17455
17456 static void
17457 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17458 {
17459 dw_loc_descr_ref copy;
17460 dw_loc_descr_ref ref_end = list->expr;
17461 add_loc_descr (&ref, list->expr);
17462 list->expr = ref;
17463 list = list->dw_loc_next;
17464 while (list)
17465 {
17466 dw_loc_descr_ref end = list->expr;
17467 list->expr = copy = copy_loc_descr (ref);
17468 while (copy->dw_loc_next != ref_end)
17469 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17470 copy->dw_loc_next = end;
17471 list = list->dw_loc_next;
17472 }
17473 }
17474
17475 /* Given two lists RET and LIST
17476 produce location list that is result of adding expression in LIST
17477 to expression in RET on each position in program.
17478 Might be destructive on both RET and LIST.
17479
17480 TODO: We handle only simple cases of RET or LIST having at most one
17481 element. General case would involve sorting the lists in program order
17482 and merging them that will need some additional work.
17483 Adding that will improve quality of debug info especially for SRA-ed
17484 structures. */
17485
17486 static void
17487 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17488 {
17489 if (!list)
17490 return;
17491 if (!*ret)
17492 {
17493 *ret = list;
17494 return;
17495 }
17496 if (!list->dw_loc_next)
17497 {
17498 add_loc_descr_to_each (*ret, list->expr);
17499 return;
17500 }
17501 if (!(*ret)->dw_loc_next)
17502 {
17503 prepend_loc_descr_to_each (list, (*ret)->expr);
17504 *ret = list;
17505 return;
17506 }
17507 expansion_failed (NULL_TREE, NULL_RTX,
17508 "Don't know how to merge two non-trivial"
17509 " location lists.\n");
17510 *ret = NULL;
17511 return;
17512 }
17513
17514 /* LOC is constant expression. Try a luck, look it up in constant
17515 pool and return its loc_descr of its address. */
17516
17517 static dw_loc_descr_ref
17518 cst_pool_loc_descr (tree loc)
17519 {
17520 /* Get an RTL for this, if something has been emitted. */
17521 rtx rtl = lookup_constant_def (loc);
17522
17523 if (!rtl || !MEM_P (rtl))
17524 {
17525 gcc_assert (!rtl);
17526 return 0;
17527 }
17528 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17529
17530 /* TODO: We might get more coverage if we was actually delaying expansion
17531 of all expressions till end of compilation when constant pools are fully
17532 populated. */
17533 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17534 {
17535 expansion_failed (loc, NULL_RTX,
17536 "CST value in contant pool but not marked.");
17537 return 0;
17538 }
17539 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17540 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17541 }
17542
17543 /* Return dw_loc_list representing address of addr_expr LOC
17544 by looking for inner INDIRECT_REF expression and turning
17545 it into simple arithmetics.
17546
17547 See loc_list_from_tree for the meaning of CONTEXT. */
17548
17549 static dw_loc_list_ref
17550 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17551 loc_descr_context *context)
17552 {
17553 tree obj, offset;
17554 poly_int64 bitsize, bitpos, bytepos;
17555 machine_mode mode;
17556 int unsignedp, reversep, volatilep = 0;
17557 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17558
17559 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17560 &bitsize, &bitpos, &offset, &mode,
17561 &unsignedp, &reversep, &volatilep);
17562 STRIP_NOPS (obj);
17563 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17564 {
17565 expansion_failed (loc, NULL_RTX, "bitfield access");
17566 return 0;
17567 }
17568 if (!INDIRECT_REF_P (obj))
17569 {
17570 expansion_failed (obj,
17571 NULL_RTX, "no indirect ref in inner refrence");
17572 return 0;
17573 }
17574 if (!offset && known_eq (bitpos, 0))
17575 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17576 context);
17577 else if (toplev
17578 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17579 && (dwarf_version >= 4 || !dwarf_strict))
17580 {
17581 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17582 if (!list_ret)
17583 return 0;
17584 if (offset)
17585 {
17586 /* Variable offset. */
17587 list_ret1 = loc_list_from_tree (offset, 0, context);
17588 if (list_ret1 == 0)
17589 return 0;
17590 add_loc_list (&list_ret, list_ret1);
17591 if (!list_ret)
17592 return 0;
17593 add_loc_descr_to_each (list_ret,
17594 new_loc_descr (DW_OP_plus, 0, 0));
17595 }
17596 HOST_WIDE_INT value;
17597 if (bytepos.is_constant (&value) && value > 0)
17598 add_loc_descr_to_each (list_ret,
17599 new_loc_descr (DW_OP_plus_uconst, value, 0));
17600 else if (maybe_ne (bytepos, 0))
17601 loc_list_plus_const (list_ret, bytepos);
17602 add_loc_descr_to_each (list_ret,
17603 new_loc_descr (DW_OP_stack_value, 0, 0));
17604 }
17605 return list_ret;
17606 }
17607
17608 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17609 all operations from LOC are nops, move to the last one. Insert in NOPS all
17610 operations that are skipped. */
17611
17612 static void
17613 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17614 hash_set<dw_loc_descr_ref> &nops)
17615 {
17616 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17617 {
17618 nops.add (loc);
17619 loc = loc->dw_loc_next;
17620 }
17621 }
17622
17623 /* Helper for loc_descr_without_nops: free the location description operation
17624 P. */
17625
17626 bool
17627 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17628 {
17629 ggc_free (loc);
17630 return true;
17631 }
17632
17633 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17634 finishes LOC. */
17635
17636 static void
17637 loc_descr_without_nops (dw_loc_descr_ref &loc)
17638 {
17639 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17640 return;
17641
17642 /* Set of all DW_OP_nop operations we remove. */
17643 hash_set<dw_loc_descr_ref> nops;
17644
17645 /* First, strip all prefix NOP operations in order to keep the head of the
17646 operations list. */
17647 loc_descr_to_next_no_nop (loc, nops);
17648
17649 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17650 {
17651 /* For control flow operations: strip "prefix" nops in destination
17652 labels. */
17653 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17654 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17655 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17656 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17657
17658 /* Do the same for the operations that follow, then move to the next
17659 iteration. */
17660 if (cur->dw_loc_next != NULL)
17661 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17662 cur = cur->dw_loc_next;
17663 }
17664
17665 nops.traverse<void *, free_loc_descr> (NULL);
17666 }
17667
17668
17669 struct dwarf_procedure_info;
17670
17671 /* Helper structure for location descriptions generation. */
17672 struct loc_descr_context
17673 {
17674 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17675 NULL_TREE if DW_OP_push_object_address in invalid for this location
17676 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17677 tree context_type;
17678 /* The ..._DECL node that should be translated as a
17679 DW_OP_push_object_address operation. */
17680 tree base_decl;
17681 /* Information about the DWARF procedure we are currently generating. NULL if
17682 we are not generating a DWARF procedure. */
17683 struct dwarf_procedure_info *dpi;
17684 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17685 by consumer. Used for DW_TAG_generic_subrange attributes. */
17686 bool placeholder_arg;
17687 /* True if PLACEHOLDER_EXPR has been seen. */
17688 bool placeholder_seen;
17689 };
17690
17691 /* DWARF procedures generation
17692
17693 DWARF expressions (aka. location descriptions) are used to encode variable
17694 things such as sizes or offsets. Such computations can have redundant parts
17695 that can be factorized in order to reduce the size of the output debug
17696 information. This is the whole point of DWARF procedures.
17697
17698 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17699 already factorized into functions ("size functions") in order to handle very
17700 big and complex types. Such functions are quite simple: they have integral
17701 arguments, they return an integral result and their body contains only a
17702 return statement with arithmetic expressions. This is the only kind of
17703 function we are interested in translating into DWARF procedures, here.
17704
17705 DWARF expressions and DWARF procedure are executed using a stack, so we have
17706 to define some calling convention for them to interact. Let's say that:
17707
17708 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17709 all arguments in reverse order (right-to-left) so that when the DWARF
17710 procedure execution starts, the first argument is the top of the stack.
17711
17712 - Then, when returning, the DWARF procedure must have consumed all arguments
17713 on the stack, must have pushed the result and touched nothing else.
17714
17715 - Each integral argument and the result are integral types can be hold in a
17716 single stack slot.
17717
17718 - We call "frame offset" the number of stack slots that are "under DWARF
17719 procedure control": it includes the arguments slots, the temporaries and
17720 the result slot. Thus, it is equal to the number of arguments when the
17721 procedure execution starts and must be equal to one (the result) when it
17722 returns. */
17723
17724 /* Helper structure used when generating operations for a DWARF procedure. */
17725 struct dwarf_procedure_info
17726 {
17727 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17728 currently translated. */
17729 tree fndecl;
17730 /* The number of arguments FNDECL takes. */
17731 unsigned args_count;
17732 };
17733
17734 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17735 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17736 equate it to this DIE. */
17737
17738 static dw_die_ref
17739 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17740 dw_die_ref parent_die)
17741 {
17742 dw_die_ref dwarf_proc_die;
17743
17744 if ((dwarf_version < 3 && dwarf_strict)
17745 || location == NULL)
17746 return NULL;
17747
17748 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17749 if (fndecl)
17750 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17751 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17752 return dwarf_proc_die;
17753 }
17754
17755 /* Return whether TYPE is a supported type as a DWARF procedure argument
17756 type or return type (we handle only scalar types and pointer types that
17757 aren't wider than the DWARF expression evaluation stack. */
17758
17759 static bool
17760 is_handled_procedure_type (tree type)
17761 {
17762 return ((INTEGRAL_TYPE_P (type)
17763 || TREE_CODE (type) == OFFSET_TYPE
17764 || TREE_CODE (type) == POINTER_TYPE)
17765 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17766 }
17767
17768 /* Helper for resolve_args_picking: do the same but stop when coming across
17769 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17770 offset *before* evaluating the corresponding operation. */
17771
17772 static bool
17773 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17774 struct dwarf_procedure_info *dpi,
17775 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17776 {
17777 /* The "frame_offset" identifier is already used to name a macro... */
17778 unsigned frame_offset_ = initial_frame_offset;
17779 dw_loc_descr_ref l;
17780
17781 for (l = loc; l != NULL;)
17782 {
17783 bool existed;
17784 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17785
17786 /* If we already met this node, there is nothing to compute anymore. */
17787 if (existed)
17788 {
17789 /* Make sure that the stack size is consistent wherever the execution
17790 flow comes from. */
17791 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17792 break;
17793 }
17794 l_frame_offset = frame_offset_;
17795
17796 /* If needed, relocate the picking offset with respect to the frame
17797 offset. */
17798 if (l->frame_offset_rel)
17799 {
17800 unsigned HOST_WIDE_INT off;
17801 switch (l->dw_loc_opc)
17802 {
17803 case DW_OP_pick:
17804 off = l->dw_loc_oprnd1.v.val_unsigned;
17805 break;
17806 case DW_OP_dup:
17807 off = 0;
17808 break;
17809 case DW_OP_over:
17810 off = 1;
17811 break;
17812 default:
17813 gcc_unreachable ();
17814 }
17815 /* frame_offset_ is the size of the current stack frame, including
17816 incoming arguments. Besides, the arguments are pushed
17817 right-to-left. Thus, in order to access the Nth argument from
17818 this operation node, the picking has to skip temporaries *plus*
17819 one stack slot per argument (0 for the first one, 1 for the second
17820 one, etc.).
17821
17822 The targetted argument number (N) is already set as the operand,
17823 and the number of temporaries can be computed with:
17824 frame_offsets_ - dpi->args_count */
17825 off += frame_offset_ - dpi->args_count;
17826
17827 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17828 if (off > 255)
17829 return false;
17830
17831 if (off == 0)
17832 {
17833 l->dw_loc_opc = DW_OP_dup;
17834 l->dw_loc_oprnd1.v.val_unsigned = 0;
17835 }
17836 else if (off == 1)
17837 {
17838 l->dw_loc_opc = DW_OP_over;
17839 l->dw_loc_oprnd1.v.val_unsigned = 0;
17840 }
17841 else
17842 {
17843 l->dw_loc_opc = DW_OP_pick;
17844 l->dw_loc_oprnd1.v.val_unsigned = off;
17845 }
17846 }
17847
17848 /* Update frame_offset according to the effect the current operation has
17849 on the stack. */
17850 switch (l->dw_loc_opc)
17851 {
17852 case DW_OP_deref:
17853 case DW_OP_swap:
17854 case DW_OP_rot:
17855 case DW_OP_abs:
17856 case DW_OP_neg:
17857 case DW_OP_not:
17858 case DW_OP_plus_uconst:
17859 case DW_OP_skip:
17860 case DW_OP_reg0:
17861 case DW_OP_reg1:
17862 case DW_OP_reg2:
17863 case DW_OP_reg3:
17864 case DW_OP_reg4:
17865 case DW_OP_reg5:
17866 case DW_OP_reg6:
17867 case DW_OP_reg7:
17868 case DW_OP_reg8:
17869 case DW_OP_reg9:
17870 case DW_OP_reg10:
17871 case DW_OP_reg11:
17872 case DW_OP_reg12:
17873 case DW_OP_reg13:
17874 case DW_OP_reg14:
17875 case DW_OP_reg15:
17876 case DW_OP_reg16:
17877 case DW_OP_reg17:
17878 case DW_OP_reg18:
17879 case DW_OP_reg19:
17880 case DW_OP_reg20:
17881 case DW_OP_reg21:
17882 case DW_OP_reg22:
17883 case DW_OP_reg23:
17884 case DW_OP_reg24:
17885 case DW_OP_reg25:
17886 case DW_OP_reg26:
17887 case DW_OP_reg27:
17888 case DW_OP_reg28:
17889 case DW_OP_reg29:
17890 case DW_OP_reg30:
17891 case DW_OP_reg31:
17892 case DW_OP_bregx:
17893 case DW_OP_piece:
17894 case DW_OP_deref_size:
17895 case DW_OP_nop:
17896 case DW_OP_bit_piece:
17897 case DW_OP_implicit_value:
17898 case DW_OP_stack_value:
17899 break;
17900
17901 case DW_OP_addr:
17902 case DW_OP_const1u:
17903 case DW_OP_const1s:
17904 case DW_OP_const2u:
17905 case DW_OP_const2s:
17906 case DW_OP_const4u:
17907 case DW_OP_const4s:
17908 case DW_OP_const8u:
17909 case DW_OP_const8s:
17910 case DW_OP_constu:
17911 case DW_OP_consts:
17912 case DW_OP_dup:
17913 case DW_OP_over:
17914 case DW_OP_pick:
17915 case DW_OP_lit0:
17916 case DW_OP_lit1:
17917 case DW_OP_lit2:
17918 case DW_OP_lit3:
17919 case DW_OP_lit4:
17920 case DW_OP_lit5:
17921 case DW_OP_lit6:
17922 case DW_OP_lit7:
17923 case DW_OP_lit8:
17924 case DW_OP_lit9:
17925 case DW_OP_lit10:
17926 case DW_OP_lit11:
17927 case DW_OP_lit12:
17928 case DW_OP_lit13:
17929 case DW_OP_lit14:
17930 case DW_OP_lit15:
17931 case DW_OP_lit16:
17932 case DW_OP_lit17:
17933 case DW_OP_lit18:
17934 case DW_OP_lit19:
17935 case DW_OP_lit20:
17936 case DW_OP_lit21:
17937 case DW_OP_lit22:
17938 case DW_OP_lit23:
17939 case DW_OP_lit24:
17940 case DW_OP_lit25:
17941 case DW_OP_lit26:
17942 case DW_OP_lit27:
17943 case DW_OP_lit28:
17944 case DW_OP_lit29:
17945 case DW_OP_lit30:
17946 case DW_OP_lit31:
17947 case DW_OP_breg0:
17948 case DW_OP_breg1:
17949 case DW_OP_breg2:
17950 case DW_OP_breg3:
17951 case DW_OP_breg4:
17952 case DW_OP_breg5:
17953 case DW_OP_breg6:
17954 case DW_OP_breg7:
17955 case DW_OP_breg8:
17956 case DW_OP_breg9:
17957 case DW_OP_breg10:
17958 case DW_OP_breg11:
17959 case DW_OP_breg12:
17960 case DW_OP_breg13:
17961 case DW_OP_breg14:
17962 case DW_OP_breg15:
17963 case DW_OP_breg16:
17964 case DW_OP_breg17:
17965 case DW_OP_breg18:
17966 case DW_OP_breg19:
17967 case DW_OP_breg20:
17968 case DW_OP_breg21:
17969 case DW_OP_breg22:
17970 case DW_OP_breg23:
17971 case DW_OP_breg24:
17972 case DW_OP_breg25:
17973 case DW_OP_breg26:
17974 case DW_OP_breg27:
17975 case DW_OP_breg28:
17976 case DW_OP_breg29:
17977 case DW_OP_breg30:
17978 case DW_OP_breg31:
17979 case DW_OP_fbreg:
17980 case DW_OP_push_object_address:
17981 case DW_OP_call_frame_cfa:
17982 case DW_OP_GNU_variable_value:
17983 case DW_OP_GNU_addr_index:
17984 case DW_OP_GNU_const_index:
17985 ++frame_offset_;
17986 break;
17987
17988 case DW_OP_drop:
17989 case DW_OP_xderef:
17990 case DW_OP_and:
17991 case DW_OP_div:
17992 case DW_OP_minus:
17993 case DW_OP_mod:
17994 case DW_OP_mul:
17995 case DW_OP_or:
17996 case DW_OP_plus:
17997 case DW_OP_shl:
17998 case DW_OP_shr:
17999 case DW_OP_shra:
18000 case DW_OP_xor:
18001 case DW_OP_bra:
18002 case DW_OP_eq:
18003 case DW_OP_ge:
18004 case DW_OP_gt:
18005 case DW_OP_le:
18006 case DW_OP_lt:
18007 case DW_OP_ne:
18008 case DW_OP_regx:
18009 case DW_OP_xderef_size:
18010 --frame_offset_;
18011 break;
18012
18013 case DW_OP_call2:
18014 case DW_OP_call4:
18015 case DW_OP_call_ref:
18016 {
18017 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
18018 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
18019
18020 if (stack_usage == NULL)
18021 return false;
18022 frame_offset_ += *stack_usage;
18023 break;
18024 }
18025
18026 case DW_OP_implicit_pointer:
18027 case DW_OP_entry_value:
18028 case DW_OP_const_type:
18029 case DW_OP_regval_type:
18030 case DW_OP_deref_type:
18031 case DW_OP_convert:
18032 case DW_OP_reinterpret:
18033 case DW_OP_form_tls_address:
18034 case DW_OP_GNU_push_tls_address:
18035 case DW_OP_GNU_uninit:
18036 case DW_OP_GNU_encoded_addr:
18037 case DW_OP_GNU_implicit_pointer:
18038 case DW_OP_GNU_entry_value:
18039 case DW_OP_GNU_const_type:
18040 case DW_OP_GNU_regval_type:
18041 case DW_OP_GNU_deref_type:
18042 case DW_OP_GNU_convert:
18043 case DW_OP_GNU_reinterpret:
18044 case DW_OP_GNU_parameter_ref:
18045 /* loc_list_from_tree will probably not output these operations for
18046 size functions, so assume they will not appear here. */
18047 /* Fall through... */
18048
18049 default:
18050 gcc_unreachable ();
18051 }
18052
18053 /* Now, follow the control flow (except subroutine calls). */
18054 switch (l->dw_loc_opc)
18055 {
18056 case DW_OP_bra:
18057 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
18058 frame_offsets))
18059 return false;
18060 /* Fall through. */
18061
18062 case DW_OP_skip:
18063 l = l->dw_loc_oprnd1.v.val_loc;
18064 break;
18065
18066 case DW_OP_stack_value:
18067 return true;
18068
18069 default:
18070 l = l->dw_loc_next;
18071 break;
18072 }
18073 }
18074
18075 return true;
18076 }
18077
18078 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18079 operations) in order to resolve the operand of DW_OP_pick operations that
18080 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18081 offset *before* LOC is executed. Return if all relocations were
18082 successful. */
18083
18084 static bool
18085 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18086 struct dwarf_procedure_info *dpi)
18087 {
18088 /* Associate to all visited operations the frame offset *before* evaluating
18089 this operation. */
18090 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18091
18092 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18093 frame_offsets);
18094 }
18095
18096 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18097 Return NULL if it is not possible. */
18098
18099 static dw_die_ref
18100 function_to_dwarf_procedure (tree fndecl)
18101 {
18102 struct loc_descr_context ctx;
18103 struct dwarf_procedure_info dpi;
18104 dw_die_ref dwarf_proc_die;
18105 tree tree_body = DECL_SAVED_TREE (fndecl);
18106 dw_loc_descr_ref loc_body, epilogue;
18107
18108 tree cursor;
18109 unsigned i;
18110
18111 /* Do not generate multiple DWARF procedures for the same function
18112 declaration. */
18113 dwarf_proc_die = lookup_decl_die (fndecl);
18114 if (dwarf_proc_die != NULL)
18115 return dwarf_proc_die;
18116
18117 /* DWARF procedures are available starting with the DWARFv3 standard. */
18118 if (dwarf_version < 3 && dwarf_strict)
18119 return NULL;
18120
18121 /* We handle only functions for which we still have a body, that return a
18122 supported type and that takes arguments with supported types. Note that
18123 there is no point translating functions that return nothing. */
18124 if (tree_body == NULL_TREE
18125 || DECL_RESULT (fndecl) == NULL_TREE
18126 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18127 return NULL;
18128
18129 for (cursor = DECL_ARGUMENTS (fndecl);
18130 cursor != NULL_TREE;
18131 cursor = TREE_CHAIN (cursor))
18132 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18133 return NULL;
18134
18135 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18136 if (TREE_CODE (tree_body) != RETURN_EXPR)
18137 return NULL;
18138 tree_body = TREE_OPERAND (tree_body, 0);
18139 if (TREE_CODE (tree_body) != MODIFY_EXPR
18140 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18141 return NULL;
18142 tree_body = TREE_OPERAND (tree_body, 1);
18143
18144 /* Try to translate the body expression itself. Note that this will probably
18145 cause an infinite recursion if its call graph has a cycle. This is very
18146 unlikely for size functions, however, so don't bother with such things at
18147 the moment. */
18148 ctx.context_type = NULL_TREE;
18149 ctx.base_decl = NULL_TREE;
18150 ctx.dpi = &dpi;
18151 ctx.placeholder_arg = false;
18152 ctx.placeholder_seen = false;
18153 dpi.fndecl = fndecl;
18154 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18155 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18156 if (!loc_body)
18157 return NULL;
18158
18159 /* After evaluating all operands in "loc_body", we should still have on the
18160 stack all arguments plus the desired function result (top of the stack).
18161 Generate code in order to keep only the result in our stack frame. */
18162 epilogue = NULL;
18163 for (i = 0; i < dpi.args_count; ++i)
18164 {
18165 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18166 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18167 op_couple->dw_loc_next->dw_loc_next = epilogue;
18168 epilogue = op_couple;
18169 }
18170 add_loc_descr (&loc_body, epilogue);
18171 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18172 return NULL;
18173
18174 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18175 because they are considered useful. Now there is an epilogue, they are
18176 not anymore, so give it another try. */
18177 loc_descr_without_nops (loc_body);
18178
18179 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18180 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18181 though, given that size functions do not come from source, so they should
18182 not have a dedicated DW_TAG_subprogram DIE. */
18183 dwarf_proc_die
18184 = new_dwarf_proc_die (loc_body, fndecl,
18185 get_context_die (DECL_CONTEXT (fndecl)));
18186
18187 /* The called DWARF procedure consumes one stack slot per argument and
18188 returns one stack slot. */
18189 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18190
18191 return dwarf_proc_die;
18192 }
18193
18194
18195 /* Generate Dwarf location list representing LOC.
18196 If WANT_ADDRESS is false, expression computing LOC will be computed
18197 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18198 if WANT_ADDRESS is 2, expression computing address useable in location
18199 will be returned (i.e. DW_OP_reg can be used
18200 to refer to register values).
18201
18202 CONTEXT provides information to customize the location descriptions
18203 generation. Its context_type field specifies what type is implicitly
18204 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18205 will not be generated.
18206
18207 Its DPI field determines whether we are generating a DWARF expression for a
18208 DWARF procedure, so PARM_DECL references are processed specifically.
18209
18210 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18211 and dpi fields were null. */
18212
18213 static dw_loc_list_ref
18214 loc_list_from_tree_1 (tree loc, int want_address,
18215 struct loc_descr_context *context)
18216 {
18217 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18218 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18219 int have_address = 0;
18220 enum dwarf_location_atom op;
18221
18222 /* ??? Most of the time we do not take proper care for sign/zero
18223 extending the values properly. Hopefully this won't be a real
18224 problem... */
18225
18226 if (context != NULL
18227 && context->base_decl == loc
18228 && want_address == 0)
18229 {
18230 if (dwarf_version >= 3 || !dwarf_strict)
18231 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18232 NULL, 0, NULL, 0, NULL);
18233 else
18234 return NULL;
18235 }
18236
18237 switch (TREE_CODE (loc))
18238 {
18239 case ERROR_MARK:
18240 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18241 return 0;
18242
18243 case PLACEHOLDER_EXPR:
18244 /* This case involves extracting fields from an object to determine the
18245 position of other fields. It is supposed to appear only as the first
18246 operand of COMPONENT_REF nodes and to reference precisely the type
18247 that the context allows. */
18248 if (context != NULL
18249 && TREE_TYPE (loc) == context->context_type
18250 && want_address >= 1)
18251 {
18252 if (dwarf_version >= 3 || !dwarf_strict)
18253 {
18254 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18255 have_address = 1;
18256 break;
18257 }
18258 else
18259 return NULL;
18260 }
18261 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18262 the single argument passed by consumer. */
18263 else if (context != NULL
18264 && context->placeholder_arg
18265 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18266 && want_address == 0)
18267 {
18268 ret = new_loc_descr (DW_OP_pick, 0, 0);
18269 ret->frame_offset_rel = 1;
18270 context->placeholder_seen = true;
18271 break;
18272 }
18273 else
18274 expansion_failed (loc, NULL_RTX,
18275 "PLACEHOLDER_EXPR for an unexpected type");
18276 break;
18277
18278 case CALL_EXPR:
18279 {
18280 const int nargs = call_expr_nargs (loc);
18281 tree callee = get_callee_fndecl (loc);
18282 int i;
18283 dw_die_ref dwarf_proc;
18284
18285 if (callee == NULL_TREE)
18286 goto call_expansion_failed;
18287
18288 /* We handle only functions that return an integer. */
18289 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18290 goto call_expansion_failed;
18291
18292 dwarf_proc = function_to_dwarf_procedure (callee);
18293 if (dwarf_proc == NULL)
18294 goto call_expansion_failed;
18295
18296 /* Evaluate arguments right-to-left so that the first argument will
18297 be the top-most one on the stack. */
18298 for (i = nargs - 1; i >= 0; --i)
18299 {
18300 dw_loc_descr_ref loc_descr
18301 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18302 context);
18303
18304 if (loc_descr == NULL)
18305 goto call_expansion_failed;
18306
18307 add_loc_descr (&ret, loc_descr);
18308 }
18309
18310 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18311 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18312 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18313 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18314 add_loc_descr (&ret, ret1);
18315 break;
18316
18317 call_expansion_failed:
18318 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18319 /* There are no opcodes for these operations. */
18320 return 0;
18321 }
18322
18323 case PREINCREMENT_EXPR:
18324 case PREDECREMENT_EXPR:
18325 case POSTINCREMENT_EXPR:
18326 case POSTDECREMENT_EXPR:
18327 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18328 /* There are no opcodes for these operations. */
18329 return 0;
18330
18331 case ADDR_EXPR:
18332 /* If we already want an address, see if there is INDIRECT_REF inside
18333 e.g. for &this->field. */
18334 if (want_address)
18335 {
18336 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18337 (loc, want_address == 2, context);
18338 if (list_ret)
18339 have_address = 1;
18340 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18341 && (ret = cst_pool_loc_descr (loc)))
18342 have_address = 1;
18343 }
18344 /* Otherwise, process the argument and look for the address. */
18345 if (!list_ret && !ret)
18346 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18347 else
18348 {
18349 if (want_address)
18350 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18351 return NULL;
18352 }
18353 break;
18354
18355 case VAR_DECL:
18356 if (DECL_THREAD_LOCAL_P (loc))
18357 {
18358 rtx rtl;
18359 enum dwarf_location_atom tls_op;
18360 enum dtprel_bool dtprel = dtprel_false;
18361
18362 if (targetm.have_tls)
18363 {
18364 /* If this is not defined, we have no way to emit the
18365 data. */
18366 if (!targetm.asm_out.output_dwarf_dtprel)
18367 return 0;
18368
18369 /* The way DW_OP_GNU_push_tls_address is specified, we
18370 can only look up addresses of objects in the current
18371 module. We used DW_OP_addr as first op, but that's
18372 wrong, because DW_OP_addr is relocated by the debug
18373 info consumer, while DW_OP_GNU_push_tls_address
18374 operand shouldn't be. */
18375 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18376 return 0;
18377 dtprel = dtprel_true;
18378 /* We check for DWARF 5 here because gdb did not implement
18379 DW_OP_form_tls_address until after 7.12. */
18380 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18381 : DW_OP_GNU_push_tls_address);
18382 }
18383 else
18384 {
18385 if (!targetm.emutls.debug_form_tls_address
18386 || !(dwarf_version >= 3 || !dwarf_strict))
18387 return 0;
18388 /* We stuffed the control variable into the DECL_VALUE_EXPR
18389 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18390 no longer appear in gimple code. We used the control
18391 variable in specific so that we could pick it up here. */
18392 loc = DECL_VALUE_EXPR (loc);
18393 tls_op = DW_OP_form_tls_address;
18394 }
18395
18396 rtl = rtl_for_decl_location (loc);
18397 if (rtl == NULL_RTX)
18398 return 0;
18399
18400 if (!MEM_P (rtl))
18401 return 0;
18402 rtl = XEXP (rtl, 0);
18403 if (! CONSTANT_P (rtl))
18404 return 0;
18405
18406 ret = new_addr_loc_descr (rtl, dtprel);
18407 ret1 = new_loc_descr (tls_op, 0, 0);
18408 add_loc_descr (&ret, ret1);
18409
18410 have_address = 1;
18411 break;
18412 }
18413 /* FALLTHRU */
18414
18415 case PARM_DECL:
18416 if (context != NULL && context->dpi != NULL
18417 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18418 {
18419 /* We are generating code for a DWARF procedure and we want to access
18420 one of its arguments: find the appropriate argument offset and let
18421 the resolve_args_picking pass compute the offset that complies
18422 with the stack frame size. */
18423 unsigned i = 0;
18424 tree cursor;
18425
18426 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18427 cursor != NULL_TREE && cursor != loc;
18428 cursor = TREE_CHAIN (cursor), ++i)
18429 ;
18430 /* If we are translating a DWARF procedure, all referenced parameters
18431 must belong to the current function. */
18432 gcc_assert (cursor != NULL_TREE);
18433
18434 ret = new_loc_descr (DW_OP_pick, i, 0);
18435 ret->frame_offset_rel = 1;
18436 break;
18437 }
18438 /* FALLTHRU */
18439
18440 case RESULT_DECL:
18441 if (DECL_HAS_VALUE_EXPR_P (loc))
18442 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18443 want_address, context);
18444 /* FALLTHRU */
18445
18446 case FUNCTION_DECL:
18447 {
18448 rtx rtl;
18449 var_loc_list *loc_list = lookup_decl_loc (loc);
18450
18451 if (loc_list && loc_list->first)
18452 {
18453 list_ret = dw_loc_list (loc_list, loc, want_address);
18454 have_address = want_address != 0;
18455 break;
18456 }
18457 rtl = rtl_for_decl_location (loc);
18458 if (rtl == NULL_RTX)
18459 {
18460 if (TREE_CODE (loc) != FUNCTION_DECL
18461 && early_dwarf
18462 && current_function_decl
18463 && want_address != 1
18464 && ! DECL_IGNORED_P (loc)
18465 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18466 || POINTER_TYPE_P (TREE_TYPE (loc)))
18467 && DECL_CONTEXT (loc) == current_function_decl
18468 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18469 <= DWARF2_ADDR_SIZE))
18470 {
18471 dw_die_ref ref = lookup_decl_die (loc);
18472 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18473 if (ref)
18474 {
18475 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18476 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18477 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18478 }
18479 else
18480 {
18481 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18482 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18483 }
18484 break;
18485 }
18486 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18487 return 0;
18488 }
18489 else if (CONST_INT_P (rtl))
18490 {
18491 HOST_WIDE_INT val = INTVAL (rtl);
18492 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18493 val &= GET_MODE_MASK (DECL_MODE (loc));
18494 ret = int_loc_descriptor (val);
18495 }
18496 else if (GET_CODE (rtl) == CONST_STRING)
18497 {
18498 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18499 return 0;
18500 }
18501 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18502 ret = new_addr_loc_descr (rtl, dtprel_false);
18503 else
18504 {
18505 machine_mode mode, mem_mode;
18506
18507 /* Certain constructs can only be represented at top-level. */
18508 if (want_address == 2)
18509 {
18510 ret = loc_descriptor (rtl, VOIDmode,
18511 VAR_INIT_STATUS_INITIALIZED);
18512 have_address = 1;
18513 }
18514 else
18515 {
18516 mode = GET_MODE (rtl);
18517 mem_mode = VOIDmode;
18518 if (MEM_P (rtl))
18519 {
18520 mem_mode = mode;
18521 mode = get_address_mode (rtl);
18522 rtl = XEXP (rtl, 0);
18523 have_address = 1;
18524 }
18525 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18526 VAR_INIT_STATUS_INITIALIZED);
18527 }
18528 if (!ret)
18529 expansion_failed (loc, rtl,
18530 "failed to produce loc descriptor for rtl");
18531 }
18532 }
18533 break;
18534
18535 case MEM_REF:
18536 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18537 {
18538 have_address = 1;
18539 goto do_plus;
18540 }
18541 /* Fallthru. */
18542 case INDIRECT_REF:
18543 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18544 have_address = 1;
18545 break;
18546
18547 case TARGET_MEM_REF:
18548 case SSA_NAME:
18549 case DEBUG_EXPR_DECL:
18550 return NULL;
18551
18552 case COMPOUND_EXPR:
18553 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18554 context);
18555
18556 CASE_CONVERT:
18557 case VIEW_CONVERT_EXPR:
18558 case SAVE_EXPR:
18559 case MODIFY_EXPR:
18560 case NON_LVALUE_EXPR:
18561 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18562 context);
18563
18564 case COMPONENT_REF:
18565 case BIT_FIELD_REF:
18566 case ARRAY_REF:
18567 case ARRAY_RANGE_REF:
18568 case REALPART_EXPR:
18569 case IMAGPART_EXPR:
18570 {
18571 tree obj, offset;
18572 poly_int64 bitsize, bitpos, bytepos;
18573 machine_mode mode;
18574 int unsignedp, reversep, volatilep = 0;
18575
18576 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18577 &unsignedp, &reversep, &volatilep);
18578
18579 gcc_assert (obj != loc);
18580
18581 list_ret = loc_list_from_tree_1 (obj,
18582 want_address == 2
18583 && known_eq (bitpos, 0)
18584 && !offset ? 2 : 1,
18585 context);
18586 /* TODO: We can extract value of the small expression via shifting even
18587 for nonzero bitpos. */
18588 if (list_ret == 0)
18589 return 0;
18590 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18591 || !multiple_p (bitsize, BITS_PER_UNIT))
18592 {
18593 expansion_failed (loc, NULL_RTX,
18594 "bitfield access");
18595 return 0;
18596 }
18597
18598 if (offset != NULL_TREE)
18599 {
18600 /* Variable offset. */
18601 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18602 if (list_ret1 == 0)
18603 return 0;
18604 add_loc_list (&list_ret, list_ret1);
18605 if (!list_ret)
18606 return 0;
18607 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18608 }
18609
18610 HOST_WIDE_INT value;
18611 if (bytepos.is_constant (&value) && value > 0)
18612 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18613 value, 0));
18614 else if (maybe_ne (bytepos, 0))
18615 loc_list_plus_const (list_ret, bytepos);
18616
18617 have_address = 1;
18618 break;
18619 }
18620
18621 case INTEGER_CST:
18622 if ((want_address || !tree_fits_shwi_p (loc))
18623 && (ret = cst_pool_loc_descr (loc)))
18624 have_address = 1;
18625 else if (want_address == 2
18626 && tree_fits_shwi_p (loc)
18627 && (ret = address_of_int_loc_descriptor
18628 (int_size_in_bytes (TREE_TYPE (loc)),
18629 tree_to_shwi (loc))))
18630 have_address = 1;
18631 else if (tree_fits_shwi_p (loc))
18632 ret = int_loc_descriptor (tree_to_shwi (loc));
18633 else if (tree_fits_uhwi_p (loc))
18634 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18635 else
18636 {
18637 expansion_failed (loc, NULL_RTX,
18638 "Integer operand is not host integer");
18639 return 0;
18640 }
18641 break;
18642
18643 case POLY_INT_CST:
18644 {
18645 if (want_address)
18646 {
18647 expansion_failed (loc, NULL_RTX,
18648 "constant address with a runtime component");
18649 return 0;
18650 }
18651 poly_int64 value;
18652 if (!poly_int_tree_p (loc, &value))
18653 {
18654 expansion_failed (loc, NULL_RTX, "constant too big");
18655 return 0;
18656 }
18657 ret = int_loc_descriptor (value);
18658 }
18659 break;
18660
18661 case CONSTRUCTOR:
18662 case REAL_CST:
18663 case STRING_CST:
18664 case COMPLEX_CST:
18665 if ((ret = cst_pool_loc_descr (loc)))
18666 have_address = 1;
18667 else if (TREE_CODE (loc) == CONSTRUCTOR)
18668 {
18669 tree type = TREE_TYPE (loc);
18670 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18671 unsigned HOST_WIDE_INT offset = 0;
18672 unsigned HOST_WIDE_INT cnt;
18673 constructor_elt *ce;
18674
18675 if (TREE_CODE (type) == RECORD_TYPE)
18676 {
18677 /* This is very limited, but it's enough to output
18678 pointers to member functions, as long as the
18679 referenced function is defined in the current
18680 translation unit. */
18681 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18682 {
18683 tree val = ce->value;
18684
18685 tree field = ce->index;
18686
18687 if (val)
18688 STRIP_NOPS (val);
18689
18690 if (!field || DECL_BIT_FIELD (field))
18691 {
18692 expansion_failed (loc, NULL_RTX,
18693 "bitfield in record type constructor");
18694 size = offset = (unsigned HOST_WIDE_INT)-1;
18695 ret = NULL;
18696 break;
18697 }
18698
18699 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18700 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18701 gcc_assert (pos + fieldsize <= size);
18702 if (pos < offset)
18703 {
18704 expansion_failed (loc, NULL_RTX,
18705 "out-of-order fields in record constructor");
18706 size = offset = (unsigned HOST_WIDE_INT)-1;
18707 ret = NULL;
18708 break;
18709 }
18710 if (pos > offset)
18711 {
18712 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18713 add_loc_descr (&ret, ret1);
18714 offset = pos;
18715 }
18716 if (val && fieldsize != 0)
18717 {
18718 ret1 = loc_descriptor_from_tree (val, want_address, context);
18719 if (!ret1)
18720 {
18721 expansion_failed (loc, NULL_RTX,
18722 "unsupported expression in field");
18723 size = offset = (unsigned HOST_WIDE_INT)-1;
18724 ret = NULL;
18725 break;
18726 }
18727 add_loc_descr (&ret, ret1);
18728 }
18729 if (fieldsize)
18730 {
18731 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18732 add_loc_descr (&ret, ret1);
18733 offset = pos + fieldsize;
18734 }
18735 }
18736
18737 if (offset != size)
18738 {
18739 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18740 add_loc_descr (&ret, ret1);
18741 offset = size;
18742 }
18743
18744 have_address = !!want_address;
18745 }
18746 else
18747 expansion_failed (loc, NULL_RTX,
18748 "constructor of non-record type");
18749 }
18750 else
18751 /* We can construct small constants here using int_loc_descriptor. */
18752 expansion_failed (loc, NULL_RTX,
18753 "constructor or constant not in constant pool");
18754 break;
18755
18756 case TRUTH_AND_EXPR:
18757 case TRUTH_ANDIF_EXPR:
18758 case BIT_AND_EXPR:
18759 op = DW_OP_and;
18760 goto do_binop;
18761
18762 case TRUTH_XOR_EXPR:
18763 case BIT_XOR_EXPR:
18764 op = DW_OP_xor;
18765 goto do_binop;
18766
18767 case TRUTH_OR_EXPR:
18768 case TRUTH_ORIF_EXPR:
18769 case BIT_IOR_EXPR:
18770 op = DW_OP_or;
18771 goto do_binop;
18772
18773 case FLOOR_DIV_EXPR:
18774 case CEIL_DIV_EXPR:
18775 case ROUND_DIV_EXPR:
18776 case TRUNC_DIV_EXPR:
18777 case EXACT_DIV_EXPR:
18778 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18779 return 0;
18780 op = DW_OP_div;
18781 goto do_binop;
18782
18783 case MINUS_EXPR:
18784 op = DW_OP_minus;
18785 goto do_binop;
18786
18787 case FLOOR_MOD_EXPR:
18788 case CEIL_MOD_EXPR:
18789 case ROUND_MOD_EXPR:
18790 case TRUNC_MOD_EXPR:
18791 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18792 {
18793 op = DW_OP_mod;
18794 goto do_binop;
18795 }
18796 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18797 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18798 if (list_ret == 0 || list_ret1 == 0)
18799 return 0;
18800
18801 add_loc_list (&list_ret, list_ret1);
18802 if (list_ret == 0)
18803 return 0;
18804 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18805 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18806 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18807 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18808 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18809 break;
18810
18811 case MULT_EXPR:
18812 op = DW_OP_mul;
18813 goto do_binop;
18814
18815 case LSHIFT_EXPR:
18816 op = DW_OP_shl;
18817 goto do_binop;
18818
18819 case RSHIFT_EXPR:
18820 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18821 goto do_binop;
18822
18823 case POINTER_PLUS_EXPR:
18824 case PLUS_EXPR:
18825 do_plus:
18826 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18827 {
18828 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18829 smarter to encode their opposite. The DW_OP_plus_uconst operation
18830 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18831 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18832 bytes, Y being the size of the operation that pushes the opposite
18833 of the addend. So let's choose the smallest representation. */
18834 const tree tree_addend = TREE_OPERAND (loc, 1);
18835 offset_int wi_addend;
18836 HOST_WIDE_INT shwi_addend;
18837 dw_loc_descr_ref loc_naddend;
18838
18839 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18840 if (list_ret == 0)
18841 return 0;
18842
18843 /* Try to get the literal to push. It is the opposite of the addend,
18844 so as we rely on wrapping during DWARF evaluation, first decode
18845 the literal as a "DWARF-sized" signed number. */
18846 wi_addend = wi::to_offset (tree_addend);
18847 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18848 shwi_addend = wi_addend.to_shwi ();
18849 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18850 ? int_loc_descriptor (-shwi_addend)
18851 : NULL;
18852
18853 if (loc_naddend != NULL
18854 && ((unsigned) size_of_uleb128 (shwi_addend)
18855 > size_of_loc_descr (loc_naddend)))
18856 {
18857 add_loc_descr_to_each (list_ret, loc_naddend);
18858 add_loc_descr_to_each (list_ret,
18859 new_loc_descr (DW_OP_minus, 0, 0));
18860 }
18861 else
18862 {
18863 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18864 {
18865 loc_naddend = loc_cur;
18866 loc_cur = loc_cur->dw_loc_next;
18867 ggc_free (loc_naddend);
18868 }
18869 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18870 }
18871 break;
18872 }
18873
18874 op = DW_OP_plus;
18875 goto do_binop;
18876
18877 case LE_EXPR:
18878 op = DW_OP_le;
18879 goto do_comp_binop;
18880
18881 case GE_EXPR:
18882 op = DW_OP_ge;
18883 goto do_comp_binop;
18884
18885 case LT_EXPR:
18886 op = DW_OP_lt;
18887 goto do_comp_binop;
18888
18889 case GT_EXPR:
18890 op = DW_OP_gt;
18891 goto do_comp_binop;
18892
18893 do_comp_binop:
18894 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18895 {
18896 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18897 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18898 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18899 TREE_CODE (loc));
18900 break;
18901 }
18902 else
18903 goto do_binop;
18904
18905 case EQ_EXPR:
18906 op = DW_OP_eq;
18907 goto do_binop;
18908
18909 case NE_EXPR:
18910 op = DW_OP_ne;
18911 goto do_binop;
18912
18913 do_binop:
18914 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18915 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18916 if (list_ret == 0 || list_ret1 == 0)
18917 return 0;
18918
18919 add_loc_list (&list_ret, list_ret1);
18920 if (list_ret == 0)
18921 return 0;
18922 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18923 break;
18924
18925 case TRUTH_NOT_EXPR:
18926 case BIT_NOT_EXPR:
18927 op = DW_OP_not;
18928 goto do_unop;
18929
18930 case ABS_EXPR:
18931 op = DW_OP_abs;
18932 goto do_unop;
18933
18934 case NEGATE_EXPR:
18935 op = DW_OP_neg;
18936 goto do_unop;
18937
18938 do_unop:
18939 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18940 if (list_ret == 0)
18941 return 0;
18942
18943 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18944 break;
18945
18946 case MIN_EXPR:
18947 case MAX_EXPR:
18948 {
18949 const enum tree_code code =
18950 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18951
18952 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18953 build2 (code, integer_type_node,
18954 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18955 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18956 }
18957
18958 /* fall through */
18959
18960 case COND_EXPR:
18961 {
18962 dw_loc_descr_ref lhs
18963 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18964 dw_loc_list_ref rhs
18965 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18966 dw_loc_descr_ref bra_node, jump_node, tmp;
18967
18968 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18969 if (list_ret == 0 || lhs == 0 || rhs == 0)
18970 return 0;
18971
18972 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18973 add_loc_descr_to_each (list_ret, bra_node);
18974
18975 add_loc_list (&list_ret, rhs);
18976 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18977 add_loc_descr_to_each (list_ret, jump_node);
18978
18979 add_loc_descr_to_each (list_ret, lhs);
18980 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18981 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18982
18983 /* ??? Need a node to point the skip at. Use a nop. */
18984 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18985 add_loc_descr_to_each (list_ret, tmp);
18986 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18987 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18988 }
18989 break;
18990
18991 case FIX_TRUNC_EXPR:
18992 return 0;
18993
18994 default:
18995 /* Leave front-end specific codes as simply unknown. This comes
18996 up, for instance, with the C STMT_EXPR. */
18997 if ((unsigned int) TREE_CODE (loc)
18998 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18999 {
19000 expansion_failed (loc, NULL_RTX,
19001 "language specific tree node");
19002 return 0;
19003 }
19004
19005 /* Otherwise this is a generic code; we should just lists all of
19006 these explicitly. We forgot one. */
19007 if (flag_checking)
19008 gcc_unreachable ();
19009
19010 /* In a release build, we want to degrade gracefully: better to
19011 generate incomplete debugging information than to crash. */
19012 return NULL;
19013 }
19014
19015 if (!ret && !list_ret)
19016 return 0;
19017
19018 if (want_address == 2 && !have_address
19019 && (dwarf_version >= 4 || !dwarf_strict))
19020 {
19021 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
19022 {
19023 expansion_failed (loc, NULL_RTX,
19024 "DWARF address size mismatch");
19025 return 0;
19026 }
19027 if (ret)
19028 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
19029 else
19030 add_loc_descr_to_each (list_ret,
19031 new_loc_descr (DW_OP_stack_value, 0, 0));
19032 have_address = 1;
19033 }
19034 /* Show if we can't fill the request for an address. */
19035 if (want_address && !have_address)
19036 {
19037 expansion_failed (loc, NULL_RTX,
19038 "Want address and only have value");
19039 return 0;
19040 }
19041
19042 gcc_assert (!ret || !list_ret);
19043
19044 /* If we've got an address and don't want one, dereference. */
19045 if (!want_address && have_address)
19046 {
19047 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
19048
19049 if (size > DWARF2_ADDR_SIZE || size == -1)
19050 {
19051 expansion_failed (loc, NULL_RTX,
19052 "DWARF address size mismatch");
19053 return 0;
19054 }
19055 else if (size == DWARF2_ADDR_SIZE)
19056 op = DW_OP_deref;
19057 else
19058 op = DW_OP_deref_size;
19059
19060 if (ret)
19061 add_loc_descr (&ret, new_loc_descr (op, size, 0));
19062 else
19063 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
19064 }
19065 if (ret)
19066 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
19067
19068 return list_ret;
19069 }
19070
19071 /* Likewise, but strip useless DW_OP_nop operations in the resulting
19072 expressions. */
19073
19074 static dw_loc_list_ref
19075 loc_list_from_tree (tree loc, int want_address,
19076 struct loc_descr_context *context)
19077 {
19078 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
19079
19080 for (dw_loc_list_ref loc_cur = result;
19081 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
19082 loc_descr_without_nops (loc_cur->expr);
19083 return result;
19084 }
19085
19086 /* Same as above but return only single location expression. */
19087 static dw_loc_descr_ref
19088 loc_descriptor_from_tree (tree loc, int want_address,
19089 struct loc_descr_context *context)
19090 {
19091 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
19092 if (!ret)
19093 return NULL;
19094 if (ret->dw_loc_next)
19095 {
19096 expansion_failed (loc, NULL_RTX,
19097 "Location list where only loc descriptor needed");
19098 return NULL;
19099 }
19100 return ret->expr;
19101 }
19102
19103 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19104 pointer to the declared type for the relevant field variable, or return
19105 `integer_type_node' if the given node turns out to be an
19106 ERROR_MARK node. */
19107
19108 static inline tree
19109 field_type (const_tree decl)
19110 {
19111 tree type;
19112
19113 if (TREE_CODE (decl) == ERROR_MARK)
19114 return integer_type_node;
19115
19116 type = DECL_BIT_FIELD_TYPE (decl);
19117 if (type == NULL_TREE)
19118 type = TREE_TYPE (decl);
19119
19120 return type;
19121 }
19122
19123 /* Given a pointer to a tree node, return the alignment in bits for
19124 it, or else return BITS_PER_WORD if the node actually turns out to
19125 be an ERROR_MARK node. */
19126
19127 static inline unsigned
19128 simple_type_align_in_bits (const_tree type)
19129 {
19130 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19131 }
19132
19133 static inline unsigned
19134 simple_decl_align_in_bits (const_tree decl)
19135 {
19136 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19137 }
19138
19139 /* Return the result of rounding T up to ALIGN. */
19140
19141 static inline offset_int
19142 round_up_to_align (const offset_int &t, unsigned int align)
19143 {
19144 return wi::udiv_trunc (t + align - 1, align) * align;
19145 }
19146
19147 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19148 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19149 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19150 if we fail to return the size in one of these two forms. */
19151
19152 static dw_loc_descr_ref
19153 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19154 {
19155 tree tree_size;
19156 struct loc_descr_context ctx;
19157
19158 /* Return a constant integer in priority, if possible. */
19159 *cst_size = int_size_in_bytes (type);
19160 if (*cst_size != -1)
19161 return NULL;
19162
19163 ctx.context_type = const_cast<tree> (type);
19164 ctx.base_decl = NULL_TREE;
19165 ctx.dpi = NULL;
19166 ctx.placeholder_arg = false;
19167 ctx.placeholder_seen = false;
19168
19169 type = TYPE_MAIN_VARIANT (type);
19170 tree_size = TYPE_SIZE_UNIT (type);
19171 return ((tree_size != NULL_TREE)
19172 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19173 : NULL);
19174 }
19175
19176 /* Helper structure for RECORD_TYPE processing. */
19177 struct vlr_context
19178 {
19179 /* Root RECORD_TYPE. It is needed to generate data member location
19180 descriptions in variable-length records (VLR), but also to cope with
19181 variants, which are composed of nested structures multiplexed with
19182 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19183 function processing a FIELD_DECL, it is required to be non null. */
19184 tree struct_type;
19185
19186 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19187 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19188 this variant part as part of the root record (in storage units). For
19189 regular records, it must be NULL_TREE. */
19190 tree variant_part_offset;
19191 };
19192
19193 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19194 addressed byte of the "containing object" for the given FIELD_DECL. If
19195 possible, return a native constant through CST_OFFSET (in which case NULL is
19196 returned); otherwise return a DWARF expression that computes the offset.
19197
19198 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19199 that offset is, either because the argument turns out to be a pointer to an
19200 ERROR_MARK node, or because the offset expression is too complex for us.
19201
19202 CTX is required: see the comment for VLR_CONTEXT. */
19203
19204 static dw_loc_descr_ref
19205 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19206 HOST_WIDE_INT *cst_offset)
19207 {
19208 tree tree_result;
19209 dw_loc_list_ref loc_result;
19210
19211 *cst_offset = 0;
19212
19213 if (TREE_CODE (decl) == ERROR_MARK)
19214 return NULL;
19215 else
19216 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19217
19218 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19219 case. */
19220 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19221 return NULL;
19222
19223 /* We used to handle only constant offsets in all cases. Now, we handle
19224 properly dynamic byte offsets only when PCC bitfield type doesn't
19225 matter. */
19226 if (PCC_BITFIELD_TYPE_MATTERS
19227 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19228 {
19229 offset_int object_offset_in_bits;
19230 offset_int object_offset_in_bytes;
19231 offset_int bitpos_int;
19232 tree type;
19233 tree field_size_tree;
19234 offset_int deepest_bitpos;
19235 offset_int field_size_in_bits;
19236 unsigned int type_align_in_bits;
19237 unsigned int decl_align_in_bits;
19238 offset_int type_size_in_bits;
19239
19240 bitpos_int = wi::to_offset (bit_position (decl));
19241 type = field_type (decl);
19242 type_size_in_bits = offset_int_type_size_in_bits (type);
19243 type_align_in_bits = simple_type_align_in_bits (type);
19244
19245 field_size_tree = DECL_SIZE (decl);
19246
19247 /* The size could be unspecified if there was an error, or for
19248 a flexible array member. */
19249 if (!field_size_tree)
19250 field_size_tree = bitsize_zero_node;
19251
19252 /* If the size of the field is not constant, use the type size. */
19253 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19254 field_size_in_bits = wi::to_offset (field_size_tree);
19255 else
19256 field_size_in_bits = type_size_in_bits;
19257
19258 decl_align_in_bits = simple_decl_align_in_bits (decl);
19259
19260 /* The GCC front-end doesn't make any attempt to keep track of the
19261 starting bit offset (relative to the start of the containing
19262 structure type) of the hypothetical "containing object" for a
19263 bit-field. Thus, when computing the byte offset value for the
19264 start of the "containing object" of a bit-field, we must deduce
19265 this information on our own. This can be rather tricky to do in
19266 some cases. For example, handling the following structure type
19267 definition when compiling for an i386/i486 target (which only
19268 aligns long long's to 32-bit boundaries) can be very tricky:
19269
19270 struct S { int field1; long long field2:31; };
19271
19272 Fortunately, there is a simple rule-of-thumb which can be used
19273 in such cases. When compiling for an i386/i486, GCC will
19274 allocate 8 bytes for the structure shown above. It decides to
19275 do this based upon one simple rule for bit-field allocation.
19276 GCC allocates each "containing object" for each bit-field at
19277 the first (i.e. lowest addressed) legitimate alignment boundary
19278 (based upon the required minimum alignment for the declared
19279 type of the field) which it can possibly use, subject to the
19280 condition that there is still enough available space remaining
19281 in the containing object (when allocated at the selected point)
19282 to fully accommodate all of the bits of the bit-field itself.
19283
19284 This simple rule makes it obvious why GCC allocates 8 bytes for
19285 each object of the structure type shown above. When looking
19286 for a place to allocate the "containing object" for `field2',
19287 the compiler simply tries to allocate a 64-bit "containing
19288 object" at each successive 32-bit boundary (starting at zero)
19289 until it finds a place to allocate that 64- bit field such that
19290 at least 31 contiguous (and previously unallocated) bits remain
19291 within that selected 64 bit field. (As it turns out, for the
19292 example above, the compiler finds it is OK to allocate the
19293 "containing object" 64-bit field at bit-offset zero within the
19294 structure type.)
19295
19296 Here we attempt to work backwards from the limited set of facts
19297 we're given, and we try to deduce from those facts, where GCC
19298 must have believed that the containing object started (within
19299 the structure type). The value we deduce is then used (by the
19300 callers of this routine) to generate DW_AT_location and
19301 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19302 the case of DW_AT_location, regular fields as well). */
19303
19304 /* Figure out the bit-distance from the start of the structure to
19305 the "deepest" bit of the bit-field. */
19306 deepest_bitpos = bitpos_int + field_size_in_bits;
19307
19308 /* This is the tricky part. Use some fancy footwork to deduce
19309 where the lowest addressed bit of the containing object must
19310 be. */
19311 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19312
19313 /* Round up to type_align by default. This works best for
19314 bitfields. */
19315 object_offset_in_bits
19316 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19317
19318 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19319 {
19320 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19321
19322 /* Round up to decl_align instead. */
19323 object_offset_in_bits
19324 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19325 }
19326
19327 object_offset_in_bytes
19328 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19329 if (ctx->variant_part_offset == NULL_TREE)
19330 {
19331 *cst_offset = object_offset_in_bytes.to_shwi ();
19332 return NULL;
19333 }
19334 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19335 }
19336 else
19337 tree_result = byte_position (decl);
19338
19339 if (ctx->variant_part_offset != NULL_TREE)
19340 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19341 ctx->variant_part_offset, tree_result);
19342
19343 /* If the byte offset is a constant, it's simplier to handle a native
19344 constant rather than a DWARF expression. */
19345 if (TREE_CODE (tree_result) == INTEGER_CST)
19346 {
19347 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19348 return NULL;
19349 }
19350 struct loc_descr_context loc_ctx = {
19351 ctx->struct_type, /* context_type */
19352 NULL_TREE, /* base_decl */
19353 NULL, /* dpi */
19354 false, /* placeholder_arg */
19355 false /* placeholder_seen */
19356 };
19357 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19358
19359 /* We want a DWARF expression: abort if we only have a location list with
19360 multiple elements. */
19361 if (!loc_result || !single_element_loc_list_p (loc_result))
19362 return NULL;
19363 else
19364 return loc_result->expr;
19365 }
19366 \f
19367 /* The following routines define various Dwarf attributes and any data
19368 associated with them. */
19369
19370 /* Add a location description attribute value to a DIE.
19371
19372 This emits location attributes suitable for whole variables and
19373 whole parameters. Note that the location attributes for struct fields are
19374 generated by the routine `data_member_location_attribute' below. */
19375
19376 static inline void
19377 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19378 dw_loc_list_ref descr)
19379 {
19380 bool check_no_locviews = true;
19381 if (descr == 0)
19382 return;
19383 if (single_element_loc_list_p (descr))
19384 add_AT_loc (die, attr_kind, descr->expr);
19385 else
19386 {
19387 add_AT_loc_list (die, attr_kind, descr);
19388 gcc_assert (descr->ll_symbol);
19389 if (attr_kind == DW_AT_location && descr->vl_symbol
19390 && dwarf2out_locviews_in_attribute ())
19391 {
19392 add_AT_view_list (die, DW_AT_GNU_locviews);
19393 check_no_locviews = false;
19394 }
19395 }
19396
19397 if (check_no_locviews)
19398 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19399 }
19400
19401 /* Add DW_AT_accessibility attribute to DIE if needed. */
19402
19403 static void
19404 add_accessibility_attribute (dw_die_ref die, tree decl)
19405 {
19406 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19407 children, otherwise the default is DW_ACCESS_public. In DWARF2
19408 the default has always been DW_ACCESS_public. */
19409 if (TREE_PROTECTED (decl))
19410 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19411 else if (TREE_PRIVATE (decl))
19412 {
19413 if (dwarf_version == 2
19414 || die->die_parent == NULL
19415 || die->die_parent->die_tag != DW_TAG_class_type)
19416 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19417 }
19418 else if (dwarf_version > 2
19419 && die->die_parent
19420 && die->die_parent->die_tag == DW_TAG_class_type)
19421 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19422 }
19423
19424 /* Attach the specialized form of location attribute used for data members of
19425 struct and union types. In the special case of a FIELD_DECL node which
19426 represents a bit-field, the "offset" part of this special location
19427 descriptor must indicate the distance in bytes from the lowest-addressed
19428 byte of the containing struct or union type to the lowest-addressed byte of
19429 the "containing object" for the bit-field. (See the `field_byte_offset'
19430 function above).
19431
19432 For any given bit-field, the "containing object" is a hypothetical object
19433 (of some integral or enum type) within which the given bit-field lives. The
19434 type of this hypothetical "containing object" is always the same as the
19435 declared type of the individual bit-field itself (for GCC anyway... the
19436 DWARF spec doesn't actually mandate this). Note that it is the size (in
19437 bytes) of the hypothetical "containing object" which will be given in the
19438 DW_AT_byte_size attribute for this bit-field. (See the
19439 `byte_size_attribute' function below.) It is also used when calculating the
19440 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19441 function below.)
19442
19443 CTX is required: see the comment for VLR_CONTEXT. */
19444
19445 static void
19446 add_data_member_location_attribute (dw_die_ref die,
19447 tree decl,
19448 struct vlr_context *ctx)
19449 {
19450 HOST_WIDE_INT offset;
19451 dw_loc_descr_ref loc_descr = 0;
19452
19453 if (TREE_CODE (decl) == TREE_BINFO)
19454 {
19455 /* We're working on the TAG_inheritance for a base class. */
19456 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19457 {
19458 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19459 aren't at a fixed offset from all (sub)objects of the same
19460 type. We need to extract the appropriate offset from our
19461 vtable. The following dwarf expression means
19462
19463 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19464
19465 This is specific to the V3 ABI, of course. */
19466
19467 dw_loc_descr_ref tmp;
19468
19469 /* Make a copy of the object address. */
19470 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19471 add_loc_descr (&loc_descr, tmp);
19472
19473 /* Extract the vtable address. */
19474 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19475 add_loc_descr (&loc_descr, tmp);
19476
19477 /* Calculate the address of the offset. */
19478 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19479 gcc_assert (offset < 0);
19480
19481 tmp = int_loc_descriptor (-offset);
19482 add_loc_descr (&loc_descr, tmp);
19483 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19484 add_loc_descr (&loc_descr, tmp);
19485
19486 /* Extract the offset. */
19487 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19488 add_loc_descr (&loc_descr, tmp);
19489
19490 /* Add it to the object address. */
19491 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19492 add_loc_descr (&loc_descr, tmp);
19493 }
19494 else
19495 offset = tree_to_shwi (BINFO_OFFSET (decl));
19496 }
19497 else
19498 {
19499 loc_descr = field_byte_offset (decl, ctx, &offset);
19500
19501 /* If loc_descr is available then we know the field offset is dynamic.
19502 However, GDB does not handle dynamic field offsets very well at the
19503 moment. */
19504 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19505 {
19506 loc_descr = NULL;
19507 offset = 0;
19508 }
19509
19510 /* Data member location evalutation starts with the base address on the
19511 stack. Compute the field offset and add it to this base address. */
19512 else if (loc_descr != NULL)
19513 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19514 }
19515
19516 if (! loc_descr)
19517 {
19518 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19519 e.g. GDB only added support to it in November 2016. For DWARF5
19520 we need newer debug info consumers anyway. We might change this
19521 to dwarf_version >= 4 once most consumers catched up. */
19522 if (dwarf_version >= 5
19523 && TREE_CODE (decl) == FIELD_DECL
19524 && DECL_BIT_FIELD_TYPE (decl)
19525 && (ctx->variant_part_offset == NULL_TREE
19526 || TREE_CODE (ctx->variant_part_offset) == INTEGER_CST))
19527 {
19528 tree off = bit_position (decl);
19529 if (ctx->variant_part_offset)
19530 off = bit_from_pos (ctx->variant_part_offset, off);
19531 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19532 {
19533 remove_AT (die, DW_AT_byte_size);
19534 remove_AT (die, DW_AT_bit_offset);
19535 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19536 return;
19537 }
19538 }
19539 if (dwarf_version > 2)
19540 {
19541 /* Don't need to output a location expression, just the constant. */
19542 if (offset < 0)
19543 add_AT_int (die, DW_AT_data_member_location, offset);
19544 else
19545 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19546 return;
19547 }
19548 else
19549 {
19550 enum dwarf_location_atom op;
19551
19552 /* The DWARF2 standard says that we should assume that the structure
19553 address is already on the stack, so we can specify a structure
19554 field address by using DW_OP_plus_uconst. */
19555 op = DW_OP_plus_uconst;
19556 loc_descr = new_loc_descr (op, offset, 0);
19557 }
19558 }
19559
19560 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19561 }
19562
19563 /* Writes integer values to dw_vec_const array. */
19564
19565 static void
19566 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19567 {
19568 while (size != 0)
19569 {
19570 *dest++ = val & 0xff;
19571 val >>= 8;
19572 --size;
19573 }
19574 }
19575
19576 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19577
19578 static HOST_WIDE_INT
19579 extract_int (const unsigned char *src, unsigned int size)
19580 {
19581 HOST_WIDE_INT val = 0;
19582
19583 src += size;
19584 while (size != 0)
19585 {
19586 val <<= 8;
19587 val |= *--src & 0xff;
19588 --size;
19589 }
19590 return val;
19591 }
19592
19593 /* Writes wide_int values to dw_vec_const array. */
19594
19595 static void
19596 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19597 {
19598 int i;
19599
19600 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19601 {
19602 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19603 return;
19604 }
19605
19606 /* We'd have to extend this code to support odd sizes. */
19607 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19608
19609 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19610
19611 if (WORDS_BIG_ENDIAN)
19612 for (i = n - 1; i >= 0; i--)
19613 {
19614 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19615 dest += sizeof (HOST_WIDE_INT);
19616 }
19617 else
19618 for (i = 0; i < n; i++)
19619 {
19620 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19621 dest += sizeof (HOST_WIDE_INT);
19622 }
19623 }
19624
19625 /* Writes floating point values to dw_vec_const array. */
19626
19627 static void
19628 insert_float (const_rtx rtl, unsigned char *array)
19629 {
19630 long val[4];
19631 int i;
19632 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19633
19634 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19635
19636 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19637 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19638 {
19639 insert_int (val[i], 4, array);
19640 array += 4;
19641 }
19642 }
19643
19644 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19645 does not have a "location" either in memory or in a register. These
19646 things can arise in GNU C when a constant is passed as an actual parameter
19647 to an inlined function. They can also arise in C++ where declared
19648 constants do not necessarily get memory "homes". */
19649
19650 static bool
19651 add_const_value_attribute (dw_die_ref die, rtx rtl)
19652 {
19653 switch (GET_CODE (rtl))
19654 {
19655 case CONST_INT:
19656 {
19657 HOST_WIDE_INT val = INTVAL (rtl);
19658
19659 if (val < 0)
19660 add_AT_int (die, DW_AT_const_value, val);
19661 else
19662 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19663 }
19664 return true;
19665
19666 case CONST_WIDE_INT:
19667 {
19668 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19669 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19670 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19671 wide_int w = wi::zext (w1, prec);
19672 add_AT_wide (die, DW_AT_const_value, w);
19673 }
19674 return true;
19675
19676 case CONST_DOUBLE:
19677 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19678 floating-point constant. A CONST_DOUBLE is used whenever the
19679 constant requires more than one word in order to be adequately
19680 represented. */
19681 if (TARGET_SUPPORTS_WIDE_INT == 0
19682 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19683 add_AT_double (die, DW_AT_const_value,
19684 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19685 else
19686 {
19687 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19688 unsigned int length = GET_MODE_SIZE (mode);
19689 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19690
19691 insert_float (rtl, array);
19692 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19693 }
19694 return true;
19695
19696 case CONST_VECTOR:
19697 {
19698 unsigned int length;
19699 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19700 return false;
19701
19702 machine_mode mode = GET_MODE (rtl);
19703 /* The combination of a length and byte elt_size doesn't extend
19704 naturally to boolean vectors, where several elements are packed
19705 into the same byte. */
19706 if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
19707 return false;
19708
19709 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19710 unsigned char *array
19711 = ggc_vec_alloc<unsigned char> (length * elt_size);
19712 unsigned int i;
19713 unsigned char *p;
19714 machine_mode imode = GET_MODE_INNER (mode);
19715
19716 switch (GET_MODE_CLASS (mode))
19717 {
19718 case MODE_VECTOR_INT:
19719 for (i = 0, p = array; i < length; i++, p += elt_size)
19720 {
19721 rtx elt = CONST_VECTOR_ELT (rtl, i);
19722 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19723 }
19724 break;
19725
19726 case MODE_VECTOR_FLOAT:
19727 for (i = 0, p = array; i < length; i++, p += elt_size)
19728 {
19729 rtx elt = CONST_VECTOR_ELT (rtl, i);
19730 insert_float (elt, p);
19731 }
19732 break;
19733
19734 default:
19735 gcc_unreachable ();
19736 }
19737
19738 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19739 }
19740 return true;
19741
19742 case CONST_STRING:
19743 if (dwarf_version >= 4 || !dwarf_strict)
19744 {
19745 dw_loc_descr_ref loc_result;
19746 resolve_one_addr (&rtl);
19747 rtl_addr:
19748 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19749 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19750 add_AT_loc (die, DW_AT_location, loc_result);
19751 vec_safe_push (used_rtx_array, rtl);
19752 return true;
19753 }
19754 return false;
19755
19756 case CONST:
19757 if (CONSTANT_P (XEXP (rtl, 0)))
19758 return add_const_value_attribute (die, XEXP (rtl, 0));
19759 /* FALLTHROUGH */
19760 case SYMBOL_REF:
19761 if (!const_ok_for_output (rtl))
19762 return false;
19763 /* FALLTHROUGH */
19764 case LABEL_REF:
19765 if (dwarf_version >= 4 || !dwarf_strict)
19766 goto rtl_addr;
19767 return false;
19768
19769 case PLUS:
19770 /* In cases where an inlined instance of an inline function is passed
19771 the address of an `auto' variable (which is local to the caller) we
19772 can get a situation where the DECL_RTL of the artificial local
19773 variable (for the inlining) which acts as a stand-in for the
19774 corresponding formal parameter (of the inline function) will look
19775 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19776 exactly a compile-time constant expression, but it isn't the address
19777 of the (artificial) local variable either. Rather, it represents the
19778 *value* which the artificial local variable always has during its
19779 lifetime. We currently have no way to represent such quasi-constant
19780 values in Dwarf, so for now we just punt and generate nothing. */
19781 return false;
19782
19783 case HIGH:
19784 case CONST_FIXED:
19785 case MINUS:
19786 case SIGN_EXTEND:
19787 case ZERO_EXTEND:
19788 case CONST_POLY_INT:
19789 return false;
19790
19791 case MEM:
19792 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19793 && MEM_READONLY_P (rtl)
19794 && GET_MODE (rtl) == BLKmode)
19795 {
19796 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19797 return true;
19798 }
19799 return false;
19800
19801 default:
19802 /* No other kinds of rtx should be possible here. */
19803 gcc_unreachable ();
19804 }
19805 return false;
19806 }
19807
19808 /* Determine whether the evaluation of EXPR references any variables
19809 or functions which aren't otherwise used (and therefore may not be
19810 output). */
19811 static tree
19812 reference_to_unused (tree * tp, int * walk_subtrees,
19813 void * data ATTRIBUTE_UNUSED)
19814 {
19815 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19816 *walk_subtrees = 0;
19817
19818 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19819 && ! TREE_ASM_WRITTEN (*tp))
19820 return *tp;
19821 /* ??? The C++ FE emits debug information for using decls, so
19822 putting gcc_unreachable here falls over. See PR31899. For now
19823 be conservative. */
19824 else if (!symtab->global_info_ready && VAR_P (*tp))
19825 return *tp;
19826 else if (VAR_P (*tp))
19827 {
19828 varpool_node *node = varpool_node::get (*tp);
19829 if (!node || !node->definition)
19830 return *tp;
19831 }
19832 else if (TREE_CODE (*tp) == FUNCTION_DECL
19833 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19834 {
19835 /* The call graph machinery must have finished analyzing,
19836 optimizing and gimplifying the CU by now.
19837 So if *TP has no call graph node associated
19838 to it, it means *TP will not be emitted. */
19839 if (!symtab->global_info_ready || !cgraph_node::get (*tp))
19840 return *tp;
19841 }
19842 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19843 return *tp;
19844
19845 return NULL_TREE;
19846 }
19847
19848 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19849 for use in a later add_const_value_attribute call. */
19850
19851 static rtx
19852 rtl_for_decl_init (tree init, tree type)
19853 {
19854 rtx rtl = NULL_RTX;
19855
19856 STRIP_NOPS (init);
19857
19858 /* If a variable is initialized with a string constant without embedded
19859 zeros, build CONST_STRING. */
19860 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19861 {
19862 tree enttype = TREE_TYPE (type);
19863 tree domain = TYPE_DOMAIN (type);
19864 scalar_int_mode mode;
19865
19866 if (is_int_mode (TYPE_MODE (enttype), &mode)
19867 && GET_MODE_SIZE (mode) == 1
19868 && domain
19869 && TYPE_MAX_VALUE (domain)
19870 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19871 && integer_zerop (TYPE_MIN_VALUE (domain))
19872 && compare_tree_int (TYPE_MAX_VALUE (domain),
19873 TREE_STRING_LENGTH (init) - 1) == 0
19874 && ((size_t) TREE_STRING_LENGTH (init)
19875 == strlen (TREE_STRING_POINTER (init)) + 1))
19876 {
19877 rtl = gen_rtx_CONST_STRING (VOIDmode,
19878 ggc_strdup (TREE_STRING_POINTER (init)));
19879 rtl = gen_rtx_MEM (BLKmode, rtl);
19880 MEM_READONLY_P (rtl) = 1;
19881 }
19882 }
19883 /* Other aggregates, and complex values, could be represented using
19884 CONCAT: FIXME! */
19885 else if (AGGREGATE_TYPE_P (type)
19886 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19887 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19888 || TREE_CODE (type) == COMPLEX_TYPE)
19889 ;
19890 /* Vectors only work if their mode is supported by the target.
19891 FIXME: generic vectors ought to work too. */
19892 else if (TREE_CODE (type) == VECTOR_TYPE
19893 && !VECTOR_MODE_P (TYPE_MODE (type)))
19894 ;
19895 /* If the initializer is something that we know will expand into an
19896 immediate RTL constant, expand it now. We must be careful not to
19897 reference variables which won't be output. */
19898 else if (initializer_constant_valid_p (init, type)
19899 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19900 {
19901 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19902 possible. */
19903 if (TREE_CODE (type) == VECTOR_TYPE)
19904 switch (TREE_CODE (init))
19905 {
19906 case VECTOR_CST:
19907 break;
19908 case CONSTRUCTOR:
19909 if (TREE_CONSTANT (init))
19910 {
19911 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19912 bool constant_p = true;
19913 tree value;
19914 unsigned HOST_WIDE_INT ix;
19915
19916 /* Even when ctor is constant, it might contain non-*_CST
19917 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19918 belong into VECTOR_CST nodes. */
19919 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19920 if (!CONSTANT_CLASS_P (value))
19921 {
19922 constant_p = false;
19923 break;
19924 }
19925
19926 if (constant_p)
19927 {
19928 init = build_vector_from_ctor (type, elts);
19929 break;
19930 }
19931 }
19932 /* FALLTHRU */
19933
19934 default:
19935 return NULL;
19936 }
19937
19938 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19939
19940 /* If expand_expr returns a MEM, it wasn't immediate. */
19941 gcc_assert (!rtl || !MEM_P (rtl));
19942 }
19943
19944 return rtl;
19945 }
19946
19947 /* Generate RTL for the variable DECL to represent its location. */
19948
19949 static rtx
19950 rtl_for_decl_location (tree decl)
19951 {
19952 rtx rtl;
19953
19954 /* Here we have to decide where we are going to say the parameter "lives"
19955 (as far as the debugger is concerned). We only have a couple of
19956 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19957
19958 DECL_RTL normally indicates where the parameter lives during most of the
19959 activation of the function. If optimization is enabled however, this
19960 could be either NULL or else a pseudo-reg. Both of those cases indicate
19961 that the parameter doesn't really live anywhere (as far as the code
19962 generation parts of GCC are concerned) during most of the function's
19963 activation. That will happen (for example) if the parameter is never
19964 referenced within the function.
19965
19966 We could just generate a location descriptor here for all non-NULL
19967 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19968 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19969 where DECL_RTL is NULL or is a pseudo-reg.
19970
19971 Note however that we can only get away with using DECL_INCOMING_RTL as
19972 a backup substitute for DECL_RTL in certain limited cases. In cases
19973 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19974 we can be sure that the parameter was passed using the same type as it is
19975 declared to have within the function, and that its DECL_INCOMING_RTL
19976 points us to a place where a value of that type is passed.
19977
19978 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19979 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19980 because in these cases DECL_INCOMING_RTL points us to a value of some
19981 type which is *different* from the type of the parameter itself. Thus,
19982 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19983 such cases, the debugger would end up (for example) trying to fetch a
19984 `float' from a place which actually contains the first part of a
19985 `double'. That would lead to really incorrect and confusing
19986 output at debug-time.
19987
19988 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19989 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19990 are a couple of exceptions however. On little-endian machines we can
19991 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19992 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19993 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19994 when (on a little-endian machine) a non-prototyped function has a
19995 parameter declared to be of type `short' or `char'. In such cases,
19996 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19997 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19998 passed `int' value. If the debugger then uses that address to fetch
19999 a `short' or a `char' (on a little-endian machine) the result will be
20000 the correct data, so we allow for such exceptional cases below.
20001
20002 Note that our goal here is to describe the place where the given formal
20003 parameter lives during most of the function's activation (i.e. between the
20004 end of the prologue and the start of the epilogue). We'll do that as best
20005 as we can. Note however that if the given formal parameter is modified
20006 sometime during the execution of the function, then a stack backtrace (at
20007 debug-time) will show the function as having been called with the *new*
20008 value rather than the value which was originally passed in. This happens
20009 rarely enough that it is not a major problem, but it *is* a problem, and
20010 I'd like to fix it.
20011
20012 A future version of dwarf2out.c may generate two additional attributes for
20013 any given DW_TAG_formal_parameter DIE which will describe the "passed
20014 type" and the "passed location" for the given formal parameter in addition
20015 to the attributes we now generate to indicate the "declared type" and the
20016 "active location" for each parameter. This additional set of attributes
20017 could be used by debuggers for stack backtraces. Separately, note that
20018 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
20019 This happens (for example) for inlined-instances of inline function formal
20020 parameters which are never referenced. This really shouldn't be
20021 happening. All PARM_DECL nodes should get valid non-NULL
20022 DECL_INCOMING_RTL values. FIXME. */
20023
20024 /* Use DECL_RTL as the "location" unless we find something better. */
20025 rtl = DECL_RTL_IF_SET (decl);
20026
20027 /* When generating abstract instances, ignore everything except
20028 constants, symbols living in memory, and symbols living in
20029 fixed registers. */
20030 if (! reload_completed)
20031 {
20032 if (rtl
20033 && (CONSTANT_P (rtl)
20034 || (MEM_P (rtl)
20035 && CONSTANT_P (XEXP (rtl, 0)))
20036 || (REG_P (rtl)
20037 && VAR_P (decl)
20038 && TREE_STATIC (decl))))
20039 {
20040 rtl = targetm.delegitimize_address (rtl);
20041 return rtl;
20042 }
20043 rtl = NULL_RTX;
20044 }
20045 else if (TREE_CODE (decl) == PARM_DECL)
20046 {
20047 if (rtl == NULL_RTX
20048 || is_pseudo_reg (rtl)
20049 || (MEM_P (rtl)
20050 && is_pseudo_reg (XEXP (rtl, 0))
20051 && DECL_INCOMING_RTL (decl)
20052 && MEM_P (DECL_INCOMING_RTL (decl))
20053 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
20054 {
20055 tree declared_type = TREE_TYPE (decl);
20056 tree passed_type = DECL_ARG_TYPE (decl);
20057 machine_mode dmode = TYPE_MODE (declared_type);
20058 machine_mode pmode = TYPE_MODE (passed_type);
20059
20060 /* This decl represents a formal parameter which was optimized out.
20061 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
20062 all cases where (rtl == NULL_RTX) just below. */
20063 if (dmode == pmode)
20064 rtl = DECL_INCOMING_RTL (decl);
20065 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
20066 && SCALAR_INT_MODE_P (dmode)
20067 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
20068 && DECL_INCOMING_RTL (decl))
20069 {
20070 rtx inc = DECL_INCOMING_RTL (decl);
20071 if (REG_P (inc))
20072 rtl = inc;
20073 else if (MEM_P (inc))
20074 {
20075 if (BYTES_BIG_ENDIAN)
20076 rtl = adjust_address_nv (inc, dmode,
20077 GET_MODE_SIZE (pmode)
20078 - GET_MODE_SIZE (dmode));
20079 else
20080 rtl = inc;
20081 }
20082 }
20083 }
20084
20085 /* If the parm was passed in registers, but lives on the stack, then
20086 make a big endian correction if the mode of the type of the
20087 parameter is not the same as the mode of the rtl. */
20088 /* ??? This is the same series of checks that are made in dbxout.c before
20089 we reach the big endian correction code there. It isn't clear if all
20090 of these checks are necessary here, but keeping them all is the safe
20091 thing to do. */
20092 else if (MEM_P (rtl)
20093 && XEXP (rtl, 0) != const0_rtx
20094 && ! CONSTANT_P (XEXP (rtl, 0))
20095 /* Not passed in memory. */
20096 && !MEM_P (DECL_INCOMING_RTL (decl))
20097 /* Not passed by invisible reference. */
20098 && (!REG_P (XEXP (rtl, 0))
20099 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
20100 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
20101 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
20102 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
20103 #endif
20104 )
20105 /* Big endian correction check. */
20106 && BYTES_BIG_ENDIAN
20107 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20108 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20109 UNITS_PER_WORD))
20110 {
20111 machine_mode addr_mode = get_address_mode (rtl);
20112 poly_int64 offset = (UNITS_PER_WORD
20113 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20114
20115 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20116 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20117 }
20118 }
20119 else if (VAR_P (decl)
20120 && rtl
20121 && MEM_P (rtl)
20122 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20123 {
20124 machine_mode addr_mode = get_address_mode (rtl);
20125 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20126 GET_MODE (rtl));
20127
20128 /* If a variable is declared "register" yet is smaller than
20129 a register, then if we store the variable to memory, it
20130 looks like we're storing a register-sized value, when in
20131 fact we are not. We need to adjust the offset of the
20132 storage location to reflect the actual value's bytes,
20133 else gdb will not be able to display it. */
20134 if (maybe_ne (offset, 0))
20135 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20136 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20137 }
20138
20139 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20140 and will have been substituted directly into all expressions that use it.
20141 C does not have such a concept, but C++ and other languages do. */
20142 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20143 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20144
20145 if (rtl)
20146 rtl = targetm.delegitimize_address (rtl);
20147
20148 /* If we don't look past the constant pool, we risk emitting a
20149 reference to a constant pool entry that isn't referenced from
20150 code, and thus is not emitted. */
20151 if (rtl)
20152 rtl = avoid_constant_pool_reference (rtl);
20153
20154 /* Try harder to get a rtl. If this symbol ends up not being emitted
20155 in the current CU, resolve_addr will remove the expression referencing
20156 it. */
20157 if (rtl == NULL_RTX
20158 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20159 && VAR_P (decl)
20160 && !DECL_EXTERNAL (decl)
20161 && TREE_STATIC (decl)
20162 && DECL_NAME (decl)
20163 && !DECL_HARD_REGISTER (decl)
20164 && DECL_MODE (decl) != VOIDmode)
20165 {
20166 rtl = make_decl_rtl_for_debug (decl);
20167 if (!MEM_P (rtl)
20168 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20169 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20170 rtl = NULL_RTX;
20171 }
20172
20173 return rtl;
20174 }
20175
20176 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20177 returned. If so, the decl for the COMMON block is returned, and the
20178 value is the offset into the common block for the symbol. */
20179
20180 static tree
20181 fortran_common (tree decl, HOST_WIDE_INT *value)
20182 {
20183 tree val_expr, cvar;
20184 machine_mode mode;
20185 poly_int64 bitsize, bitpos;
20186 tree offset;
20187 HOST_WIDE_INT cbitpos;
20188 int unsignedp, reversep, volatilep = 0;
20189
20190 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20191 it does not have a value (the offset into the common area), or if it
20192 is thread local (as opposed to global) then it isn't common, and shouldn't
20193 be handled as such. */
20194 if (!VAR_P (decl)
20195 || !TREE_STATIC (decl)
20196 || !DECL_HAS_VALUE_EXPR_P (decl)
20197 || !is_fortran ())
20198 return NULL_TREE;
20199
20200 val_expr = DECL_VALUE_EXPR (decl);
20201 if (TREE_CODE (val_expr) != COMPONENT_REF)
20202 return NULL_TREE;
20203
20204 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20205 &unsignedp, &reversep, &volatilep);
20206
20207 if (cvar == NULL_TREE
20208 || !VAR_P (cvar)
20209 || DECL_ARTIFICIAL (cvar)
20210 || !TREE_PUBLIC (cvar)
20211 /* We don't expect to have to cope with variable offsets,
20212 since at present all static data must have a constant size. */
20213 || !bitpos.is_constant (&cbitpos))
20214 return NULL_TREE;
20215
20216 *value = 0;
20217 if (offset != NULL)
20218 {
20219 if (!tree_fits_shwi_p (offset))
20220 return NULL_TREE;
20221 *value = tree_to_shwi (offset);
20222 }
20223 if (cbitpos != 0)
20224 *value += cbitpos / BITS_PER_UNIT;
20225
20226 return cvar;
20227 }
20228
20229 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20230 data attribute for a variable or a parameter. We generate the
20231 DW_AT_const_value attribute only in those cases where the given variable
20232 or parameter does not have a true "location" either in memory or in a
20233 register. This can happen (for example) when a constant is passed as an
20234 actual argument in a call to an inline function. (It's possible that
20235 these things can crop up in other ways also.) Note that one type of
20236 constant value which can be passed into an inlined function is a constant
20237 pointer. This can happen for example if an actual argument in an inlined
20238 function call evaluates to a compile-time constant address.
20239
20240 CACHE_P is true if it is worth caching the location list for DECL,
20241 so that future calls can reuse it rather than regenerate it from scratch.
20242 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20243 since we will need to refer to them each time the function is inlined. */
20244
20245 static bool
20246 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20247 {
20248 rtx rtl;
20249 dw_loc_list_ref list;
20250 var_loc_list *loc_list;
20251 cached_dw_loc_list *cache;
20252
20253 if (early_dwarf)
20254 return false;
20255
20256 if (TREE_CODE (decl) == ERROR_MARK)
20257 return false;
20258
20259 if (get_AT (die, DW_AT_location)
20260 || get_AT (die, DW_AT_const_value))
20261 return true;
20262
20263 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20264 || TREE_CODE (decl) == RESULT_DECL);
20265
20266 /* Try to get some constant RTL for this decl, and use that as the value of
20267 the location. */
20268
20269 rtl = rtl_for_decl_location (decl);
20270 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20271 && add_const_value_attribute (die, rtl))
20272 return true;
20273
20274 /* See if we have single element location list that is equivalent to
20275 a constant value. That way we are better to use add_const_value_attribute
20276 rather than expanding constant value equivalent. */
20277 loc_list = lookup_decl_loc (decl);
20278 if (loc_list
20279 && loc_list->first
20280 && loc_list->first->next == NULL
20281 && NOTE_P (loc_list->first->loc)
20282 && NOTE_VAR_LOCATION (loc_list->first->loc)
20283 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20284 {
20285 struct var_loc_node *node;
20286
20287 node = loc_list->first;
20288 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20289 if (GET_CODE (rtl) == EXPR_LIST)
20290 rtl = XEXP (rtl, 0);
20291 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20292 && add_const_value_attribute (die, rtl))
20293 return true;
20294 }
20295 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20296 list several times. See if we've already cached the contents. */
20297 list = NULL;
20298 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20299 cache_p = false;
20300 if (cache_p)
20301 {
20302 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20303 if (cache)
20304 list = cache->loc_list;
20305 }
20306 if (list == NULL)
20307 {
20308 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20309 NULL);
20310 /* It is usually worth caching this result if the decl is from
20311 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20312 if (cache_p && list && list->dw_loc_next)
20313 {
20314 cached_dw_loc_list **slot
20315 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20316 DECL_UID (decl),
20317 INSERT);
20318 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20319 cache->decl_id = DECL_UID (decl);
20320 cache->loc_list = list;
20321 *slot = cache;
20322 }
20323 }
20324 if (list)
20325 {
20326 add_AT_location_description (die, DW_AT_location, list);
20327 return true;
20328 }
20329 /* None of that worked, so it must not really have a location;
20330 try adding a constant value attribute from the DECL_INITIAL. */
20331 return tree_add_const_value_attribute_for_decl (die, decl);
20332 }
20333
20334 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20335 attribute is the const value T. */
20336
20337 static bool
20338 tree_add_const_value_attribute (dw_die_ref die, tree t)
20339 {
20340 tree init;
20341 tree type = TREE_TYPE (t);
20342 rtx rtl;
20343
20344 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20345 return false;
20346
20347 init = t;
20348 gcc_assert (!DECL_P (init));
20349
20350 if (TREE_CODE (init) == INTEGER_CST)
20351 {
20352 if (tree_fits_uhwi_p (init))
20353 {
20354 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20355 return true;
20356 }
20357 if (tree_fits_shwi_p (init))
20358 {
20359 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20360 return true;
20361 }
20362 }
20363 /* Generate the RTL even if early_dwarf to force mangling of all refered to
20364 symbols. */
20365 rtl = rtl_for_decl_init (init, type);
20366 if (rtl && !early_dwarf)
20367 return add_const_value_attribute (die, rtl);
20368 /* If the host and target are sane, try harder. */
20369 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20370 && initializer_constant_valid_p (init, type))
20371 {
20372 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20373 if (size > 0 && (int) size == size)
20374 {
20375 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20376
20377 if (native_encode_initializer (init, array, size) == size)
20378 {
20379 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20380 return true;
20381 }
20382 ggc_free (array);
20383 }
20384 }
20385 return false;
20386 }
20387
20388 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20389 attribute is the const value of T, where T is an integral constant
20390 variable with static storage duration
20391 (so it can't be a PARM_DECL or a RESULT_DECL). */
20392
20393 static bool
20394 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20395 {
20396
20397 if (!decl
20398 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20399 || (VAR_P (decl) && !TREE_STATIC (decl)))
20400 return false;
20401
20402 if (TREE_READONLY (decl)
20403 && ! TREE_THIS_VOLATILE (decl)
20404 && DECL_INITIAL (decl))
20405 /* OK */;
20406 else
20407 return false;
20408
20409 /* Don't add DW_AT_const_value if abstract origin already has one. */
20410 if (get_AT (var_die, DW_AT_const_value))
20411 return false;
20412
20413 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20414 }
20415
20416 /* Convert the CFI instructions for the current function into a
20417 location list. This is used for DW_AT_frame_base when we targeting
20418 a dwarf2 consumer that does not support the dwarf3
20419 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20420 expressions. */
20421
20422 static dw_loc_list_ref
20423 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20424 {
20425 int ix;
20426 dw_fde_ref fde;
20427 dw_loc_list_ref list, *list_tail;
20428 dw_cfi_ref cfi;
20429 dw_cfa_location last_cfa, next_cfa;
20430 const char *start_label, *last_label, *section;
20431 dw_cfa_location remember;
20432
20433 fde = cfun->fde;
20434 gcc_assert (fde != NULL);
20435
20436 section = secname_for_decl (current_function_decl);
20437 list_tail = &list;
20438 list = NULL;
20439
20440 memset (&next_cfa, 0, sizeof (next_cfa));
20441 next_cfa.reg = INVALID_REGNUM;
20442 remember = next_cfa;
20443
20444 start_label = fde->dw_fde_begin;
20445
20446 /* ??? Bald assumption that the CIE opcode list does not contain
20447 advance opcodes. */
20448 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20449 lookup_cfa_1 (cfi, &next_cfa, &remember);
20450
20451 last_cfa = next_cfa;
20452 last_label = start_label;
20453
20454 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20455 {
20456 /* If the first partition contained no CFI adjustments, the
20457 CIE opcodes apply to the whole first partition. */
20458 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20459 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20460 list_tail =&(*list_tail)->dw_loc_next;
20461 start_label = last_label = fde->dw_fde_second_begin;
20462 }
20463
20464 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20465 {
20466 switch (cfi->dw_cfi_opc)
20467 {
20468 case DW_CFA_set_loc:
20469 case DW_CFA_advance_loc1:
20470 case DW_CFA_advance_loc2:
20471 case DW_CFA_advance_loc4:
20472 if (!cfa_equal_p (&last_cfa, &next_cfa))
20473 {
20474 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20475 start_label, 0, last_label, 0, section);
20476
20477 list_tail = &(*list_tail)->dw_loc_next;
20478 last_cfa = next_cfa;
20479 start_label = last_label;
20480 }
20481 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20482 break;
20483
20484 case DW_CFA_advance_loc:
20485 /* The encoding is complex enough that we should never emit this. */
20486 gcc_unreachable ();
20487
20488 default:
20489 lookup_cfa_1 (cfi, &next_cfa, &remember);
20490 break;
20491 }
20492 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20493 {
20494 if (!cfa_equal_p (&last_cfa, &next_cfa))
20495 {
20496 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20497 start_label, 0, last_label, 0, section);
20498
20499 list_tail = &(*list_tail)->dw_loc_next;
20500 last_cfa = next_cfa;
20501 start_label = last_label;
20502 }
20503 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20504 start_label, 0, fde->dw_fde_end, 0, section);
20505 list_tail = &(*list_tail)->dw_loc_next;
20506 start_label = last_label = fde->dw_fde_second_begin;
20507 }
20508 }
20509
20510 if (!cfa_equal_p (&last_cfa, &next_cfa))
20511 {
20512 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20513 start_label, 0, last_label, 0, section);
20514 list_tail = &(*list_tail)->dw_loc_next;
20515 start_label = last_label;
20516 }
20517
20518 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20519 start_label, 0,
20520 fde->dw_fde_second_begin
20521 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20522 section);
20523
20524 maybe_gen_llsym (list);
20525
20526 return list;
20527 }
20528
20529 /* Compute a displacement from the "steady-state frame pointer" to the
20530 frame base (often the same as the CFA), and store it in
20531 frame_pointer_fb_offset. OFFSET is added to the displacement
20532 before the latter is negated. */
20533
20534 static void
20535 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20536 {
20537 rtx reg, elim;
20538
20539 #ifdef FRAME_POINTER_CFA_OFFSET
20540 reg = frame_pointer_rtx;
20541 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20542 #else
20543 reg = arg_pointer_rtx;
20544 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20545 #endif
20546
20547 elim = (ira_use_lra_p
20548 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20549 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20550 elim = strip_offset_and_add (elim, &offset);
20551
20552 frame_pointer_fb_offset = -offset;
20553
20554 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20555 in which to eliminate. This is because it's stack pointer isn't
20556 directly accessible as a register within the ISA. To work around
20557 this, assume that while we cannot provide a proper value for
20558 frame_pointer_fb_offset, we won't need one either. We can use
20559 hard frame pointer in debug info even if frame pointer isn't used
20560 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20561 which uses the DW_AT_frame_base attribute, not hard frame pointer
20562 directly. */
20563 frame_pointer_fb_offset_valid
20564 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20565 }
20566
20567 /* Generate a DW_AT_name attribute given some string value to be included as
20568 the value of the attribute. */
20569
20570 static void
20571 add_name_attribute (dw_die_ref die, const char *name_string)
20572 {
20573 if (name_string != NULL && *name_string != 0)
20574 {
20575 if (demangle_name_func)
20576 name_string = (*demangle_name_func) (name_string);
20577
20578 add_AT_string (die, DW_AT_name, name_string);
20579 }
20580 }
20581
20582 /* Generate a DW_AT_name attribute given some string value representing a
20583 file or filepath to be included as value of the attribute. */
20584 static void
20585 add_filename_attribute (dw_die_ref die, const char *name_string)
20586 {
20587 if (name_string != NULL && *name_string != 0)
20588 add_filepath_AT_string (die, DW_AT_name, name_string);
20589 }
20590
20591 /* Generate a DW_AT_description attribute given some string value to be included
20592 as the value of the attribute. */
20593
20594 static void
20595 add_desc_attribute (dw_die_ref die, const char *name_string)
20596 {
20597 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20598 return;
20599
20600 if (name_string == NULL || *name_string == 0)
20601 return;
20602
20603 if (demangle_name_func)
20604 name_string = (*demangle_name_func) (name_string);
20605
20606 add_AT_string (die, DW_AT_description, name_string);
20607 }
20608
20609 /* Generate a DW_AT_description attribute given some decl to be included
20610 as the value of the attribute. */
20611
20612 static void
20613 add_desc_attribute (dw_die_ref die, tree decl)
20614 {
20615 tree decl_name;
20616
20617 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20618 return;
20619
20620 if (decl == NULL_TREE || !DECL_P (decl))
20621 return;
20622 decl_name = DECL_NAME (decl);
20623
20624 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20625 {
20626 const char *name = dwarf2_name (decl, 0);
20627 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20628 }
20629 else
20630 {
20631 char *desc = print_generic_expr_to_str (decl);
20632 add_desc_attribute (die, desc);
20633 free (desc);
20634 }
20635 }
20636
20637 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20638 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20639 of TYPE accordingly.
20640
20641 ??? This is a temporary measure until after we're able to generate
20642 regular DWARF for the complex Ada type system. */
20643
20644 static void
20645 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20646 dw_die_ref context_die)
20647 {
20648 tree dtype;
20649 dw_die_ref dtype_die;
20650
20651 if (!lang_hooks.types.descriptive_type)
20652 return;
20653
20654 dtype = lang_hooks.types.descriptive_type (type);
20655 if (!dtype)
20656 return;
20657
20658 dtype_die = lookup_type_die (dtype);
20659 if (!dtype_die)
20660 {
20661 gen_type_die (dtype, context_die);
20662 dtype_die = lookup_type_die (dtype);
20663 gcc_assert (dtype_die);
20664 }
20665
20666 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20667 }
20668
20669 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20670
20671 static const char *
20672 comp_dir_string (void)
20673 {
20674 const char *wd;
20675 char *wd_plus_sep = NULL;
20676 static const char *cached_wd = NULL;
20677
20678 if (cached_wd != NULL)
20679 return cached_wd;
20680
20681 wd = get_src_pwd ();
20682 if (wd == NULL)
20683 return NULL;
20684
20685 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20686 {
20687 size_t wdlen = strlen (wd);
20688 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20689 strcpy (wd_plus_sep, wd);
20690 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20691 wd_plus_sep [wdlen + 1] = 0;
20692 wd = wd_plus_sep;
20693 }
20694
20695 cached_wd = remap_debug_filename (wd);
20696
20697 /* remap_debug_filename can just pass through wd or return a new gc string.
20698 These two types can't be both stored in a GTY(())-tagged string, but since
20699 the cached value lives forever just copy it if needed. */
20700 if (cached_wd != wd)
20701 {
20702 cached_wd = xstrdup (cached_wd);
20703 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20704 free (wd_plus_sep);
20705 }
20706
20707 return cached_wd;
20708 }
20709
20710 /* Generate a DW_AT_comp_dir attribute for DIE. */
20711
20712 static void
20713 add_comp_dir_attribute (dw_die_ref die)
20714 {
20715 const char * wd = comp_dir_string ();
20716 if (wd != NULL)
20717 add_filepath_AT_string (die, DW_AT_comp_dir, wd);
20718 }
20719
20720 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20721 pointer computation, ...), output a representation for that bound according
20722 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20723 loc_list_from_tree for the meaning of CONTEXT. */
20724
20725 static void
20726 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20727 int forms, struct loc_descr_context *context)
20728 {
20729 dw_die_ref context_die, decl_die = NULL;
20730 dw_loc_list_ref list;
20731 bool strip_conversions = true;
20732 bool placeholder_seen = false;
20733
20734 while (strip_conversions)
20735 switch (TREE_CODE (value))
20736 {
20737 case ERROR_MARK:
20738 case SAVE_EXPR:
20739 return;
20740
20741 CASE_CONVERT:
20742 case VIEW_CONVERT_EXPR:
20743 value = TREE_OPERAND (value, 0);
20744 break;
20745
20746 default:
20747 strip_conversions = false;
20748 break;
20749 }
20750
20751 /* If possible and permitted, output the attribute as a constant. */
20752 if ((forms & dw_scalar_form_constant) != 0
20753 && TREE_CODE (value) == INTEGER_CST)
20754 {
20755 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20756
20757 /* If HOST_WIDE_INT is big enough then represent the bound as
20758 a constant value. We need to choose a form based on
20759 whether the type is signed or unsigned. We cannot just
20760 call add_AT_unsigned if the value itself is positive
20761 (add_AT_unsigned might add the unsigned value encoded as
20762 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20763 bounds type and then sign extend any unsigned values found
20764 for signed types. This is needed only for
20765 DW_AT_{lower,upper}_bound, since for most other attributes,
20766 consumers will treat DW_FORM_data[1248] as unsigned values,
20767 regardless of the underlying type. */
20768 if (prec <= HOST_BITS_PER_WIDE_INT
20769 || tree_fits_uhwi_p (value))
20770 {
20771 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20772 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20773 else
20774 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20775 }
20776 else
20777 /* Otherwise represent the bound as an unsigned value with
20778 the precision of its type. The precision and signedness
20779 of the type will be necessary to re-interpret it
20780 unambiguously. */
20781 add_AT_wide (die, attr, wi::to_wide (value));
20782 return;
20783 }
20784
20785 /* Otherwise, if it's possible and permitted too, output a reference to
20786 another DIE. */
20787 if ((forms & dw_scalar_form_reference) != 0)
20788 {
20789 tree decl = NULL_TREE;
20790
20791 /* Some type attributes reference an outer type. For instance, the upper
20792 bound of an array may reference an embedding record (this happens in
20793 Ada). */
20794 if (TREE_CODE (value) == COMPONENT_REF
20795 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20796 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20797 decl = TREE_OPERAND (value, 1);
20798
20799 else if (VAR_P (value)
20800 || TREE_CODE (value) == PARM_DECL
20801 || TREE_CODE (value) == RESULT_DECL)
20802 decl = value;
20803
20804 if (decl != NULL_TREE)
20805 {
20806 decl_die = lookup_decl_die (decl);
20807
20808 /* ??? Can this happen, or should the variable have been bound
20809 first? Probably it can, since I imagine that we try to create
20810 the types of parameters in the order in which they exist in
20811 the list, and won't have created a forward reference to a
20812 later parameter. */
20813 if (decl_die != NULL)
20814 {
20815 if (get_AT (decl_die, DW_AT_location)
20816 || get_AT (decl_die, DW_AT_data_member_location)
20817 || get_AT (decl_die, DW_AT_const_value))
20818 {
20819 add_AT_die_ref (die, attr, decl_die);
20820 return;
20821 }
20822 }
20823 }
20824 }
20825
20826 /* Last chance: try to create a stack operation procedure to evaluate the
20827 value. Do nothing if even that is not possible or permitted. */
20828 if ((forms & dw_scalar_form_exprloc) == 0)
20829 return;
20830
20831 list = loc_list_from_tree (value, 2, context);
20832 if (context && context->placeholder_arg)
20833 {
20834 placeholder_seen = context->placeholder_seen;
20835 context->placeholder_seen = false;
20836 }
20837 if (list == NULL || single_element_loc_list_p (list))
20838 {
20839 /* If this attribute is not a reference nor constant, it is
20840 a DWARF expression rather than location description. For that
20841 loc_list_from_tree (value, 0, &context) is needed. */
20842 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20843 if (list2 && single_element_loc_list_p (list2))
20844 {
20845 if (placeholder_seen)
20846 {
20847 struct dwarf_procedure_info dpi;
20848 dpi.fndecl = NULL_TREE;
20849 dpi.args_count = 1;
20850 if (!resolve_args_picking (list2->expr, 1, &dpi))
20851 return;
20852 }
20853 add_AT_loc (die, attr, list2->expr);
20854 return;
20855 }
20856 }
20857
20858 /* If that failed to give a single element location list, fall back to
20859 outputting this as a reference... still if permitted. */
20860 if (list == NULL
20861 || (forms & dw_scalar_form_reference) == 0
20862 || placeholder_seen)
20863 return;
20864
20865 if (!decl_die)
20866 {
20867 if (current_function_decl == 0)
20868 context_die = comp_unit_die ();
20869 else
20870 context_die = lookup_decl_die (current_function_decl);
20871
20872 decl_die = new_die (DW_TAG_variable, context_die, value);
20873 add_AT_flag (decl_die, DW_AT_artificial, 1);
20874 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20875 context_die);
20876 }
20877
20878 add_AT_location_description (decl_die, DW_AT_location, list);
20879 add_AT_die_ref (die, attr, decl_die);
20880 }
20881
20882 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20883 default. */
20884
20885 static int
20886 lower_bound_default (void)
20887 {
20888 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20889 {
20890 case DW_LANG_C:
20891 case DW_LANG_C89:
20892 case DW_LANG_C99:
20893 case DW_LANG_C11:
20894 case DW_LANG_C_plus_plus:
20895 case DW_LANG_C_plus_plus_11:
20896 case DW_LANG_C_plus_plus_14:
20897 case DW_LANG_ObjC:
20898 case DW_LANG_ObjC_plus_plus:
20899 return 0;
20900 case DW_LANG_Fortran77:
20901 case DW_LANG_Fortran90:
20902 case DW_LANG_Fortran95:
20903 case DW_LANG_Fortran03:
20904 case DW_LANG_Fortran08:
20905 return 1;
20906 case DW_LANG_UPC:
20907 case DW_LANG_D:
20908 case DW_LANG_Python:
20909 return dwarf_version >= 4 ? 0 : -1;
20910 case DW_LANG_Ada95:
20911 case DW_LANG_Ada83:
20912 case DW_LANG_Cobol74:
20913 case DW_LANG_Cobol85:
20914 case DW_LANG_Modula2:
20915 case DW_LANG_PLI:
20916 return dwarf_version >= 4 ? 1 : -1;
20917 default:
20918 return -1;
20919 }
20920 }
20921
20922 /* Given a tree node describing an array bound (either lower or upper) output
20923 a representation for that bound. */
20924
20925 static void
20926 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20927 tree bound, struct loc_descr_context *context)
20928 {
20929 int dflt;
20930
20931 while (1)
20932 switch (TREE_CODE (bound))
20933 {
20934 /* Strip all conversions. */
20935 CASE_CONVERT:
20936 case VIEW_CONVERT_EXPR:
20937 bound = TREE_OPERAND (bound, 0);
20938 break;
20939
20940 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20941 are even omitted when they are the default. */
20942 case INTEGER_CST:
20943 /* If the value for this bound is the default one, we can even omit the
20944 attribute. */
20945 if (bound_attr == DW_AT_lower_bound
20946 && tree_fits_shwi_p (bound)
20947 && (dflt = lower_bound_default ()) != -1
20948 && tree_to_shwi (bound) == dflt)
20949 return;
20950
20951 /* FALLTHRU */
20952
20953 default:
20954 /* Because of the complex interaction there can be with other GNAT
20955 encodings, GDB isn't ready yet to handle proper DWARF description
20956 for self-referencial subrange bounds: let GNAT encodings do the
20957 magic in such a case. */
20958 if (is_ada ()
20959 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20960 && contains_placeholder_p (bound))
20961 return;
20962
20963 add_scalar_info (subrange_die, bound_attr, bound,
20964 dw_scalar_form_constant
20965 | dw_scalar_form_exprloc
20966 | dw_scalar_form_reference,
20967 context);
20968 return;
20969 }
20970 }
20971
20972 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20973 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20974 Note that the block of subscript information for an array type also
20975 includes information about the element type of the given array type.
20976
20977 This function reuses previously set type and bound information if
20978 available. */
20979
20980 static void
20981 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20982 {
20983 unsigned dimension_number;
20984 tree lower, upper;
20985 dw_die_ref child = type_die->die_child;
20986
20987 for (dimension_number = 0;
20988 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20989 type = TREE_TYPE (type), dimension_number++)
20990 {
20991 tree domain = TYPE_DOMAIN (type);
20992
20993 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20994 break;
20995
20996 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20997 and (in GNU C only) variable bounds. Handle all three forms
20998 here. */
20999
21000 /* Find and reuse a previously generated DW_TAG_subrange_type if
21001 available.
21002
21003 For multi-dimensional arrays, as we iterate through the
21004 various dimensions in the enclosing for loop above, we also
21005 iterate through the DIE children and pick at each
21006 DW_TAG_subrange_type previously generated (if available).
21007 Each child DW_TAG_subrange_type DIE describes the range of
21008 the current dimension. At this point we should have as many
21009 DW_TAG_subrange_type's as we have dimensions in the
21010 array. */
21011 dw_die_ref subrange_die = NULL;
21012 if (child)
21013 while (1)
21014 {
21015 child = child->die_sib;
21016 if (child->die_tag == DW_TAG_subrange_type)
21017 subrange_die = child;
21018 if (child == type_die->die_child)
21019 {
21020 /* If we wrapped around, stop looking next time. */
21021 child = NULL;
21022 break;
21023 }
21024 if (child->die_tag == DW_TAG_subrange_type)
21025 break;
21026 }
21027 if (!subrange_die)
21028 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
21029
21030 if (domain)
21031 {
21032 /* We have an array type with specified bounds. */
21033 lower = TYPE_MIN_VALUE (domain);
21034 upper = TYPE_MAX_VALUE (domain);
21035
21036 /* Define the index type. */
21037 if (TREE_TYPE (domain)
21038 && !get_AT (subrange_die, DW_AT_type))
21039 {
21040 /* ??? This is probably an Ada unnamed subrange type. Ignore the
21041 TREE_TYPE field. We can't emit debug info for this
21042 because it is an unnamed integral type. */
21043 if (TREE_CODE (domain) == INTEGER_TYPE
21044 && TYPE_NAME (domain) == NULL_TREE
21045 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
21046 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
21047 ;
21048 else
21049 add_type_attribute (subrange_die, TREE_TYPE (domain),
21050 TYPE_UNQUALIFIED, false, type_die);
21051 }
21052
21053 /* ??? If upper is NULL, the array has unspecified length,
21054 but it does have a lower bound. This happens with Fortran
21055 dimension arr(N:*)
21056 Since the debugger is definitely going to need to know N
21057 to produce useful results, go ahead and output the lower
21058 bound solo, and hope the debugger can cope. */
21059
21060 if (!get_AT (subrange_die, DW_AT_lower_bound))
21061 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21062 if (!get_AT (subrange_die, DW_AT_upper_bound)
21063 && !get_AT (subrange_die, DW_AT_count))
21064 {
21065 if (upper)
21066 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21067 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
21068 /* Zero-length array. */
21069 add_bound_info (subrange_die, DW_AT_count,
21070 build_int_cst (TREE_TYPE (lower), 0), NULL);
21071 }
21072 }
21073
21074 /* Otherwise we have an array type with an unspecified length. The
21075 DWARF-2 spec does not say how to handle this; let's just leave out the
21076 bounds. */
21077 }
21078 }
21079
21080 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21081
21082 static void
21083 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21084 {
21085 dw_die_ref decl_die;
21086 HOST_WIDE_INT size;
21087 dw_loc_descr_ref size_expr = NULL;
21088
21089 switch (TREE_CODE (tree_node))
21090 {
21091 case ERROR_MARK:
21092 size = 0;
21093 break;
21094 case ENUMERAL_TYPE:
21095 case RECORD_TYPE:
21096 case UNION_TYPE:
21097 case QUAL_UNION_TYPE:
21098 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21099 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21100 {
21101 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21102 return;
21103 }
21104 size_expr = type_byte_size (tree_node, &size);
21105 break;
21106 case FIELD_DECL:
21107 /* For a data member of a struct or union, the DW_AT_byte_size is
21108 generally given as the number of bytes normally allocated for an
21109 object of the *declared* type of the member itself. This is true
21110 even for bit-fields. */
21111 size = int_size_in_bytes (field_type (tree_node));
21112 break;
21113 default:
21114 gcc_unreachable ();
21115 }
21116
21117 /* Support for dynamically-sized objects was introduced by DWARFv3.
21118 At the moment, GDB does not handle variable byte sizes very well,
21119 though. */
21120 if ((dwarf_version >= 3 || !dwarf_strict)
21121 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21122 && size_expr != NULL)
21123 add_AT_loc (die, DW_AT_byte_size, size_expr);
21124
21125 /* Note that `size' might be -1 when we get to this point. If it is, that
21126 indicates that the byte size of the entity in question is variable and
21127 that we could not generate a DWARF expression that computes it. */
21128 if (size >= 0)
21129 add_AT_unsigned (die, DW_AT_byte_size, size);
21130 }
21131
21132 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21133 alignment. */
21134
21135 static void
21136 add_alignment_attribute (dw_die_ref die, tree tree_node)
21137 {
21138 if (dwarf_version < 5 && dwarf_strict)
21139 return;
21140
21141 unsigned align;
21142
21143 if (DECL_P (tree_node))
21144 {
21145 if (!DECL_USER_ALIGN (tree_node))
21146 return;
21147
21148 align = DECL_ALIGN_UNIT (tree_node);
21149 }
21150 else if (TYPE_P (tree_node))
21151 {
21152 if (!TYPE_USER_ALIGN (tree_node))
21153 return;
21154
21155 align = TYPE_ALIGN_UNIT (tree_node);
21156 }
21157 else
21158 gcc_unreachable ();
21159
21160 add_AT_unsigned (die, DW_AT_alignment, align);
21161 }
21162
21163 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21164 which specifies the distance in bits from the highest order bit of the
21165 "containing object" for the bit-field to the highest order bit of the
21166 bit-field itself.
21167
21168 For any given bit-field, the "containing object" is a hypothetical object
21169 (of some integral or enum type) within which the given bit-field lives. The
21170 type of this hypothetical "containing object" is always the same as the
21171 declared type of the individual bit-field itself. The determination of the
21172 exact location of the "containing object" for a bit-field is rather
21173 complicated. It's handled by the `field_byte_offset' function (above).
21174
21175 Note that it is the size (in bytes) of the hypothetical "containing object"
21176 which will be given in the DW_AT_byte_size attribute for this bit-field.
21177 (See `byte_size_attribute' above). */
21178
21179 static inline void
21180 add_bit_offset_attribute (dw_die_ref die, tree decl)
21181 {
21182 HOST_WIDE_INT object_offset_in_bytes;
21183 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21184 HOST_WIDE_INT bitpos_int;
21185 HOST_WIDE_INT highest_order_object_bit_offset;
21186 HOST_WIDE_INT highest_order_field_bit_offset;
21187 HOST_WIDE_INT bit_offset;
21188
21189 /* The containing object is within the DECL_CONTEXT. */
21190 struct vlr_context ctx = { DECL_CONTEXT (decl), NULL_TREE };
21191
21192 field_byte_offset (decl, &ctx, &object_offset_in_bytes);
21193
21194 /* Must be a field and a bit field. */
21195 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21196
21197 /* We can't yet handle bit-fields whose offsets are variable, so if we
21198 encounter such things, just return without generating any attribute
21199 whatsoever. Likewise for variable or too large size. */
21200 if (! tree_fits_shwi_p (bit_position (decl))
21201 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21202 return;
21203
21204 bitpos_int = int_bit_position (decl);
21205
21206 /* Note that the bit offset is always the distance (in bits) from the
21207 highest-order bit of the "containing object" to the highest-order bit of
21208 the bit-field itself. Since the "high-order end" of any object or field
21209 is different on big-endian and little-endian machines, the computation
21210 below must take account of these differences. */
21211 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21212 highest_order_field_bit_offset = bitpos_int;
21213
21214 if (! BYTES_BIG_ENDIAN)
21215 {
21216 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21217 highest_order_object_bit_offset +=
21218 simple_type_size_in_bits (original_type);
21219 }
21220
21221 bit_offset
21222 = (! BYTES_BIG_ENDIAN
21223 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21224 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21225
21226 if (bit_offset < 0)
21227 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21228 else
21229 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21230 }
21231
21232 /* For a FIELD_DECL node which represents a bit field, output an attribute
21233 which specifies the length in bits of the given field. */
21234
21235 static inline void
21236 add_bit_size_attribute (dw_die_ref die, tree decl)
21237 {
21238 /* Must be a field and a bit field. */
21239 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21240 && DECL_BIT_FIELD_TYPE (decl));
21241
21242 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21243 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21244 }
21245
21246 /* If the compiled language is ANSI C, then add a 'prototyped'
21247 attribute, if arg types are given for the parameters of a function. */
21248
21249 static inline void
21250 add_prototyped_attribute (dw_die_ref die, tree func_type)
21251 {
21252 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21253 {
21254 case DW_LANG_C:
21255 case DW_LANG_C89:
21256 case DW_LANG_C99:
21257 case DW_LANG_C11:
21258 case DW_LANG_ObjC:
21259 if (prototype_p (func_type))
21260 add_AT_flag (die, DW_AT_prototyped, 1);
21261 break;
21262 default:
21263 break;
21264 }
21265 }
21266
21267 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21268 by looking in the type declaration, the object declaration equate table or
21269 the block mapping. */
21270
21271 static inline void
21272 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21273 {
21274 dw_die_ref origin_die = NULL;
21275
21276 /* For late LTO debug output we want to refer directly to the abstract
21277 DIE in the early debug rather to the possibly existing concrete
21278 instance and avoid creating that just for this purpose. */
21279 sym_off_pair *desc;
21280 if (in_lto_p
21281 && external_die_map
21282 && (desc = external_die_map->get (origin)))
21283 {
21284 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21285 desc->sym, desc->off);
21286 return;
21287 }
21288
21289 if (DECL_P (origin))
21290 origin_die = lookup_decl_die (origin);
21291 else if (TYPE_P (origin))
21292 origin_die = lookup_type_die (origin);
21293 else if (TREE_CODE (origin) == BLOCK)
21294 origin_die = lookup_block_die (origin);
21295
21296 /* XXX: Functions that are never lowered don't always have correct block
21297 trees (in the case of java, they simply have no block tree, in some other
21298 languages). For these functions, there is nothing we can really do to
21299 output correct debug info for inlined functions in all cases. Rather
21300 than die, we'll just produce deficient debug info now, in that we will
21301 have variables without a proper abstract origin. In the future, when all
21302 functions are lowered, we should re-add a gcc_assert (origin_die)
21303 here. */
21304
21305 if (origin_die)
21306 {
21307 dw_attr_node *a;
21308 /* Like above, if we already created a concrete instance DIE
21309 do not use that for the abstract origin but the early DIE
21310 if present. */
21311 if (in_lto_p
21312 && (a = get_AT (origin_die, DW_AT_abstract_origin)))
21313 origin_die = AT_ref (a);
21314 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21315 }
21316 }
21317
21318 /* We do not currently support the pure_virtual attribute. */
21319
21320 static inline void
21321 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21322 {
21323 if (DECL_VINDEX (func_decl))
21324 {
21325 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21326
21327 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21328 add_AT_loc (die, DW_AT_vtable_elem_location,
21329 new_loc_descr (DW_OP_constu,
21330 tree_to_shwi (DECL_VINDEX (func_decl)),
21331 0));
21332
21333 /* GNU extension: Record what type this method came from originally. */
21334 if (debug_info_level > DINFO_LEVEL_TERSE
21335 && DECL_CONTEXT (func_decl))
21336 add_AT_die_ref (die, DW_AT_containing_type,
21337 lookup_type_die (DECL_CONTEXT (func_decl)));
21338 }
21339 }
21340 \f
21341 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21342 given decl. This used to be a vendor extension until after DWARF 4
21343 standardized it. */
21344
21345 static void
21346 add_linkage_attr (dw_die_ref die, tree decl)
21347 {
21348 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21349
21350 /* Mimic what assemble_name_raw does with a leading '*'. */
21351 if (name[0] == '*')
21352 name = &name[1];
21353
21354 if (dwarf_version >= 4)
21355 add_AT_string (die, DW_AT_linkage_name, name);
21356 else
21357 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21358 }
21359
21360 /* Add source coordinate attributes for the given decl. */
21361
21362 static void
21363 add_src_coords_attributes (dw_die_ref die, tree decl)
21364 {
21365 expanded_location s;
21366
21367 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21368 return;
21369 s = expand_location (DECL_SOURCE_LOCATION (decl));
21370 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21371 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21372 if (debug_column_info && s.column)
21373 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21374 }
21375
21376 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21377
21378 static void
21379 add_linkage_name_raw (dw_die_ref die, tree decl)
21380 {
21381 /* Defer until we have an assembler name set. */
21382 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21383 {
21384 limbo_die_node *asm_name;
21385
21386 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21387 asm_name->die = die;
21388 asm_name->created_for = decl;
21389 asm_name->next = deferred_asm_name;
21390 deferred_asm_name = asm_name;
21391 }
21392 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21393 add_linkage_attr (die, decl);
21394 }
21395
21396 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21397
21398 static void
21399 add_linkage_name (dw_die_ref die, tree decl)
21400 {
21401 if (debug_info_level > DINFO_LEVEL_NONE
21402 && VAR_OR_FUNCTION_DECL_P (decl)
21403 && TREE_PUBLIC (decl)
21404 && !(VAR_P (decl) && DECL_REGISTER (decl))
21405 && die->die_tag != DW_TAG_member)
21406 add_linkage_name_raw (die, decl);
21407 }
21408
21409 /* Add a DW_AT_name attribute and source coordinate attribute for the
21410 given decl, but only if it actually has a name. */
21411
21412 static void
21413 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21414 bool no_linkage_name)
21415 {
21416 tree decl_name;
21417
21418 decl_name = DECL_NAME (decl);
21419 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21420 {
21421 const char *name = dwarf2_name (decl, 0);
21422 if (name)
21423 add_name_attribute (die, name);
21424 else
21425 add_desc_attribute (die, decl);
21426
21427 if (! DECL_ARTIFICIAL (decl))
21428 add_src_coords_attributes (die, decl);
21429
21430 if (!no_linkage_name)
21431 add_linkage_name (die, decl);
21432 }
21433 else
21434 add_desc_attribute (die, decl);
21435
21436 #ifdef VMS_DEBUGGING_INFO
21437 /* Get the function's name, as described by its RTL. This may be different
21438 from the DECL_NAME name used in the source file. */
21439 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21440 {
21441 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21442 XEXP (DECL_RTL (decl), 0), false);
21443 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21444 }
21445 #endif /* VMS_DEBUGGING_INFO */
21446 }
21447
21448 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21449
21450 static void
21451 add_discr_value (dw_die_ref die, dw_discr_value *value)
21452 {
21453 dw_attr_node attr;
21454
21455 attr.dw_attr = DW_AT_discr_value;
21456 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21457 attr.dw_attr_val.val_entry = NULL;
21458 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21459 if (value->pos)
21460 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21461 else
21462 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21463 add_dwarf_attr (die, &attr);
21464 }
21465
21466 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21467
21468 static void
21469 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21470 {
21471 dw_attr_node attr;
21472
21473 attr.dw_attr = DW_AT_discr_list;
21474 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21475 attr.dw_attr_val.val_entry = NULL;
21476 attr.dw_attr_val.v.val_discr_list = discr_list;
21477 add_dwarf_attr (die, &attr);
21478 }
21479
21480 static inline dw_discr_list_ref
21481 AT_discr_list (dw_attr_node *attr)
21482 {
21483 return attr->dw_attr_val.v.val_discr_list;
21484 }
21485
21486 #ifdef VMS_DEBUGGING_INFO
21487 /* Output the debug main pointer die for VMS */
21488
21489 void
21490 dwarf2out_vms_debug_main_pointer (void)
21491 {
21492 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21493 dw_die_ref die;
21494
21495 /* Allocate the VMS debug main subprogram die. */
21496 die = new_die_raw (DW_TAG_subprogram);
21497 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21498 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21499 current_function_funcdef_no);
21500 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21501
21502 /* Make it the first child of comp_unit_die (). */
21503 die->die_parent = comp_unit_die ();
21504 if (comp_unit_die ()->die_child)
21505 {
21506 die->die_sib = comp_unit_die ()->die_child->die_sib;
21507 comp_unit_die ()->die_child->die_sib = die;
21508 }
21509 else
21510 {
21511 die->die_sib = die;
21512 comp_unit_die ()->die_child = die;
21513 }
21514 }
21515 #endif /* VMS_DEBUGGING_INFO */
21516
21517 /* walk_tree helper function for uses_local_type, below. */
21518
21519 static tree
21520 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21521 {
21522 if (!TYPE_P (*tp))
21523 *walk_subtrees = 0;
21524 else
21525 {
21526 tree name = TYPE_NAME (*tp);
21527 if (name && DECL_P (name) && decl_function_context (name))
21528 return *tp;
21529 }
21530 return NULL_TREE;
21531 }
21532
21533 /* If TYPE involves a function-local type (including a local typedef to a
21534 non-local type), returns that type; otherwise returns NULL_TREE. */
21535
21536 static tree
21537 uses_local_type (tree type)
21538 {
21539 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21540 return used;
21541 }
21542
21543 /* Return the DIE for the scope that immediately contains this type.
21544 Non-named types that do not involve a function-local type get global
21545 scope. Named types nested in namespaces or other types get their
21546 containing scope. All other types (i.e. function-local named types) get
21547 the current active scope. */
21548
21549 static dw_die_ref
21550 scope_die_for (tree t, dw_die_ref context_die)
21551 {
21552 dw_die_ref scope_die = NULL;
21553 tree containing_scope;
21554
21555 /* Non-types always go in the current scope. */
21556 gcc_assert (TYPE_P (t));
21557
21558 /* Use the scope of the typedef, rather than the scope of the type
21559 it refers to. */
21560 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21561 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21562 else
21563 containing_scope = TYPE_CONTEXT (t);
21564
21565 /* Use the containing namespace if there is one. */
21566 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21567 {
21568 if (context_die == lookup_decl_die (containing_scope))
21569 /* OK */;
21570 else if (debug_info_level > DINFO_LEVEL_TERSE)
21571 context_die = get_context_die (containing_scope);
21572 else
21573 containing_scope = NULL_TREE;
21574 }
21575
21576 /* Ignore function type "scopes" from the C frontend. They mean that
21577 a tagged type is local to a parmlist of a function declarator, but
21578 that isn't useful to DWARF. */
21579 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21580 containing_scope = NULL_TREE;
21581
21582 if (SCOPE_FILE_SCOPE_P (containing_scope))
21583 {
21584 /* If T uses a local type keep it local as well, to avoid references
21585 to function-local DIEs from outside the function. */
21586 if (current_function_decl && uses_local_type (t))
21587 scope_die = context_die;
21588 else
21589 scope_die = comp_unit_die ();
21590 }
21591 else if (TYPE_P (containing_scope))
21592 {
21593 /* For types, we can just look up the appropriate DIE. */
21594 if (debug_info_level > DINFO_LEVEL_TERSE)
21595 scope_die = get_context_die (containing_scope);
21596 else
21597 {
21598 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21599 if (scope_die == NULL)
21600 scope_die = comp_unit_die ();
21601 }
21602 }
21603 else
21604 scope_die = context_die;
21605
21606 return scope_die;
21607 }
21608
21609 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21610
21611 static inline int
21612 local_scope_p (dw_die_ref context_die)
21613 {
21614 for (; context_die; context_die = context_die->die_parent)
21615 if (context_die->die_tag == DW_TAG_inlined_subroutine
21616 || context_die->die_tag == DW_TAG_subprogram)
21617 return 1;
21618
21619 return 0;
21620 }
21621
21622 /* Returns nonzero if CONTEXT_DIE is a class. */
21623
21624 static inline int
21625 class_scope_p (dw_die_ref context_die)
21626 {
21627 return (context_die
21628 && (context_die->die_tag == DW_TAG_structure_type
21629 || context_die->die_tag == DW_TAG_class_type
21630 || context_die->die_tag == DW_TAG_interface_type
21631 || context_die->die_tag == DW_TAG_union_type));
21632 }
21633
21634 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21635 whether or not to treat a DIE in this context as a declaration. */
21636
21637 static inline int
21638 class_or_namespace_scope_p (dw_die_ref context_die)
21639 {
21640 return (class_scope_p (context_die)
21641 || (context_die && context_die->die_tag == DW_TAG_namespace));
21642 }
21643
21644 /* Many forms of DIEs require a "type description" attribute. This
21645 routine locates the proper "type descriptor" die for the type given
21646 by 'type' plus any additional qualifiers given by 'cv_quals', and
21647 adds a DW_AT_type attribute below the given die. */
21648
21649 static void
21650 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21651 bool reverse, dw_die_ref context_die)
21652 {
21653 enum tree_code code = TREE_CODE (type);
21654 dw_die_ref type_die = NULL;
21655
21656 if (debug_info_level <= DINFO_LEVEL_TERSE)
21657 return;
21658
21659 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21660 or fixed-point type, use the inner type. This is because we have no
21661 support for unnamed types in base_type_die. This can happen if this is
21662 an Ada subrange type. Correct solution is emit a subrange type die. */
21663 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21664 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21665 type = TREE_TYPE (type), code = TREE_CODE (type);
21666
21667 if (code == ERROR_MARK
21668 /* Handle a special case. For functions whose return type is void, we
21669 generate *no* type attribute. (Note that no object may have type
21670 `void', so this only applies to function return types). */
21671 || code == VOID_TYPE)
21672 return;
21673
21674 type_die = modified_type_die (type,
21675 cv_quals | TYPE_QUALS (type),
21676 reverse,
21677 context_die);
21678
21679 if (type_die != NULL)
21680 add_AT_die_ref (object_die, DW_AT_type, type_die);
21681 }
21682
21683 /* Given an object die, add the calling convention attribute for the
21684 function call type. */
21685 static void
21686 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21687 {
21688 enum dwarf_calling_convention value = DW_CC_normal;
21689
21690 value = ((enum dwarf_calling_convention)
21691 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21692
21693 if (is_fortran ()
21694 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21695 {
21696 /* DWARF 2 doesn't provide a way to identify a program's source-level
21697 entry point. DW_AT_calling_convention attributes are only meant
21698 to describe functions' calling conventions. However, lacking a
21699 better way to signal the Fortran main program, we used this for
21700 a long time, following existing custom. Now, DWARF 4 has
21701 DW_AT_main_subprogram, which we add below, but some tools still
21702 rely on the old way, which we thus keep. */
21703 value = DW_CC_program;
21704
21705 if (dwarf_version >= 4 || !dwarf_strict)
21706 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21707 }
21708
21709 /* Only add the attribute if the backend requests it, and
21710 is not DW_CC_normal. */
21711 if (value && (value != DW_CC_normal))
21712 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21713 }
21714
21715 /* Given a tree pointer to a struct, class, union, or enum type node, return
21716 a pointer to the (string) tag name for the given type, or zero if the type
21717 was declared without a tag. */
21718
21719 static const char *
21720 type_tag (const_tree type)
21721 {
21722 const char *name = 0;
21723
21724 if (TYPE_NAME (type) != 0)
21725 {
21726 tree t = 0;
21727
21728 /* Find the IDENTIFIER_NODE for the type name. */
21729 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21730 && !TYPE_NAMELESS (type))
21731 t = TYPE_NAME (type);
21732
21733 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21734 a TYPE_DECL node, regardless of whether or not a `typedef' was
21735 involved. */
21736 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21737 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21738 {
21739 /* We want to be extra verbose. Don't call dwarf_name if
21740 DECL_NAME isn't set. The default hook for decl_printable_name
21741 doesn't like that, and in this context it's correct to return
21742 0, instead of "<anonymous>" or the like. */
21743 if (DECL_NAME (TYPE_NAME (type))
21744 && !DECL_NAMELESS (TYPE_NAME (type)))
21745 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21746 }
21747
21748 /* Now get the name as a string, or invent one. */
21749 if (!name && t != 0)
21750 name = IDENTIFIER_POINTER (t);
21751 }
21752
21753 return (name == 0 || *name == '\0') ? 0 : name;
21754 }
21755
21756 /* Return the type associated with a data member, make a special check
21757 for bit field types. */
21758
21759 static inline tree
21760 member_declared_type (const_tree member)
21761 {
21762 return (DECL_BIT_FIELD_TYPE (member)
21763 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21764 }
21765
21766 /* Get the decl's label, as described by its RTL. This may be different
21767 from the DECL_NAME name used in the source file. */
21768
21769 #if 0
21770 static const char *
21771 decl_start_label (tree decl)
21772 {
21773 rtx x;
21774 const char *fnname;
21775
21776 x = DECL_RTL (decl);
21777 gcc_assert (MEM_P (x));
21778
21779 x = XEXP (x, 0);
21780 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21781
21782 fnname = XSTR (x, 0);
21783 return fnname;
21784 }
21785 #endif
21786 \f
21787 /* For variable-length arrays that have been previously generated, but
21788 may be incomplete due to missing subscript info, fill the subscript
21789 info. Return TRUE if this is one of those cases. */
21790 static bool
21791 fill_variable_array_bounds (tree type)
21792 {
21793 if (TREE_ASM_WRITTEN (type)
21794 && TREE_CODE (type) == ARRAY_TYPE
21795 && variably_modified_type_p (type, NULL))
21796 {
21797 dw_die_ref array_die = lookup_type_die (type);
21798 if (!array_die)
21799 return false;
21800 add_subscript_info (array_die, type, !is_ada ());
21801 return true;
21802 }
21803 return false;
21804 }
21805
21806 /* These routines generate the internal representation of the DIE's for
21807 the compilation unit. Debugging information is collected by walking
21808 the declaration trees passed in from dwarf2out_decl(). */
21809
21810 static void
21811 gen_array_type_die (tree type, dw_die_ref context_die)
21812 {
21813 dw_die_ref array_die;
21814
21815 /* GNU compilers represent multidimensional array types as sequences of one
21816 dimensional array types whose element types are themselves array types.
21817 We sometimes squish that down to a single array_type DIE with multiple
21818 subscripts in the Dwarf debugging info. The draft Dwarf specification
21819 say that we are allowed to do this kind of compression in C, because
21820 there is no difference between an array of arrays and a multidimensional
21821 array. We don't do this for Ada to remain as close as possible to the
21822 actual representation, which is especially important against the language
21823 flexibilty wrt arrays of variable size. */
21824
21825 bool collapse_nested_arrays = !is_ada ();
21826
21827 if (fill_variable_array_bounds (type))
21828 return;
21829
21830 dw_die_ref scope_die = scope_die_for (type, context_die);
21831 tree element_type;
21832
21833 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21834 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21835 if (TREE_CODE (type) == ARRAY_TYPE
21836 && TYPE_STRING_FLAG (type)
21837 && is_fortran ()
21838 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21839 {
21840 HOST_WIDE_INT size;
21841
21842 array_die = new_die (DW_TAG_string_type, scope_die, type);
21843 add_name_attribute (array_die, type_tag (type));
21844 equate_type_number_to_die (type, array_die);
21845 size = int_size_in_bytes (type);
21846 if (size >= 0)
21847 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21848 /* ??? We can't annotate types late, but for LTO we may not
21849 generate a location early either (gfortran.dg/save_6.f90). */
21850 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21851 && TYPE_DOMAIN (type) != NULL_TREE
21852 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21853 {
21854 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21855 tree rszdecl = szdecl;
21856
21857 size = int_size_in_bytes (TREE_TYPE (szdecl));
21858 if (!DECL_P (szdecl))
21859 {
21860 if (TREE_CODE (szdecl) == INDIRECT_REF
21861 && DECL_P (TREE_OPERAND (szdecl, 0)))
21862 {
21863 rszdecl = TREE_OPERAND (szdecl, 0);
21864 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21865 != DWARF2_ADDR_SIZE)
21866 size = 0;
21867 }
21868 else
21869 size = 0;
21870 }
21871 if (size > 0)
21872 {
21873 dw_loc_list_ref loc
21874 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21875 NULL);
21876 if (loc)
21877 {
21878 add_AT_location_description (array_die, DW_AT_string_length,
21879 loc);
21880 if (size != DWARF2_ADDR_SIZE)
21881 add_AT_unsigned (array_die, dwarf_version >= 5
21882 ? DW_AT_string_length_byte_size
21883 : DW_AT_byte_size, size);
21884 }
21885 }
21886 }
21887 return;
21888 }
21889
21890 array_die = new_die (DW_TAG_array_type, scope_die, type);
21891 add_name_attribute (array_die, type_tag (type));
21892 equate_type_number_to_die (type, array_die);
21893
21894 if (TREE_CODE (type) == VECTOR_TYPE)
21895 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21896
21897 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21898 if (is_fortran ()
21899 && TREE_CODE (type) == ARRAY_TYPE
21900 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21901 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21902 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21903
21904 #if 0
21905 /* We default the array ordering. Debuggers will probably do the right
21906 things even if DW_AT_ordering is not present. It's not even an issue
21907 until we start to get into multidimensional arrays anyway. If a debugger
21908 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21909 then we'll have to put the DW_AT_ordering attribute back in. (But if
21910 and when we find out that we need to put these in, we will only do so
21911 for multidimensional arrays. */
21912 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21913 #endif
21914
21915 if (TREE_CODE (type) == VECTOR_TYPE)
21916 {
21917 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21918 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21919 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21920 add_bound_info (subrange_die, DW_AT_upper_bound,
21921 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21922 }
21923 else
21924 add_subscript_info (array_die, type, collapse_nested_arrays);
21925
21926 /* Add representation of the type of the elements of this array type and
21927 emit the corresponding DIE if we haven't done it already. */
21928 element_type = TREE_TYPE (type);
21929 if (collapse_nested_arrays)
21930 while (TREE_CODE (element_type) == ARRAY_TYPE)
21931 {
21932 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21933 break;
21934 element_type = TREE_TYPE (element_type);
21935 }
21936
21937 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21938 TREE_CODE (type) == ARRAY_TYPE
21939 && TYPE_REVERSE_STORAGE_ORDER (type),
21940 context_die);
21941
21942 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21943 if (TYPE_ARTIFICIAL (type))
21944 add_AT_flag (array_die, DW_AT_artificial, 1);
21945
21946 if (get_AT (array_die, DW_AT_name))
21947 add_pubtype (type, array_die);
21948
21949 add_alignment_attribute (array_die, type);
21950 }
21951
21952 /* This routine generates DIE for array with hidden descriptor, details
21953 are filled into *info by a langhook. */
21954
21955 static void
21956 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21957 dw_die_ref context_die)
21958 {
21959 const dw_die_ref scope_die = scope_die_for (type, context_die);
21960 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21961 struct loc_descr_context context = { type, info->base_decl, NULL,
21962 false, false };
21963 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21964 int dim;
21965
21966 add_name_attribute (array_die, type_tag (type));
21967 equate_type_number_to_die (type, array_die);
21968
21969 if (info->ndimensions > 1)
21970 switch (info->ordering)
21971 {
21972 case array_descr_ordering_row_major:
21973 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21974 break;
21975 case array_descr_ordering_column_major:
21976 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21977 break;
21978 default:
21979 break;
21980 }
21981
21982 if (dwarf_version >= 3 || !dwarf_strict)
21983 {
21984 if (info->data_location)
21985 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21986 dw_scalar_form_exprloc, &context);
21987 if (info->associated)
21988 add_scalar_info (array_die, DW_AT_associated, info->associated,
21989 dw_scalar_form_constant
21990 | dw_scalar_form_exprloc
21991 | dw_scalar_form_reference, &context);
21992 if (info->allocated)
21993 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21994 dw_scalar_form_constant
21995 | dw_scalar_form_exprloc
21996 | dw_scalar_form_reference, &context);
21997 if (info->stride)
21998 {
21999 const enum dwarf_attribute attr
22000 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
22001 const int forms
22002 = (info->stride_in_bits)
22003 ? dw_scalar_form_constant
22004 : (dw_scalar_form_constant
22005 | dw_scalar_form_exprloc
22006 | dw_scalar_form_reference);
22007
22008 add_scalar_info (array_die, attr, info->stride, forms, &context);
22009 }
22010 }
22011 if (dwarf_version >= 5)
22012 {
22013 if (info->rank)
22014 {
22015 add_scalar_info (array_die, DW_AT_rank, info->rank,
22016 dw_scalar_form_constant
22017 | dw_scalar_form_exprloc, &context);
22018 subrange_tag = DW_TAG_generic_subrange;
22019 context.placeholder_arg = true;
22020 }
22021 }
22022
22023 add_gnat_descriptive_type_attribute (array_die, type, context_die);
22024
22025 for (dim = 0; dim < info->ndimensions; dim++)
22026 {
22027 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
22028
22029 if (info->dimen[dim].bounds_type)
22030 add_type_attribute (subrange_die,
22031 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
22032 false, context_die);
22033 if (info->dimen[dim].lower_bound)
22034 add_bound_info (subrange_die, DW_AT_lower_bound,
22035 info->dimen[dim].lower_bound, &context);
22036 if (info->dimen[dim].upper_bound)
22037 add_bound_info (subrange_die, DW_AT_upper_bound,
22038 info->dimen[dim].upper_bound, &context);
22039 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
22040 add_scalar_info (subrange_die, DW_AT_byte_stride,
22041 info->dimen[dim].stride,
22042 dw_scalar_form_constant
22043 | dw_scalar_form_exprloc
22044 | dw_scalar_form_reference,
22045 &context);
22046 }
22047
22048 gen_type_die (info->element_type, context_die);
22049 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
22050 TREE_CODE (type) == ARRAY_TYPE
22051 && TYPE_REVERSE_STORAGE_ORDER (type),
22052 context_die);
22053
22054 if (get_AT (array_die, DW_AT_name))
22055 add_pubtype (type, array_die);
22056
22057 add_alignment_attribute (array_die, type);
22058 }
22059
22060 #if 0
22061 static void
22062 gen_entry_point_die (tree decl, dw_die_ref context_die)
22063 {
22064 tree origin = decl_ultimate_origin (decl);
22065 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
22066
22067 if (origin != NULL)
22068 add_abstract_origin_attribute (decl_die, origin);
22069 else
22070 {
22071 add_name_and_src_coords_attributes (decl_die, decl);
22072 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22073 TYPE_UNQUALIFIED, false, context_die);
22074 }
22075
22076 if (DECL_ABSTRACT_P (decl))
22077 equate_decl_number_to_die (decl, decl_die);
22078 else
22079 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22080 }
22081 #endif
22082
22083 /* Walk through the list of incomplete types again, trying once more to
22084 emit full debugging info for them. */
22085
22086 static void
22087 retry_incomplete_types (void)
22088 {
22089 set_early_dwarf s;
22090 int i;
22091
22092 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22093 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22094 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22095 vec_safe_truncate (incomplete_types, 0);
22096 }
22097
22098 /* Determine what tag to use for a record type. */
22099
22100 static enum dwarf_tag
22101 record_type_tag (tree type)
22102 {
22103 if (! lang_hooks.types.classify_record)
22104 return DW_TAG_structure_type;
22105
22106 switch (lang_hooks.types.classify_record (type))
22107 {
22108 case RECORD_IS_STRUCT:
22109 return DW_TAG_structure_type;
22110
22111 case RECORD_IS_CLASS:
22112 return DW_TAG_class_type;
22113
22114 case RECORD_IS_INTERFACE:
22115 if (dwarf_version >= 3 || !dwarf_strict)
22116 return DW_TAG_interface_type;
22117 return DW_TAG_structure_type;
22118
22119 default:
22120 gcc_unreachable ();
22121 }
22122 }
22123
22124 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22125 include all of the information about the enumeration values also. Each
22126 enumerated type name/value is listed as a child of the enumerated type
22127 DIE. */
22128
22129 static dw_die_ref
22130 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22131 {
22132 dw_die_ref type_die = lookup_type_die (type);
22133 dw_die_ref orig_type_die = type_die;
22134
22135 if (type_die == NULL)
22136 {
22137 type_die = new_die (DW_TAG_enumeration_type,
22138 scope_die_for (type, context_die), type);
22139 equate_type_number_to_die (type, type_die);
22140 add_name_attribute (type_die, type_tag (type));
22141 if ((dwarf_version >= 4 || !dwarf_strict)
22142 && ENUM_IS_SCOPED (type))
22143 add_AT_flag (type_die, DW_AT_enum_class, 1);
22144 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22145 add_AT_flag (type_die, DW_AT_declaration, 1);
22146 if (!dwarf_strict)
22147 add_AT_unsigned (type_die, DW_AT_encoding,
22148 TYPE_UNSIGNED (type)
22149 ? DW_ATE_unsigned
22150 : DW_ATE_signed);
22151 }
22152 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22153 return type_die;
22154 else
22155 remove_AT (type_die, DW_AT_declaration);
22156
22157 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22158 given enum type is incomplete, do not generate the DW_AT_byte_size
22159 attribute or the DW_AT_element_list attribute. */
22160 if (TYPE_SIZE (type))
22161 {
22162 tree link;
22163
22164 if (!ENUM_IS_OPAQUE (type))
22165 TREE_ASM_WRITTEN (type) = 1;
22166 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22167 add_byte_size_attribute (type_die, type);
22168 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22169 add_alignment_attribute (type_die, type);
22170 if ((dwarf_version >= 3 || !dwarf_strict)
22171 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22172 {
22173 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22174 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22175 context_die);
22176 }
22177 if (TYPE_STUB_DECL (type) != NULL_TREE)
22178 {
22179 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22180 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22181 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22182 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22183 }
22184
22185 /* If the first reference to this type was as the return type of an
22186 inline function, then it may not have a parent. Fix this now. */
22187 if (type_die->die_parent == NULL)
22188 add_child_die (scope_die_for (type, context_die), type_die);
22189
22190 for (link = TYPE_VALUES (type);
22191 link != NULL; link = TREE_CHAIN (link))
22192 {
22193 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22194 tree value = TREE_VALUE (link);
22195
22196 gcc_assert (!ENUM_IS_OPAQUE (type));
22197 add_name_attribute (enum_die,
22198 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22199
22200 if (TREE_CODE (value) == CONST_DECL)
22201 value = DECL_INITIAL (value);
22202
22203 if (simple_type_size_in_bits (TREE_TYPE (value))
22204 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22205 {
22206 /* For constant forms created by add_AT_unsigned DWARF
22207 consumers (GDB, elfutils, etc.) always zero extend
22208 the value. Only when the actual value is negative
22209 do we need to use add_AT_int to generate a constant
22210 form that can represent negative values. */
22211 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22212 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22213 add_AT_unsigned (enum_die, DW_AT_const_value,
22214 (unsigned HOST_WIDE_INT) val);
22215 else
22216 add_AT_int (enum_die, DW_AT_const_value, val);
22217 }
22218 else
22219 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22220 that here. TODO: This should be re-worked to use correct
22221 signed/unsigned double tags for all cases. */
22222 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22223 }
22224
22225 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22226 if (TYPE_ARTIFICIAL (type)
22227 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22228 add_AT_flag (type_die, DW_AT_artificial, 1);
22229 }
22230 else
22231 add_AT_flag (type_die, DW_AT_declaration, 1);
22232
22233 add_pubtype (type, type_die);
22234
22235 return type_die;
22236 }
22237
22238 /* Generate a DIE to represent either a real live formal parameter decl or to
22239 represent just the type of some formal parameter position in some function
22240 type.
22241
22242 Note that this routine is a bit unusual because its argument may be a
22243 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22244 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22245 node. If it's the former then this function is being called to output a
22246 DIE to represent a formal parameter object (or some inlining thereof). If
22247 it's the latter, then this function is only being called to output a
22248 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22249 argument type of some subprogram type.
22250 If EMIT_NAME_P is true, name and source coordinate attributes
22251 are emitted. */
22252
22253 static dw_die_ref
22254 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22255 dw_die_ref context_die)
22256 {
22257 tree node_or_origin = node ? node : origin;
22258 tree ultimate_origin;
22259 dw_die_ref parm_die = NULL;
22260
22261 if (DECL_P (node_or_origin))
22262 {
22263 parm_die = lookup_decl_die (node);
22264
22265 /* If the contexts differ, we may not be talking about the same
22266 thing.
22267 ??? When in LTO the DIE parent is the "abstract" copy and the
22268 context_die is the specification "copy". */
22269 if (parm_die
22270 && parm_die->die_parent != context_die
22271 && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack
22272 || parm_die->die_parent->die_parent != context_die)
22273 && !in_lto_p)
22274 {
22275 gcc_assert (!DECL_ABSTRACT_P (node));
22276 /* This can happen when creating a concrete instance, in
22277 which case we need to create a new DIE that will get
22278 annotated with DW_AT_abstract_origin. */
22279 parm_die = NULL;
22280 }
22281
22282 if (parm_die && parm_die->die_parent == NULL)
22283 {
22284 /* Check that parm_die already has the right attributes that
22285 we would have added below. If any attributes are
22286 missing, fall through to add them. */
22287 if (! DECL_ABSTRACT_P (node_or_origin)
22288 && !get_AT (parm_die, DW_AT_location)
22289 && !get_AT (parm_die, DW_AT_const_value))
22290 /* We are missing location info, and are about to add it. */
22291 ;
22292 else
22293 {
22294 add_child_die (context_die, parm_die);
22295 return parm_die;
22296 }
22297 }
22298 }
22299
22300 /* If we have a previously generated DIE, use it, unless this is an
22301 concrete instance (origin != NULL), in which case we need a new
22302 DIE with a corresponding DW_AT_abstract_origin. */
22303 bool reusing_die;
22304 if (parm_die && origin == NULL)
22305 reusing_die = true;
22306 else
22307 {
22308 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22309 reusing_die = false;
22310 }
22311
22312 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22313 {
22314 case tcc_declaration:
22315 ultimate_origin = decl_ultimate_origin (node_or_origin);
22316 if (node || ultimate_origin)
22317 origin = ultimate_origin;
22318
22319 if (reusing_die)
22320 goto add_location;
22321
22322 if (origin != NULL)
22323 add_abstract_origin_attribute (parm_die, origin);
22324 else if (emit_name_p)
22325 add_name_and_src_coords_attributes (parm_die, node);
22326 if (origin == NULL
22327 || (! DECL_ABSTRACT_P (node_or_origin)
22328 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22329 decl_function_context
22330 (node_or_origin))))
22331 {
22332 tree type = TREE_TYPE (node_or_origin);
22333 if (decl_by_reference_p (node_or_origin))
22334 add_type_attribute (parm_die, TREE_TYPE (type),
22335 TYPE_UNQUALIFIED,
22336 false, context_die);
22337 else
22338 add_type_attribute (parm_die, type,
22339 decl_quals (node_or_origin),
22340 false, context_die);
22341 }
22342 if (origin == NULL && DECL_ARTIFICIAL (node))
22343 add_AT_flag (parm_die, DW_AT_artificial, 1);
22344 add_location:
22345 if (node && node != origin)
22346 equate_decl_number_to_die (node, parm_die);
22347 if (! DECL_ABSTRACT_P (node_or_origin))
22348 add_location_or_const_value_attribute (parm_die, node_or_origin,
22349 node == NULL);
22350
22351 break;
22352
22353 case tcc_type:
22354 /* We were called with some kind of a ..._TYPE node. */
22355 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22356 context_die);
22357 break;
22358
22359 default:
22360 gcc_unreachable ();
22361 }
22362
22363 return parm_die;
22364 }
22365
22366 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22367 children DW_TAG_formal_parameter DIEs representing the arguments of the
22368 parameter pack.
22369
22370 PARM_PACK must be a function parameter pack.
22371 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22372 must point to the subsequent arguments of the function PACK_ARG belongs to.
22373 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22374 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22375 following the last one for which a DIE was generated. */
22376
22377 static dw_die_ref
22378 gen_formal_parameter_pack_die (tree parm_pack,
22379 tree pack_arg,
22380 dw_die_ref subr_die,
22381 tree *next_arg)
22382 {
22383 tree arg;
22384 dw_die_ref parm_pack_die;
22385
22386 gcc_assert (parm_pack
22387 && lang_hooks.function_parameter_pack_p (parm_pack)
22388 && subr_die);
22389
22390 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22391 add_src_coords_attributes (parm_pack_die, parm_pack);
22392
22393 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22394 {
22395 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22396 parm_pack))
22397 break;
22398 gen_formal_parameter_die (arg, NULL,
22399 false /* Don't emit name attribute. */,
22400 parm_pack_die);
22401 }
22402 if (next_arg)
22403 *next_arg = arg;
22404 return parm_pack_die;
22405 }
22406
22407 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22408 at the end of an (ANSI prototyped) formal parameters list. */
22409
22410 static void
22411 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22412 {
22413 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22414 }
22415
22416 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22417 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22418 parameters as specified in some function type specification (except for
22419 those which appear as part of a function *definition*). */
22420
22421 static void
22422 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22423 {
22424 tree link;
22425 tree formal_type = NULL;
22426 tree first_parm_type;
22427 tree arg;
22428
22429 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22430 {
22431 arg = DECL_ARGUMENTS (function_or_method_type);
22432 function_or_method_type = TREE_TYPE (function_or_method_type);
22433 }
22434 else
22435 arg = NULL_TREE;
22436
22437 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22438
22439 /* Make our first pass over the list of formal parameter types and output a
22440 DW_TAG_formal_parameter DIE for each one. */
22441 for (link = first_parm_type; link; )
22442 {
22443 dw_die_ref parm_die;
22444
22445 formal_type = TREE_VALUE (link);
22446 if (formal_type == void_type_node)
22447 break;
22448
22449 /* Output a (nameless) DIE to represent the formal parameter itself. */
22450 parm_die = gen_formal_parameter_die (formal_type, NULL,
22451 true /* Emit name attribute. */,
22452 context_die);
22453 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22454 && link == first_parm_type)
22455 {
22456 add_AT_flag (parm_die, DW_AT_artificial, 1);
22457 if (dwarf_version >= 3 || !dwarf_strict)
22458 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22459 }
22460 else if (arg && DECL_ARTIFICIAL (arg))
22461 add_AT_flag (parm_die, DW_AT_artificial, 1);
22462
22463 link = TREE_CHAIN (link);
22464 if (arg)
22465 arg = DECL_CHAIN (arg);
22466 }
22467
22468 /* If this function type has an ellipsis, add a
22469 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22470 if (formal_type != void_type_node)
22471 gen_unspecified_parameters_die (function_or_method_type, context_die);
22472
22473 /* Make our second (and final) pass over the list of formal parameter types
22474 and output DIEs to represent those types (as necessary). */
22475 for (link = TYPE_ARG_TYPES (function_or_method_type);
22476 link && TREE_VALUE (link);
22477 link = TREE_CHAIN (link))
22478 gen_type_die (TREE_VALUE (link), context_die);
22479 }
22480
22481 /* We want to generate the DIE for TYPE so that we can generate the
22482 die for MEMBER, which has been defined; we will need to refer back
22483 to the member declaration nested within TYPE. If we're trying to
22484 generate minimal debug info for TYPE, processing TYPE won't do the
22485 trick; we need to attach the member declaration by hand. */
22486
22487 static void
22488 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22489 {
22490 gen_type_die (type, context_die);
22491
22492 /* If we're trying to avoid duplicate debug info, we may not have
22493 emitted the member decl for this function. Emit it now. */
22494 if (TYPE_STUB_DECL (type)
22495 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22496 && ! lookup_decl_die (member))
22497 {
22498 dw_die_ref type_die;
22499 gcc_assert (!decl_ultimate_origin (member));
22500
22501 type_die = lookup_type_die_strip_naming_typedef (type);
22502 if (TREE_CODE (member) == FUNCTION_DECL)
22503 gen_subprogram_die (member, type_die);
22504 else if (TREE_CODE (member) == FIELD_DECL)
22505 {
22506 /* Ignore the nameless fields that are used to skip bits but handle
22507 C++ anonymous unions and structs. */
22508 if (DECL_NAME (member) != NULL_TREE
22509 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22510 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22511 {
22512 struct vlr_context vlr_ctx = {
22513 DECL_CONTEXT (member), /* struct_type */
22514 NULL_TREE /* variant_part_offset */
22515 };
22516 gen_type_die (member_declared_type (member), type_die);
22517 gen_field_die (member, &vlr_ctx, type_die);
22518 }
22519 }
22520 else
22521 gen_variable_die (member, NULL_TREE, type_die);
22522 }
22523 }
22524 \f
22525 /* Forward declare these functions, because they are mutually recursive
22526 with their set_block_* pairing functions. */
22527 static void set_decl_origin_self (tree);
22528
22529 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22530 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22531 that it points to the node itself, thus indicating that the node is its
22532 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22533 the given node is NULL, recursively descend the decl/block tree which
22534 it is the root of, and for each other ..._DECL or BLOCK node contained
22535 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22536 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22537 values to point to themselves. */
22538
22539 static void
22540 set_block_origin_self (tree stmt)
22541 {
22542 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22543 {
22544 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22545
22546 {
22547 tree local_decl;
22548
22549 for (local_decl = BLOCK_VARS (stmt);
22550 local_decl != NULL_TREE;
22551 local_decl = DECL_CHAIN (local_decl))
22552 /* Do not recurse on nested functions since the inlining status
22553 of parent and child can be different as per the DWARF spec. */
22554 if (TREE_CODE (local_decl) != FUNCTION_DECL
22555 && !DECL_EXTERNAL (local_decl))
22556 set_decl_origin_self (local_decl);
22557 }
22558
22559 {
22560 tree subblock;
22561
22562 for (subblock = BLOCK_SUBBLOCKS (stmt);
22563 subblock != NULL_TREE;
22564 subblock = BLOCK_CHAIN (subblock))
22565 set_block_origin_self (subblock); /* Recurse. */
22566 }
22567 }
22568 }
22569
22570 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22571 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22572 node to so that it points to the node itself, thus indicating that the
22573 node represents its own (abstract) origin. Additionally, if the
22574 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22575 the decl/block tree of which the given node is the root of, and for
22576 each other ..._DECL or BLOCK node contained therein whose
22577 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22578 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22579 point to themselves. */
22580
22581 static void
22582 set_decl_origin_self (tree decl)
22583 {
22584 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22585 {
22586 DECL_ABSTRACT_ORIGIN (decl) = decl;
22587 if (TREE_CODE (decl) == FUNCTION_DECL)
22588 {
22589 tree arg;
22590
22591 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22592 DECL_ABSTRACT_ORIGIN (arg) = arg;
22593 if (DECL_INITIAL (decl) != NULL_TREE
22594 && DECL_INITIAL (decl) != error_mark_node)
22595 set_block_origin_self (DECL_INITIAL (decl));
22596 }
22597 }
22598 }
22599 \f
22600 /* Mark the early DIE for DECL as the abstract instance. */
22601
22602 static void
22603 dwarf2out_abstract_function (tree decl)
22604 {
22605 dw_die_ref old_die;
22606
22607 /* Make sure we have the actual abstract inline, not a clone. */
22608 decl = DECL_ORIGIN (decl);
22609
22610 if (DECL_IGNORED_P (decl))
22611 return;
22612
22613 /* In LTO we're all set. We already created abstract instances
22614 early and we want to avoid creating a concrete instance of that
22615 if we don't output it. */
22616 if (in_lto_p)
22617 return;
22618
22619 old_die = lookup_decl_die (decl);
22620 gcc_assert (old_die != NULL);
22621 if (get_AT (old_die, DW_AT_inline))
22622 /* We've already generated the abstract instance. */
22623 return;
22624
22625 /* Go ahead and put DW_AT_inline on the DIE. */
22626 if (DECL_DECLARED_INLINE_P (decl))
22627 {
22628 if (cgraph_function_possibly_inlined_p (decl))
22629 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22630 else
22631 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22632 }
22633 else
22634 {
22635 if (cgraph_function_possibly_inlined_p (decl))
22636 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22637 else
22638 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22639 }
22640
22641 if (DECL_DECLARED_INLINE_P (decl)
22642 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22643 add_AT_flag (old_die, DW_AT_artificial, 1);
22644
22645 set_decl_origin_self (decl);
22646 }
22647
22648 /* Helper function of premark_used_types() which gets called through
22649 htab_traverse.
22650
22651 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22652 marked as unused by prune_unused_types. */
22653
22654 bool
22655 premark_used_types_helper (tree const &type, void *)
22656 {
22657 dw_die_ref die;
22658
22659 die = lookup_type_die (type);
22660 if (die != NULL)
22661 die->die_perennial_p = 1;
22662 return true;
22663 }
22664
22665 /* Helper function of premark_types_used_by_global_vars which gets called
22666 through htab_traverse.
22667
22668 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22669 marked as unused by prune_unused_types. The DIE of the type is marked
22670 only if the global variable using the type will actually be emitted. */
22671
22672 int
22673 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22674 void *)
22675 {
22676 struct types_used_by_vars_entry *entry;
22677 dw_die_ref die;
22678
22679 entry = (struct types_used_by_vars_entry *) *slot;
22680 gcc_assert (entry->type != NULL
22681 && entry->var_decl != NULL);
22682 die = lookup_type_die (entry->type);
22683 if (die)
22684 {
22685 /* Ask cgraph if the global variable really is to be emitted.
22686 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22687 varpool_node *node = varpool_node::get (entry->var_decl);
22688 if (node && node->definition)
22689 {
22690 die->die_perennial_p = 1;
22691 /* Keep the parent DIEs as well. */
22692 while ((die = die->die_parent) && die->die_perennial_p == 0)
22693 die->die_perennial_p = 1;
22694 }
22695 }
22696 return 1;
22697 }
22698
22699 /* Mark all members of used_types_hash as perennial. */
22700
22701 static void
22702 premark_used_types (struct function *fun)
22703 {
22704 if (fun && fun->used_types_hash)
22705 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22706 }
22707
22708 /* Mark all members of types_used_by_vars_entry as perennial. */
22709
22710 static void
22711 premark_types_used_by_global_vars (void)
22712 {
22713 if (types_used_by_vars_hash)
22714 types_used_by_vars_hash
22715 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22716 }
22717
22718 /* Mark all variables used by the symtab as perennial. */
22719
22720 static void
22721 premark_used_variables (void)
22722 {
22723 /* Mark DIEs in the symtab as used. */
22724 varpool_node *var;
22725 FOR_EACH_VARIABLE (var)
22726 {
22727 dw_die_ref die = lookup_decl_die (var->decl);
22728 if (die)
22729 die->die_perennial_p = 1;
22730 }
22731 }
22732
22733 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22734 for CA_LOC call arg loc node. */
22735
22736 static dw_die_ref
22737 gen_call_site_die (tree decl, dw_die_ref subr_die,
22738 struct call_arg_loc_node *ca_loc)
22739 {
22740 dw_die_ref stmt_die = NULL, die;
22741 tree block = ca_loc->block;
22742
22743 while (block
22744 && block != DECL_INITIAL (decl)
22745 && TREE_CODE (block) == BLOCK)
22746 {
22747 stmt_die = lookup_block_die (block);
22748 if (stmt_die)
22749 break;
22750 block = BLOCK_SUPERCONTEXT (block);
22751 }
22752 if (stmt_die == NULL)
22753 stmt_die = subr_die;
22754 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22755 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22756 if (ca_loc->tail_call_p)
22757 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22758 if (ca_loc->symbol_ref)
22759 {
22760 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22761 if (tdie)
22762 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22763 else
22764 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22765 false);
22766 }
22767 return die;
22768 }
22769
22770 /* Generate a DIE to represent a declared function (either file-scope or
22771 block-local). */
22772
22773 static void
22774 gen_subprogram_die (tree decl, dw_die_ref context_die)
22775 {
22776 tree origin = decl_ultimate_origin (decl);
22777 dw_die_ref subr_die;
22778 dw_die_ref old_die = lookup_decl_die (decl);
22779
22780 /* This function gets called multiple times for different stages of
22781 the debug process. For example, for func() in this code:
22782
22783 namespace S
22784 {
22785 void func() { ... }
22786 }
22787
22788 ...we get called 4 times. Twice in early debug and twice in
22789 late debug:
22790
22791 Early debug
22792 -----------
22793
22794 1. Once while generating func() within the namespace. This is
22795 the declaration. The declaration bit below is set, as the
22796 context is the namespace.
22797
22798 A new DIE will be generated with DW_AT_declaration set.
22799
22800 2. Once for func() itself. This is the specification. The
22801 declaration bit below is clear as the context is the CU.
22802
22803 We will use the cached DIE from (1) to create a new DIE with
22804 DW_AT_specification pointing to the declaration in (1).
22805
22806 Late debug via rest_of_handle_final()
22807 -------------------------------------
22808
22809 3. Once generating func() within the namespace. This is also the
22810 declaration, as in (1), but this time we will early exit below
22811 as we have a cached DIE and a declaration needs no additional
22812 annotations (no locations), as the source declaration line
22813 info is enough.
22814
22815 4. Once for func() itself. As in (2), this is the specification,
22816 but this time we will re-use the cached DIE, and just annotate
22817 it with the location information that should now be available.
22818
22819 For something without namespaces, but with abstract instances, we
22820 are also called a multiple times:
22821
22822 class Base
22823 {
22824 public:
22825 Base (); // constructor declaration (1)
22826 };
22827
22828 Base::Base () { } // constructor specification (2)
22829
22830 Early debug
22831 -----------
22832
22833 1. Once for the Base() constructor by virtue of it being a
22834 member of the Base class. This is done via
22835 rest_of_type_compilation.
22836
22837 This is a declaration, so a new DIE will be created with
22838 DW_AT_declaration.
22839
22840 2. Once for the Base() constructor definition, but this time
22841 while generating the abstract instance of the base
22842 constructor (__base_ctor) which is being generated via early
22843 debug of reachable functions.
22844
22845 Even though we have a cached version of the declaration (1),
22846 we will create a DW_AT_specification of the declaration DIE
22847 in (1).
22848
22849 3. Once for the __base_ctor itself, but this time, we generate
22850 an DW_AT_abstract_origin version of the DW_AT_specification in
22851 (2).
22852
22853 Late debug via rest_of_handle_final
22854 -----------------------------------
22855
22856 4. One final time for the __base_ctor (which will have a cached
22857 DIE with DW_AT_abstract_origin created in (3). This time,
22858 we will just annotate the location information now
22859 available.
22860 */
22861 int declaration = (current_function_decl != decl
22862 || (!DECL_INITIAL (decl) && !origin)
22863 || class_or_namespace_scope_p (context_die));
22864
22865 /* A declaration that has been previously dumped needs no
22866 additional information. */
22867 if (old_die && declaration)
22868 return;
22869
22870 /* Now that the C++ front end lazily declares artificial member fns, we
22871 might need to retrofit the declaration into its class. */
22872 if (!declaration && !origin && !old_die
22873 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22874 && !class_or_namespace_scope_p (context_die)
22875 && debug_info_level > DINFO_LEVEL_TERSE)
22876 old_die = force_decl_die (decl);
22877
22878 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22879 if (origin != NULL)
22880 {
22881 gcc_assert (!declaration || local_scope_p (context_die));
22882
22883 /* Fixup die_parent for the abstract instance of a nested
22884 inline function. */
22885 if (old_die && old_die->die_parent == NULL)
22886 add_child_die (context_die, old_die);
22887
22888 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22889 {
22890 /* If we have a DW_AT_abstract_origin we have a working
22891 cached version. */
22892 subr_die = old_die;
22893 }
22894 else
22895 {
22896 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22897 add_abstract_origin_attribute (subr_die, origin);
22898 /* This is where the actual code for a cloned function is.
22899 Let's emit linkage name attribute for it. This helps
22900 debuggers to e.g, set breakpoints into
22901 constructors/destructors when the user asks "break
22902 K::K". */
22903 add_linkage_name (subr_die, decl);
22904 }
22905 }
22906 /* A cached copy, possibly from early dwarf generation. Reuse as
22907 much as possible. */
22908 else if (old_die)
22909 {
22910 if (!get_AT_flag (old_die, DW_AT_declaration)
22911 /* We can have a normal definition following an inline one in the
22912 case of redefinition of GNU C extern inlines.
22913 It seems reasonable to use AT_specification in this case. */
22914 && !get_AT (old_die, DW_AT_inline))
22915 {
22916 /* Detect and ignore this case, where we are trying to output
22917 something we have already output. */
22918 if (get_AT (old_die, DW_AT_low_pc)
22919 || get_AT (old_die, DW_AT_ranges))
22920 return;
22921
22922 /* If we have no location information, this must be a
22923 partially generated DIE from early dwarf generation.
22924 Fall through and generate it. */
22925 }
22926
22927 /* If the definition comes from the same place as the declaration,
22928 maybe use the old DIE. We always want the DIE for this function
22929 that has the *_pc attributes to be under comp_unit_die so the
22930 debugger can find it. We also need to do this for abstract
22931 instances of inlines, since the spec requires the out-of-line copy
22932 to have the same parent. For local class methods, this doesn't
22933 apply; we just use the old DIE. */
22934 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22935 struct dwarf_file_data * file_index = lookup_filename (s.file);
22936 if (((is_unit_die (old_die->die_parent)
22937 /* This condition fixes the inconsistency/ICE with the
22938 following Fortran test (or some derivative thereof) while
22939 building libgfortran:
22940
22941 module some_m
22942 contains
22943 logical function funky (FLAG)
22944 funky = .true.
22945 end function
22946 end module
22947 */
22948 || (old_die->die_parent
22949 && old_die->die_parent->die_tag == DW_TAG_module)
22950 || local_scope_p (old_die->die_parent)
22951 || context_die == NULL)
22952 && (DECL_ARTIFICIAL (decl)
22953 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22954 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22955 == (unsigned) s.line)
22956 && (!debug_column_info
22957 || s.column == 0
22958 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22959 == (unsigned) s.column)))))
22960 /* With LTO if there's an abstract instance for
22961 the old DIE, this is a concrete instance and
22962 thus re-use the DIE. */
22963 || get_AT (old_die, DW_AT_abstract_origin))
22964 {
22965 subr_die = old_die;
22966
22967 /* Clear out the declaration attribute, but leave the
22968 parameters so they can be augmented with location
22969 information later. Unless this was a declaration, in
22970 which case, wipe out the nameless parameters and recreate
22971 them further down. */
22972 if (remove_AT (subr_die, DW_AT_declaration))
22973 {
22974
22975 remove_AT (subr_die, DW_AT_object_pointer);
22976 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22977 }
22978 }
22979 /* Make a specification pointing to the previously built
22980 declaration. */
22981 else
22982 {
22983 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22984 add_AT_specification (subr_die, old_die);
22985 add_pubname (decl, subr_die);
22986 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22987 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22988 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22989 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22990 if (debug_column_info
22991 && s.column
22992 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22993 != (unsigned) s.column))
22994 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22995
22996 /* If the prototype had an 'auto' or 'decltype(auto)' in
22997 the return type, emit the real type on the definition die. */
22998 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22999 {
23000 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
23001 while (die
23002 && (die->die_tag == DW_TAG_reference_type
23003 || die->die_tag == DW_TAG_rvalue_reference_type
23004 || die->die_tag == DW_TAG_pointer_type
23005 || die->die_tag == DW_TAG_const_type
23006 || die->die_tag == DW_TAG_volatile_type
23007 || die->die_tag == DW_TAG_restrict_type
23008 || die->die_tag == DW_TAG_array_type
23009 || die->die_tag == DW_TAG_ptr_to_member_type
23010 || die->die_tag == DW_TAG_subroutine_type))
23011 die = get_AT_ref (die, DW_AT_type);
23012 if (die == auto_die || die == decltype_auto_die)
23013 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23014 TYPE_UNQUALIFIED, false, context_die);
23015 }
23016
23017 /* When we process the method declaration, we haven't seen
23018 the out-of-class defaulted definition yet, so we have to
23019 recheck now. */
23020 if ((dwarf_version >= 5 || ! dwarf_strict)
23021 && !get_AT (subr_die, DW_AT_defaulted))
23022 {
23023 int defaulted
23024 = lang_hooks.decls.decl_dwarf_attribute (decl,
23025 DW_AT_defaulted);
23026 if (defaulted != -1)
23027 {
23028 /* Other values must have been handled before. */
23029 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
23030 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23031 }
23032 }
23033 }
23034 }
23035 /* Create a fresh DIE for anything else. */
23036 else
23037 {
23038 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23039
23040 if (TREE_PUBLIC (decl))
23041 add_AT_flag (subr_die, DW_AT_external, 1);
23042
23043 add_name_and_src_coords_attributes (subr_die, decl);
23044 add_pubname (decl, subr_die);
23045 if (debug_info_level > DINFO_LEVEL_TERSE)
23046 {
23047 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
23048 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23049 TYPE_UNQUALIFIED, false, context_die);
23050 }
23051
23052 add_pure_or_virtual_attribute (subr_die, decl);
23053 if (DECL_ARTIFICIAL (decl))
23054 add_AT_flag (subr_die, DW_AT_artificial, 1);
23055
23056 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
23057 add_AT_flag (subr_die, DW_AT_noreturn, 1);
23058
23059 add_alignment_attribute (subr_die, decl);
23060
23061 add_accessibility_attribute (subr_die, decl);
23062 }
23063
23064 /* Unless we have an existing non-declaration DIE, equate the new
23065 DIE. */
23066 if (!old_die || is_declaration_die (old_die))
23067 equate_decl_number_to_die (decl, subr_die);
23068
23069 if (declaration)
23070 {
23071 if (!old_die || !get_AT (old_die, DW_AT_inline))
23072 {
23073 add_AT_flag (subr_die, DW_AT_declaration, 1);
23074
23075 /* If this is an explicit function declaration then generate
23076 a DW_AT_explicit attribute. */
23077 if ((dwarf_version >= 3 || !dwarf_strict)
23078 && lang_hooks.decls.decl_dwarf_attribute (decl,
23079 DW_AT_explicit) == 1)
23080 add_AT_flag (subr_die, DW_AT_explicit, 1);
23081
23082 /* If this is a C++11 deleted special function member then generate
23083 a DW_AT_deleted attribute. */
23084 if ((dwarf_version >= 5 || !dwarf_strict)
23085 && lang_hooks.decls.decl_dwarf_attribute (decl,
23086 DW_AT_deleted) == 1)
23087 add_AT_flag (subr_die, DW_AT_deleted, 1);
23088
23089 /* If this is a C++11 defaulted special function member then
23090 generate a DW_AT_defaulted attribute. */
23091 if (dwarf_version >= 5 || !dwarf_strict)
23092 {
23093 int defaulted
23094 = lang_hooks.decls.decl_dwarf_attribute (decl,
23095 DW_AT_defaulted);
23096 if (defaulted != -1)
23097 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23098 }
23099
23100 /* If this is a C++11 non-static member function with & ref-qualifier
23101 then generate a DW_AT_reference attribute. */
23102 if ((dwarf_version >= 5 || !dwarf_strict)
23103 && lang_hooks.decls.decl_dwarf_attribute (decl,
23104 DW_AT_reference) == 1)
23105 add_AT_flag (subr_die, DW_AT_reference, 1);
23106
23107 /* If this is a C++11 non-static member function with &&
23108 ref-qualifier then generate a DW_AT_reference attribute. */
23109 if ((dwarf_version >= 5 || !dwarf_strict)
23110 && lang_hooks.decls.decl_dwarf_attribute (decl,
23111 DW_AT_rvalue_reference)
23112 == 1)
23113 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23114 }
23115 }
23116 /* For non DECL_EXTERNALs, if range information is available, fill
23117 the DIE with it. */
23118 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23119 {
23120 HOST_WIDE_INT cfa_fb_offset;
23121
23122 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23123
23124 if (!crtl->has_bb_partition)
23125 {
23126 dw_fde_ref fde = fun->fde;
23127 if (fde->dw_fde_begin)
23128 {
23129 /* We have already generated the labels. */
23130 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23131 fde->dw_fde_end, false);
23132 }
23133 else
23134 {
23135 /* Create start/end labels and add the range. */
23136 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23137 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23138 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23139 current_function_funcdef_no);
23140 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23141 current_function_funcdef_no);
23142 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23143 false);
23144 }
23145
23146 #if VMS_DEBUGGING_INFO
23147 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23148 Section 2.3 Prologue and Epilogue Attributes:
23149 When a breakpoint is set on entry to a function, it is generally
23150 desirable for execution to be suspended, not on the very first
23151 instruction of the function, but rather at a point after the
23152 function's frame has been set up, after any language defined local
23153 declaration processing has been completed, and before execution of
23154 the first statement of the function begins. Debuggers generally
23155 cannot properly determine where this point is. Similarly for a
23156 breakpoint set on exit from a function. The prologue and epilogue
23157 attributes allow a compiler to communicate the location(s) to use. */
23158
23159 {
23160 if (fde->dw_fde_vms_end_prologue)
23161 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23162 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23163
23164 if (fde->dw_fde_vms_begin_epilogue)
23165 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23166 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23167 }
23168 #endif
23169
23170 }
23171 else
23172 {
23173 /* Generate pubnames entries for the split function code ranges. */
23174 dw_fde_ref fde = fun->fde;
23175
23176 if (fde->dw_fde_second_begin)
23177 {
23178 if (dwarf_version >= 3 || !dwarf_strict)
23179 {
23180 /* We should use ranges for non-contiguous code section
23181 addresses. Use the actual code range for the initial
23182 section, since the HOT/COLD labels might precede an
23183 alignment offset. */
23184 bool range_list_added = false;
23185 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23186 fde->dw_fde_end, &range_list_added,
23187 false);
23188 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23189 fde->dw_fde_second_end,
23190 &range_list_added, false);
23191 if (range_list_added)
23192 add_ranges (NULL);
23193 }
23194 else
23195 {
23196 /* There is no real support in DW2 for this .. so we make
23197 a work-around. First, emit the pub name for the segment
23198 containing the function label. Then make and emit a
23199 simplified subprogram DIE for the second segment with the
23200 name pre-fixed by __hot/cold_sect_of_. We use the same
23201 linkage name for the second die so that gdb will find both
23202 sections when given "b foo". */
23203 const char *name = NULL;
23204 tree decl_name = DECL_NAME (decl);
23205 dw_die_ref seg_die;
23206
23207 /* Do the 'primary' section. */
23208 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23209 fde->dw_fde_end, false);
23210
23211 /* Build a minimal DIE for the secondary section. */
23212 seg_die = new_die (DW_TAG_subprogram,
23213 subr_die->die_parent, decl);
23214
23215 if (TREE_PUBLIC (decl))
23216 add_AT_flag (seg_die, DW_AT_external, 1);
23217
23218 if (decl_name != NULL
23219 && IDENTIFIER_POINTER (decl_name) != NULL)
23220 {
23221 name = dwarf2_name (decl, 1);
23222 if (! DECL_ARTIFICIAL (decl))
23223 add_src_coords_attributes (seg_die, decl);
23224
23225 add_linkage_name (seg_die, decl);
23226 }
23227 gcc_assert (name != NULL);
23228 add_pure_or_virtual_attribute (seg_die, decl);
23229 if (DECL_ARTIFICIAL (decl))
23230 add_AT_flag (seg_die, DW_AT_artificial, 1);
23231
23232 name = concat ("__second_sect_of_", name, NULL);
23233 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23234 fde->dw_fde_second_end, false);
23235 add_name_attribute (seg_die, name);
23236 if (want_pubnames ())
23237 add_pubname_string (name, seg_die);
23238 }
23239 }
23240 else
23241 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23242 false);
23243 }
23244
23245 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23246
23247 /* We define the "frame base" as the function's CFA. This is more
23248 convenient for several reasons: (1) It's stable across the prologue
23249 and epilogue, which makes it better than just a frame pointer,
23250 (2) With dwarf3, there exists a one-byte encoding that allows us
23251 to reference the .debug_frame data by proxy, but failing that,
23252 (3) We can at least reuse the code inspection and interpretation
23253 code that determines the CFA position at various points in the
23254 function. */
23255 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23256 {
23257 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23258 add_AT_loc (subr_die, DW_AT_frame_base, op);
23259 }
23260 else
23261 {
23262 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23263 if (list->dw_loc_next)
23264 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23265 else
23266 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23267 }
23268
23269 /* Compute a displacement from the "steady-state frame pointer" to
23270 the CFA. The former is what all stack slots and argument slots
23271 will reference in the rtl; the latter is what we've told the
23272 debugger about. We'll need to adjust all frame_base references
23273 by this displacement. */
23274 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23275
23276 if (fun->static_chain_decl)
23277 {
23278 /* DWARF requires here a location expression that computes the
23279 address of the enclosing subprogram's frame base. The machinery
23280 in tree-nested.c is supposed to store this specific address in the
23281 last field of the FRAME record. */
23282 const tree frame_type
23283 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23284 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23285
23286 tree fb_expr
23287 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23288 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23289 fb_expr, fb_decl, NULL_TREE);
23290
23291 add_AT_location_description (subr_die, DW_AT_static_link,
23292 loc_list_from_tree (fb_expr, 0, NULL));
23293 }
23294
23295 resolve_variable_values ();
23296 }
23297
23298 /* Generate child dies for template paramaters. */
23299 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23300 gen_generic_params_dies (decl);
23301
23302 /* Now output descriptions of the arguments for this function. This gets
23303 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23304 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23305 `...' at the end of the formal parameter list. In order to find out if
23306 there was a trailing ellipsis or not, we must instead look at the type
23307 associated with the FUNCTION_DECL. This will be a node of type
23308 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23309 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23310 an ellipsis at the end. */
23311
23312 /* In the case where we are describing a mere function declaration, all we
23313 need to do here (and all we *can* do here) is to describe the *types* of
23314 its formal parameters. */
23315 if (debug_info_level <= DINFO_LEVEL_TERSE)
23316 ;
23317 else if (declaration)
23318 gen_formal_types_die (decl, subr_die);
23319 else
23320 {
23321 /* Generate DIEs to represent all known formal parameters. */
23322 tree parm = DECL_ARGUMENTS (decl);
23323 tree generic_decl = early_dwarf
23324 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23325 tree generic_decl_parm = generic_decl
23326 ? DECL_ARGUMENTS (generic_decl)
23327 : NULL;
23328
23329 /* Now we want to walk the list of parameters of the function and
23330 emit their relevant DIEs.
23331
23332 We consider the case of DECL being an instance of a generic function
23333 as well as it being a normal function.
23334
23335 If DECL is an instance of a generic function we walk the
23336 parameters of the generic function declaration _and_ the parameters of
23337 DECL itself. This is useful because we want to emit specific DIEs for
23338 function parameter packs and those are declared as part of the
23339 generic function declaration. In that particular case,
23340 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23341 That DIE has children DIEs representing the set of arguments
23342 of the pack. Note that the set of pack arguments can be empty.
23343 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23344 children DIE.
23345
23346 Otherwise, we just consider the parameters of DECL. */
23347 while (generic_decl_parm || parm)
23348 {
23349 if (generic_decl_parm
23350 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23351 gen_formal_parameter_pack_die (generic_decl_parm,
23352 parm, subr_die,
23353 &parm);
23354 else if (parm)
23355 {
23356 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23357
23358 if (early_dwarf
23359 && parm == DECL_ARGUMENTS (decl)
23360 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23361 && parm_die
23362 && (dwarf_version >= 3 || !dwarf_strict))
23363 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23364
23365 parm = DECL_CHAIN (parm);
23366 }
23367
23368 if (generic_decl_parm)
23369 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23370 }
23371
23372 /* Decide whether we need an unspecified_parameters DIE at the end.
23373 There are 2 more cases to do this for: 1) the ansi ... declaration -
23374 this is detectable when the end of the arg list is not a
23375 void_type_node 2) an unprototyped function declaration (not a
23376 definition). This just means that we have no info about the
23377 parameters at all. */
23378 if (early_dwarf)
23379 {
23380 if (prototype_p (TREE_TYPE (decl)))
23381 {
23382 /* This is the prototyped case, check for.... */
23383 if (stdarg_p (TREE_TYPE (decl)))
23384 gen_unspecified_parameters_die (decl, subr_die);
23385 }
23386 else if (DECL_INITIAL (decl) == NULL_TREE)
23387 gen_unspecified_parameters_die (decl, subr_die);
23388 }
23389 }
23390
23391 if (subr_die != old_die)
23392 /* Add the calling convention attribute if requested. */
23393 add_calling_convention_attribute (subr_die, decl);
23394
23395 /* Output Dwarf info for all of the stuff within the body of the function
23396 (if it has one - it may be just a declaration).
23397
23398 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23399 a function. This BLOCK actually represents the outermost binding contour
23400 for the function, i.e. the contour in which the function's formal
23401 parameters and labels get declared. Curiously, it appears that the front
23402 end doesn't actually put the PARM_DECL nodes for the current function onto
23403 the BLOCK_VARS list for this outer scope, but are strung off of the
23404 DECL_ARGUMENTS list for the function instead.
23405
23406 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23407 the LABEL_DECL nodes for the function however, and we output DWARF info
23408 for those in decls_for_scope. Just within the `outer_scope' there will be
23409 a BLOCK node representing the function's outermost pair of curly braces,
23410 and any blocks used for the base and member initializers of a C++
23411 constructor function. */
23412 tree outer_scope = DECL_INITIAL (decl);
23413 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23414 {
23415 int call_site_note_count = 0;
23416 int tail_call_site_note_count = 0;
23417
23418 /* Emit a DW_TAG_variable DIE for a named return value. */
23419 if (DECL_NAME (DECL_RESULT (decl)))
23420 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23421
23422 /* The first time through decls_for_scope we will generate the
23423 DIEs for the locals. The second time, we fill in the
23424 location info. */
23425 decls_for_scope (outer_scope, subr_die);
23426
23427 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23428 {
23429 struct call_arg_loc_node *ca_loc;
23430 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23431 {
23432 dw_die_ref die = NULL;
23433 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23434 rtx arg, next_arg;
23435 tree arg_decl = NULL_TREE;
23436
23437 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23438 ? XEXP (ca_loc->call_arg_loc_note, 0)
23439 : NULL_RTX);
23440 arg; arg = next_arg)
23441 {
23442 dw_loc_descr_ref reg, val;
23443 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23444 dw_die_ref cdie, tdie = NULL;
23445
23446 next_arg = XEXP (arg, 1);
23447 if (REG_P (XEXP (XEXP (arg, 0), 0))
23448 && next_arg
23449 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23450 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23451 && REGNO (XEXP (XEXP (arg, 0), 0))
23452 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23453 next_arg = XEXP (next_arg, 1);
23454 if (mode == VOIDmode)
23455 {
23456 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23457 if (mode == VOIDmode)
23458 mode = GET_MODE (XEXP (arg, 0));
23459 }
23460 if (mode == VOIDmode || mode == BLKmode)
23461 continue;
23462 /* Get dynamic information about call target only if we
23463 have no static information: we cannot generate both
23464 DW_AT_call_origin and DW_AT_call_target
23465 attributes. */
23466 if (ca_loc->symbol_ref == NULL_RTX)
23467 {
23468 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23469 {
23470 tloc = XEXP (XEXP (arg, 0), 1);
23471 continue;
23472 }
23473 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23474 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23475 {
23476 tlocc = XEXP (XEXP (arg, 0), 1);
23477 continue;
23478 }
23479 }
23480 reg = NULL;
23481 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23482 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23483 VAR_INIT_STATUS_INITIALIZED);
23484 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23485 {
23486 rtx mem = XEXP (XEXP (arg, 0), 0);
23487 reg = mem_loc_descriptor (XEXP (mem, 0),
23488 get_address_mode (mem),
23489 GET_MODE (mem),
23490 VAR_INIT_STATUS_INITIALIZED);
23491 }
23492 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23493 == DEBUG_PARAMETER_REF)
23494 {
23495 tree tdecl
23496 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23497 tdie = lookup_decl_die (tdecl);
23498 if (tdie == NULL)
23499 continue;
23500 arg_decl = tdecl;
23501 }
23502 else
23503 continue;
23504 if (reg == NULL
23505 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23506 != DEBUG_PARAMETER_REF)
23507 continue;
23508 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23509 VOIDmode,
23510 VAR_INIT_STATUS_INITIALIZED);
23511 if (val == NULL)
23512 continue;
23513 if (die == NULL)
23514 die = gen_call_site_die (decl, subr_die, ca_loc);
23515 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23516 NULL_TREE);
23517 add_desc_attribute (cdie, arg_decl);
23518 if (reg != NULL)
23519 add_AT_loc (cdie, DW_AT_location, reg);
23520 else if (tdie != NULL)
23521 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23522 tdie);
23523 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23524 if (next_arg != XEXP (arg, 1))
23525 {
23526 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23527 if (mode == VOIDmode)
23528 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23529 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23530 0), 1),
23531 mode, VOIDmode,
23532 VAR_INIT_STATUS_INITIALIZED);
23533 if (val != NULL)
23534 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23535 val);
23536 }
23537 }
23538 if (die == NULL
23539 && (ca_loc->symbol_ref || tloc))
23540 die = gen_call_site_die (decl, subr_die, ca_loc);
23541 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23542 {
23543 dw_loc_descr_ref tval = NULL;
23544
23545 if (tloc != NULL_RTX)
23546 tval = mem_loc_descriptor (tloc,
23547 GET_MODE (tloc) == VOIDmode
23548 ? Pmode : GET_MODE (tloc),
23549 VOIDmode,
23550 VAR_INIT_STATUS_INITIALIZED);
23551 if (tval)
23552 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23553 else if (tlocc != NULL_RTX)
23554 {
23555 tval = mem_loc_descriptor (tlocc,
23556 GET_MODE (tlocc) == VOIDmode
23557 ? Pmode : GET_MODE (tlocc),
23558 VOIDmode,
23559 VAR_INIT_STATUS_INITIALIZED);
23560 if (tval)
23561 add_AT_loc (die,
23562 dwarf_AT (DW_AT_call_target_clobbered),
23563 tval);
23564 }
23565 }
23566 if (die != NULL)
23567 {
23568 call_site_note_count++;
23569 if (ca_loc->tail_call_p)
23570 tail_call_site_note_count++;
23571 }
23572 }
23573 }
23574 call_arg_locations = NULL;
23575 call_arg_loc_last = NULL;
23576 if (tail_call_site_count >= 0
23577 && tail_call_site_count == tail_call_site_note_count
23578 && (!dwarf_strict || dwarf_version >= 5))
23579 {
23580 if (call_site_count >= 0
23581 && call_site_count == call_site_note_count)
23582 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23583 else
23584 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23585 }
23586 call_site_count = -1;
23587 tail_call_site_count = -1;
23588 }
23589
23590 /* Mark used types after we have created DIEs for the functions scopes. */
23591 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23592 }
23593
23594 /* Returns a hash value for X (which really is a die_struct). */
23595
23596 hashval_t
23597 block_die_hasher::hash (die_struct *d)
23598 {
23599 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23600 }
23601
23602 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23603 as decl_id and die_parent of die_struct Y. */
23604
23605 bool
23606 block_die_hasher::equal (die_struct *x, die_struct *y)
23607 {
23608 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23609 }
23610
23611 /* Hold information about markers for inlined entry points. */
23612 struct GTY ((for_user)) inline_entry_data
23613 {
23614 /* The block that's the inlined_function_outer_scope for an inlined
23615 function. */
23616 tree block;
23617
23618 /* The label at the inlined entry point. */
23619 const char *label_pfx;
23620 unsigned int label_num;
23621
23622 /* The view number to be used as the inlined entry point. */
23623 var_loc_view view;
23624 };
23625
23626 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23627 {
23628 typedef tree compare_type;
23629 static inline hashval_t hash (const inline_entry_data *);
23630 static inline bool equal (const inline_entry_data *, const_tree);
23631 };
23632
23633 /* Hash table routines for inline_entry_data. */
23634
23635 inline hashval_t
23636 inline_entry_data_hasher::hash (const inline_entry_data *data)
23637 {
23638 return htab_hash_pointer (data->block);
23639 }
23640
23641 inline bool
23642 inline_entry_data_hasher::equal (const inline_entry_data *data,
23643 const_tree block)
23644 {
23645 return data->block == block;
23646 }
23647
23648 /* Inlined entry points pending DIE creation in this compilation unit. */
23649
23650 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23651
23652
23653 /* Return TRUE if DECL, which may have been previously generated as
23654 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23655 true if decl (or its origin) is either an extern declaration or a
23656 class/namespace scoped declaration.
23657
23658 The declare_in_namespace support causes us to get two DIEs for one
23659 variable, both of which are declarations. We want to avoid
23660 considering one to be a specification, so we must test for
23661 DECLARATION and DW_AT_declaration. */
23662 static inline bool
23663 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23664 {
23665 return (old_die && TREE_STATIC (decl) && !declaration
23666 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23667 }
23668
23669 /* Return true if DECL is a local static. */
23670
23671 static inline bool
23672 local_function_static (tree decl)
23673 {
23674 gcc_assert (VAR_P (decl));
23675 return TREE_STATIC (decl)
23676 && DECL_CONTEXT (decl)
23677 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23678 }
23679
23680 /* Return true iff DECL overrides (presumably completes) the type of
23681 OLD_DIE within CONTEXT_DIE. */
23682
23683 static bool
23684 override_type_for_decl_p (tree decl, dw_die_ref old_die,
23685 dw_die_ref context_die)
23686 {
23687 tree type = TREE_TYPE (decl);
23688 int cv_quals;
23689
23690 if (decl_by_reference_p (decl))
23691 {
23692 type = TREE_TYPE (type);
23693 cv_quals = TYPE_UNQUALIFIED;
23694 }
23695 else
23696 cv_quals = decl_quals (decl);
23697
23698 dw_die_ref type_die = modified_type_die (type,
23699 cv_quals | TYPE_QUALS (type),
23700 false,
23701 context_die);
23702
23703 dw_die_ref old_type_die = get_AT_ref (old_die, DW_AT_type);
23704
23705 return type_die != old_type_die;
23706 }
23707
23708 /* Generate a DIE to represent a declared data object.
23709 Either DECL or ORIGIN must be non-null. */
23710
23711 static void
23712 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23713 {
23714 HOST_WIDE_INT off = 0;
23715 tree com_decl;
23716 tree decl_or_origin = decl ? decl : origin;
23717 tree ultimate_origin;
23718 dw_die_ref var_die;
23719 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23720 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23721 || class_or_namespace_scope_p (context_die));
23722 bool specialization_p = false;
23723 bool no_linkage_name = false;
23724
23725 /* While C++ inline static data members have definitions inside of the
23726 class, force the first DIE to be a declaration, then let gen_member_die
23727 reparent it to the class context and call gen_variable_die again
23728 to create the outside of the class DIE for the definition. */
23729 if (!declaration
23730 && old_die == NULL
23731 && decl
23732 && DECL_CONTEXT (decl)
23733 && TYPE_P (DECL_CONTEXT (decl))
23734 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23735 {
23736 declaration = true;
23737 if (dwarf_version < 5)
23738 no_linkage_name = true;
23739 }
23740
23741 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23742 if (decl || ultimate_origin)
23743 origin = ultimate_origin;
23744 com_decl = fortran_common (decl_or_origin, &off);
23745
23746 /* Symbol in common gets emitted as a child of the common block, in the form
23747 of a data member. */
23748 if (com_decl)
23749 {
23750 dw_die_ref com_die;
23751 dw_loc_list_ref loc = NULL;
23752 die_node com_die_arg;
23753
23754 var_die = lookup_decl_die (decl_or_origin);
23755 if (var_die)
23756 {
23757 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23758 {
23759 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23760 if (loc)
23761 {
23762 if (off)
23763 {
23764 /* Optimize the common case. */
23765 if (single_element_loc_list_p (loc)
23766 && loc->expr->dw_loc_opc == DW_OP_addr
23767 && loc->expr->dw_loc_next == NULL
23768 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23769 == SYMBOL_REF)
23770 {
23771 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23772 loc->expr->dw_loc_oprnd1.v.val_addr
23773 = plus_constant (GET_MODE (x), x , off);
23774 }
23775 else
23776 loc_list_plus_const (loc, off);
23777 }
23778 add_AT_location_description (var_die, DW_AT_location, loc);
23779 remove_AT (var_die, DW_AT_declaration);
23780 }
23781 }
23782 return;
23783 }
23784
23785 if (common_block_die_table == NULL)
23786 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23787
23788 com_die_arg.decl_id = DECL_UID (com_decl);
23789 com_die_arg.die_parent = context_die;
23790 com_die = common_block_die_table->find (&com_die_arg);
23791 if (! early_dwarf)
23792 loc = loc_list_from_tree (com_decl, 2, NULL);
23793 if (com_die == NULL)
23794 {
23795 const char *cnam
23796 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23797 die_node **slot;
23798
23799 com_die = new_die (DW_TAG_common_block, context_die, decl);
23800 add_name_and_src_coords_attributes (com_die, com_decl);
23801 if (loc)
23802 {
23803 add_AT_location_description (com_die, DW_AT_location, loc);
23804 /* Avoid sharing the same loc descriptor between
23805 DW_TAG_common_block and DW_TAG_variable. */
23806 loc = loc_list_from_tree (com_decl, 2, NULL);
23807 }
23808 else if (DECL_EXTERNAL (decl_or_origin))
23809 add_AT_flag (com_die, DW_AT_declaration, 1);
23810 if (want_pubnames ())
23811 add_pubname_string (cnam, com_die); /* ??? needed? */
23812 com_die->decl_id = DECL_UID (com_decl);
23813 slot = common_block_die_table->find_slot (com_die, INSERT);
23814 *slot = com_die;
23815 }
23816 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23817 {
23818 add_AT_location_description (com_die, DW_AT_location, loc);
23819 loc = loc_list_from_tree (com_decl, 2, NULL);
23820 remove_AT (com_die, DW_AT_declaration);
23821 }
23822 var_die = new_die (DW_TAG_variable, com_die, decl);
23823 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23824 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23825 decl_quals (decl_or_origin), false,
23826 context_die);
23827 add_alignment_attribute (var_die, decl);
23828 add_AT_flag (var_die, DW_AT_external, 1);
23829 if (loc)
23830 {
23831 if (off)
23832 {
23833 /* Optimize the common case. */
23834 if (single_element_loc_list_p (loc)
23835 && loc->expr->dw_loc_opc == DW_OP_addr
23836 && loc->expr->dw_loc_next == NULL
23837 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23838 {
23839 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23840 loc->expr->dw_loc_oprnd1.v.val_addr
23841 = plus_constant (GET_MODE (x), x, off);
23842 }
23843 else
23844 loc_list_plus_const (loc, off);
23845 }
23846 add_AT_location_description (var_die, DW_AT_location, loc);
23847 }
23848 else if (DECL_EXTERNAL (decl_or_origin))
23849 add_AT_flag (var_die, DW_AT_declaration, 1);
23850 if (decl)
23851 equate_decl_number_to_die (decl, var_die);
23852 return;
23853 }
23854
23855 if (old_die)
23856 {
23857 if (declaration)
23858 {
23859 /* A declaration that has been previously dumped, needs no
23860 further annotations, since it doesn't need location on
23861 the second pass. */
23862 return;
23863 }
23864 else if (decl_will_get_specification_p (old_die, decl, declaration)
23865 && !get_AT (old_die, DW_AT_specification))
23866 {
23867 /* Fall-thru so we can make a new variable die along with a
23868 DW_AT_specification. */
23869 }
23870 else if (origin && old_die->die_parent != context_die)
23871 {
23872 /* If we will be creating an inlined instance, we need a
23873 new DIE that will get annotated with
23874 DW_AT_abstract_origin. */
23875 gcc_assert (!DECL_ABSTRACT_P (decl));
23876 }
23877 else
23878 {
23879 /* If a DIE was dumped early, it still needs location info.
23880 Skip to where we fill the location bits. */
23881 var_die = old_die;
23882
23883 /* ??? In LTRANS we cannot annotate early created variably
23884 modified type DIEs without copying them and adjusting all
23885 references to them. Thus we dumped them again. Also add a
23886 reference to them but beware of -g0 compile and -g link
23887 in which case the reference will be already present. */
23888 tree type = TREE_TYPE (decl_or_origin);
23889 if (in_lto_p
23890 && ! get_AT (var_die, DW_AT_type)
23891 && variably_modified_type_p
23892 (type, decl_function_context (decl_or_origin)))
23893 {
23894 if (decl_by_reference_p (decl_or_origin))
23895 add_type_attribute (var_die, TREE_TYPE (type),
23896 TYPE_UNQUALIFIED, false, context_die);
23897 else
23898 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23899 false, context_die);
23900 }
23901
23902 goto gen_variable_die_location;
23903 }
23904 }
23905
23906 /* For static data members, the declaration in the class is supposed
23907 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23908 also in DWARF2; the specification should still be DW_TAG_variable
23909 referencing the DW_TAG_member DIE. */
23910 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23911 var_die = new_die (DW_TAG_member, context_die, decl);
23912 else
23913 var_die = new_die (DW_TAG_variable, context_die, decl);
23914
23915 if (origin != NULL)
23916 add_abstract_origin_attribute (var_die, origin);
23917
23918 /* Loop unrolling can create multiple blocks that refer to the same
23919 static variable, so we must test for the DW_AT_declaration flag.
23920
23921 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23922 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23923 sharing them.
23924
23925 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23926 else if (decl_will_get_specification_p (old_die, decl, declaration))
23927 {
23928 /* This is a definition of a C++ class level static. */
23929 add_AT_specification (var_die, old_die);
23930 specialization_p = true;
23931 if (DECL_NAME (decl))
23932 {
23933 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23934 struct dwarf_file_data * file_index = lookup_filename (s.file);
23935
23936 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23937 add_AT_file (var_die, DW_AT_decl_file, file_index);
23938
23939 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23940 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23941
23942 if (debug_column_info
23943 && s.column
23944 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23945 != (unsigned) s.column))
23946 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23947
23948 if (old_die->die_tag == DW_TAG_member)
23949 add_linkage_name (var_die, decl);
23950 }
23951 }
23952 else
23953 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23954
23955 if ((origin == NULL && !specialization_p)
23956 || (origin != NULL
23957 && !DECL_ABSTRACT_P (decl_or_origin)
23958 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23959 decl_function_context
23960 (decl_or_origin)))
23961 || (old_die && specialization_p
23962 && override_type_for_decl_p (decl_or_origin, old_die, context_die)))
23963 {
23964 tree type = TREE_TYPE (decl_or_origin);
23965
23966 if (decl_by_reference_p (decl_or_origin))
23967 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23968 context_die);
23969 else
23970 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23971 context_die);
23972 }
23973
23974 if (origin == NULL && !specialization_p)
23975 {
23976 if (TREE_PUBLIC (decl))
23977 add_AT_flag (var_die, DW_AT_external, 1);
23978
23979 if (DECL_ARTIFICIAL (decl))
23980 add_AT_flag (var_die, DW_AT_artificial, 1);
23981
23982 add_alignment_attribute (var_die, decl);
23983
23984 add_accessibility_attribute (var_die, decl);
23985 }
23986
23987 if (declaration)
23988 add_AT_flag (var_die, DW_AT_declaration, 1);
23989
23990 if (decl && (DECL_ABSTRACT_P (decl)
23991 || !old_die || is_declaration_die (old_die)))
23992 equate_decl_number_to_die (decl, var_die);
23993
23994 gen_variable_die_location:
23995 if (! declaration
23996 && (! DECL_ABSTRACT_P (decl_or_origin)
23997 /* Local static vars are shared between all clones/inlines,
23998 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23999 already set. */
24000 || (VAR_P (decl_or_origin)
24001 && TREE_STATIC (decl_or_origin)
24002 && DECL_RTL_SET_P (decl_or_origin))))
24003 {
24004 if (early_dwarf)
24005 add_pubname (decl_or_origin, var_die);
24006 else
24007 add_location_or_const_value_attribute (var_die, decl_or_origin,
24008 decl == NULL);
24009 }
24010 else
24011 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
24012
24013 if ((dwarf_version >= 4 || !dwarf_strict)
24014 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24015 DW_AT_const_expr) == 1
24016 && !get_AT (var_die, DW_AT_const_expr)
24017 && !specialization_p)
24018 add_AT_flag (var_die, DW_AT_const_expr, 1);
24019
24020 if (!dwarf_strict)
24021 {
24022 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24023 DW_AT_inline);
24024 if (inl != -1
24025 && !get_AT (var_die, DW_AT_inline)
24026 && !specialization_p)
24027 add_AT_unsigned (var_die, DW_AT_inline, inl);
24028 }
24029 }
24030
24031 /* Generate a DIE to represent a named constant. */
24032
24033 static void
24034 gen_const_die (tree decl, dw_die_ref context_die)
24035 {
24036 dw_die_ref const_die;
24037 tree type = TREE_TYPE (decl);
24038
24039 const_die = lookup_decl_die (decl);
24040 if (const_die)
24041 return;
24042
24043 const_die = new_die (DW_TAG_constant, context_die, decl);
24044 equate_decl_number_to_die (decl, const_die);
24045 add_name_and_src_coords_attributes (const_die, decl);
24046 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
24047 if (TREE_PUBLIC (decl))
24048 add_AT_flag (const_die, DW_AT_external, 1);
24049 if (DECL_ARTIFICIAL (decl))
24050 add_AT_flag (const_die, DW_AT_artificial, 1);
24051 tree_add_const_value_attribute_for_decl (const_die, decl);
24052 }
24053
24054 /* Generate a DIE to represent a label identifier. */
24055
24056 static void
24057 gen_label_die (tree decl, dw_die_ref context_die)
24058 {
24059 tree origin = decl_ultimate_origin (decl);
24060 dw_die_ref lbl_die = lookup_decl_die (decl);
24061 rtx insn;
24062 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24063
24064 if (!lbl_die)
24065 {
24066 lbl_die = new_die (DW_TAG_label, context_die, decl);
24067 equate_decl_number_to_die (decl, lbl_die);
24068
24069 if (origin != NULL)
24070 add_abstract_origin_attribute (lbl_die, origin);
24071 else
24072 add_name_and_src_coords_attributes (lbl_die, decl);
24073 }
24074
24075 if (DECL_ABSTRACT_P (decl))
24076 equate_decl_number_to_die (decl, lbl_die);
24077 else if (! early_dwarf)
24078 {
24079 insn = DECL_RTL_IF_SET (decl);
24080
24081 /* Deleted labels are programmer specified labels which have been
24082 eliminated because of various optimizations. We still emit them
24083 here so that it is possible to put breakpoints on them. */
24084 if (insn
24085 && (LABEL_P (insn)
24086 || ((NOTE_P (insn)
24087 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
24088 {
24089 /* When optimization is enabled (via -O) some parts of the compiler
24090 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
24091 represent source-level labels which were explicitly declared by
24092 the user. This really shouldn't be happening though, so catch
24093 it if it ever does happen. */
24094 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
24095
24096 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
24097 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24098 }
24099 else if (insn
24100 && NOTE_P (insn)
24101 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24102 && CODE_LABEL_NUMBER (insn) != -1)
24103 {
24104 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24105 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24106 }
24107 }
24108 }
24109
24110 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24111 attributes to the DIE for a block STMT, to describe where the inlined
24112 function was called from. This is similar to add_src_coords_attributes. */
24113
24114 static inline void
24115 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24116 {
24117 /* We can end up with BUILTINS_LOCATION here. */
24118 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24119 return;
24120
24121 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24122
24123 if (dwarf_version >= 3 || !dwarf_strict)
24124 {
24125 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24126 add_AT_unsigned (die, DW_AT_call_line, s.line);
24127 if (debug_column_info && s.column)
24128 add_AT_unsigned (die, DW_AT_call_column, s.column);
24129 }
24130 }
24131
24132
24133 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24134 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24135
24136 static inline void
24137 add_high_low_attributes (tree stmt, dw_die_ref die)
24138 {
24139 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24140
24141 if (inline_entry_data **iedp
24142 = !inline_entry_data_table ? NULL
24143 : inline_entry_data_table->find_slot_with_hash (stmt,
24144 htab_hash_pointer (stmt),
24145 NO_INSERT))
24146 {
24147 inline_entry_data *ied = *iedp;
24148 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24149 gcc_assert (debug_inline_points);
24150 gcc_assert (inlined_function_outer_scope_p (stmt));
24151
24152 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24153 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24154
24155 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24156 && !dwarf_strict)
24157 {
24158 if (!output_asm_line_debug_info ())
24159 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24160 else
24161 {
24162 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24163 /* FIXME: this will resolve to a small number. Could we
24164 possibly emit smaller data? Ideally we'd emit a
24165 uleb128, but that would make the size of DIEs
24166 impossible for the compiler to compute, since it's
24167 the assembler that computes the value of the view
24168 label in this case. Ideally, we'd have a single form
24169 encompassing both the address and the view, and
24170 indirecting them through a table might make things
24171 easier, but even that would be more wasteful,
24172 space-wise, than what we have now. */
24173 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24174 }
24175 }
24176
24177 inline_entry_data_table->clear_slot (iedp);
24178 }
24179
24180 if (BLOCK_FRAGMENT_CHAIN (stmt)
24181 && (dwarf_version >= 3 || !dwarf_strict))
24182 {
24183 tree chain, superblock = NULL_TREE;
24184 dw_die_ref pdie;
24185 dw_attr_node *attr = NULL;
24186
24187 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24188 {
24189 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24190 BLOCK_NUMBER (stmt));
24191 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24192 }
24193
24194 /* Optimize duplicate .debug_ranges lists or even tails of
24195 lists. If this BLOCK has same ranges as its supercontext,
24196 lookup DW_AT_ranges attribute in the supercontext (and
24197 recursively so), verify that the ranges_table contains the
24198 right values and use it instead of adding a new .debug_range. */
24199 for (chain = stmt, pdie = die;
24200 BLOCK_SAME_RANGE (chain);
24201 chain = BLOCK_SUPERCONTEXT (chain))
24202 {
24203 dw_attr_node *new_attr;
24204
24205 pdie = pdie->die_parent;
24206 if (pdie == NULL)
24207 break;
24208 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24209 break;
24210 new_attr = get_AT (pdie, DW_AT_ranges);
24211 if (new_attr == NULL
24212 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24213 break;
24214 attr = new_attr;
24215 superblock = BLOCK_SUPERCONTEXT (chain);
24216 }
24217 if (attr != NULL
24218 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24219 == (int)BLOCK_NUMBER (superblock))
24220 && BLOCK_FRAGMENT_CHAIN (superblock))
24221 {
24222 unsigned long off = attr->dw_attr_val.v.val_offset;
24223 unsigned long supercnt = 0, thiscnt = 0;
24224 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24225 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24226 {
24227 ++supercnt;
24228 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24229 == (int)BLOCK_NUMBER (chain));
24230 }
24231 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24232 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24233 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24234 ++thiscnt;
24235 gcc_assert (supercnt >= thiscnt);
24236 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24237 false);
24238 note_rnglist_head (off + supercnt - thiscnt);
24239 return;
24240 }
24241
24242 unsigned int offset = add_ranges (stmt, true);
24243 add_AT_range_list (die, DW_AT_ranges, offset, false);
24244 note_rnglist_head (offset);
24245
24246 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24247 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24248 do
24249 {
24250 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24251 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24252 chain = BLOCK_FRAGMENT_CHAIN (chain);
24253 }
24254 while (chain);
24255 add_ranges (NULL);
24256 }
24257 else
24258 {
24259 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24260 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24261 BLOCK_NUMBER (stmt));
24262 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24263 BLOCK_NUMBER (stmt));
24264 add_AT_low_high_pc (die, label, label_high, false);
24265 }
24266 }
24267
24268 /* Generate a DIE for a lexical block. */
24269
24270 static void
24271 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24272 {
24273 dw_die_ref old_die = lookup_block_die (stmt);
24274 dw_die_ref stmt_die = NULL;
24275 if (!old_die)
24276 {
24277 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24278 equate_block_to_die (stmt, stmt_die);
24279 }
24280
24281 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24282 {
24283 /* If this is an inlined or conrecte instance, create a new lexical
24284 die for anything below to attach DW_AT_abstract_origin to. */
24285 if (old_die)
24286 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24287
24288 tree origin = block_ultimate_origin (stmt);
24289 if (origin != NULL_TREE && (origin != stmt || old_die))
24290 add_abstract_origin_attribute (stmt_die, origin);
24291
24292 old_die = NULL;
24293 }
24294
24295 if (old_die)
24296 stmt_die = old_die;
24297
24298 /* A non abstract block whose blocks have already been reordered
24299 should have the instruction range for this block. If so, set the
24300 high/low attributes. */
24301 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24302 {
24303 gcc_assert (stmt_die);
24304 add_high_low_attributes (stmt, stmt_die);
24305 }
24306
24307 decls_for_scope (stmt, stmt_die);
24308 }
24309
24310 /* Generate a DIE for an inlined subprogram. */
24311
24312 static void
24313 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24314 {
24315 tree decl = block_ultimate_origin (stmt);
24316
24317 /* Make sure any inlined functions are known to be inlineable. */
24318 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24319 || cgraph_function_possibly_inlined_p (decl));
24320
24321 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24322
24323 if (call_arg_locations || debug_inline_points)
24324 equate_block_to_die (stmt, subr_die);
24325 add_abstract_origin_attribute (subr_die, decl);
24326 if (TREE_ASM_WRITTEN (stmt))
24327 add_high_low_attributes (stmt, subr_die);
24328 add_call_src_coords_attributes (stmt, subr_die);
24329
24330 /* The inliner creates an extra BLOCK for the parameter setup,
24331 we want to merge that with the actual outermost BLOCK of the
24332 inlined function to avoid duplicate locals in consumers.
24333 Do that by doing the recursion to subblocks on the single subblock
24334 of STMT. */
24335 bool unwrap_one = false;
24336 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24337 {
24338 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24339 if (origin
24340 && TREE_CODE (origin) == BLOCK
24341 && BLOCK_SUPERCONTEXT (origin) == decl)
24342 unwrap_one = true;
24343 }
24344 decls_for_scope (stmt, subr_die, !unwrap_one);
24345 if (unwrap_one)
24346 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24347 }
24348
24349 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24350 the comment for VLR_CONTEXT. */
24351
24352 static void
24353 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24354 {
24355 dw_die_ref decl_die;
24356
24357 if (TREE_TYPE (decl) == error_mark_node)
24358 return;
24359
24360 decl_die = new_die (DW_TAG_member, context_die, decl);
24361 add_name_and_src_coords_attributes (decl_die, decl);
24362 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24363 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24364 context_die);
24365
24366 if (DECL_BIT_FIELD_TYPE (decl))
24367 {
24368 add_byte_size_attribute (decl_die, decl);
24369 add_bit_size_attribute (decl_die, decl);
24370 add_bit_offset_attribute (decl_die, decl);
24371 }
24372
24373 add_alignment_attribute (decl_die, decl);
24374
24375 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24376 add_data_member_location_attribute (decl_die, decl, ctx);
24377
24378 if (DECL_ARTIFICIAL (decl))
24379 add_AT_flag (decl_die, DW_AT_artificial, 1);
24380
24381 add_accessibility_attribute (decl_die, decl);
24382
24383 /* Equate decl number to die, so that we can look up this decl later on. */
24384 equate_decl_number_to_die (decl, decl_die);
24385 }
24386
24387 /* Generate a DIE for a pointer to a member type. TYPE can be an
24388 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24389 pointer to member function. */
24390
24391 static void
24392 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24393 {
24394 if (lookup_type_die (type))
24395 return;
24396
24397 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24398 scope_die_for (type, context_die), type);
24399
24400 equate_type_number_to_die (type, ptr_die);
24401 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24402 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24403 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24404 context_die);
24405 add_alignment_attribute (ptr_die, type);
24406
24407 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24408 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24409 {
24410 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24411 add_AT_loc (ptr_die, DW_AT_use_location, op);
24412 }
24413 }
24414
24415 static char *producer_string;
24416
24417 /* Return a heap allocated producer string including command line options
24418 if -grecord-gcc-switches. */
24419
24420 static char *
24421 gen_producer_string (void)
24422 {
24423 size_t j;
24424 auto_vec<const char *> switches;
24425 const char *language_string = lang_hooks.name;
24426 char *producer, *tail;
24427 const char *p;
24428 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24429 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24430
24431 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24432 switch (save_decoded_options[j].opt_index)
24433 {
24434 case OPT_o:
24435 case OPT_d:
24436 case OPT_dumpbase:
24437 case OPT_dumpbase_ext:
24438 case OPT_dumpdir:
24439 case OPT_quiet:
24440 case OPT_version:
24441 case OPT_v:
24442 case OPT_w:
24443 case OPT_L:
24444 case OPT_D:
24445 case OPT_I:
24446 case OPT_U:
24447 case OPT_SPECIAL_unknown:
24448 case OPT_SPECIAL_ignore:
24449 case OPT_SPECIAL_warn_removed:
24450 case OPT_SPECIAL_program_name:
24451 case OPT_SPECIAL_input_file:
24452 case OPT_grecord_gcc_switches:
24453 case OPT__output_pch_:
24454 case OPT_fdiagnostics_show_location_:
24455 case OPT_fdiagnostics_show_option:
24456 case OPT_fdiagnostics_show_caret:
24457 case OPT_fdiagnostics_show_labels:
24458 case OPT_fdiagnostics_show_line_numbers:
24459 case OPT_fdiagnostics_color_:
24460 case OPT_fdiagnostics_format_:
24461 case OPT_fverbose_asm:
24462 case OPT____:
24463 case OPT__sysroot_:
24464 case OPT_nostdinc:
24465 case OPT_nostdinc__:
24466 case OPT_fpreprocessed:
24467 case OPT_fltrans_output_list_:
24468 case OPT_fresolution_:
24469 case OPT_fdebug_prefix_map_:
24470 case OPT_fmacro_prefix_map_:
24471 case OPT_ffile_prefix_map_:
24472 case OPT_fcompare_debug:
24473 case OPT_fchecking:
24474 case OPT_fchecking_:
24475 /* Ignore these. */
24476 continue;
24477 case OPT_flto_:
24478 {
24479 const char *lto_canonical = "-flto";
24480 switches.safe_push (lto_canonical);
24481 len += strlen (lto_canonical) + 1;
24482 break;
24483 }
24484 default:
24485 if (cl_options[save_decoded_options[j].opt_index].flags
24486 & CL_NO_DWARF_RECORD)
24487 continue;
24488 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24489 == '-');
24490 switch (save_decoded_options[j].canonical_option[0][1])
24491 {
24492 case 'M':
24493 case 'i':
24494 case 'W':
24495 continue;
24496 case 'f':
24497 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24498 "dump", 4) == 0)
24499 continue;
24500 break;
24501 default:
24502 break;
24503 }
24504 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24505 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24506 break;
24507 }
24508
24509 producer = XNEWVEC (char, plen + 1 + len + 1);
24510 tail = producer;
24511 sprintf (tail, "%s %s", language_string, version_string);
24512 tail += plen;
24513
24514 FOR_EACH_VEC_ELT (switches, j, p)
24515 {
24516 len = strlen (p);
24517 *tail = ' ';
24518 memcpy (tail + 1, p, len);
24519 tail += len + 1;
24520 }
24521
24522 *tail = '\0';
24523 return producer;
24524 }
24525
24526 /* Given a C and/or C++ language/version string return the "highest".
24527 C++ is assumed to be "higher" than C in this case. Used for merging
24528 LTO translation unit languages. */
24529 static const char *
24530 highest_c_language (const char *lang1, const char *lang2)
24531 {
24532 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24533 return "GNU C++17";
24534 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24535 return "GNU C++14";
24536 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24537 return "GNU C++11";
24538 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24539 return "GNU C++98";
24540
24541 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24542 return "GNU C2X";
24543 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24544 return "GNU C17";
24545 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24546 return "GNU C11";
24547 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24548 return "GNU C99";
24549 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24550 return "GNU C89";
24551
24552 gcc_unreachable ();
24553 }
24554
24555
24556 /* Generate the DIE for the compilation unit. */
24557
24558 static dw_die_ref
24559 gen_compile_unit_die (const char *filename)
24560 {
24561 dw_die_ref die;
24562 const char *language_string = lang_hooks.name;
24563 int language;
24564
24565 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24566
24567 if (filename)
24568 {
24569 add_filename_attribute (die, filename);
24570 /* Don't add cwd for <built-in>. */
24571 if (filename[0] != '<')
24572 add_comp_dir_attribute (die);
24573 }
24574
24575 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24576
24577 /* If our producer is LTO try to figure out a common language to use
24578 from the global list of translation units. */
24579 if (strcmp (language_string, "GNU GIMPLE") == 0)
24580 {
24581 unsigned i;
24582 tree t;
24583 const char *common_lang = NULL;
24584
24585 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24586 {
24587 if (!TRANSLATION_UNIT_LANGUAGE (t))
24588 continue;
24589 if (!common_lang)
24590 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24591 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24592 ;
24593 else if (strncmp (common_lang, "GNU C", 5) == 0
24594 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24595 /* Mixing C and C++ is ok, use C++ in that case. */
24596 common_lang = highest_c_language (common_lang,
24597 TRANSLATION_UNIT_LANGUAGE (t));
24598 else
24599 {
24600 /* Fall back to C. */
24601 common_lang = NULL;
24602 break;
24603 }
24604 }
24605
24606 if (common_lang)
24607 language_string = common_lang;
24608 }
24609
24610 language = DW_LANG_C;
24611 if (strncmp (language_string, "GNU C", 5) == 0
24612 && ISDIGIT (language_string[5]))
24613 {
24614 language = DW_LANG_C89;
24615 if (dwarf_version >= 3 || !dwarf_strict)
24616 {
24617 if (strcmp (language_string, "GNU C89") != 0)
24618 language = DW_LANG_C99;
24619
24620 if (dwarf_version >= 5 /* || !dwarf_strict */)
24621 if (strcmp (language_string, "GNU C11") == 0
24622 || strcmp (language_string, "GNU C17") == 0
24623 || strcmp (language_string, "GNU C2X"))
24624 language = DW_LANG_C11;
24625 }
24626 }
24627 else if (strncmp (language_string, "GNU C++", 7) == 0)
24628 {
24629 language = DW_LANG_C_plus_plus;
24630 if (dwarf_version >= 5 /* || !dwarf_strict */)
24631 {
24632 if (strcmp (language_string, "GNU C++11") == 0)
24633 language = DW_LANG_C_plus_plus_11;
24634 else if (strcmp (language_string, "GNU C++14") == 0)
24635 language = DW_LANG_C_plus_plus_14;
24636 else if (strcmp (language_string, "GNU C++17") == 0)
24637 /* For now. */
24638 language = DW_LANG_C_plus_plus_14;
24639 }
24640 }
24641 else if (strcmp (language_string, "GNU F77") == 0)
24642 language = DW_LANG_Fortran77;
24643 else if (dwarf_version >= 3 || !dwarf_strict)
24644 {
24645 if (strcmp (language_string, "GNU Ada") == 0)
24646 language = DW_LANG_Ada95;
24647 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24648 {
24649 language = DW_LANG_Fortran95;
24650 if (dwarf_version >= 5 /* || !dwarf_strict */)
24651 {
24652 if (strcmp (language_string, "GNU Fortran2003") == 0)
24653 language = DW_LANG_Fortran03;
24654 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24655 language = DW_LANG_Fortran08;
24656 }
24657 }
24658 else if (strcmp (language_string, "GNU Objective-C") == 0)
24659 language = DW_LANG_ObjC;
24660 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24661 language = DW_LANG_ObjC_plus_plus;
24662 else if (strcmp (language_string, "GNU D") == 0)
24663 language = DW_LANG_D;
24664 else if (dwarf_version >= 5 || !dwarf_strict)
24665 {
24666 if (strcmp (language_string, "GNU Go") == 0)
24667 language = DW_LANG_Go;
24668 }
24669 }
24670 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24671 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24672 language = DW_LANG_Fortran90;
24673 /* Likewise for Ada. */
24674 else if (strcmp (language_string, "GNU Ada") == 0)
24675 language = DW_LANG_Ada83;
24676
24677 add_AT_unsigned (die, DW_AT_language, language);
24678
24679 switch (language)
24680 {
24681 case DW_LANG_Fortran77:
24682 case DW_LANG_Fortran90:
24683 case DW_LANG_Fortran95:
24684 case DW_LANG_Fortran03:
24685 case DW_LANG_Fortran08:
24686 /* Fortran has case insensitive identifiers and the front-end
24687 lowercases everything. */
24688 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24689 break;
24690 default:
24691 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24692 break;
24693 }
24694 return die;
24695 }
24696
24697 /* Generate the DIE for a base class. */
24698
24699 static void
24700 gen_inheritance_die (tree binfo, tree access, tree type,
24701 dw_die_ref context_die)
24702 {
24703 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24704 struct vlr_context ctx = { type, NULL };
24705
24706 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24707 context_die);
24708 add_data_member_location_attribute (die, binfo, &ctx);
24709
24710 if (BINFO_VIRTUAL_P (binfo))
24711 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24712
24713 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24714 children, otherwise the default is DW_ACCESS_public. In DWARF2
24715 the default has always been DW_ACCESS_private. */
24716 if (access == access_public_node)
24717 {
24718 if (dwarf_version == 2
24719 || context_die->die_tag == DW_TAG_class_type)
24720 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24721 }
24722 else if (access == access_protected_node)
24723 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24724 else if (dwarf_version > 2
24725 && context_die->die_tag != DW_TAG_class_type)
24726 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24727 }
24728
24729 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24730 structure. */
24731
24732 static bool
24733 is_variant_part (tree decl)
24734 {
24735 return (TREE_CODE (decl) == FIELD_DECL
24736 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24737 }
24738
24739 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24740 return the FIELD_DECL. Return NULL_TREE otherwise. */
24741
24742 static tree
24743 analyze_discr_in_predicate (tree operand, tree struct_type)
24744 {
24745 while (CONVERT_EXPR_P (operand))
24746 operand = TREE_OPERAND (operand, 0);
24747
24748 /* Match field access to members of struct_type only. */
24749 if (TREE_CODE (operand) == COMPONENT_REF
24750 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24751 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24752 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24753 return TREE_OPERAND (operand, 1);
24754 else
24755 return NULL_TREE;
24756 }
24757
24758 /* Check that SRC is a constant integer that can be represented as a native
24759 integer constant (either signed or unsigned). If so, store it into DEST and
24760 return true. Return false otherwise. */
24761
24762 static bool
24763 get_discr_value (tree src, dw_discr_value *dest)
24764 {
24765 tree discr_type = TREE_TYPE (src);
24766
24767 if (lang_hooks.types.get_debug_type)
24768 {
24769 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24770 if (debug_type != NULL)
24771 discr_type = debug_type;
24772 }
24773
24774 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24775 return false;
24776
24777 /* Signedness can vary between the original type and the debug type. This
24778 can happen for character types in Ada for instance: the character type
24779 used for code generation can be signed, to be compatible with the C one,
24780 but from a debugger point of view, it must be unsigned. */
24781 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24782 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24783
24784 if (is_orig_unsigned != is_debug_unsigned)
24785 src = fold_convert (discr_type, src);
24786
24787 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24788 return false;
24789
24790 dest->pos = is_debug_unsigned;
24791 if (is_debug_unsigned)
24792 dest->v.uval = tree_to_uhwi (src);
24793 else
24794 dest->v.sval = tree_to_shwi (src);
24795
24796 return true;
24797 }
24798
24799 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24800 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24801 store NULL_TREE in DISCR_DECL. Otherwise:
24802
24803 - store the discriminant field in STRUCT_TYPE that controls the variant
24804 part to *DISCR_DECL
24805
24806 - put in *DISCR_LISTS_P an array where for each variant, the item
24807 represents the corresponding matching list of discriminant values.
24808
24809 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24810 the above array.
24811
24812 Note that when the array is allocated (i.e. when the analysis is
24813 successful), it is up to the caller to free the array. */
24814
24815 static void
24816 analyze_variants_discr (tree variant_part_decl,
24817 tree struct_type,
24818 tree *discr_decl,
24819 dw_discr_list_ref **discr_lists_p,
24820 unsigned *discr_lists_length)
24821 {
24822 tree variant_part_type = TREE_TYPE (variant_part_decl);
24823 tree variant;
24824 dw_discr_list_ref *discr_lists;
24825 unsigned i;
24826
24827 /* Compute how many variants there are in this variant part. */
24828 *discr_lists_length = 0;
24829 for (variant = TYPE_FIELDS (variant_part_type);
24830 variant != NULL_TREE;
24831 variant = DECL_CHAIN (variant))
24832 ++*discr_lists_length;
24833
24834 *discr_decl = NULL_TREE;
24835 *discr_lists_p
24836 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24837 sizeof (**discr_lists_p));
24838 discr_lists = *discr_lists_p;
24839
24840 /* And then analyze all variants to extract discriminant information for all
24841 of them. This analysis is conservative: as soon as we detect something we
24842 do not support, abort everything and pretend we found nothing. */
24843 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24844 variant != NULL_TREE;
24845 variant = DECL_CHAIN (variant), ++i)
24846 {
24847 tree match_expr = DECL_QUALIFIER (variant);
24848
24849 /* Now, try to analyze the predicate and deduce a discriminant for
24850 it. */
24851 if (match_expr == boolean_true_node)
24852 /* Typically happens for the default variant: it matches all cases that
24853 previous variants rejected. Don't output any matching value for
24854 this one. */
24855 continue;
24856
24857 /* The following loop tries to iterate over each discriminant
24858 possibility: single values or ranges. */
24859 while (match_expr != NULL_TREE)
24860 {
24861 tree next_round_match_expr;
24862 tree candidate_discr = NULL_TREE;
24863 dw_discr_list_ref new_node = NULL;
24864
24865 /* Possibilities are matched one after the other by nested
24866 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24867 continue with the rest at next iteration. */
24868 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24869 {
24870 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24871 match_expr = TREE_OPERAND (match_expr, 1);
24872 }
24873 else
24874 next_round_match_expr = NULL_TREE;
24875
24876 if (match_expr == boolean_false_node)
24877 /* This sub-expression matches nothing: just wait for the next
24878 one. */
24879 ;
24880
24881 else if (TREE_CODE (match_expr) == EQ_EXPR)
24882 {
24883 /* We are matching: <discr_field> == <integer_cst>
24884 This sub-expression matches a single value. */
24885 tree integer_cst = TREE_OPERAND (match_expr, 1);
24886
24887 candidate_discr
24888 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24889 struct_type);
24890
24891 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24892 if (!get_discr_value (integer_cst,
24893 &new_node->dw_discr_lower_bound))
24894 goto abort;
24895 new_node->dw_discr_range = false;
24896 }
24897
24898 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24899 {
24900 /* We are matching:
24901 <discr_field> > <integer_cst>
24902 && <discr_field> < <integer_cst>.
24903 This sub-expression matches the range of values between the
24904 two matched integer constants. Note that comparisons can be
24905 inclusive or exclusive. */
24906 tree candidate_discr_1, candidate_discr_2;
24907 tree lower_cst, upper_cst;
24908 bool lower_cst_included, upper_cst_included;
24909 tree lower_op = TREE_OPERAND (match_expr, 0);
24910 tree upper_op = TREE_OPERAND (match_expr, 1);
24911
24912 /* When the comparison is exclusive, the integer constant is not
24913 the discriminant range bound we are looking for: we will have
24914 to increment or decrement it. */
24915 if (TREE_CODE (lower_op) == GE_EXPR)
24916 lower_cst_included = true;
24917 else if (TREE_CODE (lower_op) == GT_EXPR)
24918 lower_cst_included = false;
24919 else
24920 goto abort;
24921
24922 if (TREE_CODE (upper_op) == LE_EXPR)
24923 upper_cst_included = true;
24924 else if (TREE_CODE (upper_op) == LT_EXPR)
24925 upper_cst_included = false;
24926 else
24927 goto abort;
24928
24929 /* Extract the discriminant from the first operand and check it
24930 is consistant with the same analysis in the second
24931 operand. */
24932 candidate_discr_1
24933 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24934 struct_type);
24935 candidate_discr_2
24936 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24937 struct_type);
24938 if (candidate_discr_1 == candidate_discr_2)
24939 candidate_discr = candidate_discr_1;
24940 else
24941 goto abort;
24942
24943 /* Extract bounds from both. */
24944 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24945 lower_cst = TREE_OPERAND (lower_op, 1);
24946 upper_cst = TREE_OPERAND (upper_op, 1);
24947
24948 if (!lower_cst_included)
24949 lower_cst
24950 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24951 build_int_cst (TREE_TYPE (lower_cst), 1));
24952 if (!upper_cst_included)
24953 upper_cst
24954 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24955 build_int_cst (TREE_TYPE (upper_cst), 1));
24956
24957 if (!get_discr_value (lower_cst,
24958 &new_node->dw_discr_lower_bound)
24959 || !get_discr_value (upper_cst,
24960 &new_node->dw_discr_upper_bound))
24961 goto abort;
24962
24963 new_node->dw_discr_range = true;
24964 }
24965
24966 else if ((candidate_discr
24967 = analyze_discr_in_predicate (match_expr, struct_type))
24968 && (TREE_TYPE (candidate_discr) == boolean_type_node
24969 || TREE_TYPE (TREE_TYPE (candidate_discr))
24970 == boolean_type_node))
24971 {
24972 /* We are matching: <discr_field> for a boolean discriminant.
24973 This sub-expression matches boolean_true_node. */
24974 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24975 if (!get_discr_value (boolean_true_node,
24976 &new_node->dw_discr_lower_bound))
24977 goto abort;
24978 new_node->dw_discr_range = false;
24979 }
24980
24981 else
24982 /* Unsupported sub-expression: we cannot determine the set of
24983 matching discriminant values. Abort everything. */
24984 goto abort;
24985
24986 /* If the discriminant info is not consistant with what we saw so
24987 far, consider the analysis failed and abort everything. */
24988 if (candidate_discr == NULL_TREE
24989 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24990 goto abort;
24991 else
24992 *discr_decl = candidate_discr;
24993
24994 if (new_node != NULL)
24995 {
24996 new_node->dw_discr_next = discr_lists[i];
24997 discr_lists[i] = new_node;
24998 }
24999 match_expr = next_round_match_expr;
25000 }
25001 }
25002
25003 /* If we reach this point, we could match everything we were interested
25004 in. */
25005 return;
25006
25007 abort:
25008 /* Clean all data structure and return no result. */
25009 free (*discr_lists_p);
25010 *discr_lists_p = NULL;
25011 *discr_decl = NULL_TREE;
25012 }
25013
25014 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
25015 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
25016 under CONTEXT_DIE.
25017
25018 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
25019 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
25020 this type, which are record types, represent the available variants and each
25021 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
25022 values are inferred from these attributes.
25023
25024 In trees, the offsets for the fields inside these sub-records are relative
25025 to the variant part itself, whereas the corresponding DIEs should have
25026 offset attributes that are relative to the embedding record base address.
25027 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
25028 must be an expression that computes the offset of the variant part to
25029 describe in DWARF. */
25030
25031 static void
25032 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
25033 dw_die_ref context_die)
25034 {
25035 const tree variant_part_type = TREE_TYPE (variant_part_decl);
25036 tree variant_part_offset = vlr_ctx->variant_part_offset;
25037 struct loc_descr_context ctx = {
25038 vlr_ctx->struct_type, /* context_type */
25039 NULL_TREE, /* base_decl */
25040 NULL, /* dpi */
25041 false, /* placeholder_arg */
25042 false /* placeholder_seen */
25043 };
25044
25045 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
25046 NULL_TREE if there is no such field. */
25047 tree discr_decl = NULL_TREE;
25048 dw_discr_list_ref *discr_lists;
25049 unsigned discr_lists_length = 0;
25050 unsigned i;
25051
25052 dw_die_ref dwarf_proc_die = NULL;
25053 dw_die_ref variant_part_die
25054 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
25055
25056 equate_decl_number_to_die (variant_part_decl, variant_part_die);
25057
25058 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
25059 &discr_decl, &discr_lists, &discr_lists_length);
25060
25061 if (discr_decl != NULL_TREE)
25062 {
25063 dw_die_ref discr_die = lookup_decl_die (discr_decl);
25064
25065 if (discr_die)
25066 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
25067 else
25068 /* We have no DIE for the discriminant, so just discard all
25069 discrimimant information in the output. */
25070 discr_decl = NULL_TREE;
25071 }
25072
25073 /* If the offset for this variant part is more complex than a constant,
25074 create a DWARF procedure for it so that we will not have to generate DWARF
25075 expressions for it for each member. */
25076 if (TREE_CODE (variant_part_offset) != INTEGER_CST
25077 && (dwarf_version >= 3 || !dwarf_strict))
25078 {
25079 const tree dwarf_proc_fndecl
25080 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
25081 build_function_type (TREE_TYPE (variant_part_offset),
25082 NULL_TREE));
25083 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
25084 const dw_loc_descr_ref dwarf_proc_body
25085 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
25086
25087 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
25088 dwarf_proc_fndecl, context_die);
25089 if (dwarf_proc_die != NULL)
25090 variant_part_offset = dwarf_proc_call;
25091 }
25092
25093 /* Output DIEs for all variants. */
25094 i = 0;
25095 for (tree variant = TYPE_FIELDS (variant_part_type);
25096 variant != NULL_TREE;
25097 variant = DECL_CHAIN (variant), ++i)
25098 {
25099 tree variant_type = TREE_TYPE (variant);
25100 dw_die_ref variant_die;
25101
25102 /* All variants (i.e. members of a variant part) are supposed to be
25103 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25104 under these records. */
25105 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25106
25107 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25108 equate_decl_number_to_die (variant, variant_die);
25109
25110 /* Output discriminant values this variant matches, if any. */
25111 if (discr_decl == NULL || discr_lists[i] == NULL)
25112 /* In the case we have discriminant information at all, this is
25113 probably the default variant: as the standard says, don't
25114 output any discriminant value/list attribute. */
25115 ;
25116 else if (discr_lists[i]->dw_discr_next == NULL
25117 && !discr_lists[i]->dw_discr_range)
25118 /* If there is only one accepted value, don't bother outputting a
25119 list. */
25120 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25121 else
25122 add_discr_list (variant_die, discr_lists[i]);
25123
25124 for (tree member = TYPE_FIELDS (variant_type);
25125 member != NULL_TREE;
25126 member = DECL_CHAIN (member))
25127 {
25128 struct vlr_context vlr_sub_ctx = {
25129 vlr_ctx->struct_type, /* struct_type */
25130 NULL /* variant_part_offset */
25131 };
25132 if (is_variant_part (member))
25133 {
25134 /* All offsets for fields inside variant parts are relative to
25135 the top-level embedding RECORD_TYPE's base address. On the
25136 other hand, offsets in GCC's types are relative to the
25137 nested-most variant part. So we have to sum offsets each time
25138 we recurse. */
25139
25140 vlr_sub_ctx.variant_part_offset
25141 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25142 variant_part_offset, byte_position (member));
25143 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25144 }
25145 else
25146 {
25147 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25148 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25149 }
25150 }
25151 }
25152
25153 free (discr_lists);
25154 }
25155
25156 /* Generate a DIE for a class member. */
25157
25158 static void
25159 gen_member_die (tree type, dw_die_ref context_die)
25160 {
25161 tree member;
25162 tree binfo = TYPE_BINFO (type);
25163
25164 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25165
25166 /* If this is not an incomplete type, output descriptions of each of its
25167 members. Note that as we output the DIEs necessary to represent the
25168 members of this record or union type, we will also be trying to output
25169 DIEs to represent the *types* of those members. However the `type'
25170 function (above) will specifically avoid generating type DIEs for member
25171 types *within* the list of member DIEs for this (containing) type except
25172 for those types (of members) which are explicitly marked as also being
25173 members of this (containing) type themselves. The g++ front- end can
25174 force any given type to be treated as a member of some other (containing)
25175 type by setting the TYPE_CONTEXT of the given (member) type to point to
25176 the TREE node representing the appropriate (containing) type. */
25177
25178 /* First output info about the base classes. */
25179 if (binfo && early_dwarf)
25180 {
25181 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25182 int i;
25183 tree base;
25184
25185 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25186 gen_inheritance_die (base,
25187 (accesses ? (*accesses)[i] : access_public_node),
25188 type,
25189 context_die);
25190 }
25191
25192 /* Now output info about the members. */
25193 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25194 {
25195 /* Ignore clones. */
25196 if (DECL_ABSTRACT_ORIGIN (member))
25197 continue;
25198
25199 struct vlr_context vlr_ctx = { type, NULL_TREE };
25200 bool static_inline_p
25201 = (VAR_P (member)
25202 && TREE_STATIC (member)
25203 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25204 != -1));
25205
25206 /* If we thought we were generating minimal debug info for TYPE
25207 and then changed our minds, some of the member declarations
25208 may have already been defined. Don't define them again, but
25209 do put them in the right order. */
25210
25211 if (dw_die_ref child = lookup_decl_die (member))
25212 {
25213 /* Handle inline static data members, which only have in-class
25214 declarations. */
25215 bool splice = true;
25216
25217 dw_die_ref ref = NULL;
25218 if (child->die_tag == DW_TAG_variable
25219 && child->die_parent == comp_unit_die ())
25220 {
25221 ref = get_AT_ref (child, DW_AT_specification);
25222
25223 /* For C++17 inline static data members followed by redundant
25224 out of class redeclaration, we might get here with
25225 child being the DIE created for the out of class
25226 redeclaration and with its DW_AT_specification being
25227 the DIE created for in-class definition. We want to
25228 reparent the latter, and don't want to create another
25229 DIE with DW_AT_specification in that case, because
25230 we already have one. */
25231 if (ref
25232 && static_inline_p
25233 && ref->die_tag == DW_TAG_variable
25234 && ref->die_parent == comp_unit_die ()
25235 && get_AT (ref, DW_AT_specification) == NULL)
25236 {
25237 child = ref;
25238 ref = NULL;
25239 static_inline_p = false;
25240 }
25241
25242 if (!ref)
25243 {
25244 reparent_child (child, context_die);
25245 if (dwarf_version < 5)
25246 child->die_tag = DW_TAG_member;
25247 splice = false;
25248 }
25249 }
25250
25251 if (splice)
25252 splice_child_die (context_die, child);
25253 }
25254
25255 /* Do not generate standard DWARF for variant parts if we are generating
25256 the corresponding GNAT encodings: DIEs generated for both would
25257 conflict in our mappings. */
25258 else if (is_variant_part (member)
25259 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25260 {
25261 vlr_ctx.variant_part_offset = byte_position (member);
25262 gen_variant_part (member, &vlr_ctx, context_die);
25263 }
25264 else
25265 {
25266 vlr_ctx.variant_part_offset = NULL_TREE;
25267 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25268 }
25269
25270 /* For C++ inline static data members emit immediately a DW_TAG_variable
25271 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25272 DW_AT_specification. */
25273 if (static_inline_p)
25274 {
25275 int old_extern = DECL_EXTERNAL (member);
25276 DECL_EXTERNAL (member) = 0;
25277 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25278 DECL_EXTERNAL (member) = old_extern;
25279 }
25280 }
25281 }
25282
25283 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25284 is set, we pretend that the type was never defined, so we only get the
25285 member DIEs needed by later specification DIEs. */
25286
25287 static void
25288 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25289 enum debug_info_usage usage)
25290 {
25291 if (TREE_ASM_WRITTEN (type))
25292 {
25293 /* Fill in the bound of variable-length fields in late dwarf if
25294 still incomplete. */
25295 if (!early_dwarf && variably_modified_type_p (type, NULL))
25296 for (tree member = TYPE_FIELDS (type);
25297 member;
25298 member = DECL_CHAIN (member))
25299 fill_variable_array_bounds (TREE_TYPE (member));
25300 return;
25301 }
25302
25303 dw_die_ref type_die = lookup_type_die (type);
25304 dw_die_ref scope_die = 0;
25305 int nested = 0;
25306 int complete = (TYPE_SIZE (type)
25307 && (! TYPE_STUB_DECL (type)
25308 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25309 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25310 complete = complete && should_emit_struct_debug (type, usage);
25311
25312 if (type_die && ! complete)
25313 return;
25314
25315 if (TYPE_CONTEXT (type) != NULL_TREE
25316 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25317 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25318 nested = 1;
25319
25320 scope_die = scope_die_for (type, context_die);
25321
25322 /* Generate child dies for template paramaters. */
25323 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25324 schedule_generic_params_dies_gen (type);
25325
25326 if (! type_die || (nested && is_cu_die (scope_die)))
25327 /* First occurrence of type or toplevel definition of nested class. */
25328 {
25329 dw_die_ref old_die = type_die;
25330
25331 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25332 ? record_type_tag (type) : DW_TAG_union_type,
25333 scope_die, type);
25334 equate_type_number_to_die (type, type_die);
25335 if (old_die)
25336 add_AT_specification (type_die, old_die);
25337 else
25338 add_name_attribute (type_die, type_tag (type));
25339 }
25340 else
25341 remove_AT (type_die, DW_AT_declaration);
25342
25343 /* If this type has been completed, then give it a byte_size attribute and
25344 then give a list of members. */
25345 if (complete && !ns_decl)
25346 {
25347 /* Prevent infinite recursion in cases where the type of some member of
25348 this type is expressed in terms of this type itself. */
25349 TREE_ASM_WRITTEN (type) = 1;
25350 add_byte_size_attribute (type_die, type);
25351 add_alignment_attribute (type_die, type);
25352 if (TYPE_STUB_DECL (type) != NULL_TREE)
25353 {
25354 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25355 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25356 }
25357
25358 /* If the first reference to this type was as the return type of an
25359 inline function, then it may not have a parent. Fix this now. */
25360 if (type_die->die_parent == NULL)
25361 add_child_die (scope_die, type_die);
25362
25363 gen_member_die (type, type_die);
25364
25365 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25366 if (TYPE_ARTIFICIAL (type))
25367 add_AT_flag (type_die, DW_AT_artificial, 1);
25368
25369 /* GNU extension: Record what type our vtable lives in. */
25370 if (TYPE_VFIELD (type))
25371 {
25372 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25373
25374 gen_type_die (vtype, context_die);
25375 add_AT_die_ref (type_die, DW_AT_containing_type,
25376 lookup_type_die (vtype));
25377 }
25378 }
25379 else
25380 {
25381 add_AT_flag (type_die, DW_AT_declaration, 1);
25382
25383 /* We don't need to do this for function-local types. */
25384 if (TYPE_STUB_DECL (type)
25385 && ! decl_function_context (TYPE_STUB_DECL (type)))
25386 vec_safe_push (incomplete_types, type);
25387 }
25388
25389 if (get_AT (type_die, DW_AT_name))
25390 add_pubtype (type, type_die);
25391 }
25392
25393 /* Generate a DIE for a subroutine _type_. */
25394
25395 static void
25396 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25397 {
25398 tree return_type = TREE_TYPE (type);
25399 dw_die_ref subr_die
25400 = new_die (DW_TAG_subroutine_type,
25401 scope_die_for (type, context_die), type);
25402
25403 equate_type_number_to_die (type, subr_die);
25404 add_prototyped_attribute (subr_die, type);
25405 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25406 context_die);
25407 add_alignment_attribute (subr_die, type);
25408 gen_formal_types_die (type, subr_die);
25409
25410 if (get_AT (subr_die, DW_AT_name))
25411 add_pubtype (type, subr_die);
25412 if ((dwarf_version >= 5 || !dwarf_strict)
25413 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25414 add_AT_flag (subr_die, DW_AT_reference, 1);
25415 if ((dwarf_version >= 5 || !dwarf_strict)
25416 && lang_hooks.types.type_dwarf_attribute (type,
25417 DW_AT_rvalue_reference) != -1)
25418 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25419 }
25420
25421 /* Generate a DIE for a type definition. */
25422
25423 static void
25424 gen_typedef_die (tree decl, dw_die_ref context_die)
25425 {
25426 dw_die_ref type_die;
25427 tree type;
25428
25429 if (TREE_ASM_WRITTEN (decl))
25430 {
25431 if (DECL_ORIGINAL_TYPE (decl))
25432 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25433 return;
25434 }
25435
25436 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25437 checks in process_scope_var and modified_type_die), this should be called
25438 only for original types. */
25439 gcc_assert (decl_ultimate_origin (decl) == NULL
25440 || decl_ultimate_origin (decl) == decl);
25441
25442 TREE_ASM_WRITTEN (decl) = 1;
25443 type_die = new_die (DW_TAG_typedef, context_die, decl);
25444
25445 add_name_and_src_coords_attributes (type_die, decl);
25446 if (DECL_ORIGINAL_TYPE (decl))
25447 {
25448 type = DECL_ORIGINAL_TYPE (decl);
25449 if (type == error_mark_node)
25450 return;
25451
25452 gcc_assert (type != TREE_TYPE (decl));
25453 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25454 }
25455 else
25456 {
25457 type = TREE_TYPE (decl);
25458 if (type == error_mark_node)
25459 return;
25460
25461 if (is_naming_typedef_decl (TYPE_NAME (type)))
25462 {
25463 /* Here, we are in the case of decl being a typedef naming
25464 an anonymous type, e.g:
25465 typedef struct {...} foo;
25466 In that case TREE_TYPE (decl) is not a typedef variant
25467 type and TYPE_NAME of the anonymous type is set to the
25468 TYPE_DECL of the typedef. This construct is emitted by
25469 the C++ FE.
25470
25471 TYPE is the anonymous struct named by the typedef
25472 DECL. As we need the DW_AT_type attribute of the
25473 DW_TAG_typedef to point to the DIE of TYPE, let's
25474 generate that DIE right away. add_type_attribute
25475 called below will then pick (via lookup_type_die) that
25476 anonymous struct DIE. */
25477 if (!TREE_ASM_WRITTEN (type))
25478 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25479
25480 /* This is a GNU Extension. We are adding a
25481 DW_AT_linkage_name attribute to the DIE of the
25482 anonymous struct TYPE. The value of that attribute
25483 is the name of the typedef decl naming the anonymous
25484 struct. This greatly eases the work of consumers of
25485 this debug info. */
25486 add_linkage_name_raw (lookup_type_die (type), decl);
25487 }
25488 }
25489
25490 add_type_attribute (type_die, type, decl_quals (decl), false,
25491 context_die);
25492
25493 if (is_naming_typedef_decl (decl))
25494 /* We want that all subsequent calls to lookup_type_die with
25495 TYPE in argument yield the DW_TAG_typedef we have just
25496 created. */
25497 equate_type_number_to_die (type, type_die);
25498
25499 add_alignment_attribute (type_die, TREE_TYPE (decl));
25500
25501 add_accessibility_attribute (type_die, decl);
25502
25503 if (DECL_ABSTRACT_P (decl))
25504 equate_decl_number_to_die (decl, type_die);
25505
25506 if (get_AT (type_die, DW_AT_name))
25507 add_pubtype (decl, type_die);
25508 }
25509
25510 /* Generate a DIE for a struct, class, enum or union type. */
25511
25512 static void
25513 gen_tagged_type_die (tree type,
25514 dw_die_ref context_die,
25515 enum debug_info_usage usage)
25516 {
25517 if (type == NULL_TREE
25518 || !is_tagged_type (type))
25519 return;
25520
25521 if (TREE_ASM_WRITTEN (type))
25522 ;
25523 /* If this is a nested type whose containing class hasn't been written
25524 out yet, writing it out will cover this one, too. This does not apply
25525 to instantiations of member class templates; they need to be added to
25526 the containing class as they are generated. FIXME: This hurts the
25527 idea of combining type decls from multiple TUs, since we can't predict
25528 what set of template instantiations we'll get. */
25529 else if (TYPE_CONTEXT (type)
25530 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25531 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25532 {
25533 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25534
25535 if (TREE_ASM_WRITTEN (type))
25536 return;
25537
25538 /* If that failed, attach ourselves to the stub. */
25539 context_die = lookup_type_die (TYPE_CONTEXT (type));
25540 }
25541 else if (TYPE_CONTEXT (type) != NULL_TREE
25542 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25543 {
25544 /* If this type is local to a function that hasn't been written
25545 out yet, use a NULL context for now; it will be fixed up in
25546 decls_for_scope. */
25547 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25548 /* A declaration DIE doesn't count; nested types need to go in the
25549 specification. */
25550 if (context_die && is_declaration_die (context_die))
25551 context_die = NULL;
25552 }
25553 else
25554 context_die = declare_in_namespace (type, context_die);
25555
25556 if (TREE_CODE (type) == ENUMERAL_TYPE)
25557 {
25558 /* This might have been written out by the call to
25559 declare_in_namespace. */
25560 if (!TREE_ASM_WRITTEN (type))
25561 gen_enumeration_type_die (type, context_die);
25562 }
25563 else
25564 gen_struct_or_union_type_die (type, context_die, usage);
25565
25566 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25567 it up if it is ever completed. gen_*_type_die will set it for us
25568 when appropriate. */
25569 }
25570
25571 /* Generate a type description DIE. */
25572
25573 static void
25574 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25575 enum debug_info_usage usage)
25576 {
25577 struct array_descr_info info;
25578
25579 if (type == NULL_TREE || type == error_mark_node)
25580 return;
25581
25582 if (flag_checking && type)
25583 verify_type (type);
25584
25585 if (TYPE_NAME (type) != NULL_TREE
25586 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25587 && is_redundant_typedef (TYPE_NAME (type))
25588 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25589 /* The DECL of this type is a typedef we don't want to emit debug
25590 info for but we want debug info for its underlying typedef.
25591 This can happen for e.g, the injected-class-name of a C++
25592 type. */
25593 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25594
25595 /* If TYPE is a typedef type variant, let's generate debug info
25596 for the parent typedef which TYPE is a type of. */
25597 if (typedef_variant_p (type))
25598 {
25599 if (TREE_ASM_WRITTEN (type))
25600 return;
25601
25602 tree name = TYPE_NAME (type);
25603 tree origin = decl_ultimate_origin (name);
25604 if (origin != NULL && origin != name)
25605 {
25606 gen_decl_die (origin, NULL, NULL, context_die);
25607 return;
25608 }
25609
25610 /* Prevent broken recursion; we can't hand off to the same type. */
25611 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25612
25613 /* Give typedefs the right scope. */
25614 context_die = scope_die_for (type, context_die);
25615
25616 TREE_ASM_WRITTEN (type) = 1;
25617
25618 gen_decl_die (name, NULL, NULL, context_die);
25619 return;
25620 }
25621
25622 /* If type is an anonymous tagged type named by a typedef, let's
25623 generate debug info for the typedef. */
25624 if (is_naming_typedef_decl (TYPE_NAME (type)))
25625 {
25626 /* Give typedefs the right scope. */
25627 context_die = scope_die_for (type, context_die);
25628
25629 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25630 return;
25631 }
25632
25633 if (lang_hooks.types.get_debug_type)
25634 {
25635 tree debug_type = lang_hooks.types.get_debug_type (type);
25636
25637 if (debug_type != NULL_TREE && debug_type != type)
25638 {
25639 gen_type_die_with_usage (debug_type, context_die, usage);
25640 return;
25641 }
25642 }
25643
25644 /* We are going to output a DIE to represent the unqualified version
25645 of this type (i.e. without any const or volatile qualifiers) so
25646 get the main variant (i.e. the unqualified version) of this type
25647 now. (Vectors and arrays are special because the debugging info is in the
25648 cloned type itself. Similarly function/method types can contain extra
25649 ref-qualification). */
25650 if (TREE_CODE (type) == FUNCTION_TYPE
25651 || TREE_CODE (type) == METHOD_TYPE)
25652 {
25653 /* For function/method types, can't use type_main_variant here,
25654 because that can have different ref-qualifiers for C++,
25655 but try to canonicalize. */
25656 tree main = TYPE_MAIN_VARIANT (type);
25657 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25658 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25659 && check_base_type (t, main)
25660 && check_lang_type (t, type))
25661 {
25662 type = t;
25663 break;
25664 }
25665 }
25666 else if (TREE_CODE (type) != VECTOR_TYPE
25667 && TREE_CODE (type) != ARRAY_TYPE)
25668 type = type_main_variant (type);
25669
25670 /* If this is an array type with hidden descriptor, handle it first. */
25671 if (!TREE_ASM_WRITTEN (type)
25672 && lang_hooks.types.get_array_descr_info)
25673 {
25674 memset (&info, 0, sizeof (info));
25675 if (lang_hooks.types.get_array_descr_info (type, &info))
25676 {
25677 /* Fortran sometimes emits array types with no dimension. */
25678 gcc_assert (info.ndimensions >= 0
25679 && (info.ndimensions
25680 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25681 gen_descr_array_type_die (type, &info, context_die);
25682 TREE_ASM_WRITTEN (type) = 1;
25683 return;
25684 }
25685 }
25686
25687 if (TREE_ASM_WRITTEN (type))
25688 {
25689 /* Variable-length types may be incomplete even if
25690 TREE_ASM_WRITTEN. For such types, fall through to
25691 gen_array_type_die() and possibly fill in
25692 DW_AT_{upper,lower}_bound attributes. */
25693 if ((TREE_CODE (type) != ARRAY_TYPE
25694 && TREE_CODE (type) != RECORD_TYPE
25695 && TREE_CODE (type) != UNION_TYPE
25696 && TREE_CODE (type) != QUAL_UNION_TYPE)
25697 || !variably_modified_type_p (type, NULL))
25698 return;
25699 }
25700
25701 switch (TREE_CODE (type))
25702 {
25703 case ERROR_MARK:
25704 break;
25705
25706 case POINTER_TYPE:
25707 case REFERENCE_TYPE:
25708 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25709 ensures that the gen_type_die recursion will terminate even if the
25710 type is recursive. Recursive types are possible in Ada. */
25711 /* ??? We could perhaps do this for all types before the switch
25712 statement. */
25713 TREE_ASM_WRITTEN (type) = 1;
25714
25715 /* For these types, all that is required is that we output a DIE (or a
25716 set of DIEs) to represent the "basis" type. */
25717 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25718 DINFO_USAGE_IND_USE);
25719 break;
25720
25721 case OFFSET_TYPE:
25722 /* This code is used for C++ pointer-to-data-member types.
25723 Output a description of the relevant class type. */
25724 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25725 DINFO_USAGE_IND_USE);
25726
25727 /* Output a description of the type of the object pointed to. */
25728 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25729 DINFO_USAGE_IND_USE);
25730
25731 /* Now output a DIE to represent this pointer-to-data-member type
25732 itself. */
25733 gen_ptr_to_mbr_type_die (type, context_die);
25734 break;
25735
25736 case FUNCTION_TYPE:
25737 /* Force out return type (in case it wasn't forced out already). */
25738 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25739 DINFO_USAGE_DIR_USE);
25740 gen_subroutine_type_die (type, context_die);
25741 break;
25742
25743 case METHOD_TYPE:
25744 /* Force out return type (in case it wasn't forced out already). */
25745 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25746 DINFO_USAGE_DIR_USE);
25747 gen_subroutine_type_die (type, context_die);
25748 break;
25749
25750 case ARRAY_TYPE:
25751 case VECTOR_TYPE:
25752 gen_array_type_die (type, context_die);
25753 break;
25754
25755 case ENUMERAL_TYPE:
25756 case RECORD_TYPE:
25757 case UNION_TYPE:
25758 case QUAL_UNION_TYPE:
25759 gen_tagged_type_die (type, context_die, usage);
25760 return;
25761
25762 case VOID_TYPE:
25763 case INTEGER_TYPE:
25764 case REAL_TYPE:
25765 case FIXED_POINT_TYPE:
25766 case COMPLEX_TYPE:
25767 case BOOLEAN_TYPE:
25768 /* No DIEs needed for fundamental types. */
25769 break;
25770
25771 case NULLPTR_TYPE:
25772 case LANG_TYPE:
25773 /* Just use DW_TAG_unspecified_type. */
25774 {
25775 dw_die_ref type_die = lookup_type_die (type);
25776 if (type_die == NULL)
25777 {
25778 tree name = TYPE_IDENTIFIER (type);
25779 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25780 type);
25781 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25782 equate_type_number_to_die (type, type_die);
25783 }
25784 }
25785 break;
25786
25787 default:
25788 if (is_cxx_auto (type))
25789 {
25790 tree name = TYPE_IDENTIFIER (type);
25791 dw_die_ref *die = (name == get_identifier ("auto")
25792 ? &auto_die : &decltype_auto_die);
25793 if (!*die)
25794 {
25795 *die = new_die (DW_TAG_unspecified_type,
25796 comp_unit_die (), NULL_TREE);
25797 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25798 }
25799 equate_type_number_to_die (type, *die);
25800 break;
25801 }
25802 gcc_unreachable ();
25803 }
25804
25805 TREE_ASM_WRITTEN (type) = 1;
25806 }
25807
25808 static void
25809 gen_type_die (tree type, dw_die_ref context_die)
25810 {
25811 if (type != error_mark_node)
25812 {
25813 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25814 if (flag_checking)
25815 {
25816 dw_die_ref die = lookup_type_die (type);
25817 if (die)
25818 check_die (die);
25819 }
25820 }
25821 }
25822
25823 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25824 things which are local to the given block. */
25825
25826 static void
25827 gen_block_die (tree stmt, dw_die_ref context_die)
25828 {
25829 int must_output_die = 0;
25830 bool inlined_func;
25831
25832 /* Ignore blocks that are NULL. */
25833 if (stmt == NULL_TREE)
25834 return;
25835
25836 inlined_func = inlined_function_outer_scope_p (stmt);
25837
25838 /* If the block is one fragment of a non-contiguous block, do not
25839 process the variables, since they will have been done by the
25840 origin block. Do process subblocks. */
25841 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25842 {
25843 tree sub;
25844
25845 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25846 gen_block_die (sub, context_die);
25847
25848 return;
25849 }
25850
25851 /* Determine if we need to output any Dwarf DIEs at all to represent this
25852 block. */
25853 if (inlined_func)
25854 /* The outer scopes for inlinings *must* always be represented. We
25855 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25856 must_output_die = 1;
25857 else if (lookup_block_die (stmt))
25858 /* If we already have a DIE then it was filled early. Meanwhile
25859 we might have pruned all BLOCK_VARS as optimized out but we
25860 still want to generate high/low PC attributes so output it. */
25861 must_output_die = 1;
25862 else if (TREE_USED (stmt)
25863 || TREE_ASM_WRITTEN (stmt))
25864 {
25865 /* Determine if this block directly contains any "significant"
25866 local declarations which we will need to output DIEs for. */
25867 if (debug_info_level > DINFO_LEVEL_TERSE)
25868 {
25869 /* We are not in terse mode so any local declaration that
25870 is not ignored for debug purposes counts as being a
25871 "significant" one. */
25872 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25873 must_output_die = 1;
25874 else
25875 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25876 if (!DECL_IGNORED_P (var))
25877 {
25878 must_output_die = 1;
25879 break;
25880 }
25881 }
25882 else if (!dwarf2out_ignore_block (stmt))
25883 must_output_die = 1;
25884 }
25885
25886 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25887 DIE for any block which contains no significant local declarations at
25888 all. Rather, in such cases we just call `decls_for_scope' so that any
25889 needed Dwarf info for any sub-blocks will get properly generated. Note
25890 that in terse mode, our definition of what constitutes a "significant"
25891 local declaration gets restricted to include only inlined function
25892 instances and local (nested) function definitions. */
25893 if (must_output_die)
25894 {
25895 if (inlined_func)
25896 gen_inlined_subroutine_die (stmt, context_die);
25897 else
25898 gen_lexical_block_die (stmt, context_die);
25899 }
25900 else
25901 decls_for_scope (stmt, context_die);
25902 }
25903
25904 /* Process variable DECL (or variable with origin ORIGIN) within
25905 block STMT and add it to CONTEXT_DIE. */
25906 static void
25907 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25908 {
25909 dw_die_ref die;
25910 tree decl_or_origin = decl ? decl : origin;
25911
25912 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25913 die = lookup_decl_die (decl_or_origin);
25914 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25915 {
25916 if (TYPE_DECL_IS_STUB (decl_or_origin))
25917 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25918 else
25919 die = lookup_decl_die (decl_or_origin);
25920 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25921 if (! die && ! early_dwarf)
25922 return;
25923 }
25924 else
25925 die = NULL;
25926
25927 /* Avoid creating DIEs for local typedefs and concrete static variables that
25928 will only be pruned later. */
25929 if ((origin || decl_ultimate_origin (decl))
25930 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25931 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25932 {
25933 origin = decl_ultimate_origin (decl_or_origin);
25934 if (decl && VAR_P (decl) && die != NULL)
25935 {
25936 die = lookup_decl_die (origin);
25937 if (die != NULL)
25938 equate_decl_number_to_die (decl, die);
25939 }
25940 return;
25941 }
25942
25943 if (die != NULL && die->die_parent == NULL)
25944 add_child_die (context_die, die);
25945 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25946 {
25947 if (early_dwarf)
25948 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25949 stmt, context_die);
25950 }
25951 else
25952 {
25953 if (decl && DECL_P (decl))
25954 {
25955 die = lookup_decl_die (decl);
25956
25957 /* Early created DIEs do not have a parent as the decls refer
25958 to the function as DECL_CONTEXT rather than the BLOCK. */
25959 if (die && die->die_parent == NULL)
25960 {
25961 gcc_assert (in_lto_p);
25962 add_child_die (context_die, die);
25963 }
25964 }
25965
25966 gen_decl_die (decl, origin, NULL, context_die);
25967 }
25968 }
25969
25970 /* Generate all of the decls declared within a given scope and (recursively)
25971 all of its sub-blocks. */
25972
25973 static void
25974 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25975 {
25976 tree decl;
25977 unsigned int i;
25978 tree subblocks;
25979
25980 /* Ignore NULL blocks. */
25981 if (stmt == NULL_TREE)
25982 return;
25983
25984 /* Output the DIEs to represent all of the data objects and typedefs
25985 declared directly within this block but not within any nested
25986 sub-blocks. Also, nested function and tag DIEs have been
25987 generated with a parent of NULL; fix that up now. We don't
25988 have to do this if we're at -g1. */
25989 if (debug_info_level > DINFO_LEVEL_TERSE)
25990 {
25991 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25992 process_scope_var (stmt, decl, NULL_TREE, context_die);
25993 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25994 origin - avoid doing this twice as we have no good way to see
25995 if we've done it once already. */
25996 if (! early_dwarf)
25997 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25998 {
25999 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
26000 if (decl == current_function_decl)
26001 /* Ignore declarations of the current function, while they
26002 are declarations, gen_subprogram_die would treat them
26003 as definitions again, because they are equal to
26004 current_function_decl and endlessly recurse. */;
26005 else if (TREE_CODE (decl) == FUNCTION_DECL)
26006 process_scope_var (stmt, decl, NULL_TREE, context_die);
26007 else
26008 process_scope_var (stmt, NULL_TREE, decl, context_die);
26009 }
26010 }
26011
26012 /* Even if we're at -g1, we need to process the subblocks in order to get
26013 inlined call information. */
26014
26015 /* Output the DIEs to represent all sub-blocks (and the items declared
26016 therein) of this block. */
26017 if (recurse)
26018 for (subblocks = BLOCK_SUBBLOCKS (stmt);
26019 subblocks != NULL;
26020 subblocks = BLOCK_CHAIN (subblocks))
26021 gen_block_die (subblocks, context_die);
26022 }
26023
26024 /* Is this a typedef we can avoid emitting? */
26025
26026 static bool
26027 is_redundant_typedef (const_tree decl)
26028 {
26029 if (TYPE_DECL_IS_STUB (decl))
26030 return true;
26031
26032 if (DECL_ARTIFICIAL (decl)
26033 && DECL_CONTEXT (decl)
26034 && is_tagged_type (DECL_CONTEXT (decl))
26035 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
26036 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
26037 /* Also ignore the artificial member typedef for the class name. */
26038 return true;
26039
26040 return false;
26041 }
26042
26043 /* Return TRUE if TYPE is a typedef that names a type for linkage
26044 purposes. This kind of typedefs is produced by the C++ FE for
26045 constructs like:
26046
26047 typedef struct {...} foo;
26048
26049 In that case, there is no typedef variant type produced for foo.
26050 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
26051 struct type. */
26052
26053 static bool
26054 is_naming_typedef_decl (const_tree decl)
26055 {
26056 if (decl == NULL_TREE
26057 || TREE_CODE (decl) != TYPE_DECL
26058 || DECL_NAMELESS (decl)
26059 || !is_tagged_type (TREE_TYPE (decl))
26060 || DECL_IS_UNDECLARED_BUILTIN (decl)
26061 || is_redundant_typedef (decl)
26062 /* It looks like Ada produces TYPE_DECLs that are very similar
26063 to C++ naming typedefs but that have different
26064 semantics. Let's be specific to c++ for now. */
26065 || !is_cxx (decl))
26066 return FALSE;
26067
26068 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
26069 && TYPE_NAME (TREE_TYPE (decl)) == decl
26070 && (TYPE_STUB_DECL (TREE_TYPE (decl))
26071 != TYPE_NAME (TREE_TYPE (decl))));
26072 }
26073
26074 /* Looks up the DIE for a context. */
26075
26076 static inline dw_die_ref
26077 lookup_context_die (tree context)
26078 {
26079 if (context)
26080 {
26081 /* Find die that represents this context. */
26082 if (TYPE_P (context))
26083 {
26084 context = TYPE_MAIN_VARIANT (context);
26085 dw_die_ref ctx = lookup_type_die (context);
26086 if (!ctx)
26087 return NULL;
26088 return strip_naming_typedef (context, ctx);
26089 }
26090 else
26091 return lookup_decl_die (context);
26092 }
26093 return comp_unit_die ();
26094 }
26095
26096 /* Returns the DIE for a context. */
26097
26098 static inline dw_die_ref
26099 get_context_die (tree context)
26100 {
26101 if (context)
26102 {
26103 /* Find die that represents this context. */
26104 if (TYPE_P (context))
26105 {
26106 context = TYPE_MAIN_VARIANT (context);
26107 return strip_naming_typedef (context, force_type_die (context));
26108 }
26109 else
26110 return force_decl_die (context);
26111 }
26112 return comp_unit_die ();
26113 }
26114
26115 /* Returns the DIE for decl. A DIE will always be returned. */
26116
26117 static dw_die_ref
26118 force_decl_die (tree decl)
26119 {
26120 dw_die_ref decl_die;
26121 unsigned saved_external_flag;
26122 tree save_fn = NULL_TREE;
26123 decl_die = lookup_decl_die (decl);
26124 if (!decl_die)
26125 {
26126 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26127
26128 decl_die = lookup_decl_die (decl);
26129 if (decl_die)
26130 return decl_die;
26131
26132 switch (TREE_CODE (decl))
26133 {
26134 case FUNCTION_DECL:
26135 /* Clear current_function_decl, so that gen_subprogram_die thinks
26136 that this is a declaration. At this point, we just want to force
26137 declaration die. */
26138 save_fn = current_function_decl;
26139 current_function_decl = NULL_TREE;
26140 gen_subprogram_die (decl, context_die);
26141 current_function_decl = save_fn;
26142 break;
26143
26144 case VAR_DECL:
26145 /* Set external flag to force declaration die. Restore it after
26146 gen_decl_die() call. */
26147 saved_external_flag = DECL_EXTERNAL (decl);
26148 DECL_EXTERNAL (decl) = 1;
26149 gen_decl_die (decl, NULL, NULL, context_die);
26150 DECL_EXTERNAL (decl) = saved_external_flag;
26151 break;
26152
26153 case NAMESPACE_DECL:
26154 if (dwarf_version >= 3 || !dwarf_strict)
26155 dwarf2out_decl (decl);
26156 else
26157 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26158 decl_die = comp_unit_die ();
26159 break;
26160
26161 case TRANSLATION_UNIT_DECL:
26162 decl_die = comp_unit_die ();
26163 break;
26164
26165 default:
26166 gcc_unreachable ();
26167 }
26168
26169 /* We should be able to find the DIE now. */
26170 if (!decl_die)
26171 decl_die = lookup_decl_die (decl);
26172 gcc_assert (decl_die);
26173 }
26174
26175 return decl_die;
26176 }
26177
26178 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26179 always returned. */
26180
26181 static dw_die_ref
26182 force_type_die (tree type)
26183 {
26184 dw_die_ref type_die;
26185
26186 type_die = lookup_type_die (type);
26187 if (!type_die)
26188 {
26189 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26190
26191 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26192 false, context_die);
26193 gcc_assert (type_die);
26194 }
26195 return type_die;
26196 }
26197
26198 /* Force out any required namespaces to be able to output DECL,
26199 and return the new context_die for it, if it's changed. */
26200
26201 static dw_die_ref
26202 setup_namespace_context (tree thing, dw_die_ref context_die)
26203 {
26204 tree context = (DECL_P (thing)
26205 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26206 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26207 /* Force out the namespace. */
26208 context_die = force_decl_die (context);
26209
26210 return context_die;
26211 }
26212
26213 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26214 type) within its namespace, if appropriate.
26215
26216 For compatibility with older debuggers, namespace DIEs only contain
26217 declarations; all definitions are emitted at CU scope, with
26218 DW_AT_specification pointing to the declaration (like with class
26219 members). */
26220
26221 static dw_die_ref
26222 declare_in_namespace (tree thing, dw_die_ref context_die)
26223 {
26224 dw_die_ref ns_context;
26225
26226 if (debug_info_level <= DINFO_LEVEL_TERSE)
26227 return context_die;
26228
26229 /* External declarations in the local scope only need to be emitted
26230 once, not once in the namespace and once in the scope.
26231
26232 This avoids declaring the `extern' below in the
26233 namespace DIE as well as in the innermost scope:
26234
26235 namespace S
26236 {
26237 int i=5;
26238 int foo()
26239 {
26240 int i=8;
26241 extern int i;
26242 return i;
26243 }
26244 }
26245 */
26246 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26247 return context_die;
26248
26249 /* If this decl is from an inlined function, then don't try to emit it in its
26250 namespace, as we will get confused. It would have already been emitted
26251 when the abstract instance of the inline function was emitted anyways. */
26252 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26253 return context_die;
26254
26255 ns_context = setup_namespace_context (thing, context_die);
26256
26257 if (ns_context != context_die)
26258 {
26259 if (is_fortran () || is_dlang ())
26260 return ns_context;
26261 if (DECL_P (thing))
26262 gen_decl_die (thing, NULL, NULL, ns_context);
26263 else
26264 gen_type_die (thing, ns_context);
26265 }
26266 return context_die;
26267 }
26268
26269 /* Generate a DIE for a namespace or namespace alias. */
26270
26271 static void
26272 gen_namespace_die (tree decl, dw_die_ref context_die)
26273 {
26274 dw_die_ref namespace_die;
26275
26276 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26277 they are an alias of. */
26278 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26279 {
26280 /* Output a real namespace or module. */
26281 context_die = setup_namespace_context (decl, comp_unit_die ());
26282 namespace_die = new_die (is_fortran () || is_dlang ()
26283 ? DW_TAG_module : DW_TAG_namespace,
26284 context_die, decl);
26285 /* For Fortran modules defined in different CU don't add src coords. */
26286 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26287 {
26288 const char *name = dwarf2_name (decl, 0);
26289 if (name)
26290 add_name_attribute (namespace_die, name);
26291 }
26292 else
26293 add_name_and_src_coords_attributes (namespace_die, decl);
26294 if (DECL_EXTERNAL (decl))
26295 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26296 equate_decl_number_to_die (decl, namespace_die);
26297 }
26298 else
26299 {
26300 /* Output a namespace alias. */
26301
26302 /* Force out the namespace we are an alias of, if necessary. */
26303 dw_die_ref origin_die
26304 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26305
26306 if (DECL_FILE_SCOPE_P (decl)
26307 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26308 context_die = setup_namespace_context (decl, comp_unit_die ());
26309 /* Now create the namespace alias DIE. */
26310 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26311 add_name_and_src_coords_attributes (namespace_die, decl);
26312 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26313 equate_decl_number_to_die (decl, namespace_die);
26314 }
26315 if ((dwarf_version >= 5 || !dwarf_strict)
26316 && lang_hooks.decls.decl_dwarf_attribute (decl,
26317 DW_AT_export_symbols) == 1)
26318 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26319
26320 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26321 if (want_pubnames ())
26322 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26323 }
26324
26325 /* Generate Dwarf debug information for a decl described by DECL.
26326 The return value is currently only meaningful for PARM_DECLs,
26327 for all other decls it returns NULL.
26328
26329 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26330 It can be NULL otherwise. */
26331
26332 static dw_die_ref
26333 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26334 dw_die_ref context_die)
26335 {
26336 tree decl_or_origin = decl ? decl : origin;
26337 tree class_origin = NULL, ultimate_origin;
26338
26339 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26340 return NULL;
26341
26342 switch (TREE_CODE (decl_or_origin))
26343 {
26344 case ERROR_MARK:
26345 break;
26346
26347 case CONST_DECL:
26348 if (!is_fortran () && !is_ada () && !is_dlang ())
26349 {
26350 /* The individual enumerators of an enum type get output when we output
26351 the Dwarf representation of the relevant enum type itself. */
26352 break;
26353 }
26354
26355 /* Emit its type. */
26356 gen_type_die (TREE_TYPE (decl), context_die);
26357
26358 /* And its containing namespace. */
26359 context_die = declare_in_namespace (decl, context_die);
26360
26361 gen_const_die (decl, context_die);
26362 break;
26363
26364 case FUNCTION_DECL:
26365 #if 0
26366 /* FIXME */
26367 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26368 on local redeclarations of global functions. That seems broken. */
26369 if (current_function_decl != decl)
26370 /* This is only a declaration. */;
26371 #endif
26372
26373 /* We should have abstract copies already and should not generate
26374 stray type DIEs in late LTO dumping. */
26375 if (! early_dwarf)
26376 ;
26377
26378 /* If we're emitting a clone, emit info for the abstract instance. */
26379 else if (origin || DECL_ORIGIN (decl) != decl)
26380 dwarf2out_abstract_function (origin
26381 ? DECL_ORIGIN (origin)
26382 : DECL_ABSTRACT_ORIGIN (decl));
26383
26384 /* If we're emitting a possibly inlined function emit it as
26385 abstract instance. */
26386 else if (cgraph_function_possibly_inlined_p (decl)
26387 && ! DECL_ABSTRACT_P (decl)
26388 && ! class_or_namespace_scope_p (context_die)
26389 /* dwarf2out_abstract_function won't emit a die if this is just
26390 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26391 that case, because that works only if we have a die. */
26392 && DECL_INITIAL (decl) != NULL_TREE)
26393 dwarf2out_abstract_function (decl);
26394
26395 /* Otherwise we're emitting the primary DIE for this decl. */
26396 else if (debug_info_level > DINFO_LEVEL_TERSE)
26397 {
26398 /* Before we describe the FUNCTION_DECL itself, make sure that we
26399 have its containing type. */
26400 if (!origin)
26401 origin = decl_class_context (decl);
26402 if (origin != NULL_TREE)
26403 gen_type_die (origin, context_die);
26404
26405 /* And its return type. */
26406 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26407
26408 /* And its virtual context. */
26409 if (DECL_VINDEX (decl) != NULL_TREE)
26410 gen_type_die (DECL_CONTEXT (decl), context_die);
26411
26412 /* Make sure we have a member DIE for decl. */
26413 if (origin != NULL_TREE)
26414 gen_type_die_for_member (origin, decl, context_die);
26415
26416 /* And its containing namespace. */
26417 context_die = declare_in_namespace (decl, context_die);
26418 }
26419
26420 /* Now output a DIE to represent the function itself. */
26421 if (decl)
26422 gen_subprogram_die (decl, context_die);
26423 break;
26424
26425 case TYPE_DECL:
26426 /* If we are in terse mode, don't generate any DIEs to represent any
26427 actual typedefs. */
26428 if (debug_info_level <= DINFO_LEVEL_TERSE)
26429 break;
26430
26431 /* In the special case of a TYPE_DECL node representing the declaration
26432 of some type tag, if the given TYPE_DECL is marked as having been
26433 instantiated from some other (original) TYPE_DECL node (e.g. one which
26434 was generated within the original definition of an inline function) we
26435 used to generate a special (abbreviated) DW_TAG_structure_type,
26436 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26437 should be actually referencing those DIEs, as variable DIEs with that
26438 type would be emitted already in the abstract origin, so it was always
26439 removed during unused type prunning. Don't add anything in this
26440 case. */
26441 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26442 break;
26443
26444 if (is_redundant_typedef (decl))
26445 gen_type_die (TREE_TYPE (decl), context_die);
26446 else
26447 /* Output a DIE to represent the typedef itself. */
26448 gen_typedef_die (decl, context_die);
26449 break;
26450
26451 case LABEL_DECL:
26452 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26453 gen_label_die (decl, context_die);
26454 break;
26455
26456 case VAR_DECL:
26457 case RESULT_DECL:
26458 /* If we are in terse mode, don't generate any DIEs to represent any
26459 variable declarations or definitions unless it is external. */
26460 if (debug_info_level < DINFO_LEVEL_TERSE
26461 || (debug_info_level == DINFO_LEVEL_TERSE
26462 && !TREE_PUBLIC (decl_or_origin)))
26463 break;
26464
26465 if (debug_info_level > DINFO_LEVEL_TERSE)
26466 {
26467 /* Avoid generating stray type DIEs during late dwarf dumping.
26468 All types have been dumped early. */
26469 if (early_dwarf
26470 /* ??? But in LTRANS we cannot annotate early created variably
26471 modified type DIEs without copying them and adjusting all
26472 references to them. Dump them again as happens for inlining
26473 which copies both the decl and the types. */
26474 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26475 in VLA bound information for example. */
26476 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26477 current_function_decl)))
26478 {
26479 /* Output any DIEs that are needed to specify the type of this data
26480 object. */
26481 if (decl_by_reference_p (decl_or_origin))
26482 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26483 else
26484 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26485 }
26486
26487 if (early_dwarf)
26488 {
26489 /* And its containing type. */
26490 class_origin = decl_class_context (decl_or_origin);
26491 if (class_origin != NULL_TREE)
26492 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26493
26494 /* And its containing namespace. */
26495 context_die = declare_in_namespace (decl_or_origin, context_die);
26496 }
26497 }
26498
26499 /* Now output the DIE to represent the data object itself. This gets
26500 complicated because of the possibility that the VAR_DECL really
26501 represents an inlined instance of a formal parameter for an inline
26502 function. */
26503 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26504 if (ultimate_origin != NULL_TREE
26505 && TREE_CODE (ultimate_origin) == PARM_DECL)
26506 gen_formal_parameter_die (decl, origin,
26507 true /* Emit name attribute. */,
26508 context_die);
26509 else
26510 gen_variable_die (decl, origin, context_die);
26511 break;
26512
26513 case FIELD_DECL:
26514 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26515 /* Ignore the nameless fields that are used to skip bits but handle C++
26516 anonymous unions and structs. */
26517 if (DECL_NAME (decl) != NULL_TREE
26518 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26519 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26520 {
26521 gen_type_die (member_declared_type (decl), context_die);
26522 gen_field_die (decl, ctx, context_die);
26523 }
26524 break;
26525
26526 case PARM_DECL:
26527 /* Avoid generating stray type DIEs during late dwarf dumping.
26528 All types have been dumped early. */
26529 if (early_dwarf
26530 /* ??? But in LTRANS we cannot annotate early created variably
26531 modified type DIEs without copying them and adjusting all
26532 references to them. Dump them again as happens for inlining
26533 which copies both the decl and the types. */
26534 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26535 in VLA bound information for example. */
26536 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26537 current_function_decl)))
26538 {
26539 if (DECL_BY_REFERENCE (decl_or_origin))
26540 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26541 else
26542 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26543 }
26544 return gen_formal_parameter_die (decl, origin,
26545 true /* Emit name attribute. */,
26546 context_die);
26547
26548 case NAMESPACE_DECL:
26549 if (dwarf_version >= 3 || !dwarf_strict)
26550 gen_namespace_die (decl, context_die);
26551 break;
26552
26553 case IMPORTED_DECL:
26554 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26555 DECL_CONTEXT (decl), context_die);
26556 break;
26557
26558 case NAMELIST_DECL:
26559 gen_namelist_decl (DECL_NAME (decl), context_die,
26560 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26561 break;
26562
26563 default:
26564 /* Probably some frontend-internal decl. Assume we don't care. */
26565 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26566 break;
26567 }
26568
26569 return NULL;
26570 }
26571 \f
26572 /* Output initial debug information for global DECL. Called at the
26573 end of the parsing process.
26574
26575 This is the initial debug generation process. As such, the DIEs
26576 generated may be incomplete. A later debug generation pass
26577 (dwarf2out_late_global_decl) will augment the information generated
26578 in this pass (e.g., with complete location info). */
26579
26580 static void
26581 dwarf2out_early_global_decl (tree decl)
26582 {
26583 set_early_dwarf s;
26584
26585 /* gen_decl_die() will set DECL_ABSTRACT because
26586 cgraph_function_possibly_inlined_p() returns true. This is in
26587 turn will cause DW_AT_inline attributes to be set.
26588
26589 This happens because at early dwarf generation, there is no
26590 cgraph information, causing cgraph_function_possibly_inlined_p()
26591 to return true. Trick cgraph_function_possibly_inlined_p()
26592 while we generate dwarf early. */
26593 bool save = symtab->global_info_ready;
26594 symtab->global_info_ready = true;
26595
26596 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26597 other DECLs and they can point to template types or other things
26598 that dwarf2out can't handle when done via dwarf2out_decl. */
26599 if (TREE_CODE (decl) != TYPE_DECL
26600 && TREE_CODE (decl) != PARM_DECL)
26601 {
26602 if (TREE_CODE (decl) == FUNCTION_DECL)
26603 {
26604 tree save_fndecl = current_function_decl;
26605
26606 /* For nested functions, make sure we have DIEs for the parents first
26607 so that all nested DIEs are generated at the proper scope in the
26608 first shot. */
26609 tree context = decl_function_context (decl);
26610 if (context != NULL)
26611 {
26612 dw_die_ref context_die = lookup_decl_die (context);
26613 current_function_decl = context;
26614
26615 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26616 enough so that it lands in its own context. This avoids type
26617 pruning issues later on. */
26618 if (context_die == NULL || is_declaration_die (context_die))
26619 dwarf2out_early_global_decl (context);
26620 }
26621
26622 /* Emit an abstract origin of a function first. This happens
26623 with C++ constructor clones for example and makes
26624 dwarf2out_abstract_function happy which requires the early
26625 DIE of the abstract instance to be present. */
26626 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26627 dw_die_ref origin_die;
26628 if (origin != NULL
26629 /* Do not emit the DIE multiple times but make sure to
26630 process it fully here in case we just saw a declaration. */
26631 && ((origin_die = lookup_decl_die (origin)) == NULL
26632 || is_declaration_die (origin_die)))
26633 {
26634 current_function_decl = origin;
26635 dwarf2out_decl (origin);
26636 }
26637
26638 /* Emit the DIE for decl but avoid doing that multiple times. */
26639 dw_die_ref old_die;
26640 if ((old_die = lookup_decl_die (decl)) == NULL
26641 || is_declaration_die (old_die))
26642 {
26643 current_function_decl = decl;
26644 dwarf2out_decl (decl);
26645 }
26646
26647 current_function_decl = save_fndecl;
26648 }
26649 else
26650 dwarf2out_decl (decl);
26651 }
26652 symtab->global_info_ready = save;
26653 }
26654
26655 /* Return whether EXPR is an expression with the following pattern:
26656 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26657
26658 static bool
26659 is_trivial_indirect_ref (tree expr)
26660 {
26661 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26662 return false;
26663
26664 tree nop = TREE_OPERAND (expr, 0);
26665 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26666 return false;
26667
26668 tree int_cst = TREE_OPERAND (nop, 0);
26669 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26670 }
26671
26672 /* Output debug information for global decl DECL. Called from
26673 toplev.c after compilation proper has finished. */
26674
26675 static void
26676 dwarf2out_late_global_decl (tree decl)
26677 {
26678 /* Fill-in any location information we were unable to determine
26679 on the first pass. */
26680 if (VAR_P (decl))
26681 {
26682 dw_die_ref die = lookup_decl_die (decl);
26683
26684 /* We may have to generate full debug late for LTO in case debug
26685 was not enabled at compile-time or the target doesn't support
26686 the LTO early debug scheme. */
26687 if (! die && in_lto_p)
26688 dwarf2out_decl (decl);
26689 else if (die)
26690 {
26691 /* We get called via the symtab code invoking late_global_decl
26692 for symbols that are optimized out.
26693
26694 Do not add locations for those, except if they have a
26695 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26696 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26697 INDIRECT_REF expression, as this could generate relocations to
26698 text symbols in LTO object files, which is invalid. */
26699 varpool_node *node = varpool_node::get (decl);
26700 if ((! node || ! node->definition)
26701 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26702 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26703 tree_add_const_value_attribute_for_decl (die, decl);
26704 else
26705 add_location_or_const_value_attribute (die, decl, false);
26706 }
26707 }
26708 }
26709
26710 /* Output debug information for type decl DECL. Called from toplev.c
26711 and from language front ends (to record built-in types). */
26712 static void
26713 dwarf2out_type_decl (tree decl, int local)
26714 {
26715 if (!local)
26716 {
26717 set_early_dwarf s;
26718 dwarf2out_decl (decl);
26719 }
26720 }
26721
26722 /* Output debug information for imported module or decl DECL.
26723 NAME is non-NULL name in the lexical block if the decl has been renamed.
26724 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26725 that DECL belongs to.
26726 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26727 static void
26728 dwarf2out_imported_module_or_decl_1 (tree decl,
26729 tree name,
26730 tree lexical_block,
26731 dw_die_ref lexical_block_die)
26732 {
26733 expanded_location xloc;
26734 dw_die_ref imported_die = NULL;
26735 dw_die_ref at_import_die;
26736
26737 if (TREE_CODE (decl) == IMPORTED_DECL)
26738 {
26739 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26740 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26741 gcc_assert (decl);
26742 }
26743 else
26744 xloc = expand_location (input_location);
26745
26746 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26747 {
26748 at_import_die = force_type_die (TREE_TYPE (decl));
26749 /* For namespace N { typedef void T; } using N::T; base_type_die
26750 returns NULL, but DW_TAG_imported_declaration requires
26751 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26752 if (!at_import_die)
26753 {
26754 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26755 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26756 at_import_die = lookup_type_die (TREE_TYPE (decl));
26757 gcc_assert (at_import_die);
26758 }
26759 }
26760 else
26761 {
26762 at_import_die = lookup_decl_die (decl);
26763 if (!at_import_die)
26764 {
26765 /* If we're trying to avoid duplicate debug info, we may not have
26766 emitted the member decl for this field. Emit it now. */
26767 if (TREE_CODE (decl) == FIELD_DECL)
26768 {
26769 tree type = DECL_CONTEXT (decl);
26770
26771 if (TYPE_CONTEXT (type)
26772 && TYPE_P (TYPE_CONTEXT (type))
26773 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26774 DINFO_USAGE_DIR_USE))
26775 return;
26776 gen_type_die_for_member (type, decl,
26777 get_context_die (TYPE_CONTEXT (type)));
26778 }
26779 if (TREE_CODE (decl) == NAMELIST_DECL)
26780 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26781 get_context_die (DECL_CONTEXT (decl)),
26782 NULL_TREE);
26783 else
26784 at_import_die = force_decl_die (decl);
26785 }
26786 }
26787
26788 if (TREE_CODE (decl) == NAMESPACE_DECL)
26789 {
26790 if (dwarf_version >= 3 || !dwarf_strict)
26791 imported_die = new_die (DW_TAG_imported_module,
26792 lexical_block_die,
26793 lexical_block);
26794 else
26795 return;
26796 }
26797 else
26798 imported_die = new_die (DW_TAG_imported_declaration,
26799 lexical_block_die,
26800 lexical_block);
26801
26802 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26803 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26804 if (debug_column_info && xloc.column)
26805 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26806 if (name)
26807 add_AT_string (imported_die, DW_AT_name,
26808 IDENTIFIER_POINTER (name));
26809 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26810 }
26811
26812 /* Output debug information for imported module or decl DECL.
26813 NAME is non-NULL name in context if the decl has been renamed.
26814 CHILD is true if decl is one of the renamed decls as part of
26815 importing whole module.
26816 IMPLICIT is set if this hook is called for an implicit import
26817 such as inline namespace. */
26818
26819 static void
26820 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26821 bool child, bool implicit)
26822 {
26823 /* dw_die_ref at_import_die; */
26824 dw_die_ref scope_die;
26825
26826 if (debug_info_level <= DINFO_LEVEL_TERSE)
26827 return;
26828
26829 gcc_assert (decl);
26830
26831 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26832 should be enough, for DWARF4 and older even if we emit as extension
26833 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26834 for the benefit of consumers unaware of DW_AT_export_symbols. */
26835 if (implicit
26836 && dwarf_version >= 5
26837 && lang_hooks.decls.decl_dwarf_attribute (decl,
26838 DW_AT_export_symbols) == 1)
26839 return;
26840
26841 set_early_dwarf s;
26842
26843 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26844 We need decl DIE for reference and scope die. First, get DIE for the decl
26845 itself. */
26846
26847 /* Get the scope die for decl context. Use comp_unit_die for global module
26848 or decl. If die is not found for non globals, force new die. */
26849 if (context
26850 && TYPE_P (context)
26851 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26852 return;
26853
26854 scope_die = get_context_die (context);
26855
26856 if (child)
26857 {
26858 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26859 there is nothing we can do, here. */
26860 if (dwarf_version < 3 && dwarf_strict)
26861 return;
26862
26863 gcc_assert (scope_die->die_child);
26864 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26865 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26866 scope_die = scope_die->die_child;
26867 }
26868
26869 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26870 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26871 }
26872
26873 /* Output debug information for namelists. */
26874
26875 static dw_die_ref
26876 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26877 {
26878 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26879 tree value;
26880 unsigned i;
26881
26882 if (debug_info_level <= DINFO_LEVEL_TERSE)
26883 return NULL;
26884
26885 gcc_assert (scope_die != NULL);
26886 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26887 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26888
26889 /* If there are no item_decls, we have a nondefining namelist, e.g.
26890 with USE association; hence, set DW_AT_declaration. */
26891 if (item_decls == NULL_TREE)
26892 {
26893 add_AT_flag (nml_die, DW_AT_declaration, 1);
26894 return nml_die;
26895 }
26896
26897 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26898 {
26899 nml_item_ref_die = lookup_decl_die (value);
26900 if (!nml_item_ref_die)
26901 nml_item_ref_die = force_decl_die (value);
26902
26903 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26904 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26905 }
26906 return nml_die;
26907 }
26908
26909
26910 /* Write the debugging output for DECL and return the DIE. */
26911
26912 static void
26913 dwarf2out_decl (tree decl)
26914 {
26915 dw_die_ref context_die = comp_unit_die ();
26916
26917 switch (TREE_CODE (decl))
26918 {
26919 case ERROR_MARK:
26920 return;
26921
26922 case FUNCTION_DECL:
26923 /* If we're a nested function, initially use a parent of NULL; if we're
26924 a plain function, this will be fixed up in decls_for_scope. If
26925 we're a method, it will be ignored, since we already have a DIE.
26926 Avoid doing this late though since clones of class methods may
26927 otherwise end up in limbo and create type DIEs late. */
26928 if (early_dwarf
26929 && decl_function_context (decl)
26930 /* But if we're in terse mode, we don't care about scope. */
26931 && debug_info_level > DINFO_LEVEL_TERSE)
26932 context_die = NULL;
26933 break;
26934
26935 case VAR_DECL:
26936 /* For local statics lookup proper context die. */
26937 if (local_function_static (decl))
26938 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26939
26940 /* If we are in terse mode, don't generate any DIEs to represent any
26941 variable declarations or definitions unless it is external. */
26942 if (debug_info_level < DINFO_LEVEL_TERSE
26943 || (debug_info_level == DINFO_LEVEL_TERSE
26944 && !TREE_PUBLIC (decl)))
26945 return;
26946 break;
26947
26948 case CONST_DECL:
26949 if (debug_info_level <= DINFO_LEVEL_TERSE)
26950 return;
26951 if (!is_fortran () && !is_ada () && !is_dlang ())
26952 return;
26953 if (TREE_STATIC (decl) && decl_function_context (decl))
26954 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26955 break;
26956
26957 case NAMESPACE_DECL:
26958 case IMPORTED_DECL:
26959 if (debug_info_level <= DINFO_LEVEL_TERSE)
26960 return;
26961 if (lookup_decl_die (decl) != NULL)
26962 return;
26963 break;
26964
26965 case TYPE_DECL:
26966 /* Don't emit stubs for types unless they are needed by other DIEs. */
26967 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26968 return;
26969
26970 /* Don't bother trying to generate any DIEs to represent any of the
26971 normal built-in types for the language we are compiling. */
26972 if (DECL_IS_UNDECLARED_BUILTIN (decl))
26973 return;
26974
26975 /* If we are in terse mode, don't generate any DIEs for types. */
26976 if (debug_info_level <= DINFO_LEVEL_TERSE)
26977 return;
26978
26979 /* If we're a function-scope tag, initially use a parent of NULL;
26980 this will be fixed up in decls_for_scope. */
26981 if (decl_function_context (decl))
26982 context_die = NULL;
26983
26984 break;
26985
26986 case NAMELIST_DECL:
26987 break;
26988
26989 default:
26990 return;
26991 }
26992
26993 gen_decl_die (decl, NULL, NULL, context_die);
26994
26995 if (flag_checking)
26996 {
26997 dw_die_ref die = lookup_decl_die (decl);
26998 if (die)
26999 check_die (die);
27000 }
27001 }
27002
27003 /* Write the debugging output for DECL. */
27004
27005 static void
27006 dwarf2out_function_decl (tree decl)
27007 {
27008 dwarf2out_decl (decl);
27009 call_arg_locations = NULL;
27010 call_arg_loc_last = NULL;
27011 call_site_count = -1;
27012 tail_call_site_count = -1;
27013 decl_loc_table->empty ();
27014 cached_dw_loc_list_table->empty ();
27015 }
27016
27017 /* Output a marker (i.e. a label) for the beginning of the generated code for
27018 a lexical block. */
27019
27020 static void
27021 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
27022 unsigned int blocknum)
27023 {
27024 switch_to_section (current_function_section ());
27025 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
27026 }
27027
27028 /* Output a marker (i.e. a label) for the end of the generated code for a
27029 lexical block. */
27030
27031 static void
27032 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
27033 {
27034 switch_to_section (current_function_section ());
27035 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
27036 }
27037
27038 /* Returns nonzero if it is appropriate not to emit any debugging
27039 information for BLOCK, because it doesn't contain any instructions.
27040
27041 Don't allow this for blocks with nested functions or local classes
27042 as we would end up with orphans, and in the presence of scheduling
27043 we may end up calling them anyway. */
27044
27045 static bool
27046 dwarf2out_ignore_block (const_tree block)
27047 {
27048 tree decl;
27049 unsigned int i;
27050
27051 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
27052 if (TREE_CODE (decl) == FUNCTION_DECL
27053 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27054 return 0;
27055 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
27056 {
27057 decl = BLOCK_NONLOCALIZED_VAR (block, i);
27058 if (TREE_CODE (decl) == FUNCTION_DECL
27059 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27060 return 0;
27061 }
27062
27063 return 1;
27064 }
27065
27066 /* Hash table routines for file_hash. */
27067
27068 bool
27069 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
27070 {
27071 return filename_cmp (p1->filename, p2) == 0;
27072 }
27073
27074 hashval_t
27075 dwarf_file_hasher::hash (dwarf_file_data *p)
27076 {
27077 return htab_hash_string (p->filename);
27078 }
27079
27080 /* Lookup FILE_NAME (in the list of filenames that we know about here in
27081 dwarf2out.c) and return its "index". The index of each (known) filename is
27082 just a unique number which is associated with only that one filename. We
27083 need such numbers for the sake of generating labels (in the .debug_sfnames
27084 section) and references to those files numbers (in the .debug_srcinfo
27085 and .debug_macinfo sections). If the filename given as an argument is not
27086 found in our current list, add it to the list and assign it the next
27087 available unique index number. */
27088
27089 static struct dwarf_file_data *
27090 lookup_filename (const char *file_name)
27091 {
27092 struct dwarf_file_data * created;
27093
27094 if (!file_name)
27095 return NULL;
27096
27097 if (!file_name[0])
27098 file_name = "<stdin>";
27099
27100 dwarf_file_data **slot
27101 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27102 INSERT);
27103 if (*slot)
27104 return *slot;
27105
27106 created = ggc_alloc<dwarf_file_data> ();
27107 created->filename = file_name;
27108 created->emitted_number = 0;
27109 *slot = created;
27110 return created;
27111 }
27112
27113 /* If the assembler will construct the file table, then translate the compiler
27114 internal file table number into the assembler file table number, and emit
27115 a .file directive if we haven't already emitted one yet. The file table
27116 numbers are different because we prune debug info for unused variables and
27117 types, which may include filenames. */
27118
27119 static int
27120 maybe_emit_file (struct dwarf_file_data * fd)
27121 {
27122 if (! fd->emitted_number)
27123 {
27124 if (last_emitted_file)
27125 fd->emitted_number = last_emitted_file->emitted_number + 1;
27126 else
27127 fd->emitted_number = 1;
27128 last_emitted_file = fd;
27129
27130 if (output_asm_line_debug_info ())
27131 {
27132 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27133 output_quoted_string (asm_out_file,
27134 remap_debug_filename (fd->filename));
27135 fputc ('\n', asm_out_file);
27136 }
27137 }
27138
27139 return fd->emitted_number;
27140 }
27141
27142 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27143 That generation should happen after function debug info has been
27144 generated. The value of the attribute is the constant value of ARG. */
27145
27146 static void
27147 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27148 {
27149 die_arg_entry entry;
27150
27151 if (!die || !arg)
27152 return;
27153
27154 gcc_assert (early_dwarf);
27155
27156 if (!tmpl_value_parm_die_table)
27157 vec_alloc (tmpl_value_parm_die_table, 32);
27158
27159 entry.die = die;
27160 entry.arg = arg;
27161 vec_safe_push (tmpl_value_parm_die_table, entry);
27162 }
27163
27164 /* Return TRUE if T is an instance of generic type, FALSE
27165 otherwise. */
27166
27167 static bool
27168 generic_type_p (tree t)
27169 {
27170 if (t == NULL_TREE || !TYPE_P (t))
27171 return false;
27172 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27173 }
27174
27175 /* Schedule the generation of the generic parameter dies for the
27176 instance of generic type T. The proper generation itself is later
27177 done by gen_scheduled_generic_parms_dies. */
27178
27179 static void
27180 schedule_generic_params_dies_gen (tree t)
27181 {
27182 if (!generic_type_p (t))
27183 return;
27184
27185 gcc_assert (early_dwarf);
27186
27187 if (!generic_type_instances)
27188 vec_alloc (generic_type_instances, 256);
27189
27190 vec_safe_push (generic_type_instances, t);
27191 }
27192
27193 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27194 by append_entry_to_tmpl_value_parm_die_table. This function must
27195 be called after function DIEs have been generated. */
27196
27197 static void
27198 gen_remaining_tmpl_value_param_die_attribute (void)
27199 {
27200 if (tmpl_value_parm_die_table)
27201 {
27202 unsigned i, j;
27203 die_arg_entry *e;
27204
27205 /* We do this in two phases - first get the cases we can
27206 handle during early-finish, preserving those we cannot
27207 (containing symbolic constants where we don't yet know
27208 whether we are going to output the referenced symbols).
27209 For those we try again at late-finish. */
27210 j = 0;
27211 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27212 {
27213 if (!e->die->removed
27214 && !tree_add_const_value_attribute (e->die, e->arg))
27215 {
27216 dw_loc_descr_ref loc = NULL;
27217 if (! early_dwarf
27218 && (dwarf_version >= 5 || !dwarf_strict))
27219 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27220 if (loc)
27221 add_AT_loc (e->die, DW_AT_location, loc);
27222 else
27223 (*tmpl_value_parm_die_table)[j++] = *e;
27224 }
27225 }
27226 tmpl_value_parm_die_table->truncate (j);
27227 }
27228 }
27229
27230 /* Generate generic parameters DIEs for instances of generic types
27231 that have been previously scheduled by
27232 schedule_generic_params_dies_gen. This function must be called
27233 after all the types of the CU have been laid out. */
27234
27235 static void
27236 gen_scheduled_generic_parms_dies (void)
27237 {
27238 unsigned i;
27239 tree t;
27240
27241 if (!generic_type_instances)
27242 return;
27243
27244 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27245 if (COMPLETE_TYPE_P (t))
27246 gen_generic_params_dies (t);
27247
27248 generic_type_instances = NULL;
27249 }
27250
27251
27252 /* Replace DW_AT_name for the decl with name. */
27253
27254 static void
27255 dwarf2out_set_name (tree decl, tree name)
27256 {
27257 dw_die_ref die;
27258 dw_attr_node *attr;
27259 const char *dname;
27260
27261 die = TYPE_SYMTAB_DIE (decl);
27262 if (!die)
27263 return;
27264
27265 dname = dwarf2_name (name, 0);
27266 if (!dname)
27267 return;
27268
27269 attr = get_AT (die, DW_AT_name);
27270 if (attr)
27271 {
27272 struct indirect_string_node *node;
27273
27274 node = find_AT_string (dname);
27275 /* replace the string. */
27276 attr->dw_attr_val.v.val_str = node;
27277 }
27278
27279 else
27280 add_name_attribute (die, dname);
27281 }
27282
27283 /* True if before or during processing of the first function being emitted. */
27284 static bool in_first_function_p = true;
27285 /* True if loc_note during dwarf2out_var_location call might still be
27286 before first real instruction at address equal to .Ltext0. */
27287 static bool maybe_at_text_label_p = true;
27288 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27289 static unsigned int first_loclabel_num_not_at_text_label;
27290
27291 /* Look ahead for a real insn. */
27292
27293 static rtx_insn *
27294 dwarf2out_next_real_insn (rtx_insn *loc_note)
27295 {
27296 rtx_insn *next_real = NEXT_INSN (loc_note);
27297
27298 while (next_real)
27299 if (INSN_P (next_real))
27300 break;
27301 else
27302 next_real = NEXT_INSN (next_real);
27303
27304 return next_real;
27305 }
27306
27307 /* Called by the final INSN scan whenever we see a var location. We
27308 use it to drop labels in the right places, and throw the location in
27309 our lookup table. */
27310
27311 static void
27312 dwarf2out_var_location (rtx_insn *loc_note)
27313 {
27314 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27315 struct var_loc_node *newloc;
27316 rtx_insn *next_real;
27317 rtx_insn *call_insn = NULL;
27318 static const char *last_label;
27319 static const char *last_postcall_label;
27320 static bool last_in_cold_section_p;
27321 static rtx_insn *expected_next_loc_note;
27322 tree decl;
27323 bool var_loc_p;
27324 var_loc_view view = 0;
27325
27326 if (!NOTE_P (loc_note))
27327 {
27328 if (CALL_P (loc_note))
27329 {
27330 maybe_reset_location_view (loc_note, cur_line_info_table);
27331 call_site_count++;
27332 if (SIBLING_CALL_P (loc_note))
27333 tail_call_site_count++;
27334 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27335 {
27336 call_insn = loc_note;
27337 loc_note = NULL;
27338 var_loc_p = false;
27339
27340 next_real = dwarf2out_next_real_insn (call_insn);
27341 cached_next_real_insn = NULL;
27342 goto create_label;
27343 }
27344 if (optimize == 0 && !flag_var_tracking)
27345 {
27346 /* When the var-tracking pass is not running, there is no note
27347 for indirect calls whose target is compile-time known. In this
27348 case, process such calls specifically so that we generate call
27349 sites for them anyway. */
27350 rtx x = PATTERN (loc_note);
27351 if (GET_CODE (x) == PARALLEL)
27352 x = XVECEXP (x, 0, 0);
27353 if (GET_CODE (x) == SET)
27354 x = SET_SRC (x);
27355 if (GET_CODE (x) == CALL)
27356 x = XEXP (x, 0);
27357 if (!MEM_P (x)
27358 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27359 || !SYMBOL_REF_DECL (XEXP (x, 0))
27360 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27361 != FUNCTION_DECL))
27362 {
27363 call_insn = loc_note;
27364 loc_note = NULL;
27365 var_loc_p = false;
27366
27367 next_real = dwarf2out_next_real_insn (call_insn);
27368 cached_next_real_insn = NULL;
27369 goto create_label;
27370 }
27371 }
27372 }
27373 else if (!debug_variable_location_views)
27374 gcc_unreachable ();
27375 else
27376 maybe_reset_location_view (loc_note, cur_line_info_table);
27377
27378 return;
27379 }
27380
27381 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27382 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27383 return;
27384
27385 /* Optimize processing a large consecutive sequence of location
27386 notes so we don't spend too much time in next_real_insn. If the
27387 next insn is another location note, remember the next_real_insn
27388 calculation for next time. */
27389 next_real = cached_next_real_insn;
27390 if (next_real)
27391 {
27392 if (expected_next_loc_note != loc_note)
27393 next_real = NULL;
27394 }
27395
27396 if (! next_real)
27397 next_real = dwarf2out_next_real_insn (loc_note);
27398
27399 if (next_real)
27400 {
27401 rtx_insn *next_note = NEXT_INSN (loc_note);
27402 while (next_note != next_real)
27403 {
27404 if (! next_note->deleted ()
27405 && NOTE_P (next_note)
27406 && NOTE_KIND (next_note) == NOTE_INSN_VAR_LOCATION)
27407 break;
27408 next_note = NEXT_INSN (next_note);
27409 }
27410
27411 if (next_note == next_real)
27412 cached_next_real_insn = NULL;
27413 else
27414 {
27415 expected_next_loc_note = next_note;
27416 cached_next_real_insn = next_real;
27417 }
27418 }
27419 else
27420 cached_next_real_insn = NULL;
27421
27422 /* If there are no instructions which would be affected by this note,
27423 don't do anything. */
27424 if (var_loc_p
27425 && next_real == NULL_RTX
27426 && !NOTE_DURING_CALL_P (loc_note))
27427 return;
27428
27429 create_label:
27430
27431 if (next_real == NULL_RTX)
27432 next_real = get_last_insn ();
27433
27434 /* If there were any real insns between note we processed last time
27435 and this note (or if it is the first note), clear
27436 last_{,postcall_}label so that they are not reused this time. */
27437 if (last_var_location_insn == NULL_RTX
27438 || last_var_location_insn != next_real
27439 || last_in_cold_section_p != in_cold_section_p)
27440 {
27441 last_label = NULL;
27442 last_postcall_label = NULL;
27443 }
27444
27445 if (var_loc_p)
27446 {
27447 const char *label
27448 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27449 view = cur_line_info_table->view;
27450 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27451 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27452 if (newloc == NULL)
27453 return;
27454 }
27455 else
27456 {
27457 decl = NULL_TREE;
27458 newloc = NULL;
27459 }
27460
27461 /* If there were no real insns between note we processed last time
27462 and this note, use the label we emitted last time. Otherwise
27463 create a new label and emit it. */
27464 if (last_label == NULL)
27465 {
27466 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27467 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27468 loclabel_num++;
27469 last_label = ggc_strdup (loclabel);
27470 /* See if loclabel might be equal to .Ltext0. If yes,
27471 bump first_loclabel_num_not_at_text_label. */
27472 if (!have_multiple_function_sections
27473 && in_first_function_p
27474 && maybe_at_text_label_p)
27475 {
27476 static rtx_insn *last_start;
27477 rtx_insn *insn;
27478 for (insn = loc_note; insn; insn = previous_insn (insn))
27479 if (insn == last_start)
27480 break;
27481 else if (!NONDEBUG_INSN_P (insn))
27482 continue;
27483 else
27484 {
27485 rtx body = PATTERN (insn);
27486 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27487 continue;
27488 /* Inline asm could occupy zero bytes. */
27489 else if (GET_CODE (body) == ASM_INPUT
27490 || asm_noperands (body) >= 0)
27491 continue;
27492 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27493 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27494 continue;
27495 #endif
27496 else
27497 {
27498 /* Assume insn has non-zero length. */
27499 maybe_at_text_label_p = false;
27500 break;
27501 }
27502 }
27503 if (maybe_at_text_label_p)
27504 {
27505 last_start = loc_note;
27506 first_loclabel_num_not_at_text_label = loclabel_num;
27507 }
27508 }
27509 }
27510
27511 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27512 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27513
27514 if (!var_loc_p)
27515 {
27516 struct call_arg_loc_node *ca_loc
27517 = ggc_cleared_alloc<call_arg_loc_node> ();
27518 rtx_insn *prev = call_insn;
27519
27520 ca_loc->call_arg_loc_note
27521 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27522 ca_loc->next = NULL;
27523 ca_loc->label = last_label;
27524 gcc_assert (prev
27525 && (CALL_P (prev)
27526 || (NONJUMP_INSN_P (prev)
27527 && GET_CODE (PATTERN (prev)) == SEQUENCE
27528 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27529 if (!CALL_P (prev))
27530 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27531 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27532
27533 /* Look for a SYMBOL_REF in the "prev" instruction. */
27534 rtx x = get_call_rtx_from (prev);
27535 if (x)
27536 {
27537 /* Try to get the call symbol, if any. */
27538 if (MEM_P (XEXP (x, 0)))
27539 x = XEXP (x, 0);
27540 /* First, look for a memory access to a symbol_ref. */
27541 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27542 && SYMBOL_REF_DECL (XEXP (x, 0))
27543 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27544 ca_loc->symbol_ref = XEXP (x, 0);
27545 /* Otherwise, look at a compile-time known user-level function
27546 declaration. */
27547 else if (MEM_P (x)
27548 && MEM_EXPR (x)
27549 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27550 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27551 }
27552
27553 ca_loc->block = insn_scope (prev);
27554 if (call_arg_locations)
27555 call_arg_loc_last->next = ca_loc;
27556 else
27557 call_arg_locations = ca_loc;
27558 call_arg_loc_last = ca_loc;
27559 }
27560 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27561 {
27562 newloc->label = last_label;
27563 newloc->view = view;
27564 }
27565 else
27566 {
27567 if (!last_postcall_label)
27568 {
27569 sprintf (loclabel, "%s-1", last_label);
27570 last_postcall_label = ggc_strdup (loclabel);
27571 }
27572 newloc->label = last_postcall_label;
27573 /* ??? This view is at last_label, not last_label-1, but we
27574 could only assume view at last_label-1 is zero if we could
27575 assume calls always have length greater than one. This is
27576 probably true in general, though there might be a rare
27577 exception to this rule, e.g. if a call insn is optimized out
27578 by target magic. Then, even the -1 in the label will be
27579 wrong, which might invalidate the range. Anyway, using view,
27580 though technically possibly incorrect, will work as far as
27581 ranges go: since L-1 is in the middle of the call insn,
27582 (L-1).0 and (L-1).V shouldn't make any difference, and having
27583 the loclist entry refer to the .loc entry might be useful, so
27584 leave it like this. */
27585 newloc->view = view;
27586 }
27587
27588 if (var_loc_p && flag_debug_asm)
27589 {
27590 const char *name, *sep, *patstr;
27591 if (decl && DECL_NAME (decl))
27592 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27593 else
27594 name = "";
27595 if (NOTE_VAR_LOCATION_LOC (loc_note))
27596 {
27597 sep = " => ";
27598 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27599 }
27600 else
27601 {
27602 sep = " ";
27603 patstr = "RESET";
27604 }
27605 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27606 name, sep, patstr);
27607 }
27608
27609 last_var_location_insn = next_real;
27610 last_in_cold_section_p = in_cold_section_p;
27611 }
27612
27613 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27614 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27615 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27616 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27617 BLOCK_FRAGMENT_ORIGIN links. */
27618 static bool
27619 block_within_block_p (tree block, tree outer, bool bothways)
27620 {
27621 if (block == outer)
27622 return true;
27623
27624 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27625 for (tree context = BLOCK_SUPERCONTEXT (block);
27626 context != outer;
27627 context = BLOCK_SUPERCONTEXT (context))
27628 if (!context || TREE_CODE (context) != BLOCK)
27629 return false;
27630
27631 if (!bothways)
27632 return true;
27633
27634 /* Now check that each block is actually referenced by its
27635 parent. */
27636 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27637 context = BLOCK_SUPERCONTEXT (context))
27638 {
27639 if (BLOCK_FRAGMENT_ORIGIN (context))
27640 {
27641 gcc_assert (!BLOCK_SUBBLOCKS (context));
27642 context = BLOCK_FRAGMENT_ORIGIN (context);
27643 }
27644 for (tree sub = BLOCK_SUBBLOCKS (context);
27645 sub != block;
27646 sub = BLOCK_CHAIN (sub))
27647 if (!sub)
27648 return false;
27649 if (context == outer)
27650 return true;
27651 else
27652 block = context;
27653 }
27654 }
27655
27656 /* Called during final while assembling the marker of the entry point
27657 for an inlined function. */
27658
27659 static void
27660 dwarf2out_inline_entry (tree block)
27661 {
27662 gcc_assert (debug_inline_points);
27663
27664 /* If we can't represent it, don't bother. */
27665 if (!(dwarf_version >= 3 || !dwarf_strict))
27666 return;
27667
27668 gcc_assert (DECL_P (block_ultimate_origin (block)));
27669
27670 /* Sanity check the block tree. This would catch a case in which
27671 BLOCK got removed from the tree reachable from the outermost
27672 lexical block, but got retained in markers. It would still link
27673 back to its parents, but some ancestor would be missing a link
27674 down the path to the sub BLOCK. If the block got removed, its
27675 BLOCK_NUMBER will not be a usable value. */
27676 if (flag_checking)
27677 gcc_assert (block_within_block_p (block,
27678 DECL_INITIAL (current_function_decl),
27679 true));
27680
27681 gcc_assert (inlined_function_outer_scope_p (block));
27682 gcc_assert (!lookup_block_die (block));
27683
27684 if (BLOCK_FRAGMENT_ORIGIN (block))
27685 block = BLOCK_FRAGMENT_ORIGIN (block);
27686 /* Can the entry point ever not be at the beginning of an
27687 unfragmented lexical block? */
27688 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27689 || (cur_line_info_table
27690 && !ZERO_VIEW_P (cur_line_info_table->view))))
27691 return;
27692
27693 if (!inline_entry_data_table)
27694 inline_entry_data_table
27695 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27696
27697
27698 inline_entry_data **iedp
27699 = inline_entry_data_table->find_slot_with_hash (block,
27700 htab_hash_pointer (block),
27701 INSERT);
27702 if (*iedp)
27703 /* ??? Ideally, we'd record all entry points for the same inlined
27704 function (some may have been duplicated by e.g. unrolling), but
27705 we have no way to represent that ATM. */
27706 return;
27707
27708 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27709 ied->block = block;
27710 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27711 ied->label_num = BLOCK_NUMBER (block);
27712 if (cur_line_info_table)
27713 ied->view = cur_line_info_table->view;
27714
27715 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL,
27716 BLOCK_NUMBER (block));
27717 }
27718
27719 /* Called from finalize_size_functions for size functions so that their body
27720 can be encoded in the debug info to describe the layout of variable-length
27721 structures. */
27722
27723 static void
27724 dwarf2out_size_function (tree decl)
27725 {
27726 set_early_dwarf s;
27727 function_to_dwarf_procedure (decl);
27728 }
27729
27730 /* Note in one location list that text section has changed. */
27731
27732 int
27733 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27734 {
27735 var_loc_list *list = *slot;
27736 if (list->first)
27737 list->last_before_switch
27738 = list->last->next ? list->last->next : list->last;
27739 return 1;
27740 }
27741
27742 /* Note in all location lists that text section has changed. */
27743
27744 static void
27745 var_location_switch_text_section (void)
27746 {
27747 if (decl_loc_table == NULL)
27748 return;
27749
27750 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27751 }
27752
27753 /* Create a new line number table. */
27754
27755 static dw_line_info_table *
27756 new_line_info_table (void)
27757 {
27758 dw_line_info_table *table;
27759
27760 table = ggc_cleared_alloc<dw_line_info_table> ();
27761 table->file_num = 1;
27762 table->line_num = 1;
27763 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27764 FORCE_RESET_NEXT_VIEW (table->view);
27765 table->symviews_since_reset = 0;
27766
27767 return table;
27768 }
27769
27770 /* Lookup the "current" table into which we emit line info, so
27771 that we don't have to do it for every source line. */
27772
27773 static void
27774 set_cur_line_info_table (section *sec)
27775 {
27776 dw_line_info_table *table;
27777
27778 if (sec == text_section)
27779 table = text_section_line_info;
27780 else if (sec == cold_text_section)
27781 {
27782 table = cold_text_section_line_info;
27783 if (!table)
27784 {
27785 cold_text_section_line_info = table = new_line_info_table ();
27786 table->end_label = cold_end_label;
27787 }
27788 }
27789 else
27790 {
27791 const char *end_label;
27792
27793 if (crtl->has_bb_partition)
27794 {
27795 if (in_cold_section_p)
27796 end_label = crtl->subsections.cold_section_end_label;
27797 else
27798 end_label = crtl->subsections.hot_section_end_label;
27799 }
27800 else
27801 {
27802 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27803 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27804 current_function_funcdef_no);
27805 end_label = ggc_strdup (label);
27806 }
27807
27808 table = new_line_info_table ();
27809 table->end_label = end_label;
27810
27811 vec_safe_push (separate_line_info, table);
27812 }
27813
27814 if (output_asm_line_debug_info ())
27815 table->is_stmt = (cur_line_info_table
27816 ? cur_line_info_table->is_stmt
27817 : DWARF_LINE_DEFAULT_IS_STMT_START);
27818 cur_line_info_table = table;
27819 }
27820
27821
27822 /* We need to reset the locations at the beginning of each
27823 function. We can't do this in the end_function hook, because the
27824 declarations that use the locations won't have been output when
27825 that hook is called. Also compute have_multiple_function_sections here. */
27826
27827 static void
27828 dwarf2out_begin_function (tree fun)
27829 {
27830 section *sec = function_section (fun);
27831
27832 if (sec != text_section)
27833 have_multiple_function_sections = true;
27834
27835 if (crtl->has_bb_partition && !cold_text_section)
27836 {
27837 gcc_assert (current_function_decl == fun);
27838 cold_text_section = unlikely_text_section ();
27839 switch_to_section (cold_text_section);
27840 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27841 switch_to_section (sec);
27842 }
27843
27844 dwarf2out_note_section_used ();
27845 call_site_count = 0;
27846 tail_call_site_count = 0;
27847
27848 set_cur_line_info_table (sec);
27849 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27850 }
27851
27852 /* Helper function of dwarf2out_end_function, called only after emitting
27853 the very first function into assembly. Check if some .debug_loc range
27854 might end with a .LVL* label that could be equal to .Ltext0.
27855 In that case we must force using absolute addresses in .debug_loc ranges,
27856 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27857 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27858 list terminator.
27859 Set have_multiple_function_sections to true in that case and
27860 terminate htab traversal. */
27861
27862 int
27863 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27864 {
27865 var_loc_list *entry = *slot;
27866 struct var_loc_node *node;
27867
27868 node = entry->first;
27869 if (node && node->next && node->next->label)
27870 {
27871 unsigned int i;
27872 const char *label = node->next->label;
27873 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27874
27875 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27876 {
27877 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27878 if (strcmp (label, loclabel) == 0)
27879 {
27880 have_multiple_function_sections = true;
27881 return 0;
27882 }
27883 }
27884 }
27885 return 1;
27886 }
27887
27888 /* Hook called after emitting a function into assembly.
27889 This does something only for the very first function emitted. */
27890
27891 static void
27892 dwarf2out_end_function (unsigned int)
27893 {
27894 if (in_first_function_p
27895 && !have_multiple_function_sections
27896 && first_loclabel_num_not_at_text_label
27897 && decl_loc_table)
27898 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27899 in_first_function_p = false;
27900 maybe_at_text_label_p = false;
27901 }
27902
27903 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27904 front-ends register a translation unit even before dwarf2out_init is
27905 called. */
27906 static tree main_translation_unit = NULL_TREE;
27907
27908 /* Hook called by front-ends after they built their main translation unit.
27909 Associate comp_unit_die to UNIT. */
27910
27911 static void
27912 dwarf2out_register_main_translation_unit (tree unit)
27913 {
27914 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27915 && main_translation_unit == NULL_TREE);
27916 main_translation_unit = unit;
27917 /* If dwarf2out_init has not been called yet, it will perform the association
27918 itself looking at main_translation_unit. */
27919 if (decl_die_table != NULL)
27920 equate_decl_number_to_die (unit, comp_unit_die ());
27921 }
27922
27923 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27924
27925 static void
27926 push_dw_line_info_entry (dw_line_info_table *table,
27927 enum dw_line_info_opcode opcode, unsigned int val)
27928 {
27929 dw_line_info_entry e;
27930 e.opcode = opcode;
27931 e.val = val;
27932 vec_safe_push (table->entries, e);
27933 }
27934
27935 /* Output a label to mark the beginning of a source code line entry
27936 and record information relating to this source line, in
27937 'line_info_table' for later output of the .debug_line section. */
27938 /* ??? The discriminator parameter ought to be unsigned. */
27939
27940 static void
27941 dwarf2out_source_line (unsigned int line, unsigned int column,
27942 const char *filename,
27943 int discriminator, bool is_stmt)
27944 {
27945 unsigned int file_num;
27946 dw_line_info_table *table;
27947 static var_loc_view lvugid;
27948
27949 if (debug_info_level < DINFO_LEVEL_TERSE)
27950 return;
27951
27952 table = cur_line_info_table;
27953
27954 if (line == 0)
27955 {
27956 if (debug_variable_location_views
27957 && output_asm_line_debug_info ()
27958 && table && !RESETTING_VIEW_P (table->view))
27959 {
27960 /* If we're using the assembler to compute view numbers, we
27961 can't issue a .loc directive for line zero, so we can't
27962 get a view number at this point. We might attempt to
27963 compute it from the previous view, or equate it to a
27964 subsequent view (though it might not be there!), but
27965 since we're omitting the line number entry, we might as
27966 well omit the view number as well. That means pretending
27967 it's a view number zero, which might very well turn out
27968 to be correct. ??? Extend the assembler so that the
27969 compiler could emit e.g. ".locview .LVU#", to output a
27970 view without changing line number information. We'd then
27971 have to count it in symviews_since_reset; when it's omitted,
27972 it doesn't count. */
27973 if (!zero_view_p)
27974 zero_view_p = BITMAP_GGC_ALLOC ();
27975 bitmap_set_bit (zero_view_p, table->view);
27976 if (flag_debug_asm)
27977 {
27978 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27979 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27980 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27981 ASM_COMMENT_START);
27982 assemble_name (asm_out_file, label);
27983 putc ('\n', asm_out_file);
27984 }
27985 table->view = ++lvugid;
27986 }
27987 return;
27988 }
27989
27990 /* The discriminator column was added in dwarf4. Simplify the below
27991 by simply removing it if we're not supposed to output it. */
27992 if (dwarf_version < 4 && dwarf_strict)
27993 discriminator = 0;
27994
27995 if (!debug_column_info)
27996 column = 0;
27997
27998 file_num = maybe_emit_file (lookup_filename (filename));
27999
28000 /* ??? TODO: Elide duplicate line number entries. Traditionally,
28001 the debugger has used the second (possibly duplicate) line number
28002 at the beginning of the function to mark the end of the prologue.
28003 We could eliminate any other duplicates within the function. For
28004 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
28005 that second line number entry. */
28006 /* Recall that this end-of-prologue indication is *not* the same thing
28007 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
28008 to which the hook corresponds, follows the last insn that was
28009 emitted by gen_prologue. What we need is to precede the first insn
28010 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
28011 insn that corresponds to something the user wrote. These may be
28012 very different locations once scheduling is enabled. */
28013
28014 if (0 && file_num == table->file_num
28015 && line == table->line_num
28016 && column == table->column_num
28017 && discriminator == table->discrim_num
28018 && is_stmt == table->is_stmt)
28019 return;
28020
28021 switch_to_section (current_function_section ());
28022
28023 /* If requested, emit something human-readable. */
28024 if (flag_debug_asm)
28025 {
28026 if (debug_column_info)
28027 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
28028 filename, line, column);
28029 else
28030 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
28031 filename, line);
28032 }
28033
28034 if (output_asm_line_debug_info ())
28035 {
28036 /* Emit the .loc directive understood by GNU as. */
28037 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
28038 file_num, line, is_stmt, discriminator */
28039 fputs ("\t.loc ", asm_out_file);
28040 fprint_ul (asm_out_file, file_num);
28041 putc (' ', asm_out_file);
28042 fprint_ul (asm_out_file, line);
28043 putc (' ', asm_out_file);
28044 fprint_ul (asm_out_file, column);
28045
28046 if (is_stmt != table->is_stmt)
28047 {
28048 #if HAVE_GAS_LOC_STMT
28049 fputs (" is_stmt ", asm_out_file);
28050 putc (is_stmt ? '1' : '0', asm_out_file);
28051 #endif
28052 }
28053 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
28054 {
28055 gcc_assert (discriminator > 0);
28056 fputs (" discriminator ", asm_out_file);
28057 fprint_ul (asm_out_file, (unsigned long) discriminator);
28058 }
28059 if (debug_variable_location_views)
28060 {
28061 if (!RESETTING_VIEW_P (table->view))
28062 {
28063 table->symviews_since_reset++;
28064 if (table->symviews_since_reset > symview_upper_bound)
28065 symview_upper_bound = table->symviews_since_reset;
28066 /* When we're using the assembler to compute view
28067 numbers, we output symbolic labels after "view" in
28068 .loc directives, and the assembler will set them for
28069 us, so that we can refer to the view numbers in
28070 location lists. The only exceptions are when we know
28071 a view will be zero: "-0" is a forced reset, used
28072 e.g. in the beginning of functions, whereas "0" tells
28073 the assembler to check that there was a PC change
28074 since the previous view, in a way that implicitly
28075 resets the next view. */
28076 fputs (" view ", asm_out_file);
28077 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28078 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
28079 assemble_name (asm_out_file, label);
28080 table->view = ++lvugid;
28081 }
28082 else
28083 {
28084 table->symviews_since_reset = 0;
28085 if (FORCE_RESETTING_VIEW_P (table->view))
28086 fputs (" view -0", asm_out_file);
28087 else
28088 fputs (" view 0", asm_out_file);
28089 /* Mark the present view as a zero view. Earlier debug
28090 binds may have already added its id to loclists to be
28091 emitted later, so we can't reuse the id for something
28092 else. However, it's good to know whether a view is
28093 known to be zero, because then we may be able to
28094 optimize out locviews that are all zeros, so take
28095 note of it in zero_view_p. */
28096 if (!zero_view_p)
28097 zero_view_p = BITMAP_GGC_ALLOC ();
28098 bitmap_set_bit (zero_view_p, lvugid);
28099 table->view = ++lvugid;
28100 }
28101 }
28102 putc ('\n', asm_out_file);
28103 }
28104 else
28105 {
28106 unsigned int label_num = ++line_info_label_num;
28107
28108 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28109
28110 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28111 push_dw_line_info_entry (table, LI_adv_address, label_num);
28112 else
28113 push_dw_line_info_entry (table, LI_set_address, label_num);
28114 if (debug_variable_location_views)
28115 {
28116 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28117 if (resetting)
28118 table->view = 0;
28119
28120 if (flag_debug_asm)
28121 fprintf (asm_out_file, "\t%s view %s%d\n",
28122 ASM_COMMENT_START,
28123 resetting ? "-" : "",
28124 table->view);
28125
28126 table->view++;
28127 }
28128 if (file_num != table->file_num)
28129 push_dw_line_info_entry (table, LI_set_file, file_num);
28130 if (discriminator != table->discrim_num)
28131 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28132 if (is_stmt != table->is_stmt)
28133 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28134 push_dw_line_info_entry (table, LI_set_line, line);
28135 if (debug_column_info)
28136 push_dw_line_info_entry (table, LI_set_column, column);
28137 }
28138
28139 table->file_num = file_num;
28140 table->line_num = line;
28141 table->column_num = column;
28142 table->discrim_num = discriminator;
28143 table->is_stmt = is_stmt;
28144 table->in_use = true;
28145 }
28146
28147 /* Record the beginning of a new source file. */
28148
28149 static void
28150 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28151 {
28152 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28153 {
28154 macinfo_entry e;
28155 e.code = DW_MACINFO_start_file;
28156 e.lineno = lineno;
28157 e.info = ggc_strdup (filename);
28158 vec_safe_push (macinfo_table, e);
28159 }
28160 }
28161
28162 /* Record the end of a source file. */
28163
28164 static void
28165 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28166 {
28167 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28168 {
28169 macinfo_entry e;
28170 e.code = DW_MACINFO_end_file;
28171 e.lineno = lineno;
28172 e.info = NULL;
28173 vec_safe_push (macinfo_table, e);
28174 }
28175 }
28176
28177 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28178 the tail part of the directive line, i.e. the part which is past the
28179 initial whitespace, #, whitespace, directive-name, whitespace part. */
28180
28181 static void
28182 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28183 const char *buffer ATTRIBUTE_UNUSED)
28184 {
28185 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28186 {
28187 macinfo_entry e;
28188 /* Insert a dummy first entry to be able to optimize the whole
28189 predefined macro block using DW_MACRO_import. */
28190 if (macinfo_table->is_empty () && lineno <= 1)
28191 {
28192 e.code = 0;
28193 e.lineno = 0;
28194 e.info = NULL;
28195 vec_safe_push (macinfo_table, e);
28196 }
28197 e.code = DW_MACINFO_define;
28198 e.lineno = lineno;
28199 e.info = ggc_strdup (buffer);
28200 vec_safe_push (macinfo_table, e);
28201 }
28202 }
28203
28204 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28205 the tail part of the directive line, i.e. the part which is past the
28206 initial whitespace, #, whitespace, directive-name, whitespace part. */
28207
28208 static void
28209 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28210 const char *buffer ATTRIBUTE_UNUSED)
28211 {
28212 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28213 {
28214 macinfo_entry e;
28215 /* Insert a dummy first entry to be able to optimize the whole
28216 predefined macro block using DW_MACRO_import. */
28217 if (macinfo_table->is_empty () && lineno <= 1)
28218 {
28219 e.code = 0;
28220 e.lineno = 0;
28221 e.info = NULL;
28222 vec_safe_push (macinfo_table, e);
28223 }
28224 e.code = DW_MACINFO_undef;
28225 e.lineno = lineno;
28226 e.info = ggc_strdup (buffer);
28227 vec_safe_push (macinfo_table, e);
28228 }
28229 }
28230
28231 /* Helpers to manipulate hash table of CUs. */
28232
28233 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28234 {
28235 static inline hashval_t hash (const macinfo_entry *);
28236 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28237 };
28238
28239 inline hashval_t
28240 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28241 {
28242 return htab_hash_string (entry->info);
28243 }
28244
28245 inline bool
28246 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28247 const macinfo_entry *entry2)
28248 {
28249 return !strcmp (entry1->info, entry2->info);
28250 }
28251
28252 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28253
28254 /* Output a single .debug_macinfo entry. */
28255
28256 static void
28257 output_macinfo_op (macinfo_entry *ref)
28258 {
28259 int file_num;
28260 size_t len;
28261 struct indirect_string_node *node;
28262 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28263 struct dwarf_file_data *fd;
28264
28265 switch (ref->code)
28266 {
28267 case DW_MACINFO_start_file:
28268 fd = lookup_filename (ref->info);
28269 file_num = maybe_emit_file (fd);
28270 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28271 dw2_asm_output_data_uleb128 (ref->lineno,
28272 "Included from line number %lu",
28273 (unsigned long) ref->lineno);
28274 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28275 break;
28276 case DW_MACINFO_end_file:
28277 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28278 break;
28279 case DW_MACINFO_define:
28280 case DW_MACINFO_undef:
28281 len = strlen (ref->info) + 1;
28282 if (!dwarf_strict
28283 && len > DWARF_OFFSET_SIZE
28284 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28285 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28286 {
28287 ref->code = ref->code == DW_MACINFO_define
28288 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28289 output_macinfo_op (ref);
28290 return;
28291 }
28292 dw2_asm_output_data (1, ref->code,
28293 ref->code == DW_MACINFO_define
28294 ? "Define macro" : "Undefine macro");
28295 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28296 (unsigned long) ref->lineno);
28297 dw2_asm_output_nstring (ref->info, -1, "The macro");
28298 break;
28299 case DW_MACRO_define_strp:
28300 case DW_MACRO_undef_strp:
28301 /* NB: dwarf2out_finish performs:
28302 1. save_macinfo_strings
28303 2. hash table traverse of index_string
28304 3. output_macinfo -> output_macinfo_op
28305 4. output_indirect_strings
28306 -> hash table traverse of output_index_string
28307
28308 When output_macinfo_op is called, all index strings have been
28309 added to hash table by save_macinfo_strings and we can't pass
28310 INSERT to find_slot_with_hash which may expand hash table, even
28311 if no insertion is needed, and change hash table traverse order
28312 between index_string and output_index_string. */
28313 node = find_AT_string (ref->info, NO_INSERT);
28314 gcc_assert (node
28315 && (node->form == DW_FORM_strp
28316 || node->form == dwarf_FORM (DW_FORM_strx)));
28317 dw2_asm_output_data (1, ref->code,
28318 ref->code == DW_MACRO_define_strp
28319 ? "Define macro strp"
28320 : "Undefine macro strp");
28321 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28322 (unsigned long) ref->lineno);
28323 if (node->form == DW_FORM_strp)
28324 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28325 debug_str_section, "The macro: \"%s\"",
28326 ref->info);
28327 else
28328 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28329 ref->info);
28330 break;
28331 case DW_MACRO_import:
28332 dw2_asm_output_data (1, ref->code, "Import");
28333 ASM_GENERATE_INTERNAL_LABEL (label,
28334 DEBUG_MACRO_SECTION_LABEL,
28335 ref->lineno + macinfo_label_base);
28336 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28337 break;
28338 default:
28339 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28340 ASM_COMMENT_START, (unsigned long) ref->code);
28341 break;
28342 }
28343 }
28344
28345 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28346 other compilation unit .debug_macinfo sections. IDX is the first
28347 index of a define/undef, return the number of ops that should be
28348 emitted in a comdat .debug_macinfo section and emit
28349 a DW_MACRO_import entry referencing it.
28350 If the define/undef entry should be emitted normally, return 0. */
28351
28352 static unsigned
28353 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28354 macinfo_hash_type **macinfo_htab)
28355 {
28356 macinfo_entry *first, *second, *cur, *inc;
28357 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28358 unsigned char checksum[16];
28359 struct md5_ctx ctx;
28360 char *grp_name, *tail;
28361 const char *base;
28362 unsigned int i, count, encoded_filename_len, linebuf_len;
28363 macinfo_entry **slot;
28364
28365 first = &(*macinfo_table)[idx];
28366 second = &(*macinfo_table)[idx + 1];
28367
28368 /* Optimize only if there are at least two consecutive define/undef ops,
28369 and either all of them are before first DW_MACINFO_start_file
28370 with lineno {0,1} (i.e. predefined macro block), or all of them are
28371 in some included header file. */
28372 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28373 return 0;
28374 if (vec_safe_is_empty (files))
28375 {
28376 if (first->lineno > 1 || second->lineno > 1)
28377 return 0;
28378 }
28379 else if (first->lineno == 0)
28380 return 0;
28381
28382 /* Find the last define/undef entry that can be grouped together
28383 with first and at the same time compute md5 checksum of their
28384 codes, linenumbers and strings. */
28385 md5_init_ctx (&ctx);
28386 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28387 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28388 break;
28389 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28390 break;
28391 else
28392 {
28393 unsigned char code = cur->code;
28394 md5_process_bytes (&code, 1, &ctx);
28395 checksum_uleb128 (cur->lineno, &ctx);
28396 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28397 }
28398 md5_finish_ctx (&ctx, checksum);
28399 count = i - idx;
28400
28401 /* From the containing include filename (if any) pick up just
28402 usable characters from its basename. */
28403 if (vec_safe_is_empty (files))
28404 base = "";
28405 else
28406 base = lbasename (files->last ().info);
28407 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28408 if (ISIDNUM (base[i]) || base[i] == '.')
28409 encoded_filename_len++;
28410 /* Count . at the end. */
28411 if (encoded_filename_len)
28412 encoded_filename_len++;
28413
28414 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28415 linebuf_len = strlen (linebuf);
28416
28417 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28418 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28419 + 16 * 2 + 1);
28420 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28421 tail = grp_name + 4;
28422 if (encoded_filename_len)
28423 {
28424 for (i = 0; base[i]; i++)
28425 if (ISIDNUM (base[i]) || base[i] == '.')
28426 *tail++ = base[i];
28427 *tail++ = '.';
28428 }
28429 memcpy (tail, linebuf, linebuf_len);
28430 tail += linebuf_len;
28431 *tail++ = '.';
28432 for (i = 0; i < 16; i++)
28433 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28434
28435 /* Construct a macinfo_entry for DW_MACRO_import
28436 in the empty vector entry before the first define/undef. */
28437 inc = &(*macinfo_table)[idx - 1];
28438 inc->code = DW_MACRO_import;
28439 inc->lineno = 0;
28440 inc->info = ggc_strdup (grp_name);
28441 if (!*macinfo_htab)
28442 *macinfo_htab = new macinfo_hash_type (10);
28443 /* Avoid emitting duplicates. */
28444 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28445 if (*slot != NULL)
28446 {
28447 inc->code = 0;
28448 inc->info = NULL;
28449 /* If such an entry has been used before, just emit
28450 a DW_MACRO_import op. */
28451 inc = *slot;
28452 output_macinfo_op (inc);
28453 /* And clear all macinfo_entry in the range to avoid emitting them
28454 in the second pass. */
28455 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28456 {
28457 cur->code = 0;
28458 cur->info = NULL;
28459 }
28460 }
28461 else
28462 {
28463 *slot = inc;
28464 inc->lineno = (*macinfo_htab)->elements ();
28465 output_macinfo_op (inc);
28466 }
28467 return count;
28468 }
28469
28470 /* Save any strings needed by the macinfo table in the debug str
28471 table. All strings must be collected into the table by the time
28472 index_string is called. */
28473
28474 static void
28475 save_macinfo_strings (void)
28476 {
28477 unsigned len;
28478 unsigned i;
28479 macinfo_entry *ref;
28480
28481 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28482 {
28483 switch (ref->code)
28484 {
28485 /* Match the logic in output_macinfo_op to decide on
28486 indirect strings. */
28487 case DW_MACINFO_define:
28488 case DW_MACINFO_undef:
28489 len = strlen (ref->info) + 1;
28490 if (!dwarf_strict
28491 && len > DWARF_OFFSET_SIZE
28492 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28493 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28494 set_indirect_string (find_AT_string (ref->info));
28495 break;
28496 case DW_MACINFO_start_file:
28497 /* -gsplit-dwarf -g3 will also output filename as indirect
28498 string. */
28499 if (!dwarf_split_debug_info)
28500 break;
28501 /* Fall through. */
28502 case DW_MACRO_define_strp:
28503 case DW_MACRO_undef_strp:
28504 set_indirect_string (find_AT_string (ref->info));
28505 break;
28506 default:
28507 break;
28508 }
28509 }
28510 }
28511
28512 /* Output macinfo section(s). */
28513
28514 static void
28515 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28516 {
28517 unsigned i;
28518 unsigned long length = vec_safe_length (macinfo_table);
28519 macinfo_entry *ref;
28520 vec<macinfo_entry, va_gc> *files = NULL;
28521 macinfo_hash_type *macinfo_htab = NULL;
28522 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28523
28524 if (! length)
28525 return;
28526
28527 /* output_macinfo* uses these interchangeably. */
28528 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28529 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28530 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28531 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28532
28533 /* AIX Assembler inserts the length, so adjust the reference to match the
28534 offset expected by debuggers. */
28535 strcpy (dl_section_ref, debug_line_label);
28536 if (XCOFF_DEBUGGING_INFO)
28537 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28538
28539 /* For .debug_macro emit the section header. */
28540 if (!dwarf_strict || dwarf_version >= 5)
28541 {
28542 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28543 "DWARF macro version number");
28544 if (DWARF_OFFSET_SIZE == 8)
28545 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28546 else
28547 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28548 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28549 debug_line_section, NULL);
28550 }
28551
28552 /* In the first loop, it emits the primary .debug_macinfo section
28553 and after each emitted op the macinfo_entry is cleared.
28554 If a longer range of define/undef ops can be optimized using
28555 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28556 the vector before the first define/undef in the range and the
28557 whole range of define/undef ops is not emitted and kept. */
28558 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28559 {
28560 switch (ref->code)
28561 {
28562 case DW_MACINFO_start_file:
28563 vec_safe_push (files, *ref);
28564 break;
28565 case DW_MACINFO_end_file:
28566 if (!vec_safe_is_empty (files))
28567 files->pop ();
28568 break;
28569 case DW_MACINFO_define:
28570 case DW_MACINFO_undef:
28571 if ((!dwarf_strict || dwarf_version >= 5)
28572 && HAVE_COMDAT_GROUP
28573 && vec_safe_length (files) != 1
28574 && i > 0
28575 && i + 1 < length
28576 && (*macinfo_table)[i - 1].code == 0)
28577 {
28578 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28579 if (count)
28580 {
28581 i += count - 1;
28582 continue;
28583 }
28584 }
28585 break;
28586 case 0:
28587 /* A dummy entry may be inserted at the beginning to be able
28588 to optimize the whole block of predefined macros. */
28589 if (i == 0)
28590 continue;
28591 default:
28592 break;
28593 }
28594 output_macinfo_op (ref);
28595 ref->info = NULL;
28596 ref->code = 0;
28597 }
28598
28599 if (!macinfo_htab)
28600 return;
28601
28602 /* Save the number of transparent includes so we can adjust the
28603 label number for the fat LTO object DWARF. */
28604 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28605
28606 delete macinfo_htab;
28607 macinfo_htab = NULL;
28608
28609 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28610 terminate the current chain and switch to a new comdat .debug_macinfo
28611 section and emit the define/undef entries within it. */
28612 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28613 switch (ref->code)
28614 {
28615 case 0:
28616 continue;
28617 case DW_MACRO_import:
28618 {
28619 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28620 tree comdat_key = get_identifier (ref->info);
28621 /* Terminate the previous .debug_macinfo section. */
28622 dw2_asm_output_data (1, 0, "End compilation unit");
28623 targetm.asm_out.named_section (debug_macinfo_section_name,
28624 SECTION_DEBUG
28625 | SECTION_LINKONCE
28626 | (early_lto_debug
28627 ? SECTION_EXCLUDE : 0),
28628 comdat_key);
28629 ASM_GENERATE_INTERNAL_LABEL (label,
28630 DEBUG_MACRO_SECTION_LABEL,
28631 ref->lineno + macinfo_label_base);
28632 ASM_OUTPUT_LABEL (asm_out_file, label);
28633 ref->code = 0;
28634 ref->info = NULL;
28635 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28636 "DWARF macro version number");
28637 if (DWARF_OFFSET_SIZE == 8)
28638 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28639 else
28640 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28641 }
28642 break;
28643 case DW_MACINFO_define:
28644 case DW_MACINFO_undef:
28645 output_macinfo_op (ref);
28646 ref->code = 0;
28647 ref->info = NULL;
28648 break;
28649 default:
28650 gcc_unreachable ();
28651 }
28652
28653 macinfo_label_base += macinfo_label_base_adj;
28654 }
28655
28656 /* Initialize the various sections and labels for dwarf output and prefix
28657 them with PREFIX if non-NULL. Returns the generation (zero based
28658 number of times function was called). */
28659
28660 static unsigned
28661 init_sections_and_labels (bool early_lto_debug)
28662 {
28663 /* As we may get called multiple times have a generation count for
28664 labels. */
28665 static unsigned generation = 0;
28666
28667 if (early_lto_debug)
28668 {
28669 if (!dwarf_split_debug_info)
28670 {
28671 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28672 SECTION_DEBUG | SECTION_EXCLUDE,
28673 NULL);
28674 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28675 SECTION_DEBUG | SECTION_EXCLUDE,
28676 NULL);
28677 debug_macinfo_section_name
28678 = ((dwarf_strict && dwarf_version < 5)
28679 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28680 debug_macinfo_section = get_section (debug_macinfo_section_name,
28681 SECTION_DEBUG
28682 | SECTION_EXCLUDE, NULL);
28683 }
28684 else
28685 {
28686 /* ??? Which of the following do we need early? */
28687 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28688 SECTION_DEBUG | SECTION_EXCLUDE,
28689 NULL);
28690 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28691 SECTION_DEBUG | SECTION_EXCLUDE,
28692 NULL);
28693 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28694 SECTION_DEBUG
28695 | SECTION_EXCLUDE, NULL);
28696 debug_skeleton_abbrev_section
28697 = get_section (DEBUG_LTO_ABBREV_SECTION,
28698 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28699 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28700 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28701 generation);
28702
28703 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28704 stay in the main .o, but the skeleton_line goes into the split
28705 off dwo. */
28706 debug_skeleton_line_section
28707 = get_section (DEBUG_LTO_LINE_SECTION,
28708 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28709 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28710 DEBUG_SKELETON_LINE_SECTION_LABEL,
28711 generation);
28712 debug_str_offsets_section
28713 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28714 SECTION_DEBUG | SECTION_EXCLUDE,
28715 NULL);
28716 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28717 DEBUG_SKELETON_INFO_SECTION_LABEL,
28718 generation);
28719 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28720 DEBUG_STR_DWO_SECTION_FLAGS,
28721 NULL);
28722 debug_macinfo_section_name
28723 = ((dwarf_strict && dwarf_version < 5)
28724 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28725 debug_macinfo_section = get_section (debug_macinfo_section_name,
28726 SECTION_DEBUG | SECTION_EXCLUDE,
28727 NULL);
28728 }
28729 /* For macro info and the file table we have to refer to a
28730 debug_line section. */
28731 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28732 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28733 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28734 DEBUG_LINE_SECTION_LABEL, generation);
28735
28736 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28737 DEBUG_STR_SECTION_FLAGS
28738 | SECTION_EXCLUDE, NULL);
28739 if (!dwarf_split_debug_info)
28740 debug_line_str_section
28741 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28742 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28743 }
28744 else
28745 {
28746 if (!dwarf_split_debug_info)
28747 {
28748 debug_info_section = get_section (DEBUG_INFO_SECTION,
28749 SECTION_DEBUG, NULL);
28750 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28751 SECTION_DEBUG, NULL);
28752 debug_loc_section = get_section (dwarf_version >= 5
28753 ? DEBUG_LOCLISTS_SECTION
28754 : DEBUG_LOC_SECTION,
28755 SECTION_DEBUG, NULL);
28756 debug_macinfo_section_name
28757 = ((dwarf_strict && dwarf_version < 5)
28758 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28759 debug_macinfo_section = get_section (debug_macinfo_section_name,
28760 SECTION_DEBUG, NULL);
28761 }
28762 else
28763 {
28764 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28765 SECTION_DEBUG | SECTION_EXCLUDE,
28766 NULL);
28767 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28768 SECTION_DEBUG | SECTION_EXCLUDE,
28769 NULL);
28770 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28771 SECTION_DEBUG, NULL);
28772 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28773 SECTION_DEBUG, NULL);
28774 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28775 SECTION_DEBUG, NULL);
28776 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28777 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28778 generation);
28779
28780 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28781 stay in the main .o, but the skeleton_line goes into the
28782 split off dwo. */
28783 debug_skeleton_line_section
28784 = get_section (DEBUG_DWO_LINE_SECTION,
28785 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28786 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28787 DEBUG_SKELETON_LINE_SECTION_LABEL,
28788 generation);
28789 debug_str_offsets_section
28790 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28791 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28792 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28793 DEBUG_SKELETON_INFO_SECTION_LABEL,
28794 generation);
28795 debug_loc_section = get_section (dwarf_version >= 5
28796 ? DEBUG_DWO_LOCLISTS_SECTION
28797 : DEBUG_DWO_LOC_SECTION,
28798 SECTION_DEBUG | SECTION_EXCLUDE,
28799 NULL);
28800 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28801 DEBUG_STR_DWO_SECTION_FLAGS,
28802 NULL);
28803 debug_macinfo_section_name
28804 = ((dwarf_strict && dwarf_version < 5)
28805 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28806 debug_macinfo_section = get_section (debug_macinfo_section_name,
28807 SECTION_DEBUG | SECTION_EXCLUDE,
28808 NULL);
28809 }
28810 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28811 SECTION_DEBUG, NULL);
28812 debug_line_section = get_section (DEBUG_LINE_SECTION,
28813 SECTION_DEBUG, NULL);
28814 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28815 SECTION_DEBUG, NULL);
28816 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28817 SECTION_DEBUG, NULL);
28818 debug_str_section = get_section (DEBUG_STR_SECTION,
28819 DEBUG_STR_SECTION_FLAGS, NULL);
28820 if ((!dwarf_split_debug_info && !output_asm_line_debug_info ())
28821 || asm_outputs_debug_line_str ())
28822 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28823 DEBUG_STR_SECTION_FLAGS, NULL);
28824
28825 debug_ranges_section = get_section (dwarf_version >= 5
28826 ? DEBUG_RNGLISTS_SECTION
28827 : DEBUG_RANGES_SECTION,
28828 SECTION_DEBUG, NULL);
28829 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28830 SECTION_DEBUG, NULL);
28831 }
28832
28833 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28834 DEBUG_ABBREV_SECTION_LABEL, generation);
28835 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28836 DEBUG_INFO_SECTION_LABEL, generation);
28837 info_section_emitted = false;
28838 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28839 DEBUG_LINE_SECTION_LABEL, generation);
28840 /* There are up to 4 unique ranges labels per generation.
28841 See also output_rnglists. */
28842 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28843 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28844 if (dwarf_version >= 5 && dwarf_split_debug_info)
28845 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28846 DEBUG_RANGES_SECTION_LABEL,
28847 1 + generation * 4);
28848 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28849 DEBUG_ADDR_SECTION_LABEL, generation);
28850 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28851 (dwarf_strict && dwarf_version < 5)
28852 ? DEBUG_MACINFO_SECTION_LABEL
28853 : DEBUG_MACRO_SECTION_LABEL, generation);
28854 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28855 generation);
28856
28857 ++generation;
28858 return generation - 1;
28859 }
28860
28861 /* Set up for Dwarf output at the start of compilation. */
28862
28863 static void
28864 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28865 {
28866 /* Allocate the file_table. */
28867 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28868
28869 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28870 /* Allocate the decl_die_table. */
28871 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28872
28873 /* Allocate the decl_loc_table. */
28874 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28875
28876 /* Allocate the cached_dw_loc_list_table. */
28877 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28878
28879 /* Allocate the initial hunk of the abbrev_die_table. */
28880 vec_alloc (abbrev_die_table, 256);
28881 /* Zero-th entry is allocated, but unused. */
28882 abbrev_die_table->quick_push (NULL);
28883
28884 /* Allocate the dwarf_proc_stack_usage_map. */
28885 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28886
28887 /* Allocate the pubtypes and pubnames vectors. */
28888 vec_alloc (pubname_table, 32);
28889 vec_alloc (pubtype_table, 32);
28890
28891 vec_alloc (incomplete_types, 64);
28892
28893 vec_alloc (used_rtx_array, 32);
28894
28895 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28896 vec_alloc (macinfo_table, 64);
28897 #endif
28898
28899 /* If front-ends already registered a main translation unit but we were not
28900 ready to perform the association, do this now. */
28901 if (main_translation_unit != NULL_TREE)
28902 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28903 }
28904
28905 /* Called before compile () starts outputtting functions, variables
28906 and toplevel asms into assembly. */
28907
28908 static void
28909 dwarf2out_assembly_start (void)
28910 {
28911 if (text_section_line_info)
28912 return;
28913
28914 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28915 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28916 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28917 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28918 COLD_TEXT_SECTION_LABEL, 0);
28919 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28920
28921 switch_to_section (text_section);
28922 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28923 #endif
28924
28925 /* Make sure the line number table for .text always exists. */
28926 text_section_line_info = new_line_info_table ();
28927 text_section_line_info->end_label = text_end_label;
28928
28929 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28930 cur_line_info_table = text_section_line_info;
28931 #endif
28932
28933 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28934 && dwarf2out_do_cfi_asm ()
28935 && !dwarf2out_do_eh_frame ())
28936 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28937 }
28938
28939 /* A helper function for dwarf2out_finish called through
28940 htab_traverse. Assign a string its index. All strings must be
28941 collected into the table by the time index_string is called,
28942 because the indexing code relies on htab_traverse to traverse nodes
28943 in the same order for each run. */
28944
28945 int
28946 index_string (indirect_string_node **h, unsigned int *index)
28947 {
28948 indirect_string_node *node = *h;
28949
28950 find_string_form (node);
28951 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28952 {
28953 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28954 node->index = *index;
28955 *index += 1;
28956 }
28957 return 1;
28958 }
28959
28960 /* A helper function for output_indirect_strings called through
28961 htab_traverse. Output the offset to a string and update the
28962 current offset. */
28963
28964 int
28965 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28966 {
28967 indirect_string_node *node = *h;
28968
28969 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28970 {
28971 /* Assert that this node has been assigned an index. */
28972 gcc_assert (node->index != NO_INDEX_ASSIGNED
28973 && node->index != NOT_INDEXED);
28974 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28975 "indexed string 0x%x: %s", node->index, node->str);
28976 *offset += strlen (node->str) + 1;
28977 }
28978 return 1;
28979 }
28980
28981 /* A helper function for dwarf2out_finish called through
28982 htab_traverse. Output the indexed string. */
28983
28984 int
28985 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28986 {
28987 struct indirect_string_node *node = *h;
28988
28989 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28990 {
28991 /* Assert that the strings are output in the same order as their
28992 indexes were assigned. */
28993 gcc_assert (*cur_idx == node->index);
28994 assemble_string (node->str, strlen (node->str) + 1);
28995 *cur_idx += 1;
28996 }
28997 return 1;
28998 }
28999
29000 /* A helper function for output_indirect_strings. Counts the number
29001 of index strings offsets. Must match the logic of the functions
29002 output_index_string[_offsets] above. */
29003 int
29004 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
29005 {
29006 struct indirect_string_node *node = *h;
29007
29008 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
29009 *last_idx += 1;
29010 return 1;
29011 }
29012
29013 /* A helper function for dwarf2out_finish called through
29014 htab_traverse. Emit one queued .debug_str string. */
29015
29016 int
29017 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
29018 {
29019 struct indirect_string_node *node = *h;
29020
29021 node->form = find_string_form (node);
29022 if (node->form == form && node->refcount > 0)
29023 {
29024 ASM_OUTPUT_LABEL (asm_out_file, node->label);
29025 assemble_string (node->str, strlen (node->str) + 1);
29026 }
29027
29028 return 1;
29029 }
29030
29031 /* Output the indexed string table. */
29032
29033 static void
29034 output_indirect_strings (void)
29035 {
29036 switch_to_section (debug_str_section);
29037 if (!dwarf_split_debug_info)
29038 debug_str_hash->traverse<enum dwarf_form,
29039 output_indirect_string> (DW_FORM_strp);
29040 else
29041 {
29042 unsigned int offset = 0;
29043 unsigned int cur_idx = 0;
29044
29045 if (skeleton_debug_str_hash)
29046 skeleton_debug_str_hash->traverse<enum dwarf_form,
29047 output_indirect_string> (DW_FORM_strp);
29048
29049 switch_to_section (debug_str_offsets_section);
29050 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
29051 header. Note that we don't need to generate a label to the
29052 actual index table following the header here, because this is
29053 for the split dwarf case only. In an .dwo file there is only
29054 one string offsets table (and one debug info section). But
29055 if we would start using string offset tables for the main (or
29056 skeleton) unit, then we have to add a DW_AT_str_offsets_base
29057 pointing to the actual index after the header. Split dwarf
29058 units will never have a string offsets base attribute. When
29059 a split unit is moved into a .dwp file the string offsets can
29060 be found through the .debug_cu_index section table. */
29061 if (dwarf_version >= 5)
29062 {
29063 unsigned int last_idx = 0;
29064 unsigned long str_offsets_length;
29065
29066 debug_str_hash->traverse_noresize
29067 <unsigned int *, count_index_strings> (&last_idx);
29068 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
29069 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29070 dw2_asm_output_data (4, 0xffffffff,
29071 "Escape value for 64-bit DWARF extension");
29072 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
29073 "Length of string offsets unit");
29074 dw2_asm_output_data (2, 5, "DWARF string offsets version");
29075 dw2_asm_output_data (2, 0, "Header zero padding");
29076 }
29077 debug_str_hash->traverse_noresize
29078 <unsigned int *, output_index_string_offset> (&offset);
29079 switch_to_section (debug_str_dwo_section);
29080 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
29081 (&cur_idx);
29082 }
29083 }
29084
29085 /* Callback for htab_traverse to assign an index to an entry in the
29086 table, and to write that entry to the .debug_addr section. */
29087
29088 int
29089 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
29090 {
29091 addr_table_entry *entry = *slot;
29092
29093 if (entry->refcount == 0)
29094 {
29095 gcc_assert (entry->index == NO_INDEX_ASSIGNED
29096 || entry->index == NOT_INDEXED);
29097 return 1;
29098 }
29099
29100 gcc_assert (entry->index == *cur_index);
29101 (*cur_index)++;
29102
29103 switch (entry->kind)
29104 {
29105 case ate_kind_rtx:
29106 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29107 "0x%x", entry->index);
29108 break;
29109 case ate_kind_rtx_dtprel:
29110 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29111 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29112 DWARF2_ADDR_SIZE,
29113 entry->addr.rtl);
29114 fputc ('\n', asm_out_file);
29115 break;
29116 case ate_kind_label:
29117 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29118 "0x%x", entry->index);
29119 break;
29120 default:
29121 gcc_unreachable ();
29122 }
29123 return 1;
29124 }
29125
29126 /* A helper function for dwarf2out_finish. Counts the number
29127 of indexed addresses. Must match the logic of the functions
29128 output_addr_table_entry above. */
29129 int
29130 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29131 {
29132 addr_table_entry *entry = *slot;
29133
29134 if (entry->refcount > 0)
29135 *last_idx += 1;
29136 return 1;
29137 }
29138
29139 /* Produce the .debug_addr section. */
29140
29141 static void
29142 output_addr_table (void)
29143 {
29144 unsigned int index = 0;
29145 if (addr_index_table == NULL || addr_index_table->size () == 0)
29146 return;
29147
29148 switch_to_section (debug_addr_section);
29149 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
29150 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
29151 before DWARF5, didn't have a header for .debug_addr units.
29152 DWARF5 specifies a small header when address tables are used. */
29153 if (dwarf_version >= 5)
29154 {
29155 unsigned int last_idx = 0;
29156 unsigned long addrs_length;
29157
29158 addr_index_table->traverse_noresize
29159 <unsigned int *, count_index_addrs> (&last_idx);
29160 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
29161
29162 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29163 dw2_asm_output_data (4, 0xffffffff,
29164 "Escape value for 64-bit DWARF extension");
29165 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
29166 "Length of Address Unit");
29167 dw2_asm_output_data (2, 5, "DWARF addr version");
29168 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
29169 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
29170 }
29171 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
29172
29173 addr_index_table
29174 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29175 }
29176
29177 #if ENABLE_ASSERT_CHECKING
29178 /* Verify that all marks are clear. */
29179
29180 static void
29181 verify_marks_clear (dw_die_ref die)
29182 {
29183 dw_die_ref c;
29184
29185 gcc_assert (! die->die_mark);
29186 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29187 }
29188 #endif /* ENABLE_ASSERT_CHECKING */
29189
29190 /* Clear the marks for a die and its children.
29191 Be cool if the mark isn't set. */
29192
29193 static void
29194 prune_unmark_dies (dw_die_ref die)
29195 {
29196 dw_die_ref c;
29197
29198 if (die->die_mark)
29199 die->die_mark = 0;
29200 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29201 }
29202
29203 /* Given LOC that is referenced by a DIE we're marking as used, find all
29204 referenced DWARF procedures it references and mark them as used. */
29205
29206 static void
29207 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29208 {
29209 for (; loc != NULL; loc = loc->dw_loc_next)
29210 switch (loc->dw_loc_opc)
29211 {
29212 case DW_OP_implicit_pointer:
29213 case DW_OP_convert:
29214 case DW_OP_reinterpret:
29215 case DW_OP_GNU_implicit_pointer:
29216 case DW_OP_GNU_convert:
29217 case DW_OP_GNU_reinterpret:
29218 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29219 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29220 break;
29221 case DW_OP_GNU_variable_value:
29222 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29223 {
29224 dw_die_ref ref
29225 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29226 if (ref == NULL)
29227 break;
29228 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29229 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29230 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29231 }
29232 /* FALLTHRU */
29233 case DW_OP_call2:
29234 case DW_OP_call4:
29235 case DW_OP_call_ref:
29236 case DW_OP_const_type:
29237 case DW_OP_GNU_const_type:
29238 case DW_OP_GNU_parameter_ref:
29239 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29240 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29241 break;
29242 case DW_OP_regval_type:
29243 case DW_OP_deref_type:
29244 case DW_OP_GNU_regval_type:
29245 case DW_OP_GNU_deref_type:
29246 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29247 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29248 break;
29249 case DW_OP_entry_value:
29250 case DW_OP_GNU_entry_value:
29251 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29252 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29253 break;
29254 default:
29255 break;
29256 }
29257 }
29258
29259 /* Given DIE that we're marking as used, find any other dies
29260 it references as attributes and mark them as used. */
29261
29262 static void
29263 prune_unused_types_walk_attribs (dw_die_ref die)
29264 {
29265 dw_attr_node *a;
29266 unsigned ix;
29267
29268 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29269 {
29270 switch (AT_class (a))
29271 {
29272 /* Make sure DWARF procedures referenced by location descriptions will
29273 get emitted. */
29274 case dw_val_class_loc:
29275 prune_unused_types_walk_loc_descr (AT_loc (a));
29276 break;
29277 case dw_val_class_loc_list:
29278 for (dw_loc_list_ref list = AT_loc_list (a);
29279 list != NULL;
29280 list = list->dw_loc_next)
29281 prune_unused_types_walk_loc_descr (list->expr);
29282 break;
29283
29284 case dw_val_class_view_list:
29285 /* This points to a loc_list in another attribute, so it's
29286 already covered. */
29287 break;
29288
29289 case dw_val_class_die_ref:
29290 /* A reference to another DIE.
29291 Make sure that it will get emitted.
29292 If it was broken out into a comdat group, don't follow it. */
29293 if (! AT_ref (a)->comdat_type_p
29294 || a->dw_attr == DW_AT_specification)
29295 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29296 break;
29297
29298 case dw_val_class_str:
29299 /* Set the string's refcount to 0 so that prune_unused_types_mark
29300 accounts properly for it. */
29301 a->dw_attr_val.v.val_str->refcount = 0;
29302 break;
29303
29304 default:
29305 break;
29306 }
29307 }
29308 }
29309
29310 /* Mark the generic parameters and arguments children DIEs of DIE. */
29311
29312 static void
29313 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29314 {
29315 dw_die_ref c;
29316
29317 if (die == NULL || die->die_child == NULL)
29318 return;
29319 c = die->die_child;
29320 do
29321 {
29322 if (is_template_parameter (c))
29323 prune_unused_types_mark (c, 1);
29324 c = c->die_sib;
29325 } while (c && c != die->die_child);
29326 }
29327
29328 /* Mark DIE as being used. If DOKIDS is true, then walk down
29329 to DIE's children. */
29330
29331 static void
29332 prune_unused_types_mark (dw_die_ref die, int dokids)
29333 {
29334 dw_die_ref c;
29335
29336 if (die->die_mark == 0)
29337 {
29338 /* We haven't done this node yet. Mark it as used. */
29339 die->die_mark = 1;
29340 /* If this is the DIE of a generic type instantiation,
29341 mark the children DIEs that describe its generic parms and
29342 args. */
29343 prune_unused_types_mark_generic_parms_dies (die);
29344
29345 /* We also have to mark its parents as used.
29346 (But we don't want to mark our parent's kids due to this,
29347 unless it is a class.) */
29348 if (die->die_parent)
29349 prune_unused_types_mark (die->die_parent,
29350 class_scope_p (die->die_parent));
29351
29352 /* Mark any referenced nodes. */
29353 prune_unused_types_walk_attribs (die);
29354
29355 /* If this node is a specification,
29356 also mark the definition, if it exists. */
29357 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29358 prune_unused_types_mark (die->die_definition, 1);
29359 }
29360
29361 if (dokids && die->die_mark != 2)
29362 {
29363 /* We need to walk the children, but haven't done so yet.
29364 Remember that we've walked the kids. */
29365 die->die_mark = 2;
29366
29367 /* If this is an array type, we need to make sure our
29368 kids get marked, even if they're types. If we're
29369 breaking out types into comdat sections, do this
29370 for all type definitions. */
29371 if (die->die_tag == DW_TAG_array_type
29372 || (use_debug_types
29373 && is_type_die (die) && ! is_declaration_die (die)))
29374 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29375 else
29376 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29377 }
29378 }
29379
29380 /* For local classes, look if any static member functions were emitted
29381 and if so, mark them. */
29382
29383 static void
29384 prune_unused_types_walk_local_classes (dw_die_ref die)
29385 {
29386 dw_die_ref c;
29387
29388 if (die->die_mark == 2)
29389 return;
29390
29391 switch (die->die_tag)
29392 {
29393 case DW_TAG_structure_type:
29394 case DW_TAG_union_type:
29395 case DW_TAG_class_type:
29396 case DW_TAG_interface_type:
29397 break;
29398
29399 case DW_TAG_subprogram:
29400 if (!get_AT_flag (die, DW_AT_declaration)
29401 || die->die_definition != NULL)
29402 prune_unused_types_mark (die, 1);
29403 return;
29404
29405 default:
29406 return;
29407 }
29408
29409 /* Mark children. */
29410 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29411 }
29412
29413 /* Walk the tree DIE and mark types that we actually use. */
29414
29415 static void
29416 prune_unused_types_walk (dw_die_ref die)
29417 {
29418 dw_die_ref c;
29419
29420 /* Don't do anything if this node is already marked and
29421 children have been marked as well. */
29422 if (die->die_mark == 2)
29423 return;
29424
29425 switch (die->die_tag)
29426 {
29427 case DW_TAG_structure_type:
29428 case DW_TAG_union_type:
29429 case DW_TAG_class_type:
29430 case DW_TAG_interface_type:
29431 if (die->die_perennial_p)
29432 break;
29433
29434 for (c = die->die_parent; c; c = c->die_parent)
29435 if (c->die_tag == DW_TAG_subprogram)
29436 break;
29437
29438 /* Finding used static member functions inside of classes
29439 is needed just for local classes, because for other classes
29440 static member function DIEs with DW_AT_specification
29441 are emitted outside of the DW_TAG_*_type. If we ever change
29442 it, we'd need to call this even for non-local classes. */
29443 if (c)
29444 prune_unused_types_walk_local_classes (die);
29445
29446 /* It's a type node --- don't mark it. */
29447 return;
29448
29449 case DW_TAG_const_type:
29450 case DW_TAG_packed_type:
29451 case DW_TAG_pointer_type:
29452 case DW_TAG_reference_type:
29453 case DW_TAG_rvalue_reference_type:
29454 case DW_TAG_volatile_type:
29455 case DW_TAG_typedef:
29456 case DW_TAG_array_type:
29457 case DW_TAG_friend:
29458 case DW_TAG_enumeration_type:
29459 case DW_TAG_subroutine_type:
29460 case DW_TAG_string_type:
29461 case DW_TAG_set_type:
29462 case DW_TAG_subrange_type:
29463 case DW_TAG_ptr_to_member_type:
29464 case DW_TAG_file_type:
29465 /* Type nodes are useful only when other DIEs reference them --- don't
29466 mark them. */
29467 /* FALLTHROUGH */
29468
29469 case DW_TAG_dwarf_procedure:
29470 /* Likewise for DWARF procedures. */
29471
29472 if (die->die_perennial_p)
29473 break;
29474
29475 return;
29476
29477 case DW_TAG_variable:
29478 if (flag_debug_only_used_symbols)
29479 {
29480 if (die->die_perennial_p)
29481 break;
29482
29483 /* premark_used_variables marks external variables --- don't mark
29484 them here. But function-local externals are always considered
29485 used. */
29486 if (get_AT (die, DW_AT_external))
29487 {
29488 for (c = die->die_parent; c; c = c->die_parent)
29489 if (c->die_tag == DW_TAG_subprogram)
29490 break;
29491 if (!c)
29492 return;
29493 }
29494 }
29495 /* FALLTHROUGH */
29496
29497 default:
29498 /* Mark everything else. */
29499 break;
29500 }
29501
29502 if (die->die_mark == 0)
29503 {
29504 die->die_mark = 1;
29505
29506 /* Now, mark any dies referenced from here. */
29507 prune_unused_types_walk_attribs (die);
29508 }
29509
29510 die->die_mark = 2;
29511
29512 /* Mark children. */
29513 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29514 }
29515
29516 /* Increment the string counts on strings referred to from DIE's
29517 attributes. */
29518
29519 static void
29520 prune_unused_types_update_strings (dw_die_ref die)
29521 {
29522 dw_attr_node *a;
29523 unsigned ix;
29524
29525 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29526 if (AT_class (a) == dw_val_class_str)
29527 {
29528 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29529 s->refcount++;
29530 /* Avoid unnecessarily putting strings that are used less than
29531 twice in the hash table. */
29532 if (s->refcount
29533 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29534 {
29535 indirect_string_node **slot
29536 = debug_str_hash->find_slot_with_hash (s->str,
29537 htab_hash_string (s->str),
29538 INSERT);
29539 gcc_assert (*slot == NULL);
29540 *slot = s;
29541 }
29542 }
29543 }
29544
29545 /* Mark DIE and its children as removed. */
29546
29547 static void
29548 mark_removed (dw_die_ref die)
29549 {
29550 dw_die_ref c;
29551 die->removed = true;
29552 FOR_EACH_CHILD (die, c, mark_removed (c));
29553 }
29554
29555 /* Remove from the tree DIE any dies that aren't marked. */
29556
29557 static void
29558 prune_unused_types_prune (dw_die_ref die)
29559 {
29560 dw_die_ref c;
29561
29562 gcc_assert (die->die_mark);
29563 prune_unused_types_update_strings (die);
29564
29565 if (! die->die_child)
29566 return;
29567
29568 c = die->die_child;
29569 do {
29570 dw_die_ref prev = c, next;
29571 for (c = c->die_sib; ! c->die_mark; c = next)
29572 if (c == die->die_child)
29573 {
29574 /* No marked children between 'prev' and the end of the list. */
29575 if (prev == c)
29576 /* No marked children at all. */
29577 die->die_child = NULL;
29578 else
29579 {
29580 prev->die_sib = c->die_sib;
29581 die->die_child = prev;
29582 }
29583 c->die_sib = NULL;
29584 mark_removed (c);
29585 return;
29586 }
29587 else
29588 {
29589 next = c->die_sib;
29590 c->die_sib = NULL;
29591 mark_removed (c);
29592 }
29593
29594 if (c != prev->die_sib)
29595 prev->die_sib = c;
29596 prune_unused_types_prune (c);
29597 } while (c != die->die_child);
29598 }
29599
29600 /* Remove dies representing declarations that we never use. */
29601
29602 static void
29603 prune_unused_types (void)
29604 {
29605 unsigned int i;
29606 limbo_die_node *node;
29607 comdat_type_node *ctnode;
29608 pubname_entry *pub;
29609 dw_die_ref base_type;
29610
29611 #if ENABLE_ASSERT_CHECKING
29612 /* All the marks should already be clear. */
29613 verify_marks_clear (comp_unit_die ());
29614 for (node = limbo_die_list; node; node = node->next)
29615 verify_marks_clear (node->die);
29616 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29617 verify_marks_clear (ctnode->root_die);
29618 #endif /* ENABLE_ASSERT_CHECKING */
29619
29620 /* Mark types that are used in global variables. */
29621 premark_types_used_by_global_vars ();
29622
29623 /* Mark variables used in the symtab. */
29624 if (flag_debug_only_used_symbols)
29625 premark_used_variables ();
29626
29627 /* Set the mark on nodes that are actually used. */
29628 prune_unused_types_walk (comp_unit_die ());
29629 for (node = limbo_die_list; node; node = node->next)
29630 prune_unused_types_walk (node->die);
29631 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29632 {
29633 prune_unused_types_walk (ctnode->root_die);
29634 prune_unused_types_mark (ctnode->type_die, 1);
29635 }
29636
29637 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29638 are unusual in that they are pubnames that are the children of pubtypes.
29639 They should only be marked via their parent DW_TAG_enumeration_type die,
29640 not as roots in themselves. */
29641 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29642 if (pub->die->die_tag != DW_TAG_enumerator)
29643 prune_unused_types_mark (pub->die, 1);
29644 for (i = 0; base_types.iterate (i, &base_type); i++)
29645 prune_unused_types_mark (base_type, 1);
29646
29647 /* Also set the mark on nodes that could be referenced by
29648 DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or
29649 by DW_TAG_inlined_subroutine origins. */
29650 cgraph_node *cnode;
29651 FOR_EACH_FUNCTION (cnode)
29652 if (cnode->referred_to_p (false))
29653 {
29654 dw_die_ref die = lookup_decl_die (cnode->decl);
29655 if (die == NULL || die->die_mark)
29656 continue;
29657 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29658 if (e->caller != cnode)
29659 {
29660 prune_unused_types_mark (die, 1);
29661 break;
29662 }
29663 }
29664
29665 if (debug_str_hash)
29666 debug_str_hash->empty ();
29667 if (skeleton_debug_str_hash)
29668 skeleton_debug_str_hash->empty ();
29669 prune_unused_types_prune (comp_unit_die ());
29670 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29671 {
29672 node = *pnode;
29673 if (!node->die->die_mark)
29674 *pnode = node->next;
29675 else
29676 {
29677 prune_unused_types_prune (node->die);
29678 pnode = &node->next;
29679 }
29680 }
29681 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29682 prune_unused_types_prune (ctnode->root_die);
29683
29684 /* Leave the marks clear. */
29685 prune_unmark_dies (comp_unit_die ());
29686 for (node = limbo_die_list; node; node = node->next)
29687 prune_unmark_dies (node->die);
29688 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29689 prune_unmark_dies (ctnode->root_die);
29690 }
29691
29692 /* Helpers to manipulate hash table of comdat type units. */
29693
29694 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29695 {
29696 static inline hashval_t hash (const comdat_type_node *);
29697 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29698 };
29699
29700 inline hashval_t
29701 comdat_type_hasher::hash (const comdat_type_node *type_node)
29702 {
29703 hashval_t h;
29704 memcpy (&h, type_node->signature, sizeof (h));
29705 return h;
29706 }
29707
29708 inline bool
29709 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29710 const comdat_type_node *type_node_2)
29711 {
29712 return (! memcmp (type_node_1->signature, type_node_2->signature,
29713 DWARF_TYPE_SIGNATURE_SIZE));
29714 }
29715
29716 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29717 to the location it would have been added, should we know its
29718 DECL_ASSEMBLER_NAME when we added other attributes. This will
29719 probably improve compactness of debug info, removing equivalent
29720 abbrevs, and hide any differences caused by deferring the
29721 computation of the assembler name, triggered by e.g. PCH. */
29722
29723 static inline void
29724 move_linkage_attr (dw_die_ref die)
29725 {
29726 unsigned ix = vec_safe_length (die->die_attr);
29727 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29728
29729 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29730 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29731
29732 while (--ix > 0)
29733 {
29734 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29735
29736 if (prev->dw_attr == DW_AT_decl_line
29737 || prev->dw_attr == DW_AT_decl_column
29738 || prev->dw_attr == DW_AT_name)
29739 break;
29740 }
29741
29742 if (ix != vec_safe_length (die->die_attr) - 1)
29743 {
29744 die->die_attr->pop ();
29745 die->die_attr->quick_insert (ix, linkage);
29746 }
29747 }
29748
29749 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29750 referenced from typed stack ops and count how often they are used. */
29751
29752 static void
29753 mark_base_types (dw_loc_descr_ref loc)
29754 {
29755 dw_die_ref base_type = NULL;
29756
29757 for (; loc; loc = loc->dw_loc_next)
29758 {
29759 switch (loc->dw_loc_opc)
29760 {
29761 case DW_OP_regval_type:
29762 case DW_OP_deref_type:
29763 case DW_OP_GNU_regval_type:
29764 case DW_OP_GNU_deref_type:
29765 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29766 break;
29767 case DW_OP_convert:
29768 case DW_OP_reinterpret:
29769 case DW_OP_GNU_convert:
29770 case DW_OP_GNU_reinterpret:
29771 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29772 continue;
29773 /* FALLTHRU */
29774 case DW_OP_const_type:
29775 case DW_OP_GNU_const_type:
29776 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29777 break;
29778 case DW_OP_entry_value:
29779 case DW_OP_GNU_entry_value:
29780 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29781 continue;
29782 default:
29783 continue;
29784 }
29785 gcc_assert (base_type->die_parent == comp_unit_die ());
29786 if (base_type->die_mark)
29787 base_type->die_mark++;
29788 else
29789 {
29790 base_types.safe_push (base_type);
29791 base_type->die_mark = 1;
29792 }
29793 }
29794 }
29795
29796 /* Comparison function for sorting marked base types. */
29797
29798 static int
29799 base_type_cmp (const void *x, const void *y)
29800 {
29801 dw_die_ref dx = *(const dw_die_ref *) x;
29802 dw_die_ref dy = *(const dw_die_ref *) y;
29803 unsigned int byte_size1, byte_size2;
29804 unsigned int encoding1, encoding2;
29805 unsigned int align1, align2;
29806 if (dx->die_mark > dy->die_mark)
29807 return -1;
29808 if (dx->die_mark < dy->die_mark)
29809 return 1;
29810 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29811 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29812 if (byte_size1 < byte_size2)
29813 return 1;
29814 if (byte_size1 > byte_size2)
29815 return -1;
29816 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29817 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29818 if (encoding1 < encoding2)
29819 return 1;
29820 if (encoding1 > encoding2)
29821 return -1;
29822 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29823 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29824 if (align1 < align2)
29825 return 1;
29826 if (align1 > align2)
29827 return -1;
29828 return 0;
29829 }
29830
29831 /* Move base types marked by mark_base_types as early as possible
29832 in the CU, sorted by decreasing usage count both to make the
29833 uleb128 references as small as possible and to make sure they
29834 will have die_offset already computed by calc_die_sizes when
29835 sizes of typed stack loc ops is computed. */
29836
29837 static void
29838 move_marked_base_types (void)
29839 {
29840 unsigned int i;
29841 dw_die_ref base_type, die, c;
29842
29843 if (base_types.is_empty ())
29844 return;
29845
29846 /* Sort by decreasing usage count, they will be added again in that
29847 order later on. */
29848 base_types.qsort (base_type_cmp);
29849 die = comp_unit_die ();
29850 c = die->die_child;
29851 do
29852 {
29853 dw_die_ref prev = c;
29854 c = c->die_sib;
29855 while (c->die_mark)
29856 {
29857 remove_child_with_prev (c, prev);
29858 /* As base types got marked, there must be at least
29859 one node other than DW_TAG_base_type. */
29860 gcc_assert (die->die_child != NULL);
29861 c = prev->die_sib;
29862 }
29863 }
29864 while (c != die->die_child);
29865 gcc_assert (die->die_child);
29866 c = die->die_child;
29867 for (i = 0; base_types.iterate (i, &base_type); i++)
29868 {
29869 base_type->die_mark = 0;
29870 base_type->die_sib = c->die_sib;
29871 c->die_sib = base_type;
29872 c = base_type;
29873 }
29874 }
29875
29876 /* Helper function for resolve_addr, attempt to resolve
29877 one CONST_STRING, return true if successful. Similarly verify that
29878 SYMBOL_REFs refer to variables emitted in the current CU. */
29879
29880 static bool
29881 resolve_one_addr (rtx *addr)
29882 {
29883 rtx rtl = *addr;
29884
29885 if (GET_CODE (rtl) == CONST_STRING)
29886 {
29887 size_t len = strlen (XSTR (rtl, 0)) + 1;
29888 tree t = build_string (len, XSTR (rtl, 0));
29889 tree tlen = size_int (len - 1);
29890 TREE_TYPE (t)
29891 = build_array_type (char_type_node, build_index_type (tlen));
29892 rtl = lookup_constant_def (t);
29893 if (!rtl || !MEM_P (rtl))
29894 return false;
29895 rtl = XEXP (rtl, 0);
29896 if (GET_CODE (rtl) == SYMBOL_REF
29897 && SYMBOL_REF_DECL (rtl)
29898 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29899 return false;
29900 vec_safe_push (used_rtx_array, rtl);
29901 *addr = rtl;
29902 return true;
29903 }
29904
29905 if (GET_CODE (rtl) == SYMBOL_REF
29906 && SYMBOL_REF_DECL (rtl))
29907 {
29908 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29909 {
29910 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29911 return false;
29912 }
29913 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29914 return false;
29915 }
29916
29917 if (GET_CODE (rtl) == CONST)
29918 {
29919 subrtx_ptr_iterator::array_type array;
29920 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29921 if (!resolve_one_addr (*iter))
29922 return false;
29923 }
29924
29925 return true;
29926 }
29927
29928 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29929 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29930 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29931
29932 static rtx
29933 string_cst_pool_decl (tree t)
29934 {
29935 rtx rtl = output_constant_def (t, 1);
29936 unsigned char *array;
29937 dw_loc_descr_ref l;
29938 tree decl;
29939 size_t len;
29940 dw_die_ref ref;
29941
29942 if (!rtl || !MEM_P (rtl))
29943 return NULL_RTX;
29944 rtl = XEXP (rtl, 0);
29945 if (GET_CODE (rtl) != SYMBOL_REF
29946 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29947 return NULL_RTX;
29948
29949 decl = SYMBOL_REF_DECL (rtl);
29950 if (!lookup_decl_die (decl))
29951 {
29952 len = TREE_STRING_LENGTH (t);
29953 vec_safe_push (used_rtx_array, rtl);
29954 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29955 array = ggc_vec_alloc<unsigned char> (len);
29956 memcpy (array, TREE_STRING_POINTER (t), len);
29957 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29958 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29959 l->dw_loc_oprnd2.v.val_vec.length = len;
29960 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29961 l->dw_loc_oprnd2.v.val_vec.array = array;
29962 add_AT_loc (ref, DW_AT_location, l);
29963 equate_decl_number_to_die (decl, ref);
29964 }
29965 return rtl;
29966 }
29967
29968 /* Helper function of resolve_addr_in_expr. LOC is
29969 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29970 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29971 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29972 with DW_OP_implicit_pointer if possible
29973 and return true, if unsuccessful, return false. */
29974
29975 static bool
29976 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29977 {
29978 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29979 HOST_WIDE_INT offset = 0;
29980 dw_die_ref ref = NULL;
29981 tree decl;
29982
29983 if (GET_CODE (rtl) == CONST
29984 && GET_CODE (XEXP (rtl, 0)) == PLUS
29985 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29986 {
29987 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29988 rtl = XEXP (XEXP (rtl, 0), 0);
29989 }
29990 if (GET_CODE (rtl) == CONST_STRING)
29991 {
29992 size_t len = strlen (XSTR (rtl, 0)) + 1;
29993 tree t = build_string (len, XSTR (rtl, 0));
29994 tree tlen = size_int (len - 1);
29995
29996 TREE_TYPE (t)
29997 = build_array_type (char_type_node, build_index_type (tlen));
29998 rtl = string_cst_pool_decl (t);
29999 if (!rtl)
30000 return false;
30001 }
30002 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
30003 {
30004 decl = SYMBOL_REF_DECL (rtl);
30005 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
30006 {
30007 ref = lookup_decl_die (decl);
30008 if (ref && (get_AT (ref, DW_AT_location)
30009 || get_AT (ref, DW_AT_const_value)))
30010 {
30011 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
30012 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30013 loc->dw_loc_oprnd1.val_entry = NULL;
30014 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30015 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30016 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30017 loc->dw_loc_oprnd2.v.val_int = offset;
30018 return true;
30019 }
30020 }
30021 }
30022 return false;
30023 }
30024
30025 /* Helper function for resolve_addr, handle one location
30026 expression, return false if at least one CONST_STRING or SYMBOL_REF in
30027 the location list couldn't be resolved. */
30028
30029 static bool
30030 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
30031 {
30032 dw_loc_descr_ref keep = NULL;
30033 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
30034 switch (loc->dw_loc_opc)
30035 {
30036 case DW_OP_addr:
30037 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30038 {
30039 if ((prev == NULL
30040 || prev->dw_loc_opc == DW_OP_piece
30041 || prev->dw_loc_opc == DW_OP_bit_piece)
30042 && loc->dw_loc_next
30043 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
30044 && (!dwarf_strict || dwarf_version >= 5)
30045 && optimize_one_addr_into_implicit_ptr (loc))
30046 break;
30047 return false;
30048 }
30049 break;
30050 case DW_OP_GNU_addr_index:
30051 case DW_OP_addrx:
30052 case DW_OP_GNU_const_index:
30053 case DW_OP_constx:
30054 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
30055 || loc->dw_loc_opc == DW_OP_addrx)
30056 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
30057 || loc->dw_loc_opc == DW_OP_constx)
30058 && loc->dtprel))
30059 {
30060 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
30061 if (!resolve_one_addr (&rtl))
30062 return false;
30063 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
30064 loc->dw_loc_oprnd1.val_entry
30065 = add_addr_table_entry (rtl, ate_kind_rtx);
30066 }
30067 break;
30068 case DW_OP_const4u:
30069 case DW_OP_const8u:
30070 if (loc->dtprel
30071 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30072 return false;
30073 break;
30074 case DW_OP_plus_uconst:
30075 if (size_of_loc_descr (loc)
30076 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
30077 + 1
30078 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
30079 {
30080 dw_loc_descr_ref repl
30081 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
30082 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
30083 add_loc_descr (&repl, loc->dw_loc_next);
30084 *loc = *repl;
30085 }
30086 break;
30087 case DW_OP_implicit_value:
30088 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
30089 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
30090 return false;
30091 break;
30092 case DW_OP_implicit_pointer:
30093 case DW_OP_GNU_implicit_pointer:
30094 case DW_OP_GNU_parameter_ref:
30095 case DW_OP_GNU_variable_value:
30096 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30097 {
30098 dw_die_ref ref
30099 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
30100 if (ref == NULL)
30101 return false;
30102 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30103 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30104 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30105 }
30106 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30107 {
30108 if (prev == NULL
30109 && loc->dw_loc_next == NULL
30110 && AT_class (a) == dw_val_class_loc)
30111 switch (a->dw_attr)
30112 {
30113 /* Following attributes allow both exprloc and reference,
30114 so if the whole expression is DW_OP_GNU_variable_value
30115 alone we could transform it into reference. */
30116 case DW_AT_byte_size:
30117 case DW_AT_bit_size:
30118 case DW_AT_lower_bound:
30119 case DW_AT_upper_bound:
30120 case DW_AT_bit_stride:
30121 case DW_AT_count:
30122 case DW_AT_allocated:
30123 case DW_AT_associated:
30124 case DW_AT_byte_stride:
30125 a->dw_attr_val.val_class = dw_val_class_die_ref;
30126 a->dw_attr_val.val_entry = NULL;
30127 a->dw_attr_val.v.val_die_ref.die
30128 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30129 a->dw_attr_val.v.val_die_ref.external = 0;
30130 return true;
30131 default:
30132 break;
30133 }
30134 if (dwarf_strict)
30135 return false;
30136 }
30137 break;
30138 case DW_OP_const_type:
30139 case DW_OP_regval_type:
30140 case DW_OP_deref_type:
30141 case DW_OP_convert:
30142 case DW_OP_reinterpret:
30143 case DW_OP_GNU_const_type:
30144 case DW_OP_GNU_regval_type:
30145 case DW_OP_GNU_deref_type:
30146 case DW_OP_GNU_convert:
30147 case DW_OP_GNU_reinterpret:
30148 while (loc->dw_loc_next
30149 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30150 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30151 {
30152 dw_die_ref base1, base2;
30153 unsigned enc1, enc2, size1, size2;
30154 if (loc->dw_loc_opc == DW_OP_regval_type
30155 || loc->dw_loc_opc == DW_OP_deref_type
30156 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30157 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30158 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30159 else if (loc->dw_loc_oprnd1.val_class
30160 == dw_val_class_unsigned_const)
30161 break;
30162 else
30163 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30164 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30165 == dw_val_class_unsigned_const)
30166 break;
30167 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30168 gcc_assert (base1->die_tag == DW_TAG_base_type
30169 && base2->die_tag == DW_TAG_base_type);
30170 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30171 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30172 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30173 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30174 if (size1 == size2
30175 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30176 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30177 && loc != keep)
30178 || enc1 == enc2))
30179 {
30180 /* Optimize away next DW_OP_convert after
30181 adjusting LOC's base type die reference. */
30182 if (loc->dw_loc_opc == DW_OP_regval_type
30183 || loc->dw_loc_opc == DW_OP_deref_type
30184 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30185 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30186 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30187 else
30188 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30189 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30190 continue;
30191 }
30192 /* Don't change integer DW_OP_convert after e.g. floating
30193 point typed stack entry. */
30194 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30195 keep = loc->dw_loc_next;
30196 break;
30197 }
30198 break;
30199 default:
30200 break;
30201 }
30202 return true;
30203 }
30204
30205 /* Helper function of resolve_addr. DIE had DW_AT_location of
30206 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30207 and DW_OP_addr couldn't be resolved. resolve_addr has already
30208 removed the DW_AT_location attribute. This function attempts to
30209 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30210 to it or DW_AT_const_value attribute, if possible. */
30211
30212 static void
30213 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30214 {
30215 if (!VAR_P (decl)
30216 || lookup_decl_die (decl) != die
30217 || DECL_EXTERNAL (decl)
30218 || !TREE_STATIC (decl)
30219 || DECL_INITIAL (decl) == NULL_TREE
30220 || DECL_P (DECL_INITIAL (decl))
30221 || get_AT (die, DW_AT_const_value))
30222 return;
30223
30224 tree init = DECL_INITIAL (decl);
30225 HOST_WIDE_INT offset = 0;
30226 /* For variables that have been optimized away and thus
30227 don't have a memory location, see if we can emit
30228 DW_AT_const_value instead. */
30229 if (tree_add_const_value_attribute (die, init))
30230 return;
30231 if (dwarf_strict && dwarf_version < 5)
30232 return;
30233 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30234 and ADDR_EXPR refers to a decl that has DW_AT_location or
30235 DW_AT_const_value (but isn't addressable, otherwise
30236 resolving the original DW_OP_addr wouldn't fail), see if
30237 we can add DW_OP_implicit_pointer. */
30238 STRIP_NOPS (init);
30239 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30240 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30241 {
30242 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30243 init = TREE_OPERAND (init, 0);
30244 STRIP_NOPS (init);
30245 }
30246 if (TREE_CODE (init) != ADDR_EXPR)
30247 return;
30248 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30249 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30250 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30251 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30252 && TREE_OPERAND (init, 0) != decl))
30253 {
30254 dw_die_ref ref;
30255 dw_loc_descr_ref l;
30256
30257 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30258 {
30259 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30260 if (!rtl)
30261 return;
30262 decl = SYMBOL_REF_DECL (rtl);
30263 }
30264 else
30265 decl = TREE_OPERAND (init, 0);
30266 ref = lookup_decl_die (decl);
30267 if (ref == NULL
30268 || (!get_AT (ref, DW_AT_location)
30269 && !get_AT (ref, DW_AT_const_value)))
30270 return;
30271 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30272 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30273 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30274 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30275 add_AT_loc (die, DW_AT_location, l);
30276 }
30277 }
30278
30279 /* Return NULL if l is a DWARF expression, or first op that is not
30280 valid DWARF expression. */
30281
30282 static dw_loc_descr_ref
30283 non_dwarf_expression (dw_loc_descr_ref l)
30284 {
30285 while (l)
30286 {
30287 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30288 return l;
30289 switch (l->dw_loc_opc)
30290 {
30291 case DW_OP_regx:
30292 case DW_OP_implicit_value:
30293 case DW_OP_stack_value:
30294 case DW_OP_implicit_pointer:
30295 case DW_OP_GNU_implicit_pointer:
30296 case DW_OP_GNU_parameter_ref:
30297 case DW_OP_piece:
30298 case DW_OP_bit_piece:
30299 return l;
30300 default:
30301 break;
30302 }
30303 l = l->dw_loc_next;
30304 }
30305 return NULL;
30306 }
30307
30308 /* Return adjusted copy of EXPR:
30309 If it is empty DWARF expression, return it.
30310 If it is valid non-empty DWARF expression,
30311 return copy of EXPR with DW_OP_deref appended to it.
30312 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30313 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30314 If it is DWARF expression followed by DW_OP_stack_value, return
30315 copy of the DWARF expression without anything appended.
30316 Otherwise, return NULL. */
30317
30318 static dw_loc_descr_ref
30319 copy_deref_exprloc (dw_loc_descr_ref expr)
30320 {
30321 dw_loc_descr_ref tail = NULL;
30322
30323 if (expr == NULL)
30324 return NULL;
30325
30326 dw_loc_descr_ref l = non_dwarf_expression (expr);
30327 if (l && l->dw_loc_next)
30328 return NULL;
30329
30330 if (l)
30331 {
30332 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30333 tail = new_loc_descr ((enum dwarf_location_atom)
30334 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30335 0, 0);
30336 else
30337 switch (l->dw_loc_opc)
30338 {
30339 case DW_OP_regx:
30340 tail = new_loc_descr (DW_OP_bregx,
30341 l->dw_loc_oprnd1.v.val_unsigned, 0);
30342 break;
30343 case DW_OP_stack_value:
30344 break;
30345 default:
30346 return NULL;
30347 }
30348 }
30349 else
30350 tail = new_loc_descr (DW_OP_deref, 0, 0);
30351
30352 dw_loc_descr_ref ret = NULL, *p = &ret;
30353 while (expr != l)
30354 {
30355 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30356 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30357 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30358 p = &(*p)->dw_loc_next;
30359 expr = expr->dw_loc_next;
30360 }
30361 *p = tail;
30362 return ret;
30363 }
30364
30365 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30366 reference to a variable or argument, adjust it if needed and return:
30367 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30368 attribute if present should be removed
30369 0 keep the attribute perhaps with minor modifications, no need to rescan
30370 1 if the attribute has been successfully adjusted. */
30371
30372 static int
30373 optimize_string_length (dw_attr_node *a)
30374 {
30375 dw_loc_descr_ref l = AT_loc (a), lv;
30376 dw_die_ref die;
30377 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30378 {
30379 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30380 die = lookup_decl_die (decl);
30381 if (die)
30382 {
30383 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30384 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30385 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30386 }
30387 else
30388 return -1;
30389 }
30390 else
30391 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30392
30393 /* DWARF5 allows reference class, so we can then reference the DIE.
30394 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30395 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30396 {
30397 a->dw_attr_val.val_class = dw_val_class_die_ref;
30398 a->dw_attr_val.val_entry = NULL;
30399 a->dw_attr_val.v.val_die_ref.die = die;
30400 a->dw_attr_val.v.val_die_ref.external = 0;
30401 return 0;
30402 }
30403
30404 dw_attr_node *av = get_AT (die, DW_AT_location);
30405 dw_loc_list_ref d;
30406 bool non_dwarf_expr = false;
30407
30408 if (av == NULL)
30409 return dwarf_strict ? -1 : 0;
30410 switch (AT_class (av))
30411 {
30412 case dw_val_class_loc_list:
30413 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30414 if (d->expr && non_dwarf_expression (d->expr))
30415 non_dwarf_expr = true;
30416 break;
30417 case dw_val_class_view_list:
30418 gcc_unreachable ();
30419 case dw_val_class_loc:
30420 lv = AT_loc (av);
30421 if (lv == NULL)
30422 return dwarf_strict ? -1 : 0;
30423 if (non_dwarf_expression (lv))
30424 non_dwarf_expr = true;
30425 break;
30426 default:
30427 return dwarf_strict ? -1 : 0;
30428 }
30429
30430 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30431 into DW_OP_call4 or DW_OP_GNU_variable_value into
30432 DW_OP_call4 DW_OP_deref, do so. */
30433 if (!non_dwarf_expr
30434 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30435 {
30436 l->dw_loc_opc = DW_OP_call4;
30437 if (l->dw_loc_next)
30438 l->dw_loc_next = NULL;
30439 else
30440 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30441 return 0;
30442 }
30443
30444 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30445 copy over the DW_AT_location attribute from die to a. */
30446 if (l->dw_loc_next != NULL)
30447 {
30448 a->dw_attr_val = av->dw_attr_val;
30449 return 1;
30450 }
30451
30452 dw_loc_list_ref list, *p;
30453 switch (AT_class (av))
30454 {
30455 case dw_val_class_loc_list:
30456 p = &list;
30457 list = NULL;
30458 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30459 {
30460 lv = copy_deref_exprloc (d->expr);
30461 if (lv)
30462 {
30463 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30464 p = &(*p)->dw_loc_next;
30465 }
30466 else if (!dwarf_strict && d->expr)
30467 return 0;
30468 }
30469 if (list == NULL)
30470 return dwarf_strict ? -1 : 0;
30471 a->dw_attr_val.val_class = dw_val_class_loc_list;
30472 gen_llsym (list);
30473 *AT_loc_list_ptr (a) = list;
30474 return 1;
30475 case dw_val_class_loc:
30476 lv = copy_deref_exprloc (AT_loc (av));
30477 if (lv == NULL)
30478 return dwarf_strict ? -1 : 0;
30479 a->dw_attr_val.v.val_loc = lv;
30480 return 1;
30481 default:
30482 gcc_unreachable ();
30483 }
30484 }
30485
30486 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30487 an address in .rodata section if the string literal is emitted there,
30488 or remove the containing location list or replace DW_AT_const_value
30489 with DW_AT_location and empty location expression, if it isn't found
30490 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30491 to something that has been emitted in the current CU. */
30492
30493 static void
30494 resolve_addr (dw_die_ref die)
30495 {
30496 dw_die_ref c;
30497 dw_attr_node *a;
30498 dw_loc_list_ref *curr, *start, loc;
30499 unsigned ix;
30500 bool remove_AT_byte_size = false;
30501
30502 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30503 switch (AT_class (a))
30504 {
30505 case dw_val_class_loc_list:
30506 start = curr = AT_loc_list_ptr (a);
30507 loc = *curr;
30508 gcc_assert (loc);
30509 /* The same list can be referenced more than once. See if we have
30510 already recorded the result from a previous pass. */
30511 if (loc->replaced)
30512 *curr = loc->dw_loc_next;
30513 else if (!loc->resolved_addr)
30514 {
30515 /* As things stand, we do not expect or allow one die to
30516 reference a suffix of another die's location list chain.
30517 References must be identical or completely separate.
30518 There is therefore no need to cache the result of this
30519 pass on any list other than the first; doing so
30520 would lead to unnecessary writes. */
30521 while (*curr)
30522 {
30523 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30524 if (!resolve_addr_in_expr (a, (*curr)->expr))
30525 {
30526 dw_loc_list_ref next = (*curr)->dw_loc_next;
30527 dw_loc_descr_ref l = (*curr)->expr;
30528
30529 if (next && (*curr)->ll_symbol)
30530 {
30531 gcc_assert (!next->ll_symbol);
30532 next->ll_symbol = (*curr)->ll_symbol;
30533 next->vl_symbol = (*curr)->vl_symbol;
30534 }
30535 if (dwarf_split_debug_info)
30536 remove_loc_list_addr_table_entries (l);
30537 *curr = next;
30538 }
30539 else
30540 {
30541 mark_base_types ((*curr)->expr);
30542 curr = &(*curr)->dw_loc_next;
30543 }
30544 }
30545 if (loc == *start)
30546 loc->resolved_addr = 1;
30547 else
30548 {
30549 loc->replaced = 1;
30550 loc->dw_loc_next = *start;
30551 }
30552 }
30553 if (!*start)
30554 {
30555 remove_AT (die, a->dw_attr);
30556 ix--;
30557 }
30558 break;
30559 case dw_val_class_view_list:
30560 {
30561 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30562 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30563 dw_val_node *llnode
30564 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30565 /* If we no longer have a loclist, or it no longer needs
30566 views, drop this attribute. */
30567 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30568 {
30569 remove_AT (die, a->dw_attr);
30570 ix--;
30571 }
30572 break;
30573 }
30574 case dw_val_class_loc:
30575 {
30576 dw_loc_descr_ref l = AT_loc (a);
30577 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30578 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30579 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30580 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30581 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30582 with DW_FORM_ref referencing the same DIE as
30583 DW_OP_GNU_variable_value used to reference. */
30584 if (a->dw_attr == DW_AT_string_length
30585 && l
30586 && l->dw_loc_opc == DW_OP_GNU_variable_value
30587 && (l->dw_loc_next == NULL
30588 || (l->dw_loc_next->dw_loc_next == NULL
30589 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30590 {
30591 switch (optimize_string_length (a))
30592 {
30593 case -1:
30594 remove_AT (die, a->dw_attr);
30595 ix--;
30596 /* If we drop DW_AT_string_length, we need to drop also
30597 DW_AT_{string_length_,}byte_size. */
30598 remove_AT_byte_size = true;
30599 continue;
30600 default:
30601 break;
30602 case 1:
30603 /* Even if we keep the optimized DW_AT_string_length,
30604 it might have changed AT_class, so process it again. */
30605 ix--;
30606 continue;
30607 }
30608 }
30609 /* For -gdwarf-2 don't attempt to optimize
30610 DW_AT_data_member_location containing
30611 DW_OP_plus_uconst - older consumers might
30612 rely on it being that op instead of a more complex,
30613 but shorter, location description. */
30614 if ((dwarf_version > 2
30615 || a->dw_attr != DW_AT_data_member_location
30616 || l == NULL
30617 || l->dw_loc_opc != DW_OP_plus_uconst
30618 || l->dw_loc_next != NULL)
30619 && !resolve_addr_in_expr (a, l))
30620 {
30621 if (dwarf_split_debug_info)
30622 remove_loc_list_addr_table_entries (l);
30623 if (l != NULL
30624 && l->dw_loc_next == NULL
30625 && l->dw_loc_opc == DW_OP_addr
30626 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30627 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30628 && a->dw_attr == DW_AT_location)
30629 {
30630 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30631 remove_AT (die, a->dw_attr);
30632 ix--;
30633 optimize_location_into_implicit_ptr (die, decl);
30634 break;
30635 }
30636 if (a->dw_attr == DW_AT_string_length)
30637 /* If we drop DW_AT_string_length, we need to drop also
30638 DW_AT_{string_length_,}byte_size. */
30639 remove_AT_byte_size = true;
30640 remove_AT (die, a->dw_attr);
30641 ix--;
30642 }
30643 else
30644 mark_base_types (l);
30645 }
30646 break;
30647 case dw_val_class_addr:
30648 if (a->dw_attr == DW_AT_const_value
30649 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30650 {
30651 if (AT_index (a) != NOT_INDEXED)
30652 remove_addr_table_entry (a->dw_attr_val.val_entry);
30653 remove_AT (die, a->dw_attr);
30654 ix--;
30655 }
30656 if ((die->die_tag == DW_TAG_call_site
30657 && a->dw_attr == DW_AT_call_origin)
30658 || (die->die_tag == DW_TAG_GNU_call_site
30659 && a->dw_attr == DW_AT_abstract_origin))
30660 {
30661 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30662 dw_die_ref tdie = lookup_decl_die (tdecl);
30663 dw_die_ref cdie;
30664 if (tdie == NULL
30665 && DECL_EXTERNAL (tdecl)
30666 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30667 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30668 {
30669 dw_die_ref pdie = cdie;
30670 /* Make sure we don't add these DIEs into type units.
30671 We could emit skeleton DIEs for context (namespaces,
30672 outer structs/classes) and a skeleton DIE for the
30673 innermost context with DW_AT_signature pointing to the
30674 type unit. See PR78835. */
30675 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30676 pdie = pdie->die_parent;
30677 if (pdie == NULL)
30678 {
30679 /* Creating a full DIE for tdecl is overly expensive and
30680 at this point even wrong when in the LTO phase
30681 as it can end up generating new type DIEs we didn't
30682 output and thus optimize_external_refs will crash. */
30683 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30684 add_AT_flag (tdie, DW_AT_external, 1);
30685 add_AT_flag (tdie, DW_AT_declaration, 1);
30686 add_linkage_attr (tdie, tdecl);
30687 add_name_and_src_coords_attributes (tdie, tdecl, true);
30688 equate_decl_number_to_die (tdecl, tdie);
30689 }
30690 }
30691 if (tdie)
30692 {
30693 a->dw_attr_val.val_class = dw_val_class_die_ref;
30694 a->dw_attr_val.v.val_die_ref.die = tdie;
30695 a->dw_attr_val.v.val_die_ref.external = 0;
30696 }
30697 else
30698 {
30699 if (AT_index (a) != NOT_INDEXED)
30700 remove_addr_table_entry (a->dw_attr_val.val_entry);
30701 remove_AT (die, a->dw_attr);
30702 ix--;
30703 }
30704 }
30705 break;
30706 default:
30707 break;
30708 }
30709
30710 if (remove_AT_byte_size)
30711 remove_AT (die, dwarf_version >= 5
30712 ? DW_AT_string_length_byte_size
30713 : DW_AT_byte_size);
30714
30715 FOR_EACH_CHILD (die, c, resolve_addr (c));
30716 }
30717 \f
30718 /* Helper routines for optimize_location_lists.
30719 This pass tries to share identical local lists in .debug_loc
30720 section. */
30721
30722 /* Iteratively hash operands of LOC opcode into HSTATE. */
30723
30724 static void
30725 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30726 {
30727 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30728 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30729
30730 switch (loc->dw_loc_opc)
30731 {
30732 case DW_OP_const4u:
30733 case DW_OP_const8u:
30734 if (loc->dtprel)
30735 goto hash_addr;
30736 /* FALLTHRU */
30737 case DW_OP_const1u:
30738 case DW_OP_const1s:
30739 case DW_OP_const2u:
30740 case DW_OP_const2s:
30741 case DW_OP_const4s:
30742 case DW_OP_const8s:
30743 case DW_OP_constu:
30744 case DW_OP_consts:
30745 case DW_OP_pick:
30746 case DW_OP_plus_uconst:
30747 case DW_OP_breg0:
30748 case DW_OP_breg1:
30749 case DW_OP_breg2:
30750 case DW_OP_breg3:
30751 case DW_OP_breg4:
30752 case DW_OP_breg5:
30753 case DW_OP_breg6:
30754 case DW_OP_breg7:
30755 case DW_OP_breg8:
30756 case DW_OP_breg9:
30757 case DW_OP_breg10:
30758 case DW_OP_breg11:
30759 case DW_OP_breg12:
30760 case DW_OP_breg13:
30761 case DW_OP_breg14:
30762 case DW_OP_breg15:
30763 case DW_OP_breg16:
30764 case DW_OP_breg17:
30765 case DW_OP_breg18:
30766 case DW_OP_breg19:
30767 case DW_OP_breg20:
30768 case DW_OP_breg21:
30769 case DW_OP_breg22:
30770 case DW_OP_breg23:
30771 case DW_OP_breg24:
30772 case DW_OP_breg25:
30773 case DW_OP_breg26:
30774 case DW_OP_breg27:
30775 case DW_OP_breg28:
30776 case DW_OP_breg29:
30777 case DW_OP_breg30:
30778 case DW_OP_breg31:
30779 case DW_OP_regx:
30780 case DW_OP_fbreg:
30781 case DW_OP_piece:
30782 case DW_OP_deref_size:
30783 case DW_OP_xderef_size:
30784 hstate.add_object (val1->v.val_int);
30785 break;
30786 case DW_OP_skip:
30787 case DW_OP_bra:
30788 {
30789 int offset;
30790
30791 gcc_assert (val1->val_class == dw_val_class_loc);
30792 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30793 hstate.add_object (offset);
30794 }
30795 break;
30796 case DW_OP_implicit_value:
30797 hstate.add_object (val1->v.val_unsigned);
30798 switch (val2->val_class)
30799 {
30800 case dw_val_class_const:
30801 hstate.add_object (val2->v.val_int);
30802 break;
30803 case dw_val_class_vec:
30804 {
30805 unsigned int elt_size = val2->v.val_vec.elt_size;
30806 unsigned int len = val2->v.val_vec.length;
30807
30808 hstate.add_int (elt_size);
30809 hstate.add_int (len);
30810 hstate.add (val2->v.val_vec.array, len * elt_size);
30811 }
30812 break;
30813 case dw_val_class_const_double:
30814 hstate.add_object (val2->v.val_double.low);
30815 hstate.add_object (val2->v.val_double.high);
30816 break;
30817 case dw_val_class_wide_int:
30818 hstate.add (val2->v.val_wide->get_val (),
30819 get_full_len (*val2->v.val_wide)
30820 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30821 break;
30822 case dw_val_class_addr:
30823 inchash::add_rtx (val2->v.val_addr, hstate);
30824 break;
30825 default:
30826 gcc_unreachable ();
30827 }
30828 break;
30829 case DW_OP_bregx:
30830 case DW_OP_bit_piece:
30831 hstate.add_object (val1->v.val_int);
30832 hstate.add_object (val2->v.val_int);
30833 break;
30834 case DW_OP_addr:
30835 hash_addr:
30836 if (loc->dtprel)
30837 {
30838 unsigned char dtprel = 0xd1;
30839 hstate.add_object (dtprel);
30840 }
30841 inchash::add_rtx (val1->v.val_addr, hstate);
30842 break;
30843 case DW_OP_GNU_addr_index:
30844 case DW_OP_addrx:
30845 case DW_OP_GNU_const_index:
30846 case DW_OP_constx:
30847 {
30848 if (loc->dtprel)
30849 {
30850 unsigned char dtprel = 0xd1;
30851 hstate.add_object (dtprel);
30852 }
30853 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30854 }
30855 break;
30856 case DW_OP_implicit_pointer:
30857 case DW_OP_GNU_implicit_pointer:
30858 hstate.add_int (val2->v.val_int);
30859 break;
30860 case DW_OP_entry_value:
30861 case DW_OP_GNU_entry_value:
30862 hstate.add_object (val1->v.val_loc);
30863 break;
30864 case DW_OP_regval_type:
30865 case DW_OP_deref_type:
30866 case DW_OP_GNU_regval_type:
30867 case DW_OP_GNU_deref_type:
30868 {
30869 unsigned int byte_size
30870 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30871 unsigned int encoding
30872 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30873 hstate.add_object (val1->v.val_int);
30874 hstate.add_object (byte_size);
30875 hstate.add_object (encoding);
30876 }
30877 break;
30878 case DW_OP_convert:
30879 case DW_OP_reinterpret:
30880 case DW_OP_GNU_convert:
30881 case DW_OP_GNU_reinterpret:
30882 if (val1->val_class == dw_val_class_unsigned_const)
30883 {
30884 hstate.add_object (val1->v.val_unsigned);
30885 break;
30886 }
30887 /* FALLTHRU */
30888 case DW_OP_const_type:
30889 case DW_OP_GNU_const_type:
30890 {
30891 unsigned int byte_size
30892 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30893 unsigned int encoding
30894 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30895 hstate.add_object (byte_size);
30896 hstate.add_object (encoding);
30897 if (loc->dw_loc_opc != DW_OP_const_type
30898 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30899 break;
30900 hstate.add_object (val2->val_class);
30901 switch (val2->val_class)
30902 {
30903 case dw_val_class_const:
30904 hstate.add_object (val2->v.val_int);
30905 break;
30906 case dw_val_class_vec:
30907 {
30908 unsigned int elt_size = val2->v.val_vec.elt_size;
30909 unsigned int len = val2->v.val_vec.length;
30910
30911 hstate.add_object (elt_size);
30912 hstate.add_object (len);
30913 hstate.add (val2->v.val_vec.array, len * elt_size);
30914 }
30915 break;
30916 case dw_val_class_const_double:
30917 hstate.add_object (val2->v.val_double.low);
30918 hstate.add_object (val2->v.val_double.high);
30919 break;
30920 case dw_val_class_wide_int:
30921 hstate.add (val2->v.val_wide->get_val (),
30922 get_full_len (*val2->v.val_wide)
30923 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30924 break;
30925 default:
30926 gcc_unreachable ();
30927 }
30928 }
30929 break;
30930
30931 default:
30932 /* Other codes have no operands. */
30933 break;
30934 }
30935 }
30936
30937 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30938
30939 static inline void
30940 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30941 {
30942 dw_loc_descr_ref l;
30943 bool sizes_computed = false;
30944 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30945 size_of_locs (loc);
30946
30947 for (l = loc; l != NULL; l = l->dw_loc_next)
30948 {
30949 enum dwarf_location_atom opc = l->dw_loc_opc;
30950 hstate.add_object (opc);
30951 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30952 {
30953 size_of_locs (loc);
30954 sizes_computed = true;
30955 }
30956 hash_loc_operands (l, hstate);
30957 }
30958 }
30959
30960 /* Compute hash of the whole location list LIST_HEAD. */
30961
30962 static inline void
30963 hash_loc_list (dw_loc_list_ref list_head)
30964 {
30965 dw_loc_list_ref curr = list_head;
30966 inchash::hash hstate;
30967
30968 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30969 {
30970 hstate.add (curr->begin, strlen (curr->begin) + 1);
30971 hstate.add (curr->end, strlen (curr->end) + 1);
30972 hstate.add_object (curr->vbegin);
30973 hstate.add_object (curr->vend);
30974 if (curr->section)
30975 hstate.add (curr->section, strlen (curr->section) + 1);
30976 hash_locs (curr->expr, hstate);
30977 }
30978 list_head->hash = hstate.end ();
30979 }
30980
30981 /* Return true if X and Y opcodes have the same operands. */
30982
30983 static inline bool
30984 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30985 {
30986 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30987 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30988 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30989 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30990
30991 switch (x->dw_loc_opc)
30992 {
30993 case DW_OP_const4u:
30994 case DW_OP_const8u:
30995 if (x->dtprel)
30996 goto hash_addr;
30997 /* FALLTHRU */
30998 case DW_OP_const1u:
30999 case DW_OP_const1s:
31000 case DW_OP_const2u:
31001 case DW_OP_const2s:
31002 case DW_OP_const4s:
31003 case DW_OP_const8s:
31004 case DW_OP_constu:
31005 case DW_OP_consts:
31006 case DW_OP_pick:
31007 case DW_OP_plus_uconst:
31008 case DW_OP_breg0:
31009 case DW_OP_breg1:
31010 case DW_OP_breg2:
31011 case DW_OP_breg3:
31012 case DW_OP_breg4:
31013 case DW_OP_breg5:
31014 case DW_OP_breg6:
31015 case DW_OP_breg7:
31016 case DW_OP_breg8:
31017 case DW_OP_breg9:
31018 case DW_OP_breg10:
31019 case DW_OP_breg11:
31020 case DW_OP_breg12:
31021 case DW_OP_breg13:
31022 case DW_OP_breg14:
31023 case DW_OP_breg15:
31024 case DW_OP_breg16:
31025 case DW_OP_breg17:
31026 case DW_OP_breg18:
31027 case DW_OP_breg19:
31028 case DW_OP_breg20:
31029 case DW_OP_breg21:
31030 case DW_OP_breg22:
31031 case DW_OP_breg23:
31032 case DW_OP_breg24:
31033 case DW_OP_breg25:
31034 case DW_OP_breg26:
31035 case DW_OP_breg27:
31036 case DW_OP_breg28:
31037 case DW_OP_breg29:
31038 case DW_OP_breg30:
31039 case DW_OP_breg31:
31040 case DW_OP_regx:
31041 case DW_OP_fbreg:
31042 case DW_OP_piece:
31043 case DW_OP_deref_size:
31044 case DW_OP_xderef_size:
31045 return valx1->v.val_int == valy1->v.val_int;
31046 case DW_OP_skip:
31047 case DW_OP_bra:
31048 /* If splitting debug info, the use of DW_OP_GNU_addr_index
31049 can cause irrelevant differences in dw_loc_addr. */
31050 gcc_assert (valx1->val_class == dw_val_class_loc
31051 && valy1->val_class == dw_val_class_loc
31052 && (dwarf_split_debug_info
31053 || x->dw_loc_addr == y->dw_loc_addr));
31054 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
31055 case DW_OP_implicit_value:
31056 if (valx1->v.val_unsigned != valy1->v.val_unsigned
31057 || valx2->val_class != valy2->val_class)
31058 return false;
31059 switch (valx2->val_class)
31060 {
31061 case dw_val_class_const:
31062 return valx2->v.val_int == valy2->v.val_int;
31063 case dw_val_class_vec:
31064 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31065 && valx2->v.val_vec.length == valy2->v.val_vec.length
31066 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31067 valx2->v.val_vec.elt_size
31068 * valx2->v.val_vec.length) == 0;
31069 case dw_val_class_const_double:
31070 return valx2->v.val_double.low == valy2->v.val_double.low
31071 && valx2->v.val_double.high == valy2->v.val_double.high;
31072 case dw_val_class_wide_int:
31073 return *valx2->v.val_wide == *valy2->v.val_wide;
31074 case dw_val_class_addr:
31075 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
31076 default:
31077 gcc_unreachable ();
31078 }
31079 case DW_OP_bregx:
31080 case DW_OP_bit_piece:
31081 return valx1->v.val_int == valy1->v.val_int
31082 && valx2->v.val_int == valy2->v.val_int;
31083 case DW_OP_addr:
31084 hash_addr:
31085 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
31086 case DW_OP_GNU_addr_index:
31087 case DW_OP_addrx:
31088 case DW_OP_GNU_const_index:
31089 case DW_OP_constx:
31090 {
31091 rtx ax1 = valx1->val_entry->addr.rtl;
31092 rtx ay1 = valy1->val_entry->addr.rtl;
31093 return rtx_equal_p (ax1, ay1);
31094 }
31095 case DW_OP_implicit_pointer:
31096 case DW_OP_GNU_implicit_pointer:
31097 return valx1->val_class == dw_val_class_die_ref
31098 && valx1->val_class == valy1->val_class
31099 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
31100 && valx2->v.val_int == valy2->v.val_int;
31101 case DW_OP_entry_value:
31102 case DW_OP_GNU_entry_value:
31103 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31104 case DW_OP_const_type:
31105 case DW_OP_GNU_const_type:
31106 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31107 || valx2->val_class != valy2->val_class)
31108 return false;
31109 switch (valx2->val_class)
31110 {
31111 case dw_val_class_const:
31112 return valx2->v.val_int == valy2->v.val_int;
31113 case dw_val_class_vec:
31114 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31115 && valx2->v.val_vec.length == valy2->v.val_vec.length
31116 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31117 valx2->v.val_vec.elt_size
31118 * valx2->v.val_vec.length) == 0;
31119 case dw_val_class_const_double:
31120 return valx2->v.val_double.low == valy2->v.val_double.low
31121 && valx2->v.val_double.high == valy2->v.val_double.high;
31122 case dw_val_class_wide_int:
31123 return *valx2->v.val_wide == *valy2->v.val_wide;
31124 default:
31125 gcc_unreachable ();
31126 }
31127 case DW_OP_regval_type:
31128 case DW_OP_deref_type:
31129 case DW_OP_GNU_regval_type:
31130 case DW_OP_GNU_deref_type:
31131 return valx1->v.val_int == valy1->v.val_int
31132 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31133 case DW_OP_convert:
31134 case DW_OP_reinterpret:
31135 case DW_OP_GNU_convert:
31136 case DW_OP_GNU_reinterpret:
31137 if (valx1->val_class != valy1->val_class)
31138 return false;
31139 if (valx1->val_class == dw_val_class_unsigned_const)
31140 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31141 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31142 case DW_OP_GNU_parameter_ref:
31143 return valx1->val_class == dw_val_class_die_ref
31144 && valx1->val_class == valy1->val_class
31145 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31146 default:
31147 /* Other codes have no operands. */
31148 return true;
31149 }
31150 }
31151
31152 /* Return true if DWARF location expressions X and Y are the same. */
31153
31154 static inline bool
31155 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31156 {
31157 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31158 if (x->dw_loc_opc != y->dw_loc_opc
31159 || x->dtprel != y->dtprel
31160 || !compare_loc_operands (x, y))
31161 break;
31162 return x == NULL && y == NULL;
31163 }
31164
31165 /* Hashtable helpers. */
31166
31167 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31168 {
31169 static inline hashval_t hash (const dw_loc_list_struct *);
31170 static inline bool equal (const dw_loc_list_struct *,
31171 const dw_loc_list_struct *);
31172 };
31173
31174 /* Return precomputed hash of location list X. */
31175
31176 inline hashval_t
31177 loc_list_hasher::hash (const dw_loc_list_struct *x)
31178 {
31179 return x->hash;
31180 }
31181
31182 /* Return true if location lists A and B are the same. */
31183
31184 inline bool
31185 loc_list_hasher::equal (const dw_loc_list_struct *a,
31186 const dw_loc_list_struct *b)
31187 {
31188 if (a == b)
31189 return 1;
31190 if (a->hash != b->hash)
31191 return 0;
31192 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31193 if (strcmp (a->begin, b->begin) != 0
31194 || strcmp (a->end, b->end) != 0
31195 || (a->section == NULL) != (b->section == NULL)
31196 || (a->section && strcmp (a->section, b->section) != 0)
31197 || a->vbegin != b->vbegin || a->vend != b->vend
31198 || !compare_locs (a->expr, b->expr))
31199 break;
31200 return a == NULL && b == NULL;
31201 }
31202
31203 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31204
31205
31206 /* Recursively optimize location lists referenced from DIE
31207 children and share them whenever possible. */
31208
31209 static void
31210 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31211 {
31212 dw_die_ref c;
31213 dw_attr_node *a;
31214 unsigned ix;
31215 dw_loc_list_struct **slot;
31216 bool drop_locviews = false;
31217 bool has_locviews = false;
31218
31219 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31220 if (AT_class (a) == dw_val_class_loc_list)
31221 {
31222 dw_loc_list_ref list = AT_loc_list (a);
31223 /* TODO: perform some optimizations here, before hashing
31224 it and storing into the hash table. */
31225 hash_loc_list (list);
31226 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31227 if (*slot == NULL)
31228 {
31229 *slot = list;
31230 if (loc_list_has_views (list))
31231 gcc_assert (list->vl_symbol);
31232 else if (list->vl_symbol)
31233 {
31234 drop_locviews = true;
31235 list->vl_symbol = NULL;
31236 }
31237 }
31238 else
31239 {
31240 if (list->vl_symbol && !(*slot)->vl_symbol)
31241 drop_locviews = true;
31242 a->dw_attr_val.v.val_loc_list = *slot;
31243 }
31244 }
31245 else if (AT_class (a) == dw_val_class_view_list)
31246 {
31247 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31248 has_locviews = true;
31249 }
31250
31251
31252 if (drop_locviews && has_locviews)
31253 remove_AT (die, DW_AT_GNU_locviews);
31254
31255 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31256 }
31257
31258
31259 /* Recursively assign each location list a unique index into the debug_addr
31260 section. */
31261
31262 static void
31263 index_location_lists (dw_die_ref die)
31264 {
31265 dw_die_ref c;
31266 dw_attr_node *a;
31267 unsigned ix;
31268
31269 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31270 if (AT_class (a) == dw_val_class_loc_list)
31271 {
31272 dw_loc_list_ref list = AT_loc_list (a);
31273 dw_loc_list_ref curr;
31274 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31275 {
31276 /* Don't index an entry that has already been indexed
31277 or won't be output. Make sure skip_loc_list_entry doesn't
31278 call size_of_locs, because that might cause circular dependency,
31279 index_location_lists requiring address table indexes to be
31280 computed, but adding new indexes through add_addr_table_entry
31281 and address table index computation requiring no new additions
31282 to the hash table. In the rare case of DWARF[234] >= 64KB
31283 location expression, we'll just waste unused address table entry
31284 for it. */
31285 if (curr->begin_entry != NULL
31286 || skip_loc_list_entry (curr))
31287 continue;
31288
31289 curr->begin_entry
31290 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31291 }
31292 }
31293
31294 FOR_EACH_CHILD (die, c, index_location_lists (c));
31295 }
31296
31297 /* Optimize location lists referenced from DIE
31298 children and share them whenever possible. */
31299
31300 static void
31301 optimize_location_lists (dw_die_ref die)
31302 {
31303 loc_list_hash_type htab (500);
31304 optimize_location_lists_1 (die, &htab);
31305 }
31306 \f
31307 /* Traverse the limbo die list, and add parent/child links. The only
31308 dies without parents that should be here are concrete instances of
31309 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31310 For concrete instances, we can get the parent die from the abstract
31311 instance. */
31312
31313 static void
31314 flush_limbo_die_list (void)
31315 {
31316 limbo_die_node *node;
31317
31318 /* get_context_die calls force_decl_die, which can put new DIEs on the
31319 limbo list in LTO mode when nested functions are put in a different
31320 partition than that of their parent function. */
31321 while ((node = limbo_die_list))
31322 {
31323 dw_die_ref die = node->die;
31324 limbo_die_list = node->next;
31325
31326 if (die->die_parent == NULL)
31327 {
31328 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31329
31330 if (origin && origin->die_parent)
31331 add_child_die (origin->die_parent, die);
31332 else if (is_cu_die (die))
31333 ;
31334 else if (seen_error ())
31335 /* It's OK to be confused by errors in the input. */
31336 add_child_die (comp_unit_die (), die);
31337 else
31338 {
31339 /* In certain situations, the lexical block containing a
31340 nested function can be optimized away, which results
31341 in the nested function die being orphaned. Likewise
31342 with the return type of that nested function. Force
31343 this to be a child of the containing function.
31344
31345 It may happen that even the containing function got fully
31346 inlined and optimized out. In that case we are lost and
31347 assign the empty child. This should not be big issue as
31348 the function is likely unreachable too. */
31349 gcc_assert (node->created_for);
31350
31351 if (DECL_P (node->created_for))
31352 origin = get_context_die (DECL_CONTEXT (node->created_for));
31353 else if (TYPE_P (node->created_for))
31354 origin = scope_die_for (node->created_for, comp_unit_die ());
31355 else
31356 origin = comp_unit_die ();
31357
31358 add_child_die (origin, die);
31359 }
31360 }
31361 }
31362 }
31363
31364 /* Reset DIEs so we can output them again. */
31365
31366 static void
31367 reset_dies (dw_die_ref die)
31368 {
31369 dw_die_ref c;
31370
31371 /* Remove stuff we re-generate. */
31372 die->die_mark = 0;
31373 die->die_offset = 0;
31374 die->die_abbrev = 0;
31375 remove_AT (die, DW_AT_sibling);
31376
31377 FOR_EACH_CHILD (die, c, reset_dies (c));
31378 }
31379
31380 /* Output stuff that dwarf requires at the end of every file,
31381 and generate the DWARF-2 debugging info. */
31382
31383 static void
31384 dwarf2out_finish (const char *filename)
31385 {
31386 comdat_type_node *ctnode;
31387 dw_die_ref main_comp_unit_die;
31388 unsigned char checksum[16];
31389 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31390
31391 /* Flush out any latecomers to the limbo party. */
31392 flush_limbo_die_list ();
31393
31394 if (inline_entry_data_table)
31395 gcc_assert (inline_entry_data_table->is_empty ());
31396
31397 if (flag_checking)
31398 {
31399 verify_die (comp_unit_die ());
31400 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31401 verify_die (node->die);
31402 }
31403
31404 /* We shouldn't have any symbols with delayed asm names for
31405 DIEs generated after early finish. */
31406 gcc_assert (deferred_asm_name == NULL);
31407
31408 gen_remaining_tmpl_value_param_die_attribute ();
31409
31410 if (flag_generate_lto || flag_generate_offload)
31411 {
31412 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31413
31414 /* Prune stuff so that dwarf2out_finish runs successfully
31415 for the fat part of the object. */
31416 reset_dies (comp_unit_die ());
31417 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31418 reset_dies (node->die);
31419
31420 hash_table<comdat_type_hasher> comdat_type_table (100);
31421 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31422 {
31423 comdat_type_node **slot
31424 = comdat_type_table.find_slot (ctnode, INSERT);
31425
31426 /* Don't reset types twice. */
31427 if (*slot != HTAB_EMPTY_ENTRY)
31428 continue;
31429
31430 /* Remove the pointer to the line table. */
31431 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31432
31433 if (debug_info_level >= DINFO_LEVEL_TERSE)
31434 reset_dies (ctnode->root_die);
31435
31436 *slot = ctnode;
31437 }
31438
31439 /* Reset die CU symbol so we don't output it twice. */
31440 comp_unit_die ()->die_id.die_symbol = NULL;
31441
31442 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31443 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31444 if (have_macinfo)
31445 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31446
31447 /* Remove indirect string decisions. */
31448 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31449 if (debug_line_str_hash)
31450 {
31451 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31452 debug_line_str_hash = NULL;
31453 }
31454 }
31455
31456 #if ENABLE_ASSERT_CHECKING
31457 {
31458 dw_die_ref die = comp_unit_die (), c;
31459 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31460 }
31461 #endif
31462 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31463 resolve_addr (ctnode->root_die);
31464 resolve_addr (comp_unit_die ());
31465 move_marked_base_types ();
31466
31467 if (dump_file)
31468 {
31469 fprintf (dump_file, "DWARF for %s\n", filename);
31470 print_die (comp_unit_die (), dump_file);
31471 }
31472
31473 /* Initialize sections and labels used for actual assembler output. */
31474 unsigned generation = init_sections_and_labels (false);
31475
31476 /* Traverse the DIE's and add sibling attributes to those DIE's that
31477 have children. */
31478 add_sibling_attributes (comp_unit_die ());
31479 limbo_die_node *node;
31480 for (node = cu_die_list; node; node = node->next)
31481 add_sibling_attributes (node->die);
31482 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31483 add_sibling_attributes (ctnode->root_die);
31484
31485 /* When splitting DWARF info, we put some attributes in the
31486 skeleton compile_unit DIE that remains in the .o, while
31487 most attributes go in the DWO compile_unit_die. */
31488 if (dwarf_split_debug_info)
31489 {
31490 limbo_die_node *cu;
31491 main_comp_unit_die = gen_compile_unit_die (NULL);
31492 if (dwarf_version >= 5)
31493 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31494 cu = limbo_die_list;
31495 gcc_assert (cu->die == main_comp_unit_die);
31496 limbo_die_list = limbo_die_list->next;
31497 cu->next = cu_die_list;
31498 cu_die_list = cu;
31499 }
31500 else
31501 main_comp_unit_die = comp_unit_die ();
31502
31503 /* Output a terminator label for the .text section. */
31504 switch_to_section (text_section);
31505 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31506 if (cold_text_section)
31507 {
31508 switch_to_section (cold_text_section);
31509 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31510 }
31511
31512 /* We can only use the low/high_pc attributes if all of the code was
31513 in .text. */
31514 if (!have_multiple_function_sections
31515 || (dwarf_version < 3 && dwarf_strict))
31516 {
31517 /* Don't add if the CU has no associated code. */
31518 if (text_section_used)
31519 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31520 text_end_label, true);
31521 }
31522 else
31523 {
31524 unsigned fde_idx;
31525 dw_fde_ref fde;
31526 bool range_list_added = false;
31527
31528 if (text_section_used)
31529 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31530 text_end_label, &range_list_added, true);
31531 if (cold_text_section_used)
31532 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31533 cold_end_label, &range_list_added, true);
31534
31535 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31536 {
31537 if (DECL_IGNORED_P (fde->decl))
31538 continue;
31539 if (!fde->in_std_section)
31540 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31541 fde->dw_fde_end, &range_list_added,
31542 true);
31543 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31544 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31545 fde->dw_fde_second_end, &range_list_added,
31546 true);
31547 }
31548
31549 if (range_list_added)
31550 {
31551 /* We need to give .debug_loc and .debug_ranges an appropriate
31552 "base address". Use zero so that these addresses become
31553 absolute. Historically, we've emitted the unexpected
31554 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31555 Emit both to give time for other tools to adapt. */
31556 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31557 if (! dwarf_strict && dwarf_version < 4)
31558 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31559
31560 add_ranges (NULL);
31561 }
31562 }
31563
31564 /* AIX Assembler inserts the length, so adjust the reference to match the
31565 offset expected by debuggers. */
31566 strcpy (dl_section_ref, debug_line_section_label);
31567 if (XCOFF_DEBUGGING_INFO)
31568 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31569
31570 if (debug_info_level >= DINFO_LEVEL_TERSE)
31571 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31572 dl_section_ref);
31573
31574 if (have_macinfo)
31575 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31576 macinfo_section_label);
31577
31578 if (dwarf_split_debug_info)
31579 {
31580 if (have_location_lists)
31581 {
31582 /* Since we generate the loclists in the split DWARF .dwo
31583 file itself, we don't need to generate a loclists_base
31584 attribute for the split compile unit DIE. That attribute
31585 (and using relocatable sec_offset FORMs) isn't allowed
31586 for a split compile unit. Only if the .debug_loclists
31587 section was in the main file, would we need to generate a
31588 loclists_base attribute here (for the full or skeleton
31589 unit DIE). */
31590
31591 /* optimize_location_lists calculates the size of the lists,
31592 so index them first, and assign indices to the entries.
31593 Although optimize_location_lists will remove entries from
31594 the table, it only does so for duplicates, and therefore
31595 only reduces ref_counts to 1. */
31596 index_location_lists (comp_unit_die ());
31597 }
31598
31599 if (addr_index_table != NULL)
31600 {
31601 unsigned int index = 0;
31602 addr_index_table
31603 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31604 (&index);
31605 }
31606 }
31607
31608 loc_list_idx = 0;
31609 if (have_location_lists)
31610 {
31611 optimize_location_lists (comp_unit_die ());
31612 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31613 if (dwarf_version >= 5 && dwarf_split_debug_info)
31614 assign_location_list_indexes (comp_unit_die ());
31615 }
31616
31617 save_macinfo_strings ();
31618
31619 if (dwarf_split_debug_info)
31620 {
31621 unsigned int index = 0;
31622
31623 /* Add attributes common to skeleton compile_units and
31624 type_units. Because these attributes include strings, it
31625 must be done before freezing the string table. Top-level
31626 skeleton die attrs are added when the skeleton type unit is
31627 created, so ensure it is created by this point. */
31628 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31629 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31630 }
31631
31632 /* Output all of the compilation units. We put the main one last so that
31633 the offsets are available to output_pubnames. */
31634 for (node = cu_die_list; node; node = node->next)
31635 output_comp_unit (node->die, 0, NULL);
31636
31637 hash_table<comdat_type_hasher> comdat_type_table (100);
31638 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31639 {
31640 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31641
31642 /* Don't output duplicate types. */
31643 if (*slot != HTAB_EMPTY_ENTRY)
31644 continue;
31645
31646 /* Add a pointer to the line table for the main compilation unit
31647 so that the debugger can make sense of DW_AT_decl_file
31648 attributes. */
31649 if (debug_info_level >= DINFO_LEVEL_TERSE)
31650 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31651 (!dwarf_split_debug_info
31652 ? dl_section_ref
31653 : debug_skeleton_line_section_label));
31654
31655 output_comdat_type_unit (ctnode, false);
31656 *slot = ctnode;
31657 }
31658
31659 if (dwarf_split_debug_info)
31660 {
31661 int mark;
31662 struct md5_ctx ctx;
31663
31664 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31665 index_rnglists ();
31666
31667 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31668 md5_init_ctx (&ctx);
31669 mark = 0;
31670 die_checksum (comp_unit_die (), &ctx, &mark);
31671 unmark_all_dies (comp_unit_die ());
31672 md5_finish_ctx (&ctx, checksum);
31673
31674 if (dwarf_version < 5)
31675 {
31676 /* Use the first 8 bytes of the checksum as the dwo_id,
31677 and add it to both comp-unit DIEs. */
31678 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31679 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31680 }
31681
31682 /* Add the base offset of the ranges table to the skeleton
31683 comp-unit DIE. */
31684 if (!vec_safe_is_empty (ranges_table))
31685 {
31686 if (dwarf_version >= 5)
31687 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31688 ranges_base_label);
31689 else
31690 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31691 ranges_section_label);
31692 }
31693
31694 output_addr_table ();
31695 }
31696
31697 /* Output the main compilation unit if non-empty or if .debug_macinfo
31698 or .debug_macro will be emitted. */
31699 output_comp_unit (comp_unit_die (), have_macinfo,
31700 dwarf_split_debug_info ? checksum : NULL);
31701
31702 if (dwarf_split_debug_info && info_section_emitted)
31703 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31704
31705 /* Output the abbreviation table. */
31706 if (vec_safe_length (abbrev_die_table) != 1)
31707 {
31708 switch_to_section (debug_abbrev_section);
31709 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31710 output_abbrev_section ();
31711 }
31712
31713 /* Output location list section if necessary. */
31714 if (have_location_lists)
31715 {
31716 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31717 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31718 /* Output the location lists info. */
31719 switch_to_section (debug_loc_section);
31720 if (dwarf_version >= 5)
31721 {
31722 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31723 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31724 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31725 dw2_asm_output_data (4, 0xffffffff,
31726 "Initial length escape value indicating "
31727 "64-bit DWARF extension");
31728 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31729 "Length of Location Lists");
31730 ASM_OUTPUT_LABEL (asm_out_file, l1);
31731 output_dwarf_version ();
31732 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31733 dw2_asm_output_data (1, 0, "Segment Size");
31734 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31735 "Offset Entry Count");
31736 }
31737 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31738 if (dwarf_version >= 5 && dwarf_split_debug_info)
31739 {
31740 unsigned int save_loc_list_idx = loc_list_idx;
31741 loc_list_idx = 0;
31742 output_loclists_offsets (comp_unit_die ());
31743 gcc_assert (save_loc_list_idx == loc_list_idx);
31744 }
31745 output_location_lists (comp_unit_die ());
31746 if (dwarf_version >= 5)
31747 ASM_OUTPUT_LABEL (asm_out_file, l2);
31748 }
31749
31750 output_pubtables ();
31751
31752 /* Output the address range information if a CU (.debug_info section)
31753 was emitted. We output an empty table even if we had no functions
31754 to put in it. This because the consumer has no way to tell the
31755 difference between an empty table that we omitted and failure to
31756 generate a table that would have contained data. */
31757 if (info_section_emitted)
31758 {
31759 switch_to_section (debug_aranges_section);
31760 output_aranges ();
31761 }
31762
31763 /* Output ranges section if necessary. */
31764 if (!vec_safe_is_empty (ranges_table))
31765 {
31766 if (dwarf_version >= 5)
31767 output_rnglists (generation);
31768 else
31769 output_ranges ();
31770 }
31771
31772 /* Have to end the macro section. */
31773 if (have_macinfo)
31774 {
31775 switch_to_section (debug_macinfo_section);
31776 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31777 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31778 : debug_skeleton_line_section_label, false);
31779 dw2_asm_output_data (1, 0, "End compilation unit");
31780 }
31781
31782 /* Output the source line correspondence table. We must do this
31783 even if there is no line information. Otherwise, on an empty
31784 translation unit, we will generate a present, but empty,
31785 .debug_info section. IRIX 6.5 `nm' will then complain when
31786 examining the file. This is done late so that any filenames
31787 used by the debug_info section are marked as 'used'. */
31788 switch_to_section (debug_line_section);
31789 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31790 if (! output_asm_line_debug_info ())
31791 output_line_info (false);
31792 else if (asm_outputs_debug_line_str ())
31793 {
31794 /* When gas outputs DWARF5 .debug_line[_str] then we have to
31795 tell it the comp_dir and main file name for the zero entry
31796 line table. */
31797 const char *comp_dir, *filename0;
31798
31799 comp_dir = comp_dir_string ();
31800 if (comp_dir == NULL)
31801 comp_dir = "";
31802
31803 filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
31804 if (filename0 == NULL)
31805 filename0 = "";
31806
31807 fprintf (asm_out_file, "\t.file 0 ");
31808 output_quoted_string (asm_out_file, remap_debug_filename (comp_dir));
31809 fputc (' ', asm_out_file);
31810 output_quoted_string (asm_out_file, remap_debug_filename (filename0));
31811 fputc ('\n', asm_out_file);
31812 }
31813
31814 if (dwarf_split_debug_info && info_section_emitted)
31815 {
31816 switch_to_section (debug_skeleton_line_section);
31817 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31818 output_line_info (true);
31819 }
31820
31821 /* If we emitted any indirect strings, output the string table too. */
31822 if (debug_str_hash || skeleton_debug_str_hash)
31823 output_indirect_strings ();
31824 if (debug_line_str_hash)
31825 {
31826 switch_to_section (debug_line_str_section);
31827 const enum dwarf_form form = DW_FORM_line_strp;
31828 debug_line_str_hash->traverse<enum dwarf_form,
31829 output_indirect_string> (form);
31830 }
31831
31832 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31833 symview_upper_bound = 0;
31834 if (zero_view_p)
31835 bitmap_clear (zero_view_p);
31836 }
31837
31838 /* Returns a hash value for X (which really is a variable_value_struct). */
31839
31840 inline hashval_t
31841 variable_value_hasher::hash (variable_value_struct *x)
31842 {
31843 return (hashval_t) x->decl_id;
31844 }
31845
31846 /* Return nonzero if decl_id of variable_value_struct X is the same as
31847 UID of decl Y. */
31848
31849 inline bool
31850 variable_value_hasher::equal (variable_value_struct *x, tree y)
31851 {
31852 return x->decl_id == DECL_UID (y);
31853 }
31854
31855 /* Helper function for resolve_variable_value, handle
31856 DW_OP_GNU_variable_value in one location expression.
31857 Return true if exprloc has been changed into loclist. */
31858
31859 static bool
31860 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31861 {
31862 dw_loc_descr_ref next;
31863 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31864 {
31865 next = loc->dw_loc_next;
31866 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31867 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31868 continue;
31869
31870 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31871 if (DECL_CONTEXT (decl) != current_function_decl)
31872 continue;
31873
31874 dw_die_ref ref = lookup_decl_die (decl);
31875 if (ref)
31876 {
31877 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31878 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31879 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31880 continue;
31881 }
31882 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31883 if (l == NULL)
31884 continue;
31885 if (l->dw_loc_next)
31886 {
31887 if (AT_class (a) != dw_val_class_loc)
31888 continue;
31889 switch (a->dw_attr)
31890 {
31891 /* Following attributes allow both exprloc and loclist
31892 classes, so we can change them into a loclist. */
31893 case DW_AT_location:
31894 case DW_AT_string_length:
31895 case DW_AT_return_addr:
31896 case DW_AT_data_member_location:
31897 case DW_AT_frame_base:
31898 case DW_AT_segment:
31899 case DW_AT_static_link:
31900 case DW_AT_use_location:
31901 case DW_AT_vtable_elem_location:
31902 if (prev)
31903 {
31904 prev->dw_loc_next = NULL;
31905 prepend_loc_descr_to_each (l, AT_loc (a));
31906 }
31907 if (next)
31908 add_loc_descr_to_each (l, next);
31909 a->dw_attr_val.val_class = dw_val_class_loc_list;
31910 a->dw_attr_val.val_entry = NULL;
31911 a->dw_attr_val.v.val_loc_list = l;
31912 have_location_lists = true;
31913 return true;
31914 /* Following attributes allow both exprloc and reference,
31915 so if the whole expression is DW_OP_GNU_variable_value alone
31916 we could transform it into reference. */
31917 case DW_AT_byte_size:
31918 case DW_AT_bit_size:
31919 case DW_AT_lower_bound:
31920 case DW_AT_upper_bound:
31921 case DW_AT_bit_stride:
31922 case DW_AT_count:
31923 case DW_AT_allocated:
31924 case DW_AT_associated:
31925 case DW_AT_byte_stride:
31926 if (prev == NULL && next == NULL)
31927 break;
31928 /* FALLTHRU */
31929 default:
31930 if (dwarf_strict)
31931 continue;
31932 break;
31933 }
31934 /* Create DW_TAG_variable that we can refer to. */
31935 gen_decl_die (decl, NULL_TREE, NULL,
31936 lookup_decl_die (current_function_decl));
31937 ref = lookup_decl_die (decl);
31938 if (ref)
31939 {
31940 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31941 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31942 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31943 }
31944 continue;
31945 }
31946 if (prev)
31947 {
31948 prev->dw_loc_next = l->expr;
31949 add_loc_descr (&prev->dw_loc_next, next);
31950 free_loc_descr (loc, NULL);
31951 next = prev->dw_loc_next;
31952 }
31953 else
31954 {
31955 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31956 add_loc_descr (&loc, next);
31957 next = loc;
31958 }
31959 loc = prev;
31960 }
31961 return false;
31962 }
31963
31964 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31965
31966 static void
31967 resolve_variable_value (dw_die_ref die)
31968 {
31969 dw_attr_node *a;
31970 dw_loc_list_ref loc;
31971 unsigned ix;
31972
31973 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31974 switch (AT_class (a))
31975 {
31976 case dw_val_class_loc:
31977 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31978 break;
31979 /* FALLTHRU */
31980 case dw_val_class_loc_list:
31981 loc = AT_loc_list (a);
31982 gcc_assert (loc);
31983 for (; loc; loc = loc->dw_loc_next)
31984 resolve_variable_value_in_expr (a, loc->expr);
31985 break;
31986 default:
31987 break;
31988 }
31989 }
31990
31991 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31992 temporaries in the current function. */
31993
31994 static void
31995 resolve_variable_values (void)
31996 {
31997 if (!variable_value_hash || !current_function_decl)
31998 return;
31999
32000 struct variable_value_struct *node
32001 = variable_value_hash->find_with_hash (current_function_decl,
32002 DECL_UID (current_function_decl));
32003
32004 if (node == NULL)
32005 return;
32006
32007 unsigned int i;
32008 dw_die_ref die;
32009 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
32010 resolve_variable_value (die);
32011 }
32012
32013 /* Helper function for note_variable_value, handle one location
32014 expression. */
32015
32016 static void
32017 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
32018 {
32019 for (; loc; loc = loc->dw_loc_next)
32020 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
32021 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
32022 {
32023 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
32024 dw_die_ref ref = lookup_decl_die (decl);
32025 if (! ref && (flag_generate_lto || flag_generate_offload))
32026 {
32027 /* ??? This is somewhat a hack because we do not create DIEs
32028 for variables not in BLOCK trees early but when generating
32029 early LTO output we need the dw_val_class_decl_ref to be
32030 fully resolved. For fat LTO objects we'd also like to
32031 undo this after LTO dwarf output. */
32032 gcc_assert (DECL_CONTEXT (decl));
32033 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
32034 gcc_assert (ctx != NULL);
32035 gen_decl_die (decl, NULL_TREE, NULL, ctx);
32036 ref = lookup_decl_die (decl);
32037 gcc_assert (ref != NULL);
32038 }
32039 if (ref)
32040 {
32041 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
32042 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
32043 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
32044 continue;
32045 }
32046 if (VAR_P (decl)
32047 && DECL_CONTEXT (decl)
32048 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
32049 && lookup_decl_die (DECL_CONTEXT (decl)))
32050 {
32051 if (!variable_value_hash)
32052 variable_value_hash
32053 = hash_table<variable_value_hasher>::create_ggc (10);
32054
32055 tree fndecl = DECL_CONTEXT (decl);
32056 struct variable_value_struct *node;
32057 struct variable_value_struct **slot
32058 = variable_value_hash->find_slot_with_hash (fndecl,
32059 DECL_UID (fndecl),
32060 INSERT);
32061 if (*slot == NULL)
32062 {
32063 node = ggc_cleared_alloc<variable_value_struct> ();
32064 node->decl_id = DECL_UID (fndecl);
32065 *slot = node;
32066 }
32067 else
32068 node = *slot;
32069
32070 vec_safe_push (node->dies, die);
32071 }
32072 }
32073 }
32074
32075 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
32076 with dw_val_class_decl_ref operand. */
32077
32078 static void
32079 note_variable_value (dw_die_ref die)
32080 {
32081 dw_die_ref c;
32082 dw_attr_node *a;
32083 dw_loc_list_ref loc;
32084 unsigned ix;
32085
32086 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
32087 switch (AT_class (a))
32088 {
32089 case dw_val_class_loc_list:
32090 loc = AT_loc_list (a);
32091 gcc_assert (loc);
32092 if (!loc->noted_variable_value)
32093 {
32094 loc->noted_variable_value = 1;
32095 for (; loc; loc = loc->dw_loc_next)
32096 note_variable_value_in_expr (die, loc->expr);
32097 }
32098 break;
32099 case dw_val_class_loc:
32100 note_variable_value_in_expr (die, AT_loc (a));
32101 break;
32102 default:
32103 break;
32104 }
32105
32106 /* Mark children. */
32107 FOR_EACH_CHILD (die, c, note_variable_value (c));
32108 }
32109
32110 /* Perform any cleanups needed after the early debug generation pass
32111 has run. */
32112
32113 static void
32114 dwarf2out_early_finish (const char *filename)
32115 {
32116 set_early_dwarf s;
32117 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
32118
32119 /* PCH might result in DW_AT_producer string being restored from the
32120 header compilation, so always fill it with empty string initially
32121 and overwrite only here. */
32122 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
32123 producer_string = gen_producer_string ();
32124 producer->dw_attr_val.v.val_str->refcount--;
32125 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32126
32127 /* Add the name for the main input file now. We delayed this from
32128 dwarf2out_init to avoid complications with PCH. */
32129 add_filename_attribute (comp_unit_die (), remap_debug_filename (filename));
32130 add_comp_dir_attribute (comp_unit_die ());
32131
32132 /* With LTO early dwarf was really finished at compile-time, so make
32133 sure to adjust the phase after annotating the LTRANS CU DIE. */
32134 if (in_lto_p)
32135 {
32136 early_dwarf_finished = true;
32137 if (dump_file)
32138 {
32139 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32140 print_die (comp_unit_die (), dump_file);
32141 }
32142 return;
32143 }
32144
32145 /* Walk through the list of incomplete types again, trying once more to
32146 emit full debugging info for them. */
32147 retry_incomplete_types ();
32148
32149 /* The point here is to flush out the limbo list so that it is empty
32150 and we don't need to stream it for LTO. */
32151 flush_limbo_die_list ();
32152
32153 gen_scheduled_generic_parms_dies ();
32154 gen_remaining_tmpl_value_param_die_attribute ();
32155
32156 /* Add DW_AT_linkage_name for all deferred DIEs. */
32157 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32158 {
32159 tree decl = node->created_for;
32160 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32161 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32162 ended up in deferred_asm_name before we knew it was
32163 constant and never written to disk. */
32164 && DECL_ASSEMBLER_NAME (decl))
32165 {
32166 add_linkage_attr (node->die, decl);
32167 move_linkage_attr (node->die);
32168 }
32169 }
32170 deferred_asm_name = NULL;
32171
32172 if (flag_eliminate_unused_debug_types)
32173 prune_unused_types ();
32174
32175 /* Generate separate COMDAT sections for type DIEs. */
32176 if (use_debug_types)
32177 {
32178 break_out_comdat_types (comp_unit_die ());
32179
32180 /* Each new type_unit DIE was added to the limbo die list when created.
32181 Since these have all been added to comdat_type_list, clear the
32182 limbo die list. */
32183 limbo_die_list = NULL;
32184
32185 /* For each new comdat type unit, copy declarations for incomplete
32186 types to make the new unit self-contained (i.e., no direct
32187 references to the main compile unit). */
32188 for (comdat_type_node *ctnode = comdat_type_list;
32189 ctnode != NULL; ctnode = ctnode->next)
32190 copy_decls_for_unworthy_types (ctnode->root_die);
32191 copy_decls_for_unworthy_types (comp_unit_die ());
32192
32193 /* In the process of copying declarations from one unit to another,
32194 we may have left some declarations behind that are no longer
32195 referenced. Prune them. */
32196 prune_unused_types ();
32197 }
32198
32199 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32200 with dw_val_class_decl_ref operand. */
32201 note_variable_value (comp_unit_die ());
32202 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32203 note_variable_value (node->die);
32204 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32205 ctnode = ctnode->next)
32206 note_variable_value (ctnode->root_die);
32207 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32208 note_variable_value (node->die);
32209
32210 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32211 both the main_cu and all skeleton TUs. Making this call unconditional
32212 would end up either adding a second copy of the AT_pubnames attribute, or
32213 requiring a special case in add_top_level_skeleton_die_attrs. */
32214 if (!dwarf_split_debug_info)
32215 add_AT_pubnames (comp_unit_die ());
32216
32217 /* The early debug phase is now finished. */
32218 early_dwarf_finished = true;
32219 if (dump_file)
32220 {
32221 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32222 print_die (comp_unit_die (), dump_file);
32223 }
32224
32225 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32226 if ((!flag_generate_lto && !flag_generate_offload)
32227 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32228 copy_lto_debug_sections operation of the simple object support in
32229 libiberty is not implemented for them yet. */
32230 || TARGET_PECOFF || TARGET_COFF)
32231 return;
32232
32233 /* Now as we are going to output for LTO initialize sections and labels
32234 to the LTO variants. We don't need a random-seed postfix as other
32235 LTO sections as linking the LTO debug sections into one in a partial
32236 link is fine. */
32237 init_sections_and_labels (true);
32238
32239 /* The output below is modeled after dwarf2out_finish with all
32240 location related output removed and some LTO specific changes.
32241 Some refactoring might make both smaller and easier to match up. */
32242
32243 /* Traverse the DIE's and add sibling attributes to those DIE's
32244 that have children. */
32245 add_sibling_attributes (comp_unit_die ());
32246 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32247 add_sibling_attributes (node->die);
32248 for (comdat_type_node *ctnode = comdat_type_list;
32249 ctnode != NULL; ctnode = ctnode->next)
32250 add_sibling_attributes (ctnode->root_die);
32251
32252 /* AIX Assembler inserts the length, so adjust the reference to match the
32253 offset expected by debuggers. */
32254 strcpy (dl_section_ref, debug_line_section_label);
32255 if (XCOFF_DEBUGGING_INFO)
32256 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32257
32258 if (debug_info_level >= DINFO_LEVEL_TERSE)
32259 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32260
32261 if (have_macinfo)
32262 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32263 macinfo_section_label);
32264
32265 save_macinfo_strings ();
32266
32267 if (dwarf_split_debug_info)
32268 {
32269 unsigned int index = 0;
32270 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32271 }
32272
32273 /* Output all of the compilation units. We put the main one last so that
32274 the offsets are available to output_pubnames. */
32275 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32276 output_comp_unit (node->die, 0, NULL);
32277
32278 hash_table<comdat_type_hasher> comdat_type_table (100);
32279 for (comdat_type_node *ctnode = comdat_type_list;
32280 ctnode != NULL; ctnode = ctnode->next)
32281 {
32282 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32283
32284 /* Don't output duplicate types. */
32285 if (*slot != HTAB_EMPTY_ENTRY)
32286 continue;
32287
32288 /* Add a pointer to the line table for the main compilation unit
32289 so that the debugger can make sense of DW_AT_decl_file
32290 attributes. */
32291 if (debug_info_level >= DINFO_LEVEL_TERSE)
32292 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32293 (!dwarf_split_debug_info
32294 ? debug_line_section_label
32295 : debug_skeleton_line_section_label));
32296
32297 output_comdat_type_unit (ctnode, true);
32298 *slot = ctnode;
32299 }
32300
32301 /* Stick a unique symbol to the main debuginfo section. */
32302 compute_comp_unit_symbol (comp_unit_die ());
32303
32304 /* Output the main compilation unit. We always need it if only for
32305 the CU symbol. */
32306 output_comp_unit (comp_unit_die (), true, NULL);
32307
32308 /* Output the abbreviation table. */
32309 if (vec_safe_length (abbrev_die_table) != 1)
32310 {
32311 switch_to_section (debug_abbrev_section);
32312 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32313 output_abbrev_section ();
32314 }
32315
32316 /* Have to end the macro section. */
32317 if (have_macinfo)
32318 {
32319 /* We have to save macinfo state if we need to output it again
32320 for the FAT part of the object. */
32321 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32322 if (flag_fat_lto_objects)
32323 macinfo_table = macinfo_table->copy ();
32324
32325 switch_to_section (debug_macinfo_section);
32326 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32327 output_macinfo (debug_line_section_label, true);
32328 dw2_asm_output_data (1, 0, "End compilation unit");
32329
32330 if (flag_fat_lto_objects)
32331 {
32332 vec_free (macinfo_table);
32333 macinfo_table = saved_macinfo_table;
32334 }
32335 }
32336
32337 /* Emit a skeleton debug_line section. */
32338 switch_to_section (debug_line_section);
32339 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32340 output_line_info (true);
32341
32342 /* If we emitted any indirect strings, output the string table too. */
32343 if (debug_str_hash || skeleton_debug_str_hash)
32344 output_indirect_strings ();
32345 if (debug_line_str_hash)
32346 {
32347 switch_to_section (debug_line_str_section);
32348 const enum dwarf_form form = DW_FORM_line_strp;
32349 debug_line_str_hash->traverse<enum dwarf_form,
32350 output_indirect_string> (form);
32351 }
32352
32353 /* Switch back to the text section. */
32354 switch_to_section (text_section);
32355 }
32356
32357 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32358 within the same process. For use by toplev::finalize. */
32359
32360 void
32361 dwarf2out_c_finalize (void)
32362 {
32363 last_var_location_insn = NULL;
32364 cached_next_real_insn = NULL;
32365 used_rtx_array = NULL;
32366 incomplete_types = NULL;
32367 debug_info_section = NULL;
32368 debug_skeleton_info_section = NULL;
32369 debug_abbrev_section = NULL;
32370 debug_skeleton_abbrev_section = NULL;
32371 debug_aranges_section = NULL;
32372 debug_addr_section = NULL;
32373 debug_macinfo_section = NULL;
32374 debug_line_section = NULL;
32375 debug_skeleton_line_section = NULL;
32376 debug_loc_section = NULL;
32377 debug_pubnames_section = NULL;
32378 debug_pubtypes_section = NULL;
32379 debug_str_section = NULL;
32380 debug_line_str_section = NULL;
32381 debug_str_dwo_section = NULL;
32382 debug_str_offsets_section = NULL;
32383 debug_ranges_section = NULL;
32384 debug_frame_section = NULL;
32385 fde_vec = NULL;
32386 debug_str_hash = NULL;
32387 debug_line_str_hash = NULL;
32388 skeleton_debug_str_hash = NULL;
32389 dw2_string_counter = 0;
32390 have_multiple_function_sections = false;
32391 text_section_used = false;
32392 cold_text_section_used = false;
32393 cold_text_section = NULL;
32394 current_unit_personality = NULL;
32395
32396 early_dwarf = false;
32397 early_dwarf_finished = false;
32398
32399 next_die_offset = 0;
32400 single_comp_unit_die = NULL;
32401 comdat_type_list = NULL;
32402 limbo_die_list = NULL;
32403 file_table = NULL;
32404 decl_die_table = NULL;
32405 common_block_die_table = NULL;
32406 decl_loc_table = NULL;
32407 call_arg_locations = NULL;
32408 call_arg_loc_last = NULL;
32409 call_site_count = -1;
32410 tail_call_site_count = -1;
32411 cached_dw_loc_list_table = NULL;
32412 abbrev_die_table = NULL;
32413 delete dwarf_proc_stack_usage_map;
32414 dwarf_proc_stack_usage_map = NULL;
32415 line_info_label_num = 0;
32416 cur_line_info_table = NULL;
32417 text_section_line_info = NULL;
32418 cold_text_section_line_info = NULL;
32419 separate_line_info = NULL;
32420 info_section_emitted = false;
32421 pubname_table = NULL;
32422 pubtype_table = NULL;
32423 macinfo_table = NULL;
32424 ranges_table = NULL;
32425 ranges_by_label = NULL;
32426 rnglist_idx = 0;
32427 have_location_lists = false;
32428 loclabel_num = 0;
32429 poc_label_num = 0;
32430 last_emitted_file = NULL;
32431 label_num = 0;
32432 tmpl_value_parm_die_table = NULL;
32433 generic_type_instances = NULL;
32434 frame_pointer_fb_offset = 0;
32435 frame_pointer_fb_offset_valid = false;
32436 base_types.release ();
32437 XDELETEVEC (producer_string);
32438 producer_string = NULL;
32439 }
32440
32441 #include "gt-dwarf2out.h"