re PR debug/85252 (ICE with -g for static zero-length array initialization)
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_symview:
1438 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1439 case dw_val_class_lbl_id:
1440 case dw_val_class_lineptr:
1441 case dw_val_class_macptr:
1442 case dw_val_class_loclistsptr:
1443 case dw_val_class_high_pc:
1444 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1445 case dw_val_class_str:
1446 return a->v.val_str == b->v.val_str;
1447 case dw_val_class_flag:
1448 return a->v.val_flag == b->v.val_flag;
1449 case dw_val_class_file:
1450 case dw_val_class_file_implicit:
1451 return a->v.val_file == b->v.val_file;
1452 case dw_val_class_decl_ref:
1453 return a->v.val_decl_ref == b->v.val_decl_ref;
1454
1455 case dw_val_class_const_double:
1456 return (a->v.val_double.high == b->v.val_double.high
1457 && a->v.val_double.low == b->v.val_double.low);
1458
1459 case dw_val_class_wide_int:
1460 return *a->v.val_wide == *b->v.val_wide;
1461
1462 case dw_val_class_vec:
1463 {
1464 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1465 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1466
1467 return (a_len == b_len
1468 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1469 }
1470
1471 case dw_val_class_data8:
1472 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1473
1474 case dw_val_class_vms_delta:
1475 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1476 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1477
1478 case dw_val_class_discr_value:
1479 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1480 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1481 case dw_val_class_discr_list:
1482 /* It makes no sense comparing two discriminant value lists. */
1483 return false;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 /* Compare two location atoms for exact equality. */
1489
1490 static bool
1491 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1492 {
1493 if (a->dw_loc_opc != b->dw_loc_opc)
1494 return false;
1495
1496 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1497 address size, but since we always allocate cleared storage it
1498 should be zero for other types of locations. */
1499 if (a->dtprel != b->dtprel)
1500 return false;
1501
1502 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1503 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1504 }
1505
1506 /* Compare two complete location expressions for exact equality. */
1507
1508 bool
1509 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1510 {
1511 while (1)
1512 {
1513 if (a == b)
1514 return true;
1515 if (a == NULL || b == NULL)
1516 return false;
1517 if (!loc_descr_equal_p_1 (a, b))
1518 return false;
1519
1520 a = a->dw_loc_next;
1521 b = b->dw_loc_next;
1522 }
1523 }
1524
1525
1526 /* Add a constant POLY_OFFSET to a location expression. */
1527
1528 static void
1529 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1530 {
1531 dw_loc_descr_ref loc;
1532 HOST_WIDE_INT *p;
1533
1534 gcc_assert (*list_head != NULL);
1535
1536 if (known_eq (poly_offset, 0))
1537 return;
1538
1539 /* Find the end of the chain. */
1540 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1541 ;
1542
1543 HOST_WIDE_INT offset;
1544 if (!poly_offset.is_constant (&offset))
1545 {
1546 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1547 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1548 return;
1549 }
1550
1551 p = NULL;
1552 if (loc->dw_loc_opc == DW_OP_fbreg
1553 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1554 p = &loc->dw_loc_oprnd1.v.val_int;
1555 else if (loc->dw_loc_opc == DW_OP_bregx)
1556 p = &loc->dw_loc_oprnd2.v.val_int;
1557
1558 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1559 offset. Don't optimize if an signed integer overflow would happen. */
1560 if (p != NULL
1561 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1562 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1563 *p += offset;
1564
1565 else if (offset > 0)
1566 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1567
1568 else
1569 {
1570 loc->dw_loc_next
1571 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1572 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1573 }
1574 }
1575
1576 /* Return a pointer to a newly allocated location description for
1577 REG and OFFSET. */
1578
1579 static inline dw_loc_descr_ref
1580 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1581 {
1582 HOST_WIDE_INT const_offset;
1583 if (offset.is_constant (&const_offset))
1584 {
1585 if (reg <= 31)
1586 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1587 const_offset, 0);
1588 else
1589 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1590 }
1591 else
1592 {
1593 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1594 loc_descr_plus_const (&ret, offset);
1595 return ret;
1596 }
1597 }
1598
1599 /* Add a constant OFFSET to a location list. */
1600
1601 static void
1602 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1603 {
1604 dw_loc_list_ref d;
1605 for (d = list_head; d != NULL; d = d->dw_loc_next)
1606 loc_descr_plus_const (&d->expr, offset);
1607 }
1608
1609 #define DWARF_REF_SIZE \
1610 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1611
1612 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1613 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1614 DW_FORM_data16 with 128 bits. */
1615 #define DWARF_LARGEST_DATA_FORM_BITS \
1616 (dwarf_version >= 5 ? 128 : 64)
1617
1618 /* Utility inline function for construction of ops that were GNU extension
1619 before DWARF 5. */
1620 static inline enum dwarf_location_atom
1621 dwarf_OP (enum dwarf_location_atom op)
1622 {
1623 switch (op)
1624 {
1625 case DW_OP_implicit_pointer:
1626 if (dwarf_version < 5)
1627 return DW_OP_GNU_implicit_pointer;
1628 break;
1629
1630 case DW_OP_entry_value:
1631 if (dwarf_version < 5)
1632 return DW_OP_GNU_entry_value;
1633 break;
1634
1635 case DW_OP_const_type:
1636 if (dwarf_version < 5)
1637 return DW_OP_GNU_const_type;
1638 break;
1639
1640 case DW_OP_regval_type:
1641 if (dwarf_version < 5)
1642 return DW_OP_GNU_regval_type;
1643 break;
1644
1645 case DW_OP_deref_type:
1646 if (dwarf_version < 5)
1647 return DW_OP_GNU_deref_type;
1648 break;
1649
1650 case DW_OP_convert:
1651 if (dwarf_version < 5)
1652 return DW_OP_GNU_convert;
1653 break;
1654
1655 case DW_OP_reinterpret:
1656 if (dwarf_version < 5)
1657 return DW_OP_GNU_reinterpret;
1658 break;
1659
1660 default:
1661 break;
1662 }
1663 return op;
1664 }
1665
1666 /* Similarly for attributes. */
1667 static inline enum dwarf_attribute
1668 dwarf_AT (enum dwarf_attribute at)
1669 {
1670 switch (at)
1671 {
1672 case DW_AT_call_return_pc:
1673 if (dwarf_version < 5)
1674 return DW_AT_low_pc;
1675 break;
1676
1677 case DW_AT_call_tail_call:
1678 if (dwarf_version < 5)
1679 return DW_AT_GNU_tail_call;
1680 break;
1681
1682 case DW_AT_call_origin:
1683 if (dwarf_version < 5)
1684 return DW_AT_abstract_origin;
1685 break;
1686
1687 case DW_AT_call_target:
1688 if (dwarf_version < 5)
1689 return DW_AT_GNU_call_site_target;
1690 break;
1691
1692 case DW_AT_call_target_clobbered:
1693 if (dwarf_version < 5)
1694 return DW_AT_GNU_call_site_target_clobbered;
1695 break;
1696
1697 case DW_AT_call_parameter:
1698 if (dwarf_version < 5)
1699 return DW_AT_abstract_origin;
1700 break;
1701
1702 case DW_AT_call_value:
1703 if (dwarf_version < 5)
1704 return DW_AT_GNU_call_site_value;
1705 break;
1706
1707 case DW_AT_call_data_value:
1708 if (dwarf_version < 5)
1709 return DW_AT_GNU_call_site_data_value;
1710 break;
1711
1712 case DW_AT_call_all_calls:
1713 if (dwarf_version < 5)
1714 return DW_AT_GNU_all_call_sites;
1715 break;
1716
1717 case DW_AT_call_all_tail_calls:
1718 if (dwarf_version < 5)
1719 return DW_AT_GNU_all_tail_call_sites;
1720 break;
1721
1722 case DW_AT_dwo_name:
1723 if (dwarf_version < 5)
1724 return DW_AT_GNU_dwo_name;
1725 break;
1726
1727 default:
1728 break;
1729 }
1730 return at;
1731 }
1732
1733 /* And similarly for tags. */
1734 static inline enum dwarf_tag
1735 dwarf_TAG (enum dwarf_tag tag)
1736 {
1737 switch (tag)
1738 {
1739 case DW_TAG_call_site:
1740 if (dwarf_version < 5)
1741 return DW_TAG_GNU_call_site;
1742 break;
1743
1744 case DW_TAG_call_site_parameter:
1745 if (dwarf_version < 5)
1746 return DW_TAG_GNU_call_site_parameter;
1747 break;
1748
1749 default:
1750 break;
1751 }
1752 return tag;
1753 }
1754
1755 static unsigned long int get_base_type_offset (dw_die_ref);
1756
1757 /* Return the size of a location descriptor. */
1758
1759 static unsigned long
1760 size_of_loc_descr (dw_loc_descr_ref loc)
1761 {
1762 unsigned long size = 1;
1763
1764 switch (loc->dw_loc_opc)
1765 {
1766 case DW_OP_addr:
1767 size += DWARF2_ADDR_SIZE;
1768 break;
1769 case DW_OP_GNU_addr_index:
1770 case DW_OP_GNU_const_index:
1771 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1772 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1773 break;
1774 case DW_OP_const1u:
1775 case DW_OP_const1s:
1776 size += 1;
1777 break;
1778 case DW_OP_const2u:
1779 case DW_OP_const2s:
1780 size += 2;
1781 break;
1782 case DW_OP_const4u:
1783 case DW_OP_const4s:
1784 size += 4;
1785 break;
1786 case DW_OP_const8u:
1787 case DW_OP_const8s:
1788 size += 8;
1789 break;
1790 case DW_OP_constu:
1791 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1792 break;
1793 case DW_OP_consts:
1794 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1795 break;
1796 case DW_OP_pick:
1797 size += 1;
1798 break;
1799 case DW_OP_plus_uconst:
1800 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1801 break;
1802 case DW_OP_skip:
1803 case DW_OP_bra:
1804 size += 2;
1805 break;
1806 case DW_OP_breg0:
1807 case DW_OP_breg1:
1808 case DW_OP_breg2:
1809 case DW_OP_breg3:
1810 case DW_OP_breg4:
1811 case DW_OP_breg5:
1812 case DW_OP_breg6:
1813 case DW_OP_breg7:
1814 case DW_OP_breg8:
1815 case DW_OP_breg9:
1816 case DW_OP_breg10:
1817 case DW_OP_breg11:
1818 case DW_OP_breg12:
1819 case DW_OP_breg13:
1820 case DW_OP_breg14:
1821 case DW_OP_breg15:
1822 case DW_OP_breg16:
1823 case DW_OP_breg17:
1824 case DW_OP_breg18:
1825 case DW_OP_breg19:
1826 case DW_OP_breg20:
1827 case DW_OP_breg21:
1828 case DW_OP_breg22:
1829 case DW_OP_breg23:
1830 case DW_OP_breg24:
1831 case DW_OP_breg25:
1832 case DW_OP_breg26:
1833 case DW_OP_breg27:
1834 case DW_OP_breg28:
1835 case DW_OP_breg29:
1836 case DW_OP_breg30:
1837 case DW_OP_breg31:
1838 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1839 break;
1840 case DW_OP_regx:
1841 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1842 break;
1843 case DW_OP_fbreg:
1844 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1845 break;
1846 case DW_OP_bregx:
1847 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1848 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1849 break;
1850 case DW_OP_piece:
1851 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1852 break;
1853 case DW_OP_bit_piece:
1854 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1855 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1856 break;
1857 case DW_OP_deref_size:
1858 case DW_OP_xderef_size:
1859 size += 1;
1860 break;
1861 case DW_OP_call2:
1862 size += 2;
1863 break;
1864 case DW_OP_call4:
1865 size += 4;
1866 break;
1867 case DW_OP_call_ref:
1868 case DW_OP_GNU_variable_value:
1869 size += DWARF_REF_SIZE;
1870 break;
1871 case DW_OP_implicit_value:
1872 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1873 + loc->dw_loc_oprnd1.v.val_unsigned;
1874 break;
1875 case DW_OP_implicit_pointer:
1876 case DW_OP_GNU_implicit_pointer:
1877 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1878 break;
1879 case DW_OP_entry_value:
1880 case DW_OP_GNU_entry_value:
1881 {
1882 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1883 size += size_of_uleb128 (op_size) + op_size;
1884 break;
1885 }
1886 case DW_OP_const_type:
1887 case DW_OP_GNU_const_type:
1888 {
1889 unsigned long o
1890 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1891 size += size_of_uleb128 (o) + 1;
1892 switch (loc->dw_loc_oprnd2.val_class)
1893 {
1894 case dw_val_class_vec:
1895 size += loc->dw_loc_oprnd2.v.val_vec.length
1896 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1897 break;
1898 case dw_val_class_const:
1899 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1900 break;
1901 case dw_val_class_const_double:
1902 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1903 break;
1904 case dw_val_class_wide_int:
1905 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1906 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1907 break;
1908 default:
1909 gcc_unreachable ();
1910 }
1911 break;
1912 }
1913 case DW_OP_regval_type:
1914 case DW_OP_GNU_regval_type:
1915 {
1916 unsigned long o
1917 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1918 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1919 + size_of_uleb128 (o);
1920 }
1921 break;
1922 case DW_OP_deref_type:
1923 case DW_OP_GNU_deref_type:
1924 {
1925 unsigned long o
1926 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1927 size += 1 + size_of_uleb128 (o);
1928 }
1929 break;
1930 case DW_OP_convert:
1931 case DW_OP_reinterpret:
1932 case DW_OP_GNU_convert:
1933 case DW_OP_GNU_reinterpret:
1934 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1935 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1936 else
1937 {
1938 unsigned long o
1939 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1940 size += size_of_uleb128 (o);
1941 }
1942 break;
1943 case DW_OP_GNU_parameter_ref:
1944 size += 4;
1945 break;
1946 default:
1947 break;
1948 }
1949
1950 return size;
1951 }
1952
1953 /* Return the size of a series of location descriptors. */
1954
1955 unsigned long
1956 size_of_locs (dw_loc_descr_ref loc)
1957 {
1958 dw_loc_descr_ref l;
1959 unsigned long size;
1960
1961 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1962 field, to avoid writing to a PCH file. */
1963 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1964 {
1965 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1966 break;
1967 size += size_of_loc_descr (l);
1968 }
1969 if (! l)
1970 return size;
1971
1972 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1973 {
1974 l->dw_loc_addr = size;
1975 size += size_of_loc_descr (l);
1976 }
1977
1978 return size;
1979 }
1980
1981 /* Return the size of the value in a DW_AT_discr_value attribute. */
1982
1983 static int
1984 size_of_discr_value (dw_discr_value *discr_value)
1985 {
1986 if (discr_value->pos)
1987 return size_of_uleb128 (discr_value->v.uval);
1988 else
1989 return size_of_sleb128 (discr_value->v.sval);
1990 }
1991
1992 /* Return the size of the value in a DW_AT_discr_list attribute. */
1993
1994 static int
1995 size_of_discr_list (dw_discr_list_ref discr_list)
1996 {
1997 int size = 0;
1998
1999 for (dw_discr_list_ref list = discr_list;
2000 list != NULL;
2001 list = list->dw_discr_next)
2002 {
2003 /* One byte for the discriminant value descriptor, and then one or two
2004 LEB128 numbers, depending on whether it's a single case label or a
2005 range label. */
2006 size += 1;
2007 size += size_of_discr_value (&list->dw_discr_lower_bound);
2008 if (list->dw_discr_range != 0)
2009 size += size_of_discr_value (&list->dw_discr_upper_bound);
2010 }
2011 return size;
2012 }
2013
2014 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2015 static void get_ref_die_offset_label (char *, dw_die_ref);
2016 static unsigned long int get_ref_die_offset (dw_die_ref);
2017
2018 /* Output location description stack opcode's operands (if any).
2019 The for_eh_or_skip parameter controls whether register numbers are
2020 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2021 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2022 info). This should be suppressed for the cases that have not been converted
2023 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2024
2025 static void
2026 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2027 {
2028 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2029 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2030
2031 switch (loc->dw_loc_opc)
2032 {
2033 #ifdef DWARF2_DEBUGGING_INFO
2034 case DW_OP_const2u:
2035 case DW_OP_const2s:
2036 dw2_asm_output_data (2, val1->v.val_int, NULL);
2037 break;
2038 case DW_OP_const4u:
2039 if (loc->dtprel)
2040 {
2041 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2042 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2043 val1->v.val_addr);
2044 fputc ('\n', asm_out_file);
2045 break;
2046 }
2047 /* FALLTHRU */
2048 case DW_OP_const4s:
2049 dw2_asm_output_data (4, val1->v.val_int, NULL);
2050 break;
2051 case DW_OP_const8u:
2052 if (loc->dtprel)
2053 {
2054 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2055 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2056 val1->v.val_addr);
2057 fputc ('\n', asm_out_file);
2058 break;
2059 }
2060 /* FALLTHRU */
2061 case DW_OP_const8s:
2062 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2063 dw2_asm_output_data (8, val1->v.val_int, NULL);
2064 break;
2065 case DW_OP_skip:
2066 case DW_OP_bra:
2067 {
2068 int offset;
2069
2070 gcc_assert (val1->val_class == dw_val_class_loc);
2071 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2072
2073 dw2_asm_output_data (2, offset, NULL);
2074 }
2075 break;
2076 case DW_OP_implicit_value:
2077 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2078 switch (val2->val_class)
2079 {
2080 case dw_val_class_const:
2081 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2082 break;
2083 case dw_val_class_vec:
2084 {
2085 unsigned int elt_size = val2->v.val_vec.elt_size;
2086 unsigned int len = val2->v.val_vec.length;
2087 unsigned int i;
2088 unsigned char *p;
2089
2090 if (elt_size > sizeof (HOST_WIDE_INT))
2091 {
2092 elt_size /= 2;
2093 len *= 2;
2094 }
2095 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2096 i < len;
2097 i++, p += elt_size)
2098 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2099 "fp or vector constant word %u", i);
2100 }
2101 break;
2102 case dw_val_class_const_double:
2103 {
2104 unsigned HOST_WIDE_INT first, second;
2105
2106 if (WORDS_BIG_ENDIAN)
2107 {
2108 first = val2->v.val_double.high;
2109 second = val2->v.val_double.low;
2110 }
2111 else
2112 {
2113 first = val2->v.val_double.low;
2114 second = val2->v.val_double.high;
2115 }
2116 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2117 first, NULL);
2118 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2119 second, NULL);
2120 }
2121 break;
2122 case dw_val_class_wide_int:
2123 {
2124 int i;
2125 int len = get_full_len (*val2->v.val_wide);
2126 if (WORDS_BIG_ENDIAN)
2127 for (i = len - 1; i >= 0; --i)
2128 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2129 val2->v.val_wide->elt (i), NULL);
2130 else
2131 for (i = 0; i < len; ++i)
2132 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2133 val2->v.val_wide->elt (i), NULL);
2134 }
2135 break;
2136 case dw_val_class_addr:
2137 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2138 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2139 break;
2140 default:
2141 gcc_unreachable ();
2142 }
2143 break;
2144 #else
2145 case DW_OP_const2u:
2146 case DW_OP_const2s:
2147 case DW_OP_const4u:
2148 case DW_OP_const4s:
2149 case DW_OP_const8u:
2150 case DW_OP_const8s:
2151 case DW_OP_skip:
2152 case DW_OP_bra:
2153 case DW_OP_implicit_value:
2154 /* We currently don't make any attempt to make sure these are
2155 aligned properly like we do for the main unwind info, so
2156 don't support emitting things larger than a byte if we're
2157 only doing unwinding. */
2158 gcc_unreachable ();
2159 #endif
2160 case DW_OP_const1u:
2161 case DW_OP_const1s:
2162 dw2_asm_output_data (1, val1->v.val_int, NULL);
2163 break;
2164 case DW_OP_constu:
2165 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2166 break;
2167 case DW_OP_consts:
2168 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2169 break;
2170 case DW_OP_pick:
2171 dw2_asm_output_data (1, val1->v.val_int, NULL);
2172 break;
2173 case DW_OP_plus_uconst:
2174 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2175 break;
2176 case DW_OP_breg0:
2177 case DW_OP_breg1:
2178 case DW_OP_breg2:
2179 case DW_OP_breg3:
2180 case DW_OP_breg4:
2181 case DW_OP_breg5:
2182 case DW_OP_breg6:
2183 case DW_OP_breg7:
2184 case DW_OP_breg8:
2185 case DW_OP_breg9:
2186 case DW_OP_breg10:
2187 case DW_OP_breg11:
2188 case DW_OP_breg12:
2189 case DW_OP_breg13:
2190 case DW_OP_breg14:
2191 case DW_OP_breg15:
2192 case DW_OP_breg16:
2193 case DW_OP_breg17:
2194 case DW_OP_breg18:
2195 case DW_OP_breg19:
2196 case DW_OP_breg20:
2197 case DW_OP_breg21:
2198 case DW_OP_breg22:
2199 case DW_OP_breg23:
2200 case DW_OP_breg24:
2201 case DW_OP_breg25:
2202 case DW_OP_breg26:
2203 case DW_OP_breg27:
2204 case DW_OP_breg28:
2205 case DW_OP_breg29:
2206 case DW_OP_breg30:
2207 case DW_OP_breg31:
2208 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2209 break;
2210 case DW_OP_regx:
2211 {
2212 unsigned r = val1->v.val_unsigned;
2213 if (for_eh_or_skip >= 0)
2214 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2215 gcc_assert (size_of_uleb128 (r)
2216 == size_of_uleb128 (val1->v.val_unsigned));
2217 dw2_asm_output_data_uleb128 (r, NULL);
2218 }
2219 break;
2220 case DW_OP_fbreg:
2221 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2222 break;
2223 case DW_OP_bregx:
2224 {
2225 unsigned r = val1->v.val_unsigned;
2226 if (for_eh_or_skip >= 0)
2227 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2228 gcc_assert (size_of_uleb128 (r)
2229 == size_of_uleb128 (val1->v.val_unsigned));
2230 dw2_asm_output_data_uleb128 (r, NULL);
2231 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2232 }
2233 break;
2234 case DW_OP_piece:
2235 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2236 break;
2237 case DW_OP_bit_piece:
2238 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2239 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2240 break;
2241 case DW_OP_deref_size:
2242 case DW_OP_xderef_size:
2243 dw2_asm_output_data (1, val1->v.val_int, NULL);
2244 break;
2245
2246 case DW_OP_addr:
2247 if (loc->dtprel)
2248 {
2249 if (targetm.asm_out.output_dwarf_dtprel)
2250 {
2251 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2252 DWARF2_ADDR_SIZE,
2253 val1->v.val_addr);
2254 fputc ('\n', asm_out_file);
2255 }
2256 else
2257 gcc_unreachable ();
2258 }
2259 else
2260 {
2261 #ifdef DWARF2_DEBUGGING_INFO
2262 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2263 #else
2264 gcc_unreachable ();
2265 #endif
2266 }
2267 break;
2268
2269 case DW_OP_GNU_addr_index:
2270 case DW_OP_GNU_const_index:
2271 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2272 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2273 "(index into .debug_addr)");
2274 break;
2275
2276 case DW_OP_call2:
2277 case DW_OP_call4:
2278 {
2279 unsigned long die_offset
2280 = get_ref_die_offset (val1->v.val_die_ref.die);
2281 /* Make sure the offset has been computed and that we can encode it as
2282 an operand. */
2283 gcc_assert (die_offset > 0
2284 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2285 ? 0xffff
2286 : 0xffffffff));
2287 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2288 die_offset, NULL);
2289 }
2290 break;
2291
2292 case DW_OP_call_ref:
2293 case DW_OP_GNU_variable_value:
2294 {
2295 char label[MAX_ARTIFICIAL_LABEL_BYTES
2296 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2297 gcc_assert (val1->val_class == dw_val_class_die_ref);
2298 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2299 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2300 }
2301 break;
2302
2303 case DW_OP_implicit_pointer:
2304 case DW_OP_GNU_implicit_pointer:
2305 {
2306 char label[MAX_ARTIFICIAL_LABEL_BYTES
2307 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2308 gcc_assert (val1->val_class == dw_val_class_die_ref);
2309 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2310 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2311 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2312 }
2313 break;
2314
2315 case DW_OP_entry_value:
2316 case DW_OP_GNU_entry_value:
2317 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2318 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2319 break;
2320
2321 case DW_OP_const_type:
2322 case DW_OP_GNU_const_type:
2323 {
2324 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2325 gcc_assert (o);
2326 dw2_asm_output_data_uleb128 (o, NULL);
2327 switch (val2->val_class)
2328 {
2329 case dw_val_class_const:
2330 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2331 dw2_asm_output_data (1, l, NULL);
2332 dw2_asm_output_data (l, val2->v.val_int, NULL);
2333 break;
2334 case dw_val_class_vec:
2335 {
2336 unsigned int elt_size = val2->v.val_vec.elt_size;
2337 unsigned int len = val2->v.val_vec.length;
2338 unsigned int i;
2339 unsigned char *p;
2340
2341 l = len * elt_size;
2342 dw2_asm_output_data (1, l, NULL);
2343 if (elt_size > sizeof (HOST_WIDE_INT))
2344 {
2345 elt_size /= 2;
2346 len *= 2;
2347 }
2348 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2349 i < len;
2350 i++, p += elt_size)
2351 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2352 "fp or vector constant word %u", i);
2353 }
2354 break;
2355 case dw_val_class_const_double:
2356 {
2357 unsigned HOST_WIDE_INT first, second;
2358 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2359
2360 dw2_asm_output_data (1, 2 * l, NULL);
2361 if (WORDS_BIG_ENDIAN)
2362 {
2363 first = val2->v.val_double.high;
2364 second = val2->v.val_double.low;
2365 }
2366 else
2367 {
2368 first = val2->v.val_double.low;
2369 second = val2->v.val_double.high;
2370 }
2371 dw2_asm_output_data (l, first, NULL);
2372 dw2_asm_output_data (l, second, NULL);
2373 }
2374 break;
2375 case dw_val_class_wide_int:
2376 {
2377 int i;
2378 int len = get_full_len (*val2->v.val_wide);
2379 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2380
2381 dw2_asm_output_data (1, len * l, NULL);
2382 if (WORDS_BIG_ENDIAN)
2383 for (i = len - 1; i >= 0; --i)
2384 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2385 else
2386 for (i = 0; i < len; ++i)
2387 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2388 }
2389 break;
2390 default:
2391 gcc_unreachable ();
2392 }
2393 }
2394 break;
2395 case DW_OP_regval_type:
2396 case DW_OP_GNU_regval_type:
2397 {
2398 unsigned r = val1->v.val_unsigned;
2399 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2400 gcc_assert (o);
2401 if (for_eh_or_skip >= 0)
2402 {
2403 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2404 gcc_assert (size_of_uleb128 (r)
2405 == size_of_uleb128 (val1->v.val_unsigned));
2406 }
2407 dw2_asm_output_data_uleb128 (r, NULL);
2408 dw2_asm_output_data_uleb128 (o, NULL);
2409 }
2410 break;
2411 case DW_OP_deref_type:
2412 case DW_OP_GNU_deref_type:
2413 {
2414 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2415 gcc_assert (o);
2416 dw2_asm_output_data (1, val1->v.val_int, NULL);
2417 dw2_asm_output_data_uleb128 (o, NULL);
2418 }
2419 break;
2420 case DW_OP_convert:
2421 case DW_OP_reinterpret:
2422 case DW_OP_GNU_convert:
2423 case DW_OP_GNU_reinterpret:
2424 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2425 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2426 else
2427 {
2428 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2429 gcc_assert (o);
2430 dw2_asm_output_data_uleb128 (o, NULL);
2431 }
2432 break;
2433
2434 case DW_OP_GNU_parameter_ref:
2435 {
2436 unsigned long o;
2437 gcc_assert (val1->val_class == dw_val_class_die_ref);
2438 o = get_ref_die_offset (val1->v.val_die_ref.die);
2439 dw2_asm_output_data (4, o, NULL);
2440 }
2441 break;
2442
2443 default:
2444 /* Other codes have no operands. */
2445 break;
2446 }
2447 }
2448
2449 /* Output a sequence of location operations.
2450 The for_eh_or_skip parameter controls whether register numbers are
2451 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2452 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2453 info). This should be suppressed for the cases that have not been converted
2454 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2455
2456 void
2457 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2458 {
2459 for (; loc != NULL; loc = loc->dw_loc_next)
2460 {
2461 enum dwarf_location_atom opc = loc->dw_loc_opc;
2462 /* Output the opcode. */
2463 if (for_eh_or_skip >= 0
2464 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2465 {
2466 unsigned r = (opc - DW_OP_breg0);
2467 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2468 gcc_assert (r <= 31);
2469 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2470 }
2471 else if (for_eh_or_skip >= 0
2472 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2473 {
2474 unsigned r = (opc - DW_OP_reg0);
2475 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2476 gcc_assert (r <= 31);
2477 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2478 }
2479
2480 dw2_asm_output_data (1, opc,
2481 "%s", dwarf_stack_op_name (opc));
2482
2483 /* Output the operand(s) (if any). */
2484 output_loc_operands (loc, for_eh_or_skip);
2485 }
2486 }
2487
2488 /* Output location description stack opcode's operands (if any).
2489 The output is single bytes on a line, suitable for .cfi_escape. */
2490
2491 static void
2492 output_loc_operands_raw (dw_loc_descr_ref loc)
2493 {
2494 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2495 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2496
2497 switch (loc->dw_loc_opc)
2498 {
2499 case DW_OP_addr:
2500 case DW_OP_GNU_addr_index:
2501 case DW_OP_GNU_const_index:
2502 case DW_OP_implicit_value:
2503 /* We cannot output addresses in .cfi_escape, only bytes. */
2504 gcc_unreachable ();
2505
2506 case DW_OP_const1u:
2507 case DW_OP_const1s:
2508 case DW_OP_pick:
2509 case DW_OP_deref_size:
2510 case DW_OP_xderef_size:
2511 fputc (',', asm_out_file);
2512 dw2_asm_output_data_raw (1, val1->v.val_int);
2513 break;
2514
2515 case DW_OP_const2u:
2516 case DW_OP_const2s:
2517 fputc (',', asm_out_file);
2518 dw2_asm_output_data_raw (2, val1->v.val_int);
2519 break;
2520
2521 case DW_OP_const4u:
2522 case DW_OP_const4s:
2523 fputc (',', asm_out_file);
2524 dw2_asm_output_data_raw (4, val1->v.val_int);
2525 break;
2526
2527 case DW_OP_const8u:
2528 case DW_OP_const8s:
2529 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2530 fputc (',', asm_out_file);
2531 dw2_asm_output_data_raw (8, val1->v.val_int);
2532 break;
2533
2534 case DW_OP_skip:
2535 case DW_OP_bra:
2536 {
2537 int offset;
2538
2539 gcc_assert (val1->val_class == dw_val_class_loc);
2540 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2541
2542 fputc (',', asm_out_file);
2543 dw2_asm_output_data_raw (2, offset);
2544 }
2545 break;
2546
2547 case DW_OP_regx:
2548 {
2549 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2550 gcc_assert (size_of_uleb128 (r)
2551 == size_of_uleb128 (val1->v.val_unsigned));
2552 fputc (',', asm_out_file);
2553 dw2_asm_output_data_uleb128_raw (r);
2554 }
2555 break;
2556
2557 case DW_OP_constu:
2558 case DW_OP_plus_uconst:
2559 case DW_OP_piece:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2562 break;
2563
2564 case DW_OP_bit_piece:
2565 fputc (',', asm_out_file);
2566 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2567 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2568 break;
2569
2570 case DW_OP_consts:
2571 case DW_OP_breg0:
2572 case DW_OP_breg1:
2573 case DW_OP_breg2:
2574 case DW_OP_breg3:
2575 case DW_OP_breg4:
2576 case DW_OP_breg5:
2577 case DW_OP_breg6:
2578 case DW_OP_breg7:
2579 case DW_OP_breg8:
2580 case DW_OP_breg9:
2581 case DW_OP_breg10:
2582 case DW_OP_breg11:
2583 case DW_OP_breg12:
2584 case DW_OP_breg13:
2585 case DW_OP_breg14:
2586 case DW_OP_breg15:
2587 case DW_OP_breg16:
2588 case DW_OP_breg17:
2589 case DW_OP_breg18:
2590 case DW_OP_breg19:
2591 case DW_OP_breg20:
2592 case DW_OP_breg21:
2593 case DW_OP_breg22:
2594 case DW_OP_breg23:
2595 case DW_OP_breg24:
2596 case DW_OP_breg25:
2597 case DW_OP_breg26:
2598 case DW_OP_breg27:
2599 case DW_OP_breg28:
2600 case DW_OP_breg29:
2601 case DW_OP_breg30:
2602 case DW_OP_breg31:
2603 case DW_OP_fbreg:
2604 fputc (',', asm_out_file);
2605 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2606 break;
2607
2608 case DW_OP_bregx:
2609 {
2610 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2611 gcc_assert (size_of_uleb128 (r)
2612 == size_of_uleb128 (val1->v.val_unsigned));
2613 fputc (',', asm_out_file);
2614 dw2_asm_output_data_uleb128_raw (r);
2615 fputc (',', asm_out_file);
2616 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2617 }
2618 break;
2619
2620 case DW_OP_implicit_pointer:
2621 case DW_OP_entry_value:
2622 case DW_OP_const_type:
2623 case DW_OP_regval_type:
2624 case DW_OP_deref_type:
2625 case DW_OP_convert:
2626 case DW_OP_reinterpret:
2627 case DW_OP_GNU_implicit_pointer:
2628 case DW_OP_GNU_entry_value:
2629 case DW_OP_GNU_const_type:
2630 case DW_OP_GNU_regval_type:
2631 case DW_OP_GNU_deref_type:
2632 case DW_OP_GNU_convert:
2633 case DW_OP_GNU_reinterpret:
2634 case DW_OP_GNU_parameter_ref:
2635 gcc_unreachable ();
2636 break;
2637
2638 default:
2639 /* Other codes have no operands. */
2640 break;
2641 }
2642 }
2643
2644 void
2645 output_loc_sequence_raw (dw_loc_descr_ref loc)
2646 {
2647 while (1)
2648 {
2649 enum dwarf_location_atom opc = loc->dw_loc_opc;
2650 /* Output the opcode. */
2651 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2652 {
2653 unsigned r = (opc - DW_OP_breg0);
2654 r = DWARF2_FRAME_REG_OUT (r, 1);
2655 gcc_assert (r <= 31);
2656 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2657 }
2658 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2659 {
2660 unsigned r = (opc - DW_OP_reg0);
2661 r = DWARF2_FRAME_REG_OUT (r, 1);
2662 gcc_assert (r <= 31);
2663 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2664 }
2665 /* Output the opcode. */
2666 fprintf (asm_out_file, "%#x", opc);
2667 output_loc_operands_raw (loc);
2668
2669 if (!loc->dw_loc_next)
2670 break;
2671 loc = loc->dw_loc_next;
2672
2673 fputc (',', asm_out_file);
2674 }
2675 }
2676
2677 /* This function builds a dwarf location descriptor sequence from a
2678 dw_cfa_location, adding the given OFFSET to the result of the
2679 expression. */
2680
2681 struct dw_loc_descr_node *
2682 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2683 {
2684 struct dw_loc_descr_node *head, *tmp;
2685
2686 offset += cfa->offset;
2687
2688 if (cfa->indirect)
2689 {
2690 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2691 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2692 head->dw_loc_oprnd1.val_entry = NULL;
2693 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2694 add_loc_descr (&head, tmp);
2695 loc_descr_plus_const (&head, offset);
2696 }
2697 else
2698 head = new_reg_loc_descr (cfa->reg, offset);
2699
2700 return head;
2701 }
2702
2703 /* This function builds a dwarf location descriptor sequence for
2704 the address at OFFSET from the CFA when stack is aligned to
2705 ALIGNMENT byte. */
2706
2707 struct dw_loc_descr_node *
2708 build_cfa_aligned_loc (dw_cfa_location *cfa,
2709 poly_int64 offset, HOST_WIDE_INT alignment)
2710 {
2711 struct dw_loc_descr_node *head;
2712 unsigned int dwarf_fp
2713 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2714
2715 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2716 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2717 {
2718 head = new_reg_loc_descr (dwarf_fp, 0);
2719 add_loc_descr (&head, int_loc_descriptor (alignment));
2720 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2721 loc_descr_plus_const (&head, offset);
2722 }
2723 else
2724 head = new_reg_loc_descr (dwarf_fp, offset);
2725 return head;
2726 }
2727 \f
2728 /* And now, the support for symbolic debugging information. */
2729
2730 /* .debug_str support. */
2731
2732 static void dwarf2out_init (const char *);
2733 static void dwarf2out_finish (const char *);
2734 static void dwarf2out_early_finish (const char *);
2735 static void dwarf2out_assembly_start (void);
2736 static void dwarf2out_define (unsigned int, const char *);
2737 static void dwarf2out_undef (unsigned int, const char *);
2738 static void dwarf2out_start_source_file (unsigned, const char *);
2739 static void dwarf2out_end_source_file (unsigned);
2740 static void dwarf2out_function_decl (tree);
2741 static void dwarf2out_begin_block (unsigned, unsigned);
2742 static void dwarf2out_end_block (unsigned, unsigned);
2743 static bool dwarf2out_ignore_block (const_tree);
2744 static void dwarf2out_early_global_decl (tree);
2745 static void dwarf2out_late_global_decl (tree);
2746 static void dwarf2out_type_decl (tree, int);
2747 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2748 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2749 dw_die_ref);
2750 static void dwarf2out_abstract_function (tree);
2751 static void dwarf2out_var_location (rtx_insn *);
2752 static void dwarf2out_inline_entry (tree);
2753 static void dwarf2out_size_function (tree);
2754 static void dwarf2out_begin_function (tree);
2755 static void dwarf2out_end_function (unsigned int);
2756 static void dwarf2out_register_main_translation_unit (tree unit);
2757 static void dwarf2out_set_name (tree, tree);
2758 static void dwarf2out_register_external_die (tree decl, const char *sym,
2759 unsigned HOST_WIDE_INT off);
2760 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2761 unsigned HOST_WIDE_INT *off);
2762
2763 /* The debug hooks structure. */
2764
2765 const struct gcc_debug_hooks dwarf2_debug_hooks =
2766 {
2767 dwarf2out_init,
2768 dwarf2out_finish,
2769 dwarf2out_early_finish,
2770 dwarf2out_assembly_start,
2771 dwarf2out_define,
2772 dwarf2out_undef,
2773 dwarf2out_start_source_file,
2774 dwarf2out_end_source_file,
2775 dwarf2out_begin_block,
2776 dwarf2out_end_block,
2777 dwarf2out_ignore_block,
2778 dwarf2out_source_line,
2779 dwarf2out_begin_prologue,
2780 #if VMS_DEBUGGING_INFO
2781 dwarf2out_vms_end_prologue,
2782 dwarf2out_vms_begin_epilogue,
2783 #else
2784 debug_nothing_int_charstar,
2785 debug_nothing_int_charstar,
2786 #endif
2787 dwarf2out_end_epilogue,
2788 dwarf2out_begin_function,
2789 dwarf2out_end_function, /* end_function */
2790 dwarf2out_register_main_translation_unit,
2791 dwarf2out_function_decl, /* function_decl */
2792 dwarf2out_early_global_decl,
2793 dwarf2out_late_global_decl,
2794 dwarf2out_type_decl, /* type_decl */
2795 dwarf2out_imported_module_or_decl,
2796 dwarf2out_die_ref_for_decl,
2797 dwarf2out_register_external_die,
2798 debug_nothing_tree, /* deferred_inline_function */
2799 /* The DWARF 2 backend tries to reduce debugging bloat by not
2800 emitting the abstract description of inline functions until
2801 something tries to reference them. */
2802 dwarf2out_abstract_function, /* outlining_inline_function */
2803 debug_nothing_rtx_code_label, /* label */
2804 debug_nothing_int, /* handle_pch */
2805 dwarf2out_var_location,
2806 dwarf2out_inline_entry, /* inline_entry */
2807 dwarf2out_size_function, /* size_function */
2808 dwarf2out_switch_text_section,
2809 dwarf2out_set_name,
2810 1, /* start_end_main_source_file */
2811 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2812 };
2813
2814 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2815 {
2816 dwarf2out_init,
2817 debug_nothing_charstar,
2818 debug_nothing_charstar,
2819 dwarf2out_assembly_start,
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 debug_nothing_int_charstar,
2823 debug_nothing_int,
2824 debug_nothing_int_int, /* begin_block */
2825 debug_nothing_int_int, /* end_block */
2826 debug_true_const_tree, /* ignore_block */
2827 dwarf2out_source_line, /* source_line */
2828 debug_nothing_int_int_charstar, /* begin_prologue */
2829 debug_nothing_int_charstar, /* end_prologue */
2830 debug_nothing_int_charstar, /* begin_epilogue */
2831 debug_nothing_int_charstar, /* end_epilogue */
2832 debug_nothing_tree, /* begin_function */
2833 debug_nothing_int, /* end_function */
2834 debug_nothing_tree, /* register_main_translation_unit */
2835 debug_nothing_tree, /* function_decl */
2836 debug_nothing_tree, /* early_global_decl */
2837 debug_nothing_tree, /* late_global_decl */
2838 debug_nothing_tree_int, /* type_decl */
2839 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2840 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2841 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2842 debug_nothing_tree, /* deferred_inline_function */
2843 debug_nothing_tree, /* outlining_inline_function */
2844 debug_nothing_rtx_code_label, /* label */
2845 debug_nothing_int, /* handle_pch */
2846 debug_nothing_rtx_insn, /* var_location */
2847 debug_nothing_tree, /* inline_entry */
2848 debug_nothing_tree, /* size_function */
2849 debug_nothing_void, /* switch_text_section */
2850 debug_nothing_tree_tree, /* set_name */
2851 0, /* start_end_main_source_file */
2852 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2853 };
2854 \f
2855 /* NOTE: In the comments in this file, many references are made to
2856 "Debugging Information Entries". This term is abbreviated as `DIE'
2857 throughout the remainder of this file. */
2858
2859 /* An internal representation of the DWARF output is built, and then
2860 walked to generate the DWARF debugging info. The walk of the internal
2861 representation is done after the entire program has been compiled.
2862 The types below are used to describe the internal representation. */
2863
2864 /* Whether to put type DIEs into their own section .debug_types instead
2865 of making them part of the .debug_info section. Only supported for
2866 Dwarf V4 or higher and the user didn't disable them through
2867 -fno-debug-types-section. It is more efficient to put them in a
2868 separate comdat sections since the linker will then be able to
2869 remove duplicates. But not all tools support .debug_types sections
2870 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2871 it is DW_UT_type unit type in .debug_info section. */
2872
2873 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2874
2875 /* Various DIE's use offsets relative to the beginning of the
2876 .debug_info section to refer to each other. */
2877
2878 typedef long int dw_offset;
2879
2880 struct comdat_type_node;
2881
2882 /* The entries in the line_info table more-or-less mirror the opcodes
2883 that are used in the real dwarf line table. Arrays of these entries
2884 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2885 supported. */
2886
2887 enum dw_line_info_opcode {
2888 /* Emit DW_LNE_set_address; the operand is the label index. */
2889 LI_set_address,
2890
2891 /* Emit a row to the matrix with the given line. This may be done
2892 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2893 special opcodes. */
2894 LI_set_line,
2895
2896 /* Emit a DW_LNS_set_file. */
2897 LI_set_file,
2898
2899 /* Emit a DW_LNS_set_column. */
2900 LI_set_column,
2901
2902 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2903 LI_negate_stmt,
2904
2905 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2906 LI_set_prologue_end,
2907 LI_set_epilogue_begin,
2908
2909 /* Emit a DW_LNE_set_discriminator. */
2910 LI_set_discriminator,
2911
2912 /* Output a Fixed Advance PC; the target PC is the label index; the
2913 base PC is the previous LI_adv_address or LI_set_address entry.
2914 We only use this when emitting debug views without assembler
2915 support, at explicit user request. Ideally, we should only use
2916 it when the offset might be zero but we can't tell: it's the only
2917 way to maybe change the PC without resetting the view number. */
2918 LI_adv_address
2919 };
2920
2921 typedef struct GTY(()) dw_line_info_struct {
2922 enum dw_line_info_opcode opcode;
2923 unsigned int val;
2924 } dw_line_info_entry;
2925
2926
2927 struct GTY(()) dw_line_info_table {
2928 /* The label that marks the end of this section. */
2929 const char *end_label;
2930
2931 /* The values for the last row of the matrix, as collected in the table.
2932 These are used to minimize the changes to the next row. */
2933 unsigned int file_num;
2934 unsigned int line_num;
2935 unsigned int column_num;
2936 int discrim_num;
2937 bool is_stmt;
2938 bool in_use;
2939
2940 /* This denotes the NEXT view number.
2941
2942 If it is 0, it is known that the NEXT view will be the first view
2943 at the given PC.
2944
2945 If it is -1, we're forcing the view number to be reset, e.g. at a
2946 function entry.
2947
2948 The meaning of other nonzero values depends on whether we're
2949 computing views internally or leaving it for the assembler to do
2950 so. If we're emitting them internally, view denotes the view
2951 number since the last known advance of PC. If we're leaving it
2952 for the assembler, it denotes the LVU label number that we're
2953 going to ask the assembler to assign. */
2954 var_loc_view view;
2955
2956 /* This counts the number of symbolic views emitted in this table
2957 since the latest view reset. Its max value, over all tables,
2958 sets symview_upper_bound. */
2959 var_loc_view symviews_since_reset;
2960
2961 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2962 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2963 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
2964 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
2965
2966 vec<dw_line_info_entry, va_gc> *entries;
2967 };
2968
2969 /* This is an upper bound for view numbers that the assembler may
2970 assign to symbolic views output in this translation. It is used to
2971 decide how big a field to use to represent view numbers in
2972 symview-classed attributes. */
2973
2974 static var_loc_view symview_upper_bound;
2975
2976 /* If we're keep track of location views and their reset points, and
2977 INSN is a reset point (i.e., it necessarily advances the PC), mark
2978 the next view in TABLE as reset. */
2979
2980 static void
2981 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
2982 {
2983 if (!debug_internal_reset_location_views)
2984 return;
2985
2986 /* Maybe turn (part of?) this test into a default target hook. */
2987 int reset = 0;
2988
2989 if (targetm.reset_location_view)
2990 reset = targetm.reset_location_view (insn);
2991
2992 if (reset)
2993 ;
2994 else if (JUMP_TABLE_DATA_P (insn))
2995 reset = 1;
2996 else if (GET_CODE (insn) == USE
2997 || GET_CODE (insn) == CLOBBER
2998 || GET_CODE (insn) == ASM_INPUT
2999 || asm_noperands (insn) >= 0)
3000 ;
3001 else if (get_attr_min_length (insn) > 0)
3002 reset = 1;
3003
3004 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3005 RESET_NEXT_VIEW (table->view);
3006 }
3007
3008 /* Each DIE attribute has a field specifying the attribute kind,
3009 a link to the next attribute in the chain, and an attribute value.
3010 Attributes are typically linked below the DIE they modify. */
3011
3012 typedef struct GTY(()) dw_attr_struct {
3013 enum dwarf_attribute dw_attr;
3014 dw_val_node dw_attr_val;
3015 }
3016 dw_attr_node;
3017
3018
3019 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3020 The children of each node form a circular list linked by
3021 die_sib. die_child points to the node *before* the "first" child node. */
3022
3023 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3024 union die_symbol_or_type_node
3025 {
3026 const char * GTY ((tag ("0"))) die_symbol;
3027 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3028 }
3029 GTY ((desc ("%0.comdat_type_p"))) die_id;
3030 vec<dw_attr_node, va_gc> *die_attr;
3031 dw_die_ref die_parent;
3032 dw_die_ref die_child;
3033 dw_die_ref die_sib;
3034 dw_die_ref die_definition; /* ref from a specification to its definition */
3035 dw_offset die_offset;
3036 unsigned long die_abbrev;
3037 int die_mark;
3038 unsigned int decl_id;
3039 enum dwarf_tag die_tag;
3040 /* Die is used and must not be pruned as unused. */
3041 BOOL_BITFIELD die_perennial_p : 1;
3042 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3043 /* For an external ref to die_symbol if die_offset contains an extra
3044 offset to that symbol. */
3045 BOOL_BITFIELD with_offset : 1;
3046 /* Whether this DIE was removed from the DIE tree, for example via
3047 prune_unused_types. We don't consider those present from the
3048 DIE lookup routines. */
3049 BOOL_BITFIELD removed : 1;
3050 /* Lots of spare bits. */
3051 }
3052 die_node;
3053
3054 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3055 static bool early_dwarf;
3056 static bool early_dwarf_finished;
3057 struct set_early_dwarf {
3058 bool saved;
3059 set_early_dwarf () : saved(early_dwarf)
3060 {
3061 gcc_assert (! early_dwarf_finished);
3062 early_dwarf = true;
3063 }
3064 ~set_early_dwarf () { early_dwarf = saved; }
3065 };
3066
3067 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3068 #define FOR_EACH_CHILD(die, c, expr) do { \
3069 c = die->die_child; \
3070 if (c) do { \
3071 c = c->die_sib; \
3072 expr; \
3073 } while (c != die->die_child); \
3074 } while (0)
3075
3076 /* The pubname structure */
3077
3078 typedef struct GTY(()) pubname_struct {
3079 dw_die_ref die;
3080 const char *name;
3081 }
3082 pubname_entry;
3083
3084
3085 struct GTY(()) dw_ranges {
3086 const char *label;
3087 /* If this is positive, it's a block number, otherwise it's a
3088 bitwise-negated index into dw_ranges_by_label. */
3089 int num;
3090 /* Index for the range list for DW_FORM_rnglistx. */
3091 unsigned int idx : 31;
3092 /* True if this range might be possibly in a different section
3093 from previous entry. */
3094 unsigned int maybe_new_sec : 1;
3095 };
3096
3097 /* A structure to hold a macinfo entry. */
3098
3099 typedef struct GTY(()) macinfo_struct {
3100 unsigned char code;
3101 unsigned HOST_WIDE_INT lineno;
3102 const char *info;
3103 }
3104 macinfo_entry;
3105
3106
3107 struct GTY(()) dw_ranges_by_label {
3108 const char *begin;
3109 const char *end;
3110 };
3111
3112 /* The comdat type node structure. */
3113 struct GTY(()) comdat_type_node
3114 {
3115 dw_die_ref root_die;
3116 dw_die_ref type_die;
3117 dw_die_ref skeleton_die;
3118 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3119 comdat_type_node *next;
3120 };
3121
3122 /* A list of DIEs for which we can't determine ancestry (parent_die
3123 field) just yet. Later in dwarf2out_finish we will fill in the
3124 missing bits. */
3125 typedef struct GTY(()) limbo_die_struct {
3126 dw_die_ref die;
3127 /* The tree for which this DIE was created. We use this to
3128 determine ancestry later. */
3129 tree created_for;
3130 struct limbo_die_struct *next;
3131 }
3132 limbo_die_node;
3133
3134 typedef struct skeleton_chain_struct
3135 {
3136 dw_die_ref old_die;
3137 dw_die_ref new_die;
3138 struct skeleton_chain_struct *parent;
3139 }
3140 skeleton_chain_node;
3141
3142 /* Define a macro which returns nonzero for a TYPE_DECL which was
3143 implicitly generated for a type.
3144
3145 Note that, unlike the C front-end (which generates a NULL named
3146 TYPE_DECL node for each complete tagged type, each array type,
3147 and each function type node created) the C++ front-end generates
3148 a _named_ TYPE_DECL node for each tagged type node created.
3149 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3150 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3151 front-end, but for each type, tagged or not. */
3152
3153 #define TYPE_DECL_IS_STUB(decl) \
3154 (DECL_NAME (decl) == NULL_TREE \
3155 || (DECL_ARTIFICIAL (decl) \
3156 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3157 /* This is necessary for stub decls that \
3158 appear in nested inline functions. */ \
3159 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3160 && (decl_ultimate_origin (decl) \
3161 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3162
3163 /* Information concerning the compilation unit's programming
3164 language, and compiler version. */
3165
3166 /* Fixed size portion of the DWARF compilation unit header. */
3167 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3168 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3169 + (dwarf_version >= 5 ? 4 : 3))
3170
3171 /* Fixed size portion of the DWARF comdat type unit header. */
3172 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3173 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3174 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3175
3176 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3177 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3178 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3179
3180 /* Fixed size portion of public names info. */
3181 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3182
3183 /* Fixed size portion of the address range info. */
3184 #define DWARF_ARANGES_HEADER_SIZE \
3185 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3186 DWARF2_ADDR_SIZE * 2) \
3187 - DWARF_INITIAL_LENGTH_SIZE)
3188
3189 /* Size of padding portion in the address range info. It must be
3190 aligned to twice the pointer size. */
3191 #define DWARF_ARANGES_PAD_SIZE \
3192 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3193 DWARF2_ADDR_SIZE * 2) \
3194 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3195
3196 /* Use assembler line directives if available. */
3197 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3198 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3199 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3200 #else
3201 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3202 #endif
3203 #endif
3204
3205 /* Use assembler views in line directives if available. */
3206 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3207 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3208 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3209 #else
3210 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3211 #endif
3212 #endif
3213
3214 /* Return true if GCC configure detected assembler support for .loc. */
3215
3216 bool
3217 dwarf2out_default_as_loc_support (void)
3218 {
3219 return DWARF2_ASM_LINE_DEBUG_INFO;
3220 #if (GCC_VERSION >= 3000)
3221 # undef DWARF2_ASM_LINE_DEBUG_INFO
3222 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3223 #endif
3224 }
3225
3226 /* Return true if GCC configure detected assembler support for views
3227 in .loc directives. */
3228
3229 bool
3230 dwarf2out_default_as_locview_support (void)
3231 {
3232 return DWARF2_ASM_VIEW_DEBUG_INFO;
3233 #if (GCC_VERSION >= 3000)
3234 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3235 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3236 #endif
3237 }
3238
3239 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3240 view computation, and it refers to a view identifier for which we
3241 will not emit a label because it is known to map to a view number
3242 zero. We won't allocate the bitmap if we're not using assembler
3243 support for location views, but we have to make the variable
3244 visible for GGC and for code that will be optimized out for lack of
3245 support but that's still parsed and compiled. We could abstract it
3246 out with macros, but it's not worth it. */
3247 static GTY(()) bitmap zero_view_p;
3248
3249 /* Evaluate to TRUE iff N is known to identify the first location view
3250 at its PC. When not using assembler location view computation,
3251 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3252 and views label numbers recorded in it are the ones known to be
3253 zero. */
3254 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3255 || (N) == (var_loc_view)-1 \
3256 || (zero_view_p \
3257 && bitmap_bit_p (zero_view_p, (N))))
3258
3259 /* Return true iff we're to emit .loc directives for the assembler to
3260 generate line number sections.
3261
3262 When we're not emitting views, all we need from the assembler is
3263 support for .loc directives.
3264
3265 If we are emitting views, we can only use the assembler's .loc
3266 support if it also supports views.
3267
3268 When the compiler is emitting the line number programs and
3269 computing view numbers itself, it resets view numbers at known PC
3270 changes and counts from that, and then it emits view numbers as
3271 literal constants in locviewlists. There are cases in which the
3272 compiler is not sure about PC changes, e.g. when extra alignment is
3273 requested for a label. In these cases, the compiler may not reset
3274 the view counter, and the potential PC advance in the line number
3275 program will use an opcode that does not reset the view counter
3276 even if the PC actually changes, so that compiler and debug info
3277 consumer can keep view numbers in sync.
3278
3279 When the compiler defers view computation to the assembler, it
3280 emits symbolic view numbers in locviewlists, with the exception of
3281 views known to be zero (forced resets, or reset after
3282 compiler-visible PC changes): instead of emitting symbols for
3283 these, we emit literal zero and assert the assembler agrees with
3284 the compiler's assessment. We could use symbolic views everywhere,
3285 instead of special-casing zero views, but then we'd be unable to
3286 optimize out locviewlists that contain only zeros. */
3287
3288 static bool
3289 output_asm_line_debug_info (void)
3290 {
3291 return (dwarf2out_as_loc_support
3292 && (dwarf2out_as_locview_support
3293 || !debug_variable_location_views));
3294 }
3295
3296 /* Minimum line offset in a special line info. opcode.
3297 This value was chosen to give a reasonable range of values. */
3298 #define DWARF_LINE_BASE -10
3299
3300 /* First special line opcode - leave room for the standard opcodes. */
3301 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3302
3303 /* Range of line offsets in a special line info. opcode. */
3304 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3305
3306 /* Flag that indicates the initial value of the is_stmt_start flag.
3307 In the present implementation, we do not mark any lines as
3308 the beginning of a source statement, because that information
3309 is not made available by the GCC front-end. */
3310 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3311
3312 /* Maximum number of operations per instruction bundle. */
3313 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3314 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3315 #endif
3316
3317 /* This location is used by calc_die_sizes() to keep track
3318 the offset of each DIE within the .debug_info section. */
3319 static unsigned long next_die_offset;
3320
3321 /* Record the root of the DIE's built for the current compilation unit. */
3322 static GTY(()) dw_die_ref single_comp_unit_die;
3323
3324 /* A list of type DIEs that have been separated into comdat sections. */
3325 static GTY(()) comdat_type_node *comdat_type_list;
3326
3327 /* A list of CU DIEs that have been separated. */
3328 static GTY(()) limbo_die_node *cu_die_list;
3329
3330 /* A list of DIEs with a NULL parent waiting to be relocated. */
3331 static GTY(()) limbo_die_node *limbo_die_list;
3332
3333 /* A list of DIEs for which we may have to generate
3334 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3335 static GTY(()) limbo_die_node *deferred_asm_name;
3336
3337 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3338 {
3339 typedef const char *compare_type;
3340
3341 static hashval_t hash (dwarf_file_data *);
3342 static bool equal (dwarf_file_data *, const char *);
3343 };
3344
3345 /* Filenames referenced by this compilation unit. */
3346 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3347
3348 struct decl_die_hasher : ggc_ptr_hash<die_node>
3349 {
3350 typedef tree compare_type;
3351
3352 static hashval_t hash (die_node *);
3353 static bool equal (die_node *, tree);
3354 };
3355 /* A hash table of references to DIE's that describe declarations.
3356 The key is a DECL_UID() which is a unique number identifying each decl. */
3357 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3358
3359 struct GTY ((for_user)) variable_value_struct {
3360 unsigned int decl_id;
3361 vec<dw_die_ref, va_gc> *dies;
3362 };
3363
3364 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3365 {
3366 typedef tree compare_type;
3367
3368 static hashval_t hash (variable_value_struct *);
3369 static bool equal (variable_value_struct *, tree);
3370 };
3371 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3372 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3373 DECL_CONTEXT of the referenced VAR_DECLs. */
3374 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3375
3376 struct block_die_hasher : ggc_ptr_hash<die_struct>
3377 {
3378 static hashval_t hash (die_struct *);
3379 static bool equal (die_struct *, die_struct *);
3380 };
3381
3382 /* A hash table of references to DIE's that describe COMMON blocks.
3383 The key is DECL_UID() ^ die_parent. */
3384 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3385
3386 typedef struct GTY(()) die_arg_entry_struct {
3387 dw_die_ref die;
3388 tree arg;
3389 } die_arg_entry;
3390
3391
3392 /* Node of the variable location list. */
3393 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3394 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3395 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3396 in mode of the EXPR_LIST node and first EXPR_LIST operand
3397 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3398 location or NULL for padding. For larger bitsizes,
3399 mode is 0 and first operand is a CONCAT with bitsize
3400 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3401 NULL as second operand. */
3402 rtx GTY (()) loc;
3403 const char * GTY (()) label;
3404 struct var_loc_node * GTY (()) next;
3405 var_loc_view view;
3406 };
3407
3408 /* Variable location list. */
3409 struct GTY ((for_user)) var_loc_list_def {
3410 struct var_loc_node * GTY (()) first;
3411
3412 /* Pointer to the last but one or last element of the
3413 chained list. If the list is empty, both first and
3414 last are NULL, if the list contains just one node
3415 or the last node certainly is not redundant, it points
3416 to the last node, otherwise points to the last but one.
3417 Do not mark it for GC because it is marked through the chain. */
3418 struct var_loc_node * GTY ((skip ("%h"))) last;
3419
3420 /* Pointer to the last element before section switch,
3421 if NULL, either sections weren't switched or first
3422 is after section switch. */
3423 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3424
3425 /* DECL_UID of the variable decl. */
3426 unsigned int decl_id;
3427 };
3428 typedef struct var_loc_list_def var_loc_list;
3429
3430 /* Call argument location list. */
3431 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3432 rtx GTY (()) call_arg_loc_note;
3433 const char * GTY (()) label;
3434 tree GTY (()) block;
3435 bool tail_call_p;
3436 rtx GTY (()) symbol_ref;
3437 struct call_arg_loc_node * GTY (()) next;
3438 };
3439
3440
3441 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3442 {
3443 typedef const_tree compare_type;
3444
3445 static hashval_t hash (var_loc_list *);
3446 static bool equal (var_loc_list *, const_tree);
3447 };
3448
3449 /* Table of decl location linked lists. */
3450 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3451
3452 /* Head and tail of call_arg_loc chain. */
3453 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3454 static struct call_arg_loc_node *call_arg_loc_last;
3455
3456 /* Number of call sites in the current function. */
3457 static int call_site_count = -1;
3458 /* Number of tail call sites in the current function. */
3459 static int tail_call_site_count = -1;
3460
3461 /* A cached location list. */
3462 struct GTY ((for_user)) cached_dw_loc_list_def {
3463 /* The DECL_UID of the decl that this entry describes. */
3464 unsigned int decl_id;
3465
3466 /* The cached location list. */
3467 dw_loc_list_ref loc_list;
3468 };
3469 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3470
3471 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3472 {
3473
3474 typedef const_tree compare_type;
3475
3476 static hashval_t hash (cached_dw_loc_list *);
3477 static bool equal (cached_dw_loc_list *, const_tree);
3478 };
3479
3480 /* Table of cached location lists. */
3481 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3482
3483 /* A vector of references to DIE's that are uniquely identified by their tag,
3484 presence/absence of children DIE's, and list of attribute/value pairs. */
3485 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3486
3487 /* A hash map to remember the stack usage for DWARF procedures. The value
3488 stored is the stack size difference between before the DWARF procedure
3489 invokation and after it returned. In other words, for a DWARF procedure
3490 that consumes N stack slots and that pushes M ones, this stores M - N. */
3491 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3492
3493 /* A global counter for generating labels for line number data. */
3494 static unsigned int line_info_label_num;
3495
3496 /* The current table to which we should emit line number information
3497 for the current function. This will be set up at the beginning of
3498 assembly for the function. */
3499 static GTY(()) dw_line_info_table *cur_line_info_table;
3500
3501 /* The two default tables of line number info. */
3502 static GTY(()) dw_line_info_table *text_section_line_info;
3503 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3504
3505 /* The set of all non-default tables of line number info. */
3506 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3507
3508 /* A flag to tell pubnames/types export if there is an info section to
3509 refer to. */
3510 static bool info_section_emitted;
3511
3512 /* A pointer to the base of a table that contains a list of publicly
3513 accessible names. */
3514 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3515
3516 /* A pointer to the base of a table that contains a list of publicly
3517 accessible types. */
3518 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3519
3520 /* A pointer to the base of a table that contains a list of macro
3521 defines/undefines (and file start/end markers). */
3522 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3523
3524 /* True if .debug_macinfo or .debug_macros section is going to be
3525 emitted. */
3526 #define have_macinfo \
3527 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3528 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3529 && !macinfo_table->is_empty ())
3530
3531 /* Vector of dies for which we should generate .debug_ranges info. */
3532 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3533
3534 /* Vector of pairs of labels referenced in ranges_table. */
3535 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3536
3537 /* Whether we have location lists that need outputting */
3538 static GTY(()) bool have_location_lists;
3539
3540 /* Unique label counter. */
3541 static GTY(()) unsigned int loclabel_num;
3542
3543 /* Unique label counter for point-of-call tables. */
3544 static GTY(()) unsigned int poc_label_num;
3545
3546 /* The last file entry emitted by maybe_emit_file(). */
3547 static GTY(()) struct dwarf_file_data * last_emitted_file;
3548
3549 /* Number of internal labels generated by gen_internal_sym(). */
3550 static GTY(()) int label_num;
3551
3552 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3553
3554 /* Instances of generic types for which we need to generate debug
3555 info that describe their generic parameters and arguments. That
3556 generation needs to happen once all types are properly laid out so
3557 we do it at the end of compilation. */
3558 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3559
3560 /* Offset from the "steady-state frame pointer" to the frame base,
3561 within the current function. */
3562 static poly_int64 frame_pointer_fb_offset;
3563 static bool frame_pointer_fb_offset_valid;
3564
3565 static vec<dw_die_ref> base_types;
3566
3567 /* Flags to represent a set of attribute classes for attributes that represent
3568 a scalar value (bounds, pointers, ...). */
3569 enum dw_scalar_form
3570 {
3571 dw_scalar_form_constant = 0x01,
3572 dw_scalar_form_exprloc = 0x02,
3573 dw_scalar_form_reference = 0x04
3574 };
3575
3576 /* Forward declarations for functions defined in this file. */
3577
3578 static int is_pseudo_reg (const_rtx);
3579 static tree type_main_variant (tree);
3580 static int is_tagged_type (const_tree);
3581 static const char *dwarf_tag_name (unsigned);
3582 static const char *dwarf_attr_name (unsigned);
3583 static const char *dwarf_form_name (unsigned);
3584 static tree decl_ultimate_origin (const_tree);
3585 static tree decl_class_context (tree);
3586 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3587 static inline enum dw_val_class AT_class (dw_attr_node *);
3588 static inline unsigned int AT_index (dw_attr_node *);
3589 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3590 static inline unsigned AT_flag (dw_attr_node *);
3591 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3592 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3593 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3594 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3595 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3596 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3597 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3598 unsigned int, unsigned char *);
3599 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3600 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3601 static inline const char *AT_string (dw_attr_node *);
3602 static enum dwarf_form AT_string_form (dw_attr_node *);
3603 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3604 static void add_AT_specification (dw_die_ref, dw_die_ref);
3605 static inline dw_die_ref AT_ref (dw_attr_node *);
3606 static inline int AT_ref_external (dw_attr_node *);
3607 static inline void set_AT_ref_external (dw_attr_node *, int);
3608 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3609 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3610 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3611 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3612 dw_loc_list_ref);
3613 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3614 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3615 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3616 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3617 static void remove_addr_table_entry (addr_table_entry *);
3618 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3619 static inline rtx AT_addr (dw_attr_node *);
3620 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3621 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3622 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3623 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3624 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3625 const char *);
3626 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3627 unsigned HOST_WIDE_INT);
3628 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3629 unsigned long, bool);
3630 static inline const char *AT_lbl (dw_attr_node *);
3631 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3632 static const char *get_AT_low_pc (dw_die_ref);
3633 static const char *get_AT_hi_pc (dw_die_ref);
3634 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3635 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3636 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3637 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3638 static bool is_cxx (void);
3639 static bool is_cxx (const_tree);
3640 static bool is_fortran (void);
3641 static bool is_ada (void);
3642 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3643 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3644 static void add_child_die (dw_die_ref, dw_die_ref);
3645 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3646 static dw_die_ref lookup_type_die (tree);
3647 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3648 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3649 static void equate_type_number_to_die (tree, dw_die_ref);
3650 static dw_die_ref lookup_decl_die (tree);
3651 static var_loc_list *lookup_decl_loc (const_tree);
3652 static void equate_decl_number_to_die (tree, dw_die_ref);
3653 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3654 static void print_spaces (FILE *);
3655 static void print_die (dw_die_ref, FILE *);
3656 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3657 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3658 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3659 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3660 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3661 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3662 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3663 struct md5_ctx *, int *);
3664 struct checksum_attributes;
3665 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3666 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3667 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3668 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3669 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3670 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3671 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3672 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3673 static int is_type_die (dw_die_ref);
3674 static int is_comdat_die (dw_die_ref);
3675 static inline bool is_template_instantiation (dw_die_ref);
3676 static int is_declaration_die (dw_die_ref);
3677 static int should_move_die_to_comdat (dw_die_ref);
3678 static dw_die_ref clone_as_declaration (dw_die_ref);
3679 static dw_die_ref clone_die (dw_die_ref);
3680 static dw_die_ref clone_tree (dw_die_ref);
3681 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3682 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3683 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3684 static dw_die_ref generate_skeleton (dw_die_ref);
3685 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3686 dw_die_ref,
3687 dw_die_ref);
3688 static void break_out_comdat_types (dw_die_ref);
3689 static void copy_decls_for_unworthy_types (dw_die_ref);
3690
3691 static void add_sibling_attributes (dw_die_ref);
3692 static void output_location_lists (dw_die_ref);
3693 static int constant_size (unsigned HOST_WIDE_INT);
3694 static unsigned long size_of_die (dw_die_ref);
3695 static void calc_die_sizes (dw_die_ref);
3696 static void calc_base_type_die_sizes (void);
3697 static void mark_dies (dw_die_ref);
3698 static void unmark_dies (dw_die_ref);
3699 static void unmark_all_dies (dw_die_ref);
3700 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3701 static unsigned long size_of_aranges (void);
3702 static enum dwarf_form value_format (dw_attr_node *);
3703 static void output_value_format (dw_attr_node *);
3704 static void output_abbrev_section (void);
3705 static void output_die_abbrevs (unsigned long, dw_die_ref);
3706 static void output_die (dw_die_ref);
3707 static void output_compilation_unit_header (enum dwarf_unit_type);
3708 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3709 static void output_comdat_type_unit (comdat_type_node *);
3710 static const char *dwarf2_name (tree, int);
3711 static void add_pubname (tree, dw_die_ref);
3712 static void add_enumerator_pubname (const char *, dw_die_ref);
3713 static void add_pubname_string (const char *, dw_die_ref);
3714 static void add_pubtype (tree, dw_die_ref);
3715 static void output_pubnames (vec<pubname_entry, va_gc> *);
3716 static void output_aranges (void);
3717 static unsigned int add_ranges (const_tree, bool = false);
3718 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3719 bool *, bool);
3720 static void output_ranges (void);
3721 static dw_line_info_table *new_line_info_table (void);
3722 static void output_line_info (bool);
3723 static void output_file_names (void);
3724 static dw_die_ref base_type_die (tree, bool);
3725 static int is_base_type (tree);
3726 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3727 static int decl_quals (const_tree);
3728 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3729 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3730 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3731 static int type_is_enum (const_tree);
3732 static unsigned int dbx_reg_number (const_rtx);
3733 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3734 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3735 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3736 enum var_init_status);
3737 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3738 enum var_init_status);
3739 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3740 enum var_init_status);
3741 static int is_based_loc (const_rtx);
3742 static bool resolve_one_addr (rtx *);
3743 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3744 enum var_init_status);
3745 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3746 enum var_init_status);
3747 struct loc_descr_context;
3748 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3749 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3750 static dw_loc_list_ref loc_list_from_tree (tree, int,
3751 struct loc_descr_context *);
3752 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3753 struct loc_descr_context *);
3754 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3755 static tree field_type (const_tree);
3756 static unsigned int simple_type_align_in_bits (const_tree);
3757 static unsigned int simple_decl_align_in_bits (const_tree);
3758 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3759 struct vlr_context;
3760 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3761 HOST_WIDE_INT *);
3762 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3763 dw_loc_list_ref);
3764 static void add_data_member_location_attribute (dw_die_ref, tree,
3765 struct vlr_context *);
3766 static bool add_const_value_attribute (dw_die_ref, rtx);
3767 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3768 static void insert_wide_int (const wide_int &, unsigned char *, int);
3769 static void insert_float (const_rtx, unsigned char *);
3770 static rtx rtl_for_decl_location (tree);
3771 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3772 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3773 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3774 static void add_name_attribute (dw_die_ref, const char *);
3775 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3776 static void add_comp_dir_attribute (dw_die_ref);
3777 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3778 struct loc_descr_context *);
3779 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3780 struct loc_descr_context *);
3781 static void add_subscript_info (dw_die_ref, tree, bool);
3782 static void add_byte_size_attribute (dw_die_ref, tree);
3783 static void add_alignment_attribute (dw_die_ref, tree);
3784 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3785 struct vlr_context *);
3786 static void add_bit_size_attribute (dw_die_ref, tree);
3787 static void add_prototyped_attribute (dw_die_ref, tree);
3788 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3789 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3790 static void add_src_coords_attributes (dw_die_ref, tree);
3791 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3792 static void add_discr_value (dw_die_ref, dw_discr_value *);
3793 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3794 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3795 static void push_decl_scope (tree);
3796 static void pop_decl_scope (void);
3797 static dw_die_ref scope_die_for (tree, dw_die_ref);
3798 static inline int local_scope_p (dw_die_ref);
3799 static inline int class_scope_p (dw_die_ref);
3800 static inline int class_or_namespace_scope_p (dw_die_ref);
3801 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3802 static void add_calling_convention_attribute (dw_die_ref, tree);
3803 static const char *type_tag (const_tree);
3804 static tree member_declared_type (const_tree);
3805 #if 0
3806 static const char *decl_start_label (tree);
3807 #endif
3808 static void gen_array_type_die (tree, dw_die_ref);
3809 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3810 #if 0
3811 static void gen_entry_point_die (tree, dw_die_ref);
3812 #endif
3813 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3814 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3815 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3816 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3817 static void gen_formal_types_die (tree, dw_die_ref);
3818 static void gen_subprogram_die (tree, dw_die_ref);
3819 static void gen_variable_die (tree, tree, dw_die_ref);
3820 static void gen_const_die (tree, dw_die_ref);
3821 static void gen_label_die (tree, dw_die_ref);
3822 static void gen_lexical_block_die (tree, dw_die_ref);
3823 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3824 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3825 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3826 static dw_die_ref gen_compile_unit_die (const char *);
3827 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3828 static void gen_member_die (tree, dw_die_ref);
3829 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3830 enum debug_info_usage);
3831 static void gen_subroutine_type_die (tree, dw_die_ref);
3832 static void gen_typedef_die (tree, dw_die_ref);
3833 static void gen_type_die (tree, dw_die_ref);
3834 static void gen_block_die (tree, dw_die_ref);
3835 static void decls_for_scope (tree, dw_die_ref);
3836 static bool is_naming_typedef_decl (const_tree);
3837 static inline dw_die_ref get_context_die (tree);
3838 static void gen_namespace_die (tree, dw_die_ref);
3839 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3840 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3841 static dw_die_ref force_decl_die (tree);
3842 static dw_die_ref force_type_die (tree);
3843 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3844 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3845 static struct dwarf_file_data * lookup_filename (const char *);
3846 static void retry_incomplete_types (void);
3847 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3848 static void gen_generic_params_dies (tree);
3849 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3850 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3851 static void splice_child_die (dw_die_ref, dw_die_ref);
3852 static int file_info_cmp (const void *, const void *);
3853 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3854 const char *, var_loc_view, const char *);
3855 static void output_loc_list (dw_loc_list_ref);
3856 static char *gen_internal_sym (const char *);
3857 static bool want_pubnames (void);
3858
3859 static void prune_unmark_dies (dw_die_ref);
3860 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3861 static void prune_unused_types_mark (dw_die_ref, int);
3862 static void prune_unused_types_walk (dw_die_ref);
3863 static void prune_unused_types_walk_attribs (dw_die_ref);
3864 static void prune_unused_types_prune (dw_die_ref);
3865 static void prune_unused_types (void);
3866 static int maybe_emit_file (struct dwarf_file_data *fd);
3867 static inline const char *AT_vms_delta1 (dw_attr_node *);
3868 static inline const char *AT_vms_delta2 (dw_attr_node *);
3869 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3870 const char *, const char *);
3871 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3872 static void gen_remaining_tmpl_value_param_die_attribute (void);
3873 static bool generic_type_p (tree);
3874 static void schedule_generic_params_dies_gen (tree t);
3875 static void gen_scheduled_generic_parms_dies (void);
3876 static void resolve_variable_values (void);
3877
3878 static const char *comp_dir_string (void);
3879
3880 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3881
3882 /* enum for tracking thread-local variables whose address is really an offset
3883 relative to the TLS pointer, which will need link-time relocation, but will
3884 not need relocation by the DWARF consumer. */
3885
3886 enum dtprel_bool
3887 {
3888 dtprel_false = 0,
3889 dtprel_true = 1
3890 };
3891
3892 /* Return the operator to use for an address of a variable. For dtprel_true, we
3893 use DW_OP_const*. For regular variables, which need both link-time
3894 relocation and consumer-level relocation (e.g., to account for shared objects
3895 loaded at a random address), we use DW_OP_addr*. */
3896
3897 static inline enum dwarf_location_atom
3898 dw_addr_op (enum dtprel_bool dtprel)
3899 {
3900 if (dtprel == dtprel_true)
3901 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3902 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3903 else
3904 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3905 }
3906
3907 /* Return a pointer to a newly allocated address location description. If
3908 dwarf_split_debug_info is true, then record the address with the appropriate
3909 relocation. */
3910 static inline dw_loc_descr_ref
3911 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3912 {
3913 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3914
3915 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3916 ref->dw_loc_oprnd1.v.val_addr = addr;
3917 ref->dtprel = dtprel;
3918 if (dwarf_split_debug_info)
3919 ref->dw_loc_oprnd1.val_entry
3920 = add_addr_table_entry (addr,
3921 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3922 else
3923 ref->dw_loc_oprnd1.val_entry = NULL;
3924
3925 return ref;
3926 }
3927
3928 /* Section names used to hold DWARF debugging information. */
3929
3930 #ifndef DEBUG_INFO_SECTION
3931 #define DEBUG_INFO_SECTION ".debug_info"
3932 #endif
3933 #ifndef DEBUG_DWO_INFO_SECTION
3934 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3935 #endif
3936 #ifndef DEBUG_LTO_INFO_SECTION
3937 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3938 #endif
3939 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3940 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3941 #endif
3942 #ifndef DEBUG_ABBREV_SECTION
3943 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3944 #endif
3945 #ifndef DEBUG_LTO_ABBREV_SECTION
3946 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3947 #endif
3948 #ifndef DEBUG_DWO_ABBREV_SECTION
3949 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3950 #endif
3951 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3952 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3953 #endif
3954 #ifndef DEBUG_ARANGES_SECTION
3955 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3956 #endif
3957 #ifndef DEBUG_ADDR_SECTION
3958 #define DEBUG_ADDR_SECTION ".debug_addr"
3959 #endif
3960 #ifndef DEBUG_MACINFO_SECTION
3961 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3962 #endif
3963 #ifndef DEBUG_LTO_MACINFO_SECTION
3964 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3965 #endif
3966 #ifndef DEBUG_DWO_MACINFO_SECTION
3967 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3968 #endif
3969 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3970 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3971 #endif
3972 #ifndef DEBUG_MACRO_SECTION
3973 #define DEBUG_MACRO_SECTION ".debug_macro"
3974 #endif
3975 #ifndef DEBUG_LTO_MACRO_SECTION
3976 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3977 #endif
3978 #ifndef DEBUG_DWO_MACRO_SECTION
3979 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3980 #endif
3981 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3982 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3983 #endif
3984 #ifndef DEBUG_LINE_SECTION
3985 #define DEBUG_LINE_SECTION ".debug_line"
3986 #endif
3987 #ifndef DEBUG_LTO_LINE_SECTION
3988 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3989 #endif
3990 #ifndef DEBUG_DWO_LINE_SECTION
3991 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3992 #endif
3993 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3994 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3995 #endif
3996 #ifndef DEBUG_LOC_SECTION
3997 #define DEBUG_LOC_SECTION ".debug_loc"
3998 #endif
3999 #ifndef DEBUG_DWO_LOC_SECTION
4000 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4001 #endif
4002 #ifndef DEBUG_LOCLISTS_SECTION
4003 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4004 #endif
4005 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4006 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4007 #endif
4008 #ifndef DEBUG_PUBNAMES_SECTION
4009 #define DEBUG_PUBNAMES_SECTION \
4010 ((debug_generate_pub_sections == 2) \
4011 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4012 #endif
4013 #ifndef DEBUG_PUBTYPES_SECTION
4014 #define DEBUG_PUBTYPES_SECTION \
4015 ((debug_generate_pub_sections == 2) \
4016 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4017 #endif
4018 #ifndef DEBUG_STR_OFFSETS_SECTION
4019 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4020 #endif
4021 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4022 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4023 #endif
4024 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4025 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4026 #endif
4027 #ifndef DEBUG_STR_SECTION
4028 #define DEBUG_STR_SECTION ".debug_str"
4029 #endif
4030 #ifndef DEBUG_LTO_STR_SECTION
4031 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4032 #endif
4033 #ifndef DEBUG_STR_DWO_SECTION
4034 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4035 #endif
4036 #ifndef DEBUG_LTO_STR_DWO_SECTION
4037 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4038 #endif
4039 #ifndef DEBUG_RANGES_SECTION
4040 #define DEBUG_RANGES_SECTION ".debug_ranges"
4041 #endif
4042 #ifndef DEBUG_RNGLISTS_SECTION
4043 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4044 #endif
4045 #ifndef DEBUG_LINE_STR_SECTION
4046 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4047 #endif
4048 #ifndef DEBUG_LTO_LINE_STR_SECTION
4049 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4050 #endif
4051
4052 /* Standard ELF section names for compiled code and data. */
4053 #ifndef TEXT_SECTION_NAME
4054 #define TEXT_SECTION_NAME ".text"
4055 #endif
4056
4057 /* Section flags for .debug_str section. */
4058 #define DEBUG_STR_SECTION_FLAGS \
4059 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4060 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4061 : SECTION_DEBUG)
4062
4063 /* Section flags for .debug_str.dwo section. */
4064 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4065
4066 /* Attribute used to refer to the macro section. */
4067 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4068 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4069
4070 /* Labels we insert at beginning sections we can reference instead of
4071 the section names themselves. */
4072
4073 #ifndef TEXT_SECTION_LABEL
4074 #define TEXT_SECTION_LABEL "Ltext"
4075 #endif
4076 #ifndef COLD_TEXT_SECTION_LABEL
4077 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4078 #endif
4079 #ifndef DEBUG_LINE_SECTION_LABEL
4080 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4081 #endif
4082 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4083 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4084 #endif
4085 #ifndef DEBUG_INFO_SECTION_LABEL
4086 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4087 #endif
4088 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4089 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4090 #endif
4091 #ifndef DEBUG_ABBREV_SECTION_LABEL
4092 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4093 #endif
4094 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4095 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4096 #endif
4097 #ifndef DEBUG_ADDR_SECTION_LABEL
4098 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4099 #endif
4100 #ifndef DEBUG_LOC_SECTION_LABEL
4101 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4102 #endif
4103 #ifndef DEBUG_RANGES_SECTION_LABEL
4104 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4105 #endif
4106 #ifndef DEBUG_MACINFO_SECTION_LABEL
4107 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4108 #endif
4109 #ifndef DEBUG_MACRO_SECTION_LABEL
4110 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4111 #endif
4112 #define SKELETON_COMP_DIE_ABBREV 1
4113 #define SKELETON_TYPE_DIE_ABBREV 2
4114
4115 /* Definitions of defaults for formats and names of various special
4116 (artificial) labels which may be generated within this file (when the -g
4117 options is used and DWARF2_DEBUGGING_INFO is in effect.
4118 If necessary, these may be overridden from within the tm.h file, but
4119 typically, overriding these defaults is unnecessary. */
4120
4121 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4122 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4123 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4124 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4125 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4126 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4127 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4128 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4129 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4130 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4131 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4132 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4133 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4134 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4135 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4136
4137 #ifndef TEXT_END_LABEL
4138 #define TEXT_END_LABEL "Letext"
4139 #endif
4140 #ifndef COLD_END_LABEL
4141 #define COLD_END_LABEL "Letext_cold"
4142 #endif
4143 #ifndef BLOCK_BEGIN_LABEL
4144 #define BLOCK_BEGIN_LABEL "LBB"
4145 #endif
4146 #ifndef BLOCK_INLINE_ENTRY_LABEL
4147 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4148 #endif
4149 #ifndef BLOCK_END_LABEL
4150 #define BLOCK_END_LABEL "LBE"
4151 #endif
4152 #ifndef LINE_CODE_LABEL
4153 #define LINE_CODE_LABEL "LM"
4154 #endif
4155
4156 \f
4157 /* Return the root of the DIE's built for the current compilation unit. */
4158 static dw_die_ref
4159 comp_unit_die (void)
4160 {
4161 if (!single_comp_unit_die)
4162 single_comp_unit_die = gen_compile_unit_die (NULL);
4163 return single_comp_unit_die;
4164 }
4165
4166 /* We allow a language front-end to designate a function that is to be
4167 called to "demangle" any name before it is put into a DIE. */
4168
4169 static const char *(*demangle_name_func) (const char *);
4170
4171 void
4172 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4173 {
4174 demangle_name_func = func;
4175 }
4176
4177 /* Test if rtl node points to a pseudo register. */
4178
4179 static inline int
4180 is_pseudo_reg (const_rtx rtl)
4181 {
4182 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4183 || (GET_CODE (rtl) == SUBREG
4184 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4185 }
4186
4187 /* Return a reference to a type, with its const and volatile qualifiers
4188 removed. */
4189
4190 static inline tree
4191 type_main_variant (tree type)
4192 {
4193 type = TYPE_MAIN_VARIANT (type);
4194
4195 /* ??? There really should be only one main variant among any group of
4196 variants of a given type (and all of the MAIN_VARIANT values for all
4197 members of the group should point to that one type) but sometimes the C
4198 front-end messes this up for array types, so we work around that bug
4199 here. */
4200 if (TREE_CODE (type) == ARRAY_TYPE)
4201 while (type != TYPE_MAIN_VARIANT (type))
4202 type = TYPE_MAIN_VARIANT (type);
4203
4204 return type;
4205 }
4206
4207 /* Return nonzero if the given type node represents a tagged type. */
4208
4209 static inline int
4210 is_tagged_type (const_tree type)
4211 {
4212 enum tree_code code = TREE_CODE (type);
4213
4214 return (code == RECORD_TYPE || code == UNION_TYPE
4215 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4216 }
4217
4218 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4219
4220 static void
4221 get_ref_die_offset_label (char *label, dw_die_ref ref)
4222 {
4223 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4224 }
4225
4226 /* Return die_offset of a DIE reference to a base type. */
4227
4228 static unsigned long int
4229 get_base_type_offset (dw_die_ref ref)
4230 {
4231 if (ref->die_offset)
4232 return ref->die_offset;
4233 if (comp_unit_die ()->die_abbrev)
4234 {
4235 calc_base_type_die_sizes ();
4236 gcc_assert (ref->die_offset);
4237 }
4238 return ref->die_offset;
4239 }
4240
4241 /* Return die_offset of a DIE reference other than base type. */
4242
4243 static unsigned long int
4244 get_ref_die_offset (dw_die_ref ref)
4245 {
4246 gcc_assert (ref->die_offset);
4247 return ref->die_offset;
4248 }
4249
4250 /* Convert a DIE tag into its string name. */
4251
4252 static const char *
4253 dwarf_tag_name (unsigned int tag)
4254 {
4255 const char *name = get_DW_TAG_name (tag);
4256
4257 if (name != NULL)
4258 return name;
4259
4260 return "DW_TAG_<unknown>";
4261 }
4262
4263 /* Convert a DWARF attribute code into its string name. */
4264
4265 static const char *
4266 dwarf_attr_name (unsigned int attr)
4267 {
4268 const char *name;
4269
4270 switch (attr)
4271 {
4272 #if VMS_DEBUGGING_INFO
4273 case DW_AT_HP_prologue:
4274 return "DW_AT_HP_prologue";
4275 #else
4276 case DW_AT_MIPS_loop_unroll_factor:
4277 return "DW_AT_MIPS_loop_unroll_factor";
4278 #endif
4279
4280 #if VMS_DEBUGGING_INFO
4281 case DW_AT_HP_epilogue:
4282 return "DW_AT_HP_epilogue";
4283 #else
4284 case DW_AT_MIPS_stride:
4285 return "DW_AT_MIPS_stride";
4286 #endif
4287 }
4288
4289 name = get_DW_AT_name (attr);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_AT_<unknown>";
4295 }
4296
4297 /* Convert a DWARF value form code into its string name. */
4298
4299 static const char *
4300 dwarf_form_name (unsigned int form)
4301 {
4302 const char *name = get_DW_FORM_name (form);
4303
4304 if (name != NULL)
4305 return name;
4306
4307 return "DW_FORM_<unknown>";
4308 }
4309 \f
4310 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4311 instance of an inlined instance of a decl which is local to an inline
4312 function, so we have to trace all of the way back through the origin chain
4313 to find out what sort of node actually served as the original seed for the
4314 given block. */
4315
4316 static tree
4317 decl_ultimate_origin (const_tree decl)
4318 {
4319 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4320 return NULL_TREE;
4321
4322 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4323 we're trying to output the abstract instance of this function. */
4324 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4325 return NULL_TREE;
4326
4327 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4328 most distant ancestor, this should never happen. */
4329 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4330
4331 return DECL_ABSTRACT_ORIGIN (decl);
4332 }
4333
4334 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4335 of a virtual function may refer to a base class, so we check the 'this'
4336 parameter. */
4337
4338 static tree
4339 decl_class_context (tree decl)
4340 {
4341 tree context = NULL_TREE;
4342
4343 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4344 context = DECL_CONTEXT (decl);
4345 else
4346 context = TYPE_MAIN_VARIANT
4347 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4348
4349 if (context && !TYPE_P (context))
4350 context = NULL_TREE;
4351
4352 return context;
4353 }
4354 \f
4355 /* Add an attribute/value pair to a DIE. */
4356
4357 static inline void
4358 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4359 {
4360 /* Maybe this should be an assert? */
4361 if (die == NULL)
4362 return;
4363
4364 if (flag_checking)
4365 {
4366 /* Check we do not add duplicate attrs. Can't use get_AT here
4367 because that recurses to the specification/abstract origin DIE. */
4368 dw_attr_node *a;
4369 unsigned ix;
4370 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4371 gcc_assert (a->dw_attr != attr->dw_attr);
4372 }
4373
4374 vec_safe_reserve (die->die_attr, 1);
4375 vec_safe_push (die->die_attr, *attr);
4376 }
4377
4378 static inline enum dw_val_class
4379 AT_class (dw_attr_node *a)
4380 {
4381 return a->dw_attr_val.val_class;
4382 }
4383
4384 /* Return the index for any attribute that will be referenced with a
4385 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4386 are stored in dw_attr_val.v.val_str for reference counting
4387 pruning. */
4388
4389 static inline unsigned int
4390 AT_index (dw_attr_node *a)
4391 {
4392 if (AT_class (a) == dw_val_class_str)
4393 return a->dw_attr_val.v.val_str->index;
4394 else if (a->dw_attr_val.val_entry != NULL)
4395 return a->dw_attr_val.val_entry->index;
4396 return NOT_INDEXED;
4397 }
4398
4399 /* Add a flag value attribute to a DIE. */
4400
4401 static inline void
4402 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4403 {
4404 dw_attr_node attr;
4405
4406 attr.dw_attr = attr_kind;
4407 attr.dw_attr_val.val_class = dw_val_class_flag;
4408 attr.dw_attr_val.val_entry = NULL;
4409 attr.dw_attr_val.v.val_flag = flag;
4410 add_dwarf_attr (die, &attr);
4411 }
4412
4413 static inline unsigned
4414 AT_flag (dw_attr_node *a)
4415 {
4416 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4417 return a->dw_attr_val.v.val_flag;
4418 }
4419
4420 /* Add a signed integer attribute value to a DIE. */
4421
4422 static inline void
4423 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4424 {
4425 dw_attr_node attr;
4426
4427 attr.dw_attr = attr_kind;
4428 attr.dw_attr_val.val_class = dw_val_class_const;
4429 attr.dw_attr_val.val_entry = NULL;
4430 attr.dw_attr_val.v.val_int = int_val;
4431 add_dwarf_attr (die, &attr);
4432 }
4433
4434 static inline HOST_WIDE_INT
4435 AT_int (dw_attr_node *a)
4436 {
4437 gcc_assert (a && (AT_class (a) == dw_val_class_const
4438 || AT_class (a) == dw_val_class_const_implicit));
4439 return a->dw_attr_val.v.val_int;
4440 }
4441
4442 /* Add an unsigned integer attribute value to a DIE. */
4443
4444 static inline void
4445 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4446 unsigned HOST_WIDE_INT unsigned_val)
4447 {
4448 dw_attr_node attr;
4449
4450 attr.dw_attr = attr_kind;
4451 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4452 attr.dw_attr_val.val_entry = NULL;
4453 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4454 add_dwarf_attr (die, &attr);
4455 }
4456
4457 static inline unsigned HOST_WIDE_INT
4458 AT_unsigned (dw_attr_node *a)
4459 {
4460 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4461 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4462 return a->dw_attr_val.v.val_unsigned;
4463 }
4464
4465 /* Add an unsigned wide integer attribute value to a DIE. */
4466
4467 static inline void
4468 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4469 const wide_int& w)
4470 {
4471 dw_attr_node attr;
4472
4473 attr.dw_attr = attr_kind;
4474 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4475 attr.dw_attr_val.val_entry = NULL;
4476 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4477 *attr.dw_attr_val.v.val_wide = w;
4478 add_dwarf_attr (die, &attr);
4479 }
4480
4481 /* Add an unsigned double integer attribute value to a DIE. */
4482
4483 static inline void
4484 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4485 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4486 {
4487 dw_attr_node attr;
4488
4489 attr.dw_attr = attr_kind;
4490 attr.dw_attr_val.val_class = dw_val_class_const_double;
4491 attr.dw_attr_val.val_entry = NULL;
4492 attr.dw_attr_val.v.val_double.high = high;
4493 attr.dw_attr_val.v.val_double.low = low;
4494 add_dwarf_attr (die, &attr);
4495 }
4496
4497 /* Add a floating point attribute value to a DIE and return it. */
4498
4499 static inline void
4500 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4501 unsigned int length, unsigned int elt_size, unsigned char *array)
4502 {
4503 dw_attr_node attr;
4504
4505 attr.dw_attr = attr_kind;
4506 attr.dw_attr_val.val_class = dw_val_class_vec;
4507 attr.dw_attr_val.val_entry = NULL;
4508 attr.dw_attr_val.v.val_vec.length = length;
4509 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4510 attr.dw_attr_val.v.val_vec.array = array;
4511 add_dwarf_attr (die, &attr);
4512 }
4513
4514 /* Add an 8-byte data attribute value to a DIE. */
4515
4516 static inline void
4517 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4518 unsigned char data8[8])
4519 {
4520 dw_attr_node attr;
4521
4522 attr.dw_attr = attr_kind;
4523 attr.dw_attr_val.val_class = dw_val_class_data8;
4524 attr.dw_attr_val.val_entry = NULL;
4525 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4526 add_dwarf_attr (die, &attr);
4527 }
4528
4529 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4530 dwarf_split_debug_info, address attributes in dies destined for the
4531 final executable have force_direct set to avoid using indexed
4532 references. */
4533
4534 static inline void
4535 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4536 bool force_direct)
4537 {
4538 dw_attr_node attr;
4539 char * lbl_id;
4540
4541 lbl_id = xstrdup (lbl_low);
4542 attr.dw_attr = DW_AT_low_pc;
4543 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4544 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4545 if (dwarf_split_debug_info && !force_direct)
4546 attr.dw_attr_val.val_entry
4547 = add_addr_table_entry (lbl_id, ate_kind_label);
4548 else
4549 attr.dw_attr_val.val_entry = NULL;
4550 add_dwarf_attr (die, &attr);
4551
4552 attr.dw_attr = DW_AT_high_pc;
4553 if (dwarf_version < 4)
4554 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4555 else
4556 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4557 lbl_id = xstrdup (lbl_high);
4558 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4559 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4560 && dwarf_split_debug_info && !force_direct)
4561 attr.dw_attr_val.val_entry
4562 = add_addr_table_entry (lbl_id, ate_kind_label);
4563 else
4564 attr.dw_attr_val.val_entry = NULL;
4565 add_dwarf_attr (die, &attr);
4566 }
4567
4568 /* Hash and equality functions for debug_str_hash. */
4569
4570 hashval_t
4571 indirect_string_hasher::hash (indirect_string_node *x)
4572 {
4573 return htab_hash_string (x->str);
4574 }
4575
4576 bool
4577 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4578 {
4579 return strcmp (x1->str, x2) == 0;
4580 }
4581
4582 /* Add STR to the given string hash table. */
4583
4584 static struct indirect_string_node *
4585 find_AT_string_in_table (const char *str,
4586 hash_table<indirect_string_hasher> *table)
4587 {
4588 struct indirect_string_node *node;
4589
4590 indirect_string_node **slot
4591 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4592 if (*slot == NULL)
4593 {
4594 node = ggc_cleared_alloc<indirect_string_node> ();
4595 node->str = ggc_strdup (str);
4596 *slot = node;
4597 }
4598 else
4599 node = *slot;
4600
4601 node->refcount++;
4602 return node;
4603 }
4604
4605 /* Add STR to the indirect string hash table. */
4606
4607 static struct indirect_string_node *
4608 find_AT_string (const char *str)
4609 {
4610 if (! debug_str_hash)
4611 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4612
4613 return find_AT_string_in_table (str, debug_str_hash);
4614 }
4615
4616 /* Add a string attribute value to a DIE. */
4617
4618 static inline void
4619 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4620 {
4621 dw_attr_node attr;
4622 struct indirect_string_node *node;
4623
4624 node = find_AT_string (str);
4625
4626 attr.dw_attr = attr_kind;
4627 attr.dw_attr_val.val_class = dw_val_class_str;
4628 attr.dw_attr_val.val_entry = NULL;
4629 attr.dw_attr_val.v.val_str = node;
4630 add_dwarf_attr (die, &attr);
4631 }
4632
4633 static inline const char *
4634 AT_string (dw_attr_node *a)
4635 {
4636 gcc_assert (a && AT_class (a) == dw_val_class_str);
4637 return a->dw_attr_val.v.val_str->str;
4638 }
4639
4640 /* Call this function directly to bypass AT_string_form's logic to put
4641 the string inline in the die. */
4642
4643 static void
4644 set_indirect_string (struct indirect_string_node *node)
4645 {
4646 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4647 /* Already indirect is a no op. */
4648 if (node->form == DW_FORM_strp
4649 || node->form == DW_FORM_line_strp
4650 || node->form == DW_FORM_GNU_str_index)
4651 {
4652 gcc_assert (node->label);
4653 return;
4654 }
4655 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4656 ++dw2_string_counter;
4657 node->label = xstrdup (label);
4658
4659 if (!dwarf_split_debug_info)
4660 {
4661 node->form = DW_FORM_strp;
4662 node->index = NOT_INDEXED;
4663 }
4664 else
4665 {
4666 node->form = DW_FORM_GNU_str_index;
4667 node->index = NO_INDEX_ASSIGNED;
4668 }
4669 }
4670
4671 /* A helper function for dwarf2out_finish, called to reset indirect
4672 string decisions done for early LTO dwarf output before fat object
4673 dwarf output. */
4674
4675 int
4676 reset_indirect_string (indirect_string_node **h, void *)
4677 {
4678 struct indirect_string_node *node = *h;
4679 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4680 {
4681 free (node->label);
4682 node->label = NULL;
4683 node->form = (dwarf_form) 0;
4684 node->index = 0;
4685 }
4686 return 1;
4687 }
4688
4689 /* Find out whether a string should be output inline in DIE
4690 or out-of-line in .debug_str section. */
4691
4692 static enum dwarf_form
4693 find_string_form (struct indirect_string_node *node)
4694 {
4695 unsigned int len;
4696
4697 if (node->form)
4698 return node->form;
4699
4700 len = strlen (node->str) + 1;
4701
4702 /* If the string is shorter or equal to the size of the reference, it is
4703 always better to put it inline. */
4704 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4705 return node->form = DW_FORM_string;
4706
4707 /* If we cannot expect the linker to merge strings in .debug_str
4708 section, only put it into .debug_str if it is worth even in this
4709 single module. */
4710 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4711 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4712 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4713 return node->form = DW_FORM_string;
4714
4715 set_indirect_string (node);
4716
4717 return node->form;
4718 }
4719
4720 /* Find out whether the string referenced from the attribute should be
4721 output inline in DIE or out-of-line in .debug_str section. */
4722
4723 static enum dwarf_form
4724 AT_string_form (dw_attr_node *a)
4725 {
4726 gcc_assert (a && AT_class (a) == dw_val_class_str);
4727 return find_string_form (a->dw_attr_val.v.val_str);
4728 }
4729
4730 /* Add a DIE reference attribute value to a DIE. */
4731
4732 static inline void
4733 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4734 {
4735 dw_attr_node attr;
4736 gcc_checking_assert (targ_die != NULL);
4737
4738 /* With LTO we can end up trying to reference something we didn't create
4739 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4740 if (targ_die == NULL)
4741 return;
4742
4743 attr.dw_attr = attr_kind;
4744 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4745 attr.dw_attr_val.val_entry = NULL;
4746 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4747 attr.dw_attr_val.v.val_die_ref.external = 0;
4748 add_dwarf_attr (die, &attr);
4749 }
4750
4751 /* Change DIE reference REF to point to NEW_DIE instead. */
4752
4753 static inline void
4754 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4755 {
4756 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4757 ref->dw_attr_val.v.val_die_ref.die = new_die;
4758 ref->dw_attr_val.v.val_die_ref.external = 0;
4759 }
4760
4761 /* Add an AT_specification attribute to a DIE, and also make the back
4762 pointer from the specification to the definition. */
4763
4764 static inline void
4765 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4766 {
4767 add_AT_die_ref (die, DW_AT_specification, targ_die);
4768 gcc_assert (!targ_die->die_definition);
4769 targ_die->die_definition = die;
4770 }
4771
4772 static inline dw_die_ref
4773 AT_ref (dw_attr_node *a)
4774 {
4775 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4776 return a->dw_attr_val.v.val_die_ref.die;
4777 }
4778
4779 static inline int
4780 AT_ref_external (dw_attr_node *a)
4781 {
4782 if (a && AT_class (a) == dw_val_class_die_ref)
4783 return a->dw_attr_val.v.val_die_ref.external;
4784
4785 return 0;
4786 }
4787
4788 static inline void
4789 set_AT_ref_external (dw_attr_node *a, int i)
4790 {
4791 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4792 a->dw_attr_val.v.val_die_ref.external = i;
4793 }
4794
4795 /* Add an FDE reference attribute value to a DIE. */
4796
4797 static inline void
4798 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4799 {
4800 dw_attr_node attr;
4801
4802 attr.dw_attr = attr_kind;
4803 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4804 attr.dw_attr_val.val_entry = NULL;
4805 attr.dw_attr_val.v.val_fde_index = targ_fde;
4806 add_dwarf_attr (die, &attr);
4807 }
4808
4809 /* Add a location description attribute value to a DIE. */
4810
4811 static inline void
4812 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4813 {
4814 dw_attr_node attr;
4815
4816 attr.dw_attr = attr_kind;
4817 attr.dw_attr_val.val_class = dw_val_class_loc;
4818 attr.dw_attr_val.val_entry = NULL;
4819 attr.dw_attr_val.v.val_loc = loc;
4820 add_dwarf_attr (die, &attr);
4821 }
4822
4823 static inline dw_loc_descr_ref
4824 AT_loc (dw_attr_node *a)
4825 {
4826 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4827 return a->dw_attr_val.v.val_loc;
4828 }
4829
4830 static inline void
4831 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4832 {
4833 dw_attr_node attr;
4834
4835 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4836 return;
4837
4838 attr.dw_attr = attr_kind;
4839 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4840 attr.dw_attr_val.val_entry = NULL;
4841 attr.dw_attr_val.v.val_loc_list = loc_list;
4842 add_dwarf_attr (die, &attr);
4843 have_location_lists = true;
4844 }
4845
4846 static inline dw_loc_list_ref
4847 AT_loc_list (dw_attr_node *a)
4848 {
4849 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4850 return a->dw_attr_val.v.val_loc_list;
4851 }
4852
4853 /* Add a view list attribute to DIE. It must have a DW_AT_location
4854 attribute, because the view list complements the location list. */
4855
4856 static inline void
4857 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4858 {
4859 dw_attr_node attr;
4860
4861 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4862 return;
4863
4864 attr.dw_attr = attr_kind;
4865 attr.dw_attr_val.val_class = dw_val_class_view_list;
4866 attr.dw_attr_val.val_entry = NULL;
4867 attr.dw_attr_val.v.val_view_list = die;
4868 add_dwarf_attr (die, &attr);
4869 gcc_checking_assert (get_AT (die, DW_AT_location));
4870 gcc_assert (have_location_lists);
4871 }
4872
4873 /* Return a pointer to the location list referenced by the attribute.
4874 If the named attribute is a view list, look up the corresponding
4875 DW_AT_location attribute and return its location list. */
4876
4877 static inline dw_loc_list_ref *
4878 AT_loc_list_ptr (dw_attr_node *a)
4879 {
4880 gcc_assert (a);
4881 switch (AT_class (a))
4882 {
4883 case dw_val_class_loc_list:
4884 return &a->dw_attr_val.v.val_loc_list;
4885 case dw_val_class_view_list:
4886 {
4887 dw_attr_node *l;
4888 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4889 if (!l)
4890 return NULL;
4891 gcc_checking_assert (l + 1 == a);
4892 return AT_loc_list_ptr (l);
4893 }
4894 default:
4895 gcc_unreachable ();
4896 }
4897 }
4898
4899 /* Return the location attribute value associated with a view list
4900 attribute value. */
4901
4902 static inline dw_val_node *
4903 view_list_to_loc_list_val_node (dw_val_node *val)
4904 {
4905 gcc_assert (val->val_class == dw_val_class_view_list);
4906 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4907 if (!loc)
4908 return NULL;
4909 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4910 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4911 return &loc->dw_attr_val;
4912 }
4913
4914 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4915 {
4916 static hashval_t hash (addr_table_entry *);
4917 static bool equal (addr_table_entry *, addr_table_entry *);
4918 };
4919
4920 /* Table of entries into the .debug_addr section. */
4921
4922 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4923
4924 /* Hash an address_table_entry. */
4925
4926 hashval_t
4927 addr_hasher::hash (addr_table_entry *a)
4928 {
4929 inchash::hash hstate;
4930 switch (a->kind)
4931 {
4932 case ate_kind_rtx:
4933 hstate.add_int (0);
4934 break;
4935 case ate_kind_rtx_dtprel:
4936 hstate.add_int (1);
4937 break;
4938 case ate_kind_label:
4939 return htab_hash_string (a->addr.label);
4940 default:
4941 gcc_unreachable ();
4942 }
4943 inchash::add_rtx (a->addr.rtl, hstate);
4944 return hstate.end ();
4945 }
4946
4947 /* Determine equality for two address_table_entries. */
4948
4949 bool
4950 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4951 {
4952 if (a1->kind != a2->kind)
4953 return 0;
4954 switch (a1->kind)
4955 {
4956 case ate_kind_rtx:
4957 case ate_kind_rtx_dtprel:
4958 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4959 case ate_kind_label:
4960 return strcmp (a1->addr.label, a2->addr.label) == 0;
4961 default:
4962 gcc_unreachable ();
4963 }
4964 }
4965
4966 /* Initialize an addr_table_entry. */
4967
4968 void
4969 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4970 {
4971 e->kind = kind;
4972 switch (kind)
4973 {
4974 case ate_kind_rtx:
4975 case ate_kind_rtx_dtprel:
4976 e->addr.rtl = (rtx) addr;
4977 break;
4978 case ate_kind_label:
4979 e->addr.label = (char *) addr;
4980 break;
4981 }
4982 e->refcount = 0;
4983 e->index = NO_INDEX_ASSIGNED;
4984 }
4985
4986 /* Add attr to the address table entry to the table. Defer setting an
4987 index until output time. */
4988
4989 static addr_table_entry *
4990 add_addr_table_entry (void *addr, enum ate_kind kind)
4991 {
4992 addr_table_entry *node;
4993 addr_table_entry finder;
4994
4995 gcc_assert (dwarf_split_debug_info);
4996 if (! addr_index_table)
4997 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4998 init_addr_table_entry (&finder, kind, addr);
4999 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5000
5001 if (*slot == HTAB_EMPTY_ENTRY)
5002 {
5003 node = ggc_cleared_alloc<addr_table_entry> ();
5004 init_addr_table_entry (node, kind, addr);
5005 *slot = node;
5006 }
5007 else
5008 node = *slot;
5009
5010 node->refcount++;
5011 return node;
5012 }
5013
5014 /* Remove an entry from the addr table by decrementing its refcount.
5015 Strictly, decrementing the refcount would be enough, but the
5016 assertion that the entry is actually in the table has found
5017 bugs. */
5018
5019 static void
5020 remove_addr_table_entry (addr_table_entry *entry)
5021 {
5022 gcc_assert (dwarf_split_debug_info && addr_index_table);
5023 /* After an index is assigned, the table is frozen. */
5024 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5025 entry->refcount--;
5026 }
5027
5028 /* Given a location list, remove all addresses it refers to from the
5029 address_table. */
5030
5031 static void
5032 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5033 {
5034 for (; descr; descr = descr->dw_loc_next)
5035 if (descr->dw_loc_oprnd1.val_entry != NULL)
5036 {
5037 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5038 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5039 }
5040 }
5041
5042 /* A helper function for dwarf2out_finish called through
5043 htab_traverse. Assign an addr_table_entry its index. All entries
5044 must be collected into the table when this function is called,
5045 because the indexing code relies on htab_traverse to traverse nodes
5046 in the same order for each run. */
5047
5048 int
5049 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5050 {
5051 addr_table_entry *node = *h;
5052
5053 /* Don't index unreferenced nodes. */
5054 if (node->refcount == 0)
5055 return 1;
5056
5057 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5058 node->index = *index;
5059 *index += 1;
5060
5061 return 1;
5062 }
5063
5064 /* Add an address constant attribute value to a DIE. When using
5065 dwarf_split_debug_info, address attributes in dies destined for the
5066 final executable should be direct references--setting the parameter
5067 force_direct ensures this behavior. */
5068
5069 static inline void
5070 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5071 bool force_direct)
5072 {
5073 dw_attr_node attr;
5074
5075 attr.dw_attr = attr_kind;
5076 attr.dw_attr_val.val_class = dw_val_class_addr;
5077 attr.dw_attr_val.v.val_addr = addr;
5078 if (dwarf_split_debug_info && !force_direct)
5079 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5080 else
5081 attr.dw_attr_val.val_entry = NULL;
5082 add_dwarf_attr (die, &attr);
5083 }
5084
5085 /* Get the RTX from to an address DIE attribute. */
5086
5087 static inline rtx
5088 AT_addr (dw_attr_node *a)
5089 {
5090 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5091 return a->dw_attr_val.v.val_addr;
5092 }
5093
5094 /* Add a file attribute value to a DIE. */
5095
5096 static inline void
5097 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5098 struct dwarf_file_data *fd)
5099 {
5100 dw_attr_node attr;
5101
5102 attr.dw_attr = attr_kind;
5103 attr.dw_attr_val.val_class = dw_val_class_file;
5104 attr.dw_attr_val.val_entry = NULL;
5105 attr.dw_attr_val.v.val_file = fd;
5106 add_dwarf_attr (die, &attr);
5107 }
5108
5109 /* Get the dwarf_file_data from a file DIE attribute. */
5110
5111 static inline struct dwarf_file_data *
5112 AT_file (dw_attr_node *a)
5113 {
5114 gcc_assert (a && (AT_class (a) == dw_val_class_file
5115 || AT_class (a) == dw_val_class_file_implicit));
5116 return a->dw_attr_val.v.val_file;
5117 }
5118
5119 /* Add a vms delta attribute value to a DIE. */
5120
5121 static inline void
5122 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5123 const char *lbl1, const char *lbl2)
5124 {
5125 dw_attr_node attr;
5126
5127 attr.dw_attr = attr_kind;
5128 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5129 attr.dw_attr_val.val_entry = NULL;
5130 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5131 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5132 add_dwarf_attr (die, &attr);
5133 }
5134
5135 /* Add a symbolic view identifier attribute value to a DIE. */
5136
5137 static inline void
5138 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5139 const char *view_label)
5140 {
5141 dw_attr_node attr;
5142
5143 attr.dw_attr = attr_kind;
5144 attr.dw_attr_val.val_class = dw_val_class_symview;
5145 attr.dw_attr_val.val_entry = NULL;
5146 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5147 add_dwarf_attr (die, &attr);
5148 }
5149
5150 /* Add a label identifier attribute value to a DIE. */
5151
5152 static inline void
5153 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5154 const char *lbl_id)
5155 {
5156 dw_attr_node attr;
5157
5158 attr.dw_attr = attr_kind;
5159 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5160 attr.dw_attr_val.val_entry = NULL;
5161 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5162 if (dwarf_split_debug_info)
5163 attr.dw_attr_val.val_entry
5164 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5165 ate_kind_label);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a section offset attribute value to a DIE, an offset into the
5170 debug_line section. */
5171
5172 static inline void
5173 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5174 const char *label)
5175 {
5176 dw_attr_node attr;
5177
5178 attr.dw_attr = attr_kind;
5179 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5180 attr.dw_attr_val.val_entry = NULL;
5181 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5182 add_dwarf_attr (die, &attr);
5183 }
5184
5185 /* Add a section offset attribute value to a DIE, an offset into the
5186 debug_loclists section. */
5187
5188 static inline void
5189 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5190 const char *label)
5191 {
5192 dw_attr_node attr;
5193
5194 attr.dw_attr = attr_kind;
5195 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5196 attr.dw_attr_val.val_entry = NULL;
5197 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5198 add_dwarf_attr (die, &attr);
5199 }
5200
5201 /* Add a section offset attribute value to a DIE, an offset into the
5202 debug_macinfo section. */
5203
5204 static inline void
5205 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5206 const char *label)
5207 {
5208 dw_attr_node attr;
5209
5210 attr.dw_attr = attr_kind;
5211 attr.dw_attr_val.val_class = dw_val_class_macptr;
5212 attr.dw_attr_val.val_entry = NULL;
5213 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5214 add_dwarf_attr (die, &attr);
5215 }
5216
5217 /* Add an offset attribute value to a DIE. */
5218
5219 static inline void
5220 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5221 unsigned HOST_WIDE_INT offset)
5222 {
5223 dw_attr_node attr;
5224
5225 attr.dw_attr = attr_kind;
5226 attr.dw_attr_val.val_class = dw_val_class_offset;
5227 attr.dw_attr_val.val_entry = NULL;
5228 attr.dw_attr_val.v.val_offset = offset;
5229 add_dwarf_attr (die, &attr);
5230 }
5231
5232 /* Add a range_list attribute value to a DIE. When using
5233 dwarf_split_debug_info, address attributes in dies destined for the
5234 final executable should be direct references--setting the parameter
5235 force_direct ensures this behavior. */
5236
5237 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5238 #define RELOCATED_OFFSET (NULL)
5239
5240 static void
5241 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5242 long unsigned int offset, bool force_direct)
5243 {
5244 dw_attr_node attr;
5245
5246 attr.dw_attr = attr_kind;
5247 attr.dw_attr_val.val_class = dw_val_class_range_list;
5248 /* For the range_list attribute, use val_entry to store whether the
5249 offset should follow split-debug-info or normal semantics. This
5250 value is read in output_range_list_offset. */
5251 if (dwarf_split_debug_info && !force_direct)
5252 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5253 else
5254 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5255 attr.dw_attr_val.v.val_offset = offset;
5256 add_dwarf_attr (die, &attr);
5257 }
5258
5259 /* Return the start label of a delta attribute. */
5260
5261 static inline const char *
5262 AT_vms_delta1 (dw_attr_node *a)
5263 {
5264 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5265 return a->dw_attr_val.v.val_vms_delta.lbl1;
5266 }
5267
5268 /* Return the end label of a delta attribute. */
5269
5270 static inline const char *
5271 AT_vms_delta2 (dw_attr_node *a)
5272 {
5273 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5274 return a->dw_attr_val.v.val_vms_delta.lbl2;
5275 }
5276
5277 static inline const char *
5278 AT_lbl (dw_attr_node *a)
5279 {
5280 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5281 || AT_class (a) == dw_val_class_lineptr
5282 || AT_class (a) == dw_val_class_macptr
5283 || AT_class (a) == dw_val_class_loclistsptr
5284 || AT_class (a) == dw_val_class_high_pc));
5285 return a->dw_attr_val.v.val_lbl_id;
5286 }
5287
5288 /* Get the attribute of type attr_kind. */
5289
5290 static dw_attr_node *
5291 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5292 {
5293 dw_attr_node *a;
5294 unsigned ix;
5295 dw_die_ref spec = NULL;
5296
5297 if (! die)
5298 return NULL;
5299
5300 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5301 if (a->dw_attr == attr_kind)
5302 return a;
5303 else if (a->dw_attr == DW_AT_specification
5304 || a->dw_attr == DW_AT_abstract_origin)
5305 spec = AT_ref (a);
5306
5307 if (spec)
5308 return get_AT (spec, attr_kind);
5309
5310 return NULL;
5311 }
5312
5313 /* Returns the parent of the declaration of DIE. */
5314
5315 static dw_die_ref
5316 get_die_parent (dw_die_ref die)
5317 {
5318 dw_die_ref t;
5319
5320 if (!die)
5321 return NULL;
5322
5323 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5324 || (t = get_AT_ref (die, DW_AT_specification)))
5325 die = t;
5326
5327 return die->die_parent;
5328 }
5329
5330 /* Return the "low pc" attribute value, typically associated with a subprogram
5331 DIE. Return null if the "low pc" attribute is either not present, or if it
5332 cannot be represented as an assembler label identifier. */
5333
5334 static inline const char *
5335 get_AT_low_pc (dw_die_ref die)
5336 {
5337 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5338
5339 return a ? AT_lbl (a) : NULL;
5340 }
5341
5342 /* Return the "high pc" attribute value, typically associated with a subprogram
5343 DIE. Return null if the "high pc" attribute is either not present, or if it
5344 cannot be represented as an assembler label identifier. */
5345
5346 static inline const char *
5347 get_AT_hi_pc (dw_die_ref die)
5348 {
5349 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5350
5351 return a ? AT_lbl (a) : NULL;
5352 }
5353
5354 /* Return the value of the string attribute designated by ATTR_KIND, or
5355 NULL if it is not present. */
5356
5357 static inline const char *
5358 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5359 {
5360 dw_attr_node *a = get_AT (die, attr_kind);
5361
5362 return a ? AT_string (a) : NULL;
5363 }
5364
5365 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5366 if it is not present. */
5367
5368 static inline int
5369 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5370 {
5371 dw_attr_node *a = get_AT (die, attr_kind);
5372
5373 return a ? AT_flag (a) : 0;
5374 }
5375
5376 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5377 if it is not present. */
5378
5379 static inline unsigned
5380 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5381 {
5382 dw_attr_node *a = get_AT (die, attr_kind);
5383
5384 return a ? AT_unsigned (a) : 0;
5385 }
5386
5387 static inline dw_die_ref
5388 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5389 {
5390 dw_attr_node *a = get_AT (die, attr_kind);
5391
5392 return a ? AT_ref (a) : NULL;
5393 }
5394
5395 static inline struct dwarf_file_data *
5396 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5397 {
5398 dw_attr_node *a = get_AT (die, attr_kind);
5399
5400 return a ? AT_file (a) : NULL;
5401 }
5402
5403 /* Return TRUE if the language is C++. */
5404
5405 static inline bool
5406 is_cxx (void)
5407 {
5408 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5409
5410 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5411 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5412 }
5413
5414 /* Return TRUE if DECL was created by the C++ frontend. */
5415
5416 static bool
5417 is_cxx (const_tree decl)
5418 {
5419 if (in_lto_p)
5420 {
5421 const_tree context = get_ultimate_context (decl);
5422 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5423 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5424 }
5425 return is_cxx ();
5426 }
5427
5428 /* Return TRUE if the language is Fortran. */
5429
5430 static inline bool
5431 is_fortran (void)
5432 {
5433 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5434
5435 return (lang == DW_LANG_Fortran77
5436 || lang == DW_LANG_Fortran90
5437 || lang == DW_LANG_Fortran95
5438 || lang == DW_LANG_Fortran03
5439 || lang == DW_LANG_Fortran08);
5440 }
5441
5442 static inline bool
5443 is_fortran (const_tree decl)
5444 {
5445 if (in_lto_p)
5446 {
5447 const_tree context = get_ultimate_context (decl);
5448 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5449 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5450 "GNU Fortran", 11) == 0
5451 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5452 "GNU F77") == 0);
5453 }
5454 return is_fortran ();
5455 }
5456
5457 /* Return TRUE if the language is Ada. */
5458
5459 static inline bool
5460 is_ada (void)
5461 {
5462 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5463
5464 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5465 }
5466
5467 /* Remove the specified attribute if present. Return TRUE if removal
5468 was successful. */
5469
5470 static bool
5471 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5472 {
5473 dw_attr_node *a;
5474 unsigned ix;
5475
5476 if (! die)
5477 return false;
5478
5479 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5480 if (a->dw_attr == attr_kind)
5481 {
5482 if (AT_class (a) == dw_val_class_str)
5483 if (a->dw_attr_val.v.val_str->refcount)
5484 a->dw_attr_val.v.val_str->refcount--;
5485
5486 /* vec::ordered_remove should help reduce the number of abbrevs
5487 that are needed. */
5488 die->die_attr->ordered_remove (ix);
5489 return true;
5490 }
5491 return false;
5492 }
5493
5494 /* Remove CHILD from its parent. PREV must have the property that
5495 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5496
5497 static void
5498 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5499 {
5500 gcc_assert (child->die_parent == prev->die_parent);
5501 gcc_assert (prev->die_sib == child);
5502 if (prev == child)
5503 {
5504 gcc_assert (child->die_parent->die_child == child);
5505 prev = NULL;
5506 }
5507 else
5508 prev->die_sib = child->die_sib;
5509 if (child->die_parent->die_child == child)
5510 child->die_parent->die_child = prev;
5511 child->die_sib = NULL;
5512 }
5513
5514 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5515 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5516
5517 static void
5518 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5519 {
5520 dw_die_ref parent = old_child->die_parent;
5521
5522 gcc_assert (parent == prev->die_parent);
5523 gcc_assert (prev->die_sib == old_child);
5524
5525 new_child->die_parent = parent;
5526 if (prev == old_child)
5527 {
5528 gcc_assert (parent->die_child == old_child);
5529 new_child->die_sib = new_child;
5530 }
5531 else
5532 {
5533 prev->die_sib = new_child;
5534 new_child->die_sib = old_child->die_sib;
5535 }
5536 if (old_child->die_parent->die_child == old_child)
5537 old_child->die_parent->die_child = new_child;
5538 old_child->die_sib = NULL;
5539 }
5540
5541 /* Move all children from OLD_PARENT to NEW_PARENT. */
5542
5543 static void
5544 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5545 {
5546 dw_die_ref c;
5547 new_parent->die_child = old_parent->die_child;
5548 old_parent->die_child = NULL;
5549 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5550 }
5551
5552 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5553 matches TAG. */
5554
5555 static void
5556 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5557 {
5558 dw_die_ref c;
5559
5560 c = die->die_child;
5561 if (c) do {
5562 dw_die_ref prev = c;
5563 c = c->die_sib;
5564 while (c->die_tag == tag)
5565 {
5566 remove_child_with_prev (c, prev);
5567 c->die_parent = NULL;
5568 /* Might have removed every child. */
5569 if (die->die_child == NULL)
5570 return;
5571 c = prev->die_sib;
5572 }
5573 } while (c != die->die_child);
5574 }
5575
5576 /* Add a CHILD_DIE as the last child of DIE. */
5577
5578 static void
5579 add_child_die (dw_die_ref die, dw_die_ref child_die)
5580 {
5581 /* FIXME this should probably be an assert. */
5582 if (! die || ! child_die)
5583 return;
5584 gcc_assert (die != child_die);
5585
5586 child_die->die_parent = die;
5587 if (die->die_child)
5588 {
5589 child_die->die_sib = die->die_child->die_sib;
5590 die->die_child->die_sib = child_die;
5591 }
5592 else
5593 child_die->die_sib = child_die;
5594 die->die_child = child_die;
5595 }
5596
5597 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5598
5599 static void
5600 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5601 dw_die_ref after_die)
5602 {
5603 gcc_assert (die
5604 && child_die
5605 && after_die
5606 && die->die_child
5607 && die != child_die);
5608
5609 child_die->die_parent = die;
5610 child_die->die_sib = after_die->die_sib;
5611 after_die->die_sib = child_die;
5612 if (die->die_child == after_die)
5613 die->die_child = child_die;
5614 }
5615
5616 /* Unassociate CHILD from its parent, and make its parent be
5617 NEW_PARENT. */
5618
5619 static void
5620 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5621 {
5622 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5623 if (p->die_sib == child)
5624 {
5625 remove_child_with_prev (child, p);
5626 break;
5627 }
5628 add_child_die (new_parent, child);
5629 }
5630
5631 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5632 is the specification, to the end of PARENT's list of children.
5633 This is done by removing and re-adding it. */
5634
5635 static void
5636 splice_child_die (dw_die_ref parent, dw_die_ref child)
5637 {
5638 /* We want the declaration DIE from inside the class, not the
5639 specification DIE at toplevel. */
5640 if (child->die_parent != parent)
5641 {
5642 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5643
5644 if (tmp)
5645 child = tmp;
5646 }
5647
5648 gcc_assert (child->die_parent == parent
5649 || (child->die_parent
5650 == get_AT_ref (parent, DW_AT_specification)));
5651
5652 reparent_child (child, parent);
5653 }
5654
5655 /* Create and return a new die with TAG_VALUE as tag. */
5656
5657 static inline dw_die_ref
5658 new_die_raw (enum dwarf_tag tag_value)
5659 {
5660 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5661 die->die_tag = tag_value;
5662 return die;
5663 }
5664
5665 /* Create and return a new die with a parent of PARENT_DIE. If
5666 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5667 associated tree T must be supplied to determine parenthood
5668 later. */
5669
5670 static inline dw_die_ref
5671 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5672 {
5673 dw_die_ref die = new_die_raw (tag_value);
5674
5675 if (parent_die != NULL)
5676 add_child_die (parent_die, die);
5677 else
5678 {
5679 limbo_die_node *limbo_node;
5680
5681 /* No DIEs created after early dwarf should end up in limbo,
5682 because the limbo list should not persist past LTO
5683 streaming. */
5684 if (tag_value != DW_TAG_compile_unit
5685 /* These are allowed because they're generated while
5686 breaking out COMDAT units late. */
5687 && tag_value != DW_TAG_type_unit
5688 && tag_value != DW_TAG_skeleton_unit
5689 && !early_dwarf
5690 /* Allow nested functions to live in limbo because they will
5691 only temporarily live there, as decls_for_scope will fix
5692 them up. */
5693 && (TREE_CODE (t) != FUNCTION_DECL
5694 || !decl_function_context (t))
5695 /* Same as nested functions above but for types. Types that
5696 are local to a function will be fixed in
5697 decls_for_scope. */
5698 && (!RECORD_OR_UNION_TYPE_P (t)
5699 || !TYPE_CONTEXT (t)
5700 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5701 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5702 especially in the ltrans stage, but once we implement LTO
5703 dwarf streaming, we should remove this exception. */
5704 && !in_lto_p)
5705 {
5706 fprintf (stderr, "symbol ended up in limbo too late:");
5707 debug_generic_stmt (t);
5708 gcc_unreachable ();
5709 }
5710
5711 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5712 limbo_node->die = die;
5713 limbo_node->created_for = t;
5714 limbo_node->next = limbo_die_list;
5715 limbo_die_list = limbo_node;
5716 }
5717
5718 return die;
5719 }
5720
5721 /* Return the DIE associated with the given type specifier. */
5722
5723 static inline dw_die_ref
5724 lookup_type_die (tree type)
5725 {
5726 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5727 if (die && die->removed)
5728 {
5729 TYPE_SYMTAB_DIE (type) = NULL;
5730 return NULL;
5731 }
5732 return die;
5733 }
5734
5735 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5736 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5737 anonymous type instead the one of the naming typedef. */
5738
5739 static inline dw_die_ref
5740 strip_naming_typedef (tree type, dw_die_ref type_die)
5741 {
5742 if (type
5743 && TREE_CODE (type) == RECORD_TYPE
5744 && type_die
5745 && type_die->die_tag == DW_TAG_typedef
5746 && is_naming_typedef_decl (TYPE_NAME (type)))
5747 type_die = get_AT_ref (type_die, DW_AT_type);
5748 return type_die;
5749 }
5750
5751 /* Like lookup_type_die, but if type is an anonymous type named by a
5752 typedef[1], return the DIE of the anonymous type instead the one of
5753 the naming typedef. This is because in gen_typedef_die, we did
5754 equate the anonymous struct named by the typedef with the DIE of
5755 the naming typedef. So by default, lookup_type_die on an anonymous
5756 struct yields the DIE of the naming typedef.
5757
5758 [1]: Read the comment of is_naming_typedef_decl to learn about what
5759 a naming typedef is. */
5760
5761 static inline dw_die_ref
5762 lookup_type_die_strip_naming_typedef (tree type)
5763 {
5764 dw_die_ref die = lookup_type_die (type);
5765 return strip_naming_typedef (type, die);
5766 }
5767
5768 /* Equate a DIE to a given type specifier. */
5769
5770 static inline void
5771 equate_type_number_to_die (tree type, dw_die_ref type_die)
5772 {
5773 TYPE_SYMTAB_DIE (type) = type_die;
5774 }
5775
5776 /* Returns a hash value for X (which really is a die_struct). */
5777
5778 inline hashval_t
5779 decl_die_hasher::hash (die_node *x)
5780 {
5781 return (hashval_t) x->decl_id;
5782 }
5783
5784 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5785
5786 inline bool
5787 decl_die_hasher::equal (die_node *x, tree y)
5788 {
5789 return (x->decl_id == DECL_UID (y));
5790 }
5791
5792 /* Return the DIE associated with a given declaration. */
5793
5794 static inline dw_die_ref
5795 lookup_decl_die (tree decl)
5796 {
5797 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5798 NO_INSERT);
5799 if (!die)
5800 return NULL;
5801 if ((*die)->removed)
5802 {
5803 decl_die_table->clear_slot (die);
5804 return NULL;
5805 }
5806 return *die;
5807 }
5808
5809
5810 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5811 style reference. Return true if we found one refering to a DIE for
5812 DECL, otherwise return false. */
5813
5814 static bool
5815 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5816 unsigned HOST_WIDE_INT *off)
5817 {
5818 dw_die_ref die;
5819
5820 if (flag_wpa && !decl_die_table)
5821 return false;
5822
5823 if (TREE_CODE (decl) == BLOCK)
5824 die = BLOCK_DIE (decl);
5825 else
5826 die = lookup_decl_die (decl);
5827 if (!die)
5828 return false;
5829
5830 /* During WPA stage we currently use DIEs to store the
5831 decl <-> label + offset map. That's quite inefficient but it
5832 works for now. */
5833 if (flag_wpa)
5834 {
5835 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5836 if (!ref)
5837 {
5838 gcc_assert (die == comp_unit_die ());
5839 return false;
5840 }
5841 *off = ref->die_offset;
5842 *sym = ref->die_id.die_symbol;
5843 return true;
5844 }
5845
5846 /* Similar to get_ref_die_offset_label, but using the "correct"
5847 label. */
5848 *off = die->die_offset;
5849 while (die->die_parent)
5850 die = die->die_parent;
5851 /* For the containing CU DIE we compute a die_symbol in
5852 compute_comp_unit_symbol. */
5853 gcc_assert (die->die_tag == DW_TAG_compile_unit
5854 && die->die_id.die_symbol != NULL);
5855 *sym = die->die_id.die_symbol;
5856 return true;
5857 }
5858
5859 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5860
5861 static void
5862 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5863 const char *symbol, HOST_WIDE_INT offset)
5864 {
5865 /* Create a fake DIE that contains the reference. Don't use
5866 new_die because we don't want to end up in the limbo list. */
5867 dw_die_ref ref = new_die_raw (die->die_tag);
5868 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5869 ref->die_offset = offset;
5870 ref->with_offset = 1;
5871 add_AT_die_ref (die, attr_kind, ref);
5872 }
5873
5874 /* Create a DIE for DECL if required and add a reference to a DIE
5875 at SYMBOL + OFFSET which contains attributes dumped early. */
5876
5877 static void
5878 dwarf2out_register_external_die (tree decl, const char *sym,
5879 unsigned HOST_WIDE_INT off)
5880 {
5881 if (debug_info_level == DINFO_LEVEL_NONE)
5882 return;
5883
5884 if (flag_wpa && !decl_die_table)
5885 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5886
5887 dw_die_ref die
5888 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5889 gcc_assert (!die);
5890
5891 tree ctx;
5892 dw_die_ref parent = NULL;
5893 /* Need to lookup a DIE for the decls context - the containing
5894 function or translation unit. */
5895 if (TREE_CODE (decl) == BLOCK)
5896 {
5897 ctx = BLOCK_SUPERCONTEXT (decl);
5898 /* ??? We do not output DIEs for all scopes thus skip as
5899 many DIEs as needed. */
5900 while (TREE_CODE (ctx) == BLOCK
5901 && !BLOCK_DIE (ctx))
5902 ctx = BLOCK_SUPERCONTEXT (ctx);
5903 }
5904 else
5905 ctx = DECL_CONTEXT (decl);
5906 /* Peel types in the context stack. */
5907 while (ctx && TYPE_P (ctx))
5908 ctx = TYPE_CONTEXT (ctx);
5909 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5910 if (debug_info_level <= DINFO_LEVEL_TERSE)
5911 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5912 ctx = DECL_CONTEXT (ctx);
5913 if (ctx)
5914 {
5915 if (TREE_CODE (ctx) == BLOCK)
5916 parent = BLOCK_DIE (ctx);
5917 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5918 /* Keep the 1:1 association during WPA. */
5919 && !flag_wpa)
5920 /* Otherwise all late annotations go to the main CU which
5921 imports the original CUs. */
5922 parent = comp_unit_die ();
5923 else if (TREE_CODE (ctx) == FUNCTION_DECL
5924 && TREE_CODE (decl) != PARM_DECL
5925 && TREE_CODE (decl) != BLOCK)
5926 /* Leave function local entities parent determination to when
5927 we process scope vars. */
5928 ;
5929 else
5930 parent = lookup_decl_die (ctx);
5931 }
5932 else
5933 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5934 Handle this case gracefully by globalizing stuff. */
5935 parent = comp_unit_die ();
5936 /* Create a DIE "stub". */
5937 switch (TREE_CODE (decl))
5938 {
5939 case TRANSLATION_UNIT_DECL:
5940 if (! flag_wpa)
5941 {
5942 die = comp_unit_die ();
5943 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5944 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5945 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5946 to create a DIE for the original CUs. */
5947 return;
5948 }
5949 /* Keep the 1:1 association during WPA. */
5950 die = new_die (DW_TAG_compile_unit, NULL, decl);
5951 break;
5952 case NAMESPACE_DECL:
5953 if (is_fortran (decl))
5954 die = new_die (DW_TAG_module, parent, decl);
5955 else
5956 die = new_die (DW_TAG_namespace, parent, decl);
5957 break;
5958 case FUNCTION_DECL:
5959 die = new_die (DW_TAG_subprogram, parent, decl);
5960 break;
5961 case VAR_DECL:
5962 die = new_die (DW_TAG_variable, parent, decl);
5963 break;
5964 case RESULT_DECL:
5965 die = new_die (DW_TAG_variable, parent, decl);
5966 break;
5967 case PARM_DECL:
5968 die = new_die (DW_TAG_formal_parameter, parent, decl);
5969 break;
5970 case CONST_DECL:
5971 die = new_die (DW_TAG_constant, parent, decl);
5972 break;
5973 case LABEL_DECL:
5974 die = new_die (DW_TAG_label, parent, decl);
5975 break;
5976 case BLOCK:
5977 die = new_die (DW_TAG_lexical_block, parent, decl);
5978 break;
5979 default:
5980 gcc_unreachable ();
5981 }
5982 if (TREE_CODE (decl) == BLOCK)
5983 BLOCK_DIE (decl) = die;
5984 else
5985 equate_decl_number_to_die (decl, die);
5986
5987 /* Add a reference to the DIE providing early debug at $sym + off. */
5988 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5989 }
5990
5991 /* Returns a hash value for X (which really is a var_loc_list). */
5992
5993 inline hashval_t
5994 decl_loc_hasher::hash (var_loc_list *x)
5995 {
5996 return (hashval_t) x->decl_id;
5997 }
5998
5999 /* Return nonzero if decl_id of var_loc_list X is the same as
6000 UID of decl *Y. */
6001
6002 inline bool
6003 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6004 {
6005 return (x->decl_id == DECL_UID (y));
6006 }
6007
6008 /* Return the var_loc list associated with a given declaration. */
6009
6010 static inline var_loc_list *
6011 lookup_decl_loc (const_tree decl)
6012 {
6013 if (!decl_loc_table)
6014 return NULL;
6015 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6016 }
6017
6018 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6019
6020 inline hashval_t
6021 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6022 {
6023 return (hashval_t) x->decl_id;
6024 }
6025
6026 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6027 UID of decl *Y. */
6028
6029 inline bool
6030 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6031 {
6032 return (x->decl_id == DECL_UID (y));
6033 }
6034
6035 /* Equate a DIE to a particular declaration. */
6036
6037 static void
6038 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6039 {
6040 unsigned int decl_id = DECL_UID (decl);
6041
6042 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6043 decl_die->decl_id = decl_id;
6044 }
6045
6046 /* Return how many bits covers PIECE EXPR_LIST. */
6047
6048 static HOST_WIDE_INT
6049 decl_piece_bitsize (rtx piece)
6050 {
6051 int ret = (int) GET_MODE (piece);
6052 if (ret)
6053 return ret;
6054 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6055 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6056 return INTVAL (XEXP (XEXP (piece, 0), 0));
6057 }
6058
6059 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6060
6061 static rtx *
6062 decl_piece_varloc_ptr (rtx piece)
6063 {
6064 if ((int) GET_MODE (piece))
6065 return &XEXP (piece, 0);
6066 else
6067 return &XEXP (XEXP (piece, 0), 1);
6068 }
6069
6070 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6071 Next is the chain of following piece nodes. */
6072
6073 static rtx_expr_list *
6074 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6075 {
6076 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6077 return alloc_EXPR_LIST (bitsize, loc_note, next);
6078 else
6079 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6080 GEN_INT (bitsize),
6081 loc_note), next);
6082 }
6083
6084 /* Return rtx that should be stored into loc field for
6085 LOC_NOTE and BITPOS/BITSIZE. */
6086
6087 static rtx
6088 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6089 HOST_WIDE_INT bitsize)
6090 {
6091 if (bitsize != -1)
6092 {
6093 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6094 if (bitpos != 0)
6095 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6096 }
6097 return loc_note;
6098 }
6099
6100 /* This function either modifies location piece list *DEST in
6101 place (if SRC and INNER is NULL), or copies location piece list
6102 *SRC to *DEST while modifying it. Location BITPOS is modified
6103 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6104 not copied and if needed some padding around it is added.
6105 When modifying in place, DEST should point to EXPR_LIST where
6106 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6107 to the start of the whole list and INNER points to the EXPR_LIST
6108 where earlier pieces cover PIECE_BITPOS bits. */
6109
6110 static void
6111 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6112 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6113 HOST_WIDE_INT bitsize, rtx loc_note)
6114 {
6115 HOST_WIDE_INT diff;
6116 bool copy = inner != NULL;
6117
6118 if (copy)
6119 {
6120 /* First copy all nodes preceding the current bitpos. */
6121 while (src != inner)
6122 {
6123 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6124 decl_piece_bitsize (*src), NULL_RTX);
6125 dest = &XEXP (*dest, 1);
6126 src = &XEXP (*src, 1);
6127 }
6128 }
6129 /* Add padding if needed. */
6130 if (bitpos != piece_bitpos)
6131 {
6132 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6133 copy ? NULL_RTX : *dest);
6134 dest = &XEXP (*dest, 1);
6135 }
6136 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6137 {
6138 gcc_assert (!copy);
6139 /* A piece with correct bitpos and bitsize already exist,
6140 just update the location for it and return. */
6141 *decl_piece_varloc_ptr (*dest) = loc_note;
6142 return;
6143 }
6144 /* Add the piece that changed. */
6145 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6146 dest = &XEXP (*dest, 1);
6147 /* Skip over pieces that overlap it. */
6148 diff = bitpos - piece_bitpos + bitsize;
6149 if (!copy)
6150 src = dest;
6151 while (diff > 0 && *src)
6152 {
6153 rtx piece = *src;
6154 diff -= decl_piece_bitsize (piece);
6155 if (copy)
6156 src = &XEXP (piece, 1);
6157 else
6158 {
6159 *src = XEXP (piece, 1);
6160 free_EXPR_LIST_node (piece);
6161 }
6162 }
6163 /* Add padding if needed. */
6164 if (diff < 0 && *src)
6165 {
6166 if (!copy)
6167 dest = src;
6168 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6169 dest = &XEXP (*dest, 1);
6170 }
6171 if (!copy)
6172 return;
6173 /* Finally copy all nodes following it. */
6174 while (*src)
6175 {
6176 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6177 decl_piece_bitsize (*src), NULL_RTX);
6178 dest = &XEXP (*dest, 1);
6179 src = &XEXP (*src, 1);
6180 }
6181 }
6182
6183 /* Add a variable location node to the linked list for DECL. */
6184
6185 static struct var_loc_node *
6186 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6187 {
6188 unsigned int decl_id;
6189 var_loc_list *temp;
6190 struct var_loc_node *loc = NULL;
6191 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6192
6193 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6194 {
6195 tree realdecl = DECL_DEBUG_EXPR (decl);
6196 if (handled_component_p (realdecl)
6197 || (TREE_CODE (realdecl) == MEM_REF
6198 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6199 {
6200 bool reverse;
6201 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6202 &bitsize, &reverse);
6203 if (!innerdecl
6204 || !DECL_P (innerdecl)
6205 || DECL_IGNORED_P (innerdecl)
6206 || TREE_STATIC (innerdecl)
6207 || bitsize == 0
6208 || bitpos + bitsize > 256)
6209 return NULL;
6210 decl = innerdecl;
6211 }
6212 }
6213
6214 decl_id = DECL_UID (decl);
6215 var_loc_list **slot
6216 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6217 if (*slot == NULL)
6218 {
6219 temp = ggc_cleared_alloc<var_loc_list> ();
6220 temp->decl_id = decl_id;
6221 *slot = temp;
6222 }
6223 else
6224 temp = *slot;
6225
6226 /* For PARM_DECLs try to keep around the original incoming value,
6227 even if that means we'll emit a zero-range .debug_loc entry. */
6228 if (temp->last
6229 && temp->first == temp->last
6230 && TREE_CODE (decl) == PARM_DECL
6231 && NOTE_P (temp->first->loc)
6232 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6233 && DECL_INCOMING_RTL (decl)
6234 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6235 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6236 == GET_CODE (DECL_INCOMING_RTL (decl))
6237 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6238 && (bitsize != -1
6239 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6240 NOTE_VAR_LOCATION_LOC (loc_note))
6241 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6242 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6243 {
6244 loc = ggc_cleared_alloc<var_loc_node> ();
6245 temp->first->next = loc;
6246 temp->last = loc;
6247 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6248 }
6249 else if (temp->last)
6250 {
6251 struct var_loc_node *last = temp->last, *unused = NULL;
6252 rtx *piece_loc = NULL, last_loc_note;
6253 HOST_WIDE_INT piece_bitpos = 0;
6254 if (last->next)
6255 {
6256 last = last->next;
6257 gcc_assert (last->next == NULL);
6258 }
6259 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6260 {
6261 piece_loc = &last->loc;
6262 do
6263 {
6264 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6265 if (piece_bitpos + cur_bitsize > bitpos)
6266 break;
6267 piece_bitpos += cur_bitsize;
6268 piece_loc = &XEXP (*piece_loc, 1);
6269 }
6270 while (*piece_loc);
6271 }
6272 /* TEMP->LAST here is either pointer to the last but one or
6273 last element in the chained list, LAST is pointer to the
6274 last element. */
6275 if (label && strcmp (last->label, label) == 0 && last->view == view)
6276 {
6277 /* For SRA optimized variables if there weren't any real
6278 insns since last note, just modify the last node. */
6279 if (piece_loc != NULL)
6280 {
6281 adjust_piece_list (piece_loc, NULL, NULL,
6282 bitpos, piece_bitpos, bitsize, loc_note);
6283 return NULL;
6284 }
6285 /* If the last note doesn't cover any instructions, remove it. */
6286 if (temp->last != last)
6287 {
6288 temp->last->next = NULL;
6289 unused = last;
6290 last = temp->last;
6291 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6292 }
6293 else
6294 {
6295 gcc_assert (temp->first == temp->last
6296 || (temp->first->next == temp->last
6297 && TREE_CODE (decl) == PARM_DECL));
6298 memset (temp->last, '\0', sizeof (*temp->last));
6299 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6300 return temp->last;
6301 }
6302 }
6303 if (bitsize == -1 && NOTE_P (last->loc))
6304 last_loc_note = last->loc;
6305 else if (piece_loc != NULL
6306 && *piece_loc != NULL_RTX
6307 && piece_bitpos == bitpos
6308 && decl_piece_bitsize (*piece_loc) == bitsize)
6309 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6310 else
6311 last_loc_note = NULL_RTX;
6312 /* If the current location is the same as the end of the list,
6313 and either both or neither of the locations is uninitialized,
6314 we have nothing to do. */
6315 if (last_loc_note == NULL_RTX
6316 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6317 NOTE_VAR_LOCATION_LOC (loc_note)))
6318 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6319 != NOTE_VAR_LOCATION_STATUS (loc_note))
6320 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6321 == VAR_INIT_STATUS_UNINITIALIZED)
6322 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6323 == VAR_INIT_STATUS_UNINITIALIZED))))
6324 {
6325 /* Add LOC to the end of list and update LAST. If the last
6326 element of the list has been removed above, reuse its
6327 memory for the new node, otherwise allocate a new one. */
6328 if (unused)
6329 {
6330 loc = unused;
6331 memset (loc, '\0', sizeof (*loc));
6332 }
6333 else
6334 loc = ggc_cleared_alloc<var_loc_node> ();
6335 if (bitsize == -1 || piece_loc == NULL)
6336 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6337 else
6338 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6339 bitpos, piece_bitpos, bitsize, loc_note);
6340 last->next = loc;
6341 /* Ensure TEMP->LAST will point either to the new last but one
6342 element of the chain, or to the last element in it. */
6343 if (last != temp->last)
6344 temp->last = last;
6345 }
6346 else if (unused)
6347 ggc_free (unused);
6348 }
6349 else
6350 {
6351 loc = ggc_cleared_alloc<var_loc_node> ();
6352 temp->first = loc;
6353 temp->last = loc;
6354 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6355 }
6356 return loc;
6357 }
6358 \f
6359 /* Keep track of the number of spaces used to indent the
6360 output of the debugging routines that print the structure of
6361 the DIE internal representation. */
6362 static int print_indent;
6363
6364 /* Indent the line the number of spaces given by print_indent. */
6365
6366 static inline void
6367 print_spaces (FILE *outfile)
6368 {
6369 fprintf (outfile, "%*s", print_indent, "");
6370 }
6371
6372 /* Print a type signature in hex. */
6373
6374 static inline void
6375 print_signature (FILE *outfile, char *sig)
6376 {
6377 int i;
6378
6379 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6380 fprintf (outfile, "%02x", sig[i] & 0xff);
6381 }
6382
6383 static inline void
6384 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6385 {
6386 if (discr_value->pos)
6387 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6388 else
6389 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6390 }
6391
6392 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6393
6394 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6395 RECURSE, output location descriptor operations. */
6396
6397 static void
6398 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6399 {
6400 switch (val->val_class)
6401 {
6402 case dw_val_class_addr:
6403 fprintf (outfile, "address");
6404 break;
6405 case dw_val_class_offset:
6406 fprintf (outfile, "offset");
6407 break;
6408 case dw_val_class_loc:
6409 fprintf (outfile, "location descriptor");
6410 if (val->v.val_loc == NULL)
6411 fprintf (outfile, " -> <null>\n");
6412 else if (recurse)
6413 {
6414 fprintf (outfile, ":\n");
6415 print_indent += 4;
6416 print_loc_descr (val->v.val_loc, outfile);
6417 print_indent -= 4;
6418 }
6419 else
6420 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6421 break;
6422 case dw_val_class_loc_list:
6423 fprintf (outfile, "location list -> label:%s",
6424 val->v.val_loc_list->ll_symbol);
6425 break;
6426 case dw_val_class_view_list:
6427 val = view_list_to_loc_list_val_node (val);
6428 fprintf (outfile, "location list with views -> labels:%s and %s",
6429 val->v.val_loc_list->ll_symbol,
6430 val->v.val_loc_list->vl_symbol);
6431 break;
6432 case dw_val_class_range_list:
6433 fprintf (outfile, "range list");
6434 break;
6435 case dw_val_class_const:
6436 case dw_val_class_const_implicit:
6437 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6438 break;
6439 case dw_val_class_unsigned_const:
6440 case dw_val_class_unsigned_const_implicit:
6441 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6442 break;
6443 case dw_val_class_const_double:
6444 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6445 HOST_WIDE_INT_PRINT_UNSIGNED")",
6446 val->v.val_double.high,
6447 val->v.val_double.low);
6448 break;
6449 case dw_val_class_wide_int:
6450 {
6451 int i = val->v.val_wide->get_len ();
6452 fprintf (outfile, "constant (");
6453 gcc_assert (i > 0);
6454 if (val->v.val_wide->elt (i - 1) == 0)
6455 fprintf (outfile, "0x");
6456 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6457 val->v.val_wide->elt (--i));
6458 while (--i >= 0)
6459 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6460 val->v.val_wide->elt (i));
6461 fprintf (outfile, ")");
6462 break;
6463 }
6464 case dw_val_class_vec:
6465 fprintf (outfile, "floating-point or vector constant");
6466 break;
6467 case dw_val_class_flag:
6468 fprintf (outfile, "%u", val->v.val_flag);
6469 break;
6470 case dw_val_class_die_ref:
6471 if (val->v.val_die_ref.die != NULL)
6472 {
6473 dw_die_ref die = val->v.val_die_ref.die;
6474
6475 if (die->comdat_type_p)
6476 {
6477 fprintf (outfile, "die -> signature: ");
6478 print_signature (outfile,
6479 die->die_id.die_type_node->signature);
6480 }
6481 else if (die->die_id.die_symbol)
6482 {
6483 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6484 if (die->with_offset)
6485 fprintf (outfile, " + %ld", die->die_offset);
6486 }
6487 else
6488 fprintf (outfile, "die -> %ld", die->die_offset);
6489 fprintf (outfile, " (%p)", (void *) die);
6490 }
6491 else
6492 fprintf (outfile, "die -> <null>");
6493 break;
6494 case dw_val_class_vms_delta:
6495 fprintf (outfile, "delta: @slotcount(%s-%s)",
6496 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6497 break;
6498 case dw_val_class_symview:
6499 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6500 break;
6501 case dw_val_class_lbl_id:
6502 case dw_val_class_lineptr:
6503 case dw_val_class_macptr:
6504 case dw_val_class_loclistsptr:
6505 case dw_val_class_high_pc:
6506 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6507 break;
6508 case dw_val_class_str:
6509 if (val->v.val_str->str != NULL)
6510 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6511 else
6512 fprintf (outfile, "<null>");
6513 break;
6514 case dw_val_class_file:
6515 case dw_val_class_file_implicit:
6516 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6517 val->v.val_file->emitted_number);
6518 break;
6519 case dw_val_class_data8:
6520 {
6521 int i;
6522
6523 for (i = 0; i < 8; i++)
6524 fprintf (outfile, "%02x", val->v.val_data8[i]);
6525 break;
6526 }
6527 case dw_val_class_discr_value:
6528 print_discr_value (outfile, &val->v.val_discr_value);
6529 break;
6530 case dw_val_class_discr_list:
6531 for (dw_discr_list_ref node = val->v.val_discr_list;
6532 node != NULL;
6533 node = node->dw_discr_next)
6534 {
6535 if (node->dw_discr_range)
6536 {
6537 fprintf (outfile, " .. ");
6538 print_discr_value (outfile, &node->dw_discr_lower_bound);
6539 print_discr_value (outfile, &node->dw_discr_upper_bound);
6540 }
6541 else
6542 print_discr_value (outfile, &node->dw_discr_lower_bound);
6543
6544 if (node->dw_discr_next != NULL)
6545 fprintf (outfile, " | ");
6546 }
6547 default:
6548 break;
6549 }
6550 }
6551
6552 /* Likewise, for a DIE attribute. */
6553
6554 static void
6555 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6556 {
6557 print_dw_val (&a->dw_attr_val, recurse, outfile);
6558 }
6559
6560
6561 /* Print the list of operands in the LOC location description to OUTFILE. This
6562 routine is a debugging aid only. */
6563
6564 static void
6565 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6566 {
6567 dw_loc_descr_ref l = loc;
6568
6569 if (loc == NULL)
6570 {
6571 print_spaces (outfile);
6572 fprintf (outfile, "<null>\n");
6573 return;
6574 }
6575
6576 for (l = loc; l != NULL; l = l->dw_loc_next)
6577 {
6578 print_spaces (outfile);
6579 fprintf (outfile, "(%p) %s",
6580 (void *) l,
6581 dwarf_stack_op_name (l->dw_loc_opc));
6582 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6583 {
6584 fprintf (outfile, " ");
6585 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6586 }
6587 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6588 {
6589 fprintf (outfile, ", ");
6590 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6591 }
6592 fprintf (outfile, "\n");
6593 }
6594 }
6595
6596 /* Print the information associated with a given DIE, and its children.
6597 This routine is a debugging aid only. */
6598
6599 static void
6600 print_die (dw_die_ref die, FILE *outfile)
6601 {
6602 dw_attr_node *a;
6603 dw_die_ref c;
6604 unsigned ix;
6605
6606 print_spaces (outfile);
6607 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6608 die->die_offset, dwarf_tag_name (die->die_tag),
6609 (void*) die);
6610 print_spaces (outfile);
6611 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6612 fprintf (outfile, " offset: %ld", die->die_offset);
6613 fprintf (outfile, " mark: %d\n", die->die_mark);
6614
6615 if (die->comdat_type_p)
6616 {
6617 print_spaces (outfile);
6618 fprintf (outfile, " signature: ");
6619 print_signature (outfile, die->die_id.die_type_node->signature);
6620 fprintf (outfile, "\n");
6621 }
6622
6623 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6624 {
6625 print_spaces (outfile);
6626 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6627
6628 print_attribute (a, true, outfile);
6629 fprintf (outfile, "\n");
6630 }
6631
6632 if (die->die_child != NULL)
6633 {
6634 print_indent += 4;
6635 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6636 print_indent -= 4;
6637 }
6638 if (print_indent == 0)
6639 fprintf (outfile, "\n");
6640 }
6641
6642 /* Print the list of operations in the LOC location description. */
6643
6644 DEBUG_FUNCTION void
6645 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6646 {
6647 print_loc_descr (loc, stderr);
6648 }
6649
6650 /* Print the information collected for a given DIE. */
6651
6652 DEBUG_FUNCTION void
6653 debug_dwarf_die (dw_die_ref die)
6654 {
6655 print_die (die, stderr);
6656 }
6657
6658 DEBUG_FUNCTION void
6659 debug (die_struct &ref)
6660 {
6661 print_die (&ref, stderr);
6662 }
6663
6664 DEBUG_FUNCTION void
6665 debug (die_struct *ptr)
6666 {
6667 if (ptr)
6668 debug (*ptr);
6669 else
6670 fprintf (stderr, "<nil>\n");
6671 }
6672
6673
6674 /* Print all DWARF information collected for the compilation unit.
6675 This routine is a debugging aid only. */
6676
6677 DEBUG_FUNCTION void
6678 debug_dwarf (void)
6679 {
6680 print_indent = 0;
6681 print_die (comp_unit_die (), stderr);
6682 }
6683
6684 /* Verify the DIE tree structure. */
6685
6686 DEBUG_FUNCTION void
6687 verify_die (dw_die_ref die)
6688 {
6689 gcc_assert (!die->die_mark);
6690 if (die->die_parent == NULL
6691 && die->die_sib == NULL)
6692 return;
6693 /* Verify the die_sib list is cyclic. */
6694 dw_die_ref x = die;
6695 do
6696 {
6697 x->die_mark = 1;
6698 x = x->die_sib;
6699 }
6700 while (x && !x->die_mark);
6701 gcc_assert (x == die);
6702 x = die;
6703 do
6704 {
6705 /* Verify all dies have the same parent. */
6706 gcc_assert (x->die_parent == die->die_parent);
6707 if (x->die_child)
6708 {
6709 /* Verify the child has the proper parent and recurse. */
6710 gcc_assert (x->die_child->die_parent == x);
6711 verify_die (x->die_child);
6712 }
6713 x->die_mark = 0;
6714 x = x->die_sib;
6715 }
6716 while (x && x->die_mark);
6717 }
6718
6719 /* Sanity checks on DIEs. */
6720
6721 static void
6722 check_die (dw_die_ref die)
6723 {
6724 unsigned ix;
6725 dw_attr_node *a;
6726 bool inline_found = false;
6727 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6728 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6729 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6730 {
6731 switch (a->dw_attr)
6732 {
6733 case DW_AT_inline:
6734 if (a->dw_attr_val.v.val_unsigned)
6735 inline_found = true;
6736 break;
6737 case DW_AT_location:
6738 ++n_location;
6739 break;
6740 case DW_AT_low_pc:
6741 ++n_low_pc;
6742 break;
6743 case DW_AT_high_pc:
6744 ++n_high_pc;
6745 break;
6746 case DW_AT_artificial:
6747 ++n_artificial;
6748 break;
6749 case DW_AT_decl_column:
6750 ++n_decl_column;
6751 break;
6752 case DW_AT_decl_line:
6753 ++n_decl_line;
6754 break;
6755 case DW_AT_decl_file:
6756 ++n_decl_file;
6757 break;
6758 default:
6759 break;
6760 }
6761 }
6762 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6763 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6764 {
6765 fprintf (stderr, "Duplicate attributes in DIE:\n");
6766 debug_dwarf_die (die);
6767 gcc_unreachable ();
6768 }
6769 if (inline_found)
6770 {
6771 /* A debugging information entry that is a member of an abstract
6772 instance tree [that has DW_AT_inline] should not contain any
6773 attributes which describe aspects of the subroutine which vary
6774 between distinct inlined expansions or distinct out-of-line
6775 expansions. */
6776 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6777 gcc_assert (a->dw_attr != DW_AT_low_pc
6778 && a->dw_attr != DW_AT_high_pc
6779 && a->dw_attr != DW_AT_location
6780 && a->dw_attr != DW_AT_frame_base
6781 && a->dw_attr != DW_AT_call_all_calls
6782 && a->dw_attr != DW_AT_GNU_all_call_sites);
6783 }
6784 }
6785 \f
6786 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6787 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6788 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6789
6790 /* Calculate the checksum of a location expression. */
6791
6792 static inline void
6793 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6794 {
6795 int tem;
6796 inchash::hash hstate;
6797 hashval_t hash;
6798
6799 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6800 CHECKSUM (tem);
6801 hash_loc_operands (loc, hstate);
6802 hash = hstate.end();
6803 CHECKSUM (hash);
6804 }
6805
6806 /* Calculate the checksum of an attribute. */
6807
6808 static void
6809 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6810 {
6811 dw_loc_descr_ref loc;
6812 rtx r;
6813
6814 CHECKSUM (at->dw_attr);
6815
6816 /* We don't care that this was compiled with a different compiler
6817 snapshot; if the output is the same, that's what matters. */
6818 if (at->dw_attr == DW_AT_producer)
6819 return;
6820
6821 switch (AT_class (at))
6822 {
6823 case dw_val_class_const:
6824 case dw_val_class_const_implicit:
6825 CHECKSUM (at->dw_attr_val.v.val_int);
6826 break;
6827 case dw_val_class_unsigned_const:
6828 case dw_val_class_unsigned_const_implicit:
6829 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6830 break;
6831 case dw_val_class_const_double:
6832 CHECKSUM (at->dw_attr_val.v.val_double);
6833 break;
6834 case dw_val_class_wide_int:
6835 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6836 get_full_len (*at->dw_attr_val.v.val_wide)
6837 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6838 break;
6839 case dw_val_class_vec:
6840 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6841 (at->dw_attr_val.v.val_vec.length
6842 * at->dw_attr_val.v.val_vec.elt_size));
6843 break;
6844 case dw_val_class_flag:
6845 CHECKSUM (at->dw_attr_val.v.val_flag);
6846 break;
6847 case dw_val_class_str:
6848 CHECKSUM_STRING (AT_string (at));
6849 break;
6850
6851 case dw_val_class_addr:
6852 r = AT_addr (at);
6853 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6854 CHECKSUM_STRING (XSTR (r, 0));
6855 break;
6856
6857 case dw_val_class_offset:
6858 CHECKSUM (at->dw_attr_val.v.val_offset);
6859 break;
6860
6861 case dw_val_class_loc:
6862 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6863 loc_checksum (loc, ctx);
6864 break;
6865
6866 case dw_val_class_die_ref:
6867 die_checksum (AT_ref (at), ctx, mark);
6868 break;
6869
6870 case dw_val_class_fde_ref:
6871 case dw_val_class_vms_delta:
6872 case dw_val_class_symview:
6873 case dw_val_class_lbl_id:
6874 case dw_val_class_lineptr:
6875 case dw_val_class_macptr:
6876 case dw_val_class_loclistsptr:
6877 case dw_val_class_high_pc:
6878 break;
6879
6880 case dw_val_class_file:
6881 case dw_val_class_file_implicit:
6882 CHECKSUM_STRING (AT_file (at)->filename);
6883 break;
6884
6885 case dw_val_class_data8:
6886 CHECKSUM (at->dw_attr_val.v.val_data8);
6887 break;
6888
6889 default:
6890 break;
6891 }
6892 }
6893
6894 /* Calculate the checksum of a DIE. */
6895
6896 static void
6897 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6898 {
6899 dw_die_ref c;
6900 dw_attr_node *a;
6901 unsigned ix;
6902
6903 /* To avoid infinite recursion. */
6904 if (die->die_mark)
6905 {
6906 CHECKSUM (die->die_mark);
6907 return;
6908 }
6909 die->die_mark = ++(*mark);
6910
6911 CHECKSUM (die->die_tag);
6912
6913 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6914 attr_checksum (a, ctx, mark);
6915
6916 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6917 }
6918
6919 #undef CHECKSUM
6920 #undef CHECKSUM_BLOCK
6921 #undef CHECKSUM_STRING
6922
6923 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6924 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6925 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6926 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6927 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6928 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6929 #define CHECKSUM_ATTR(FOO) \
6930 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6931
6932 /* Calculate the checksum of a number in signed LEB128 format. */
6933
6934 static void
6935 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6936 {
6937 unsigned char byte;
6938 bool more;
6939
6940 while (1)
6941 {
6942 byte = (value & 0x7f);
6943 value >>= 7;
6944 more = !((value == 0 && (byte & 0x40) == 0)
6945 || (value == -1 && (byte & 0x40) != 0));
6946 if (more)
6947 byte |= 0x80;
6948 CHECKSUM (byte);
6949 if (!more)
6950 break;
6951 }
6952 }
6953
6954 /* Calculate the checksum of a number in unsigned LEB128 format. */
6955
6956 static void
6957 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6958 {
6959 while (1)
6960 {
6961 unsigned char byte = (value & 0x7f);
6962 value >>= 7;
6963 if (value != 0)
6964 /* More bytes to follow. */
6965 byte |= 0x80;
6966 CHECKSUM (byte);
6967 if (value == 0)
6968 break;
6969 }
6970 }
6971
6972 /* Checksum the context of the DIE. This adds the names of any
6973 surrounding namespaces or structures to the checksum. */
6974
6975 static void
6976 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6977 {
6978 const char *name;
6979 dw_die_ref spec;
6980 int tag = die->die_tag;
6981
6982 if (tag != DW_TAG_namespace
6983 && tag != DW_TAG_structure_type
6984 && tag != DW_TAG_class_type)
6985 return;
6986
6987 name = get_AT_string (die, DW_AT_name);
6988
6989 spec = get_AT_ref (die, DW_AT_specification);
6990 if (spec != NULL)
6991 die = spec;
6992
6993 if (die->die_parent != NULL)
6994 checksum_die_context (die->die_parent, ctx);
6995
6996 CHECKSUM_ULEB128 ('C');
6997 CHECKSUM_ULEB128 (tag);
6998 if (name != NULL)
6999 CHECKSUM_STRING (name);
7000 }
7001
7002 /* Calculate the checksum of a location expression. */
7003
7004 static inline void
7005 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7006 {
7007 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7008 were emitted as a DW_FORM_sdata instead of a location expression. */
7009 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7010 {
7011 CHECKSUM_ULEB128 (DW_FORM_sdata);
7012 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7013 return;
7014 }
7015
7016 /* Otherwise, just checksum the raw location expression. */
7017 while (loc != NULL)
7018 {
7019 inchash::hash hstate;
7020 hashval_t hash;
7021
7022 CHECKSUM_ULEB128 (loc->dtprel);
7023 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7024 hash_loc_operands (loc, hstate);
7025 hash = hstate.end ();
7026 CHECKSUM (hash);
7027 loc = loc->dw_loc_next;
7028 }
7029 }
7030
7031 /* Calculate the checksum of an attribute. */
7032
7033 static void
7034 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7035 struct md5_ctx *ctx, int *mark)
7036 {
7037 dw_loc_descr_ref loc;
7038 rtx r;
7039
7040 if (AT_class (at) == dw_val_class_die_ref)
7041 {
7042 dw_die_ref target_die = AT_ref (at);
7043
7044 /* For pointer and reference types, we checksum only the (qualified)
7045 name of the target type (if there is a name). For friend entries,
7046 we checksum only the (qualified) name of the target type or function.
7047 This allows the checksum to remain the same whether the target type
7048 is complete or not. */
7049 if ((at->dw_attr == DW_AT_type
7050 && (tag == DW_TAG_pointer_type
7051 || tag == DW_TAG_reference_type
7052 || tag == DW_TAG_rvalue_reference_type
7053 || tag == DW_TAG_ptr_to_member_type))
7054 || (at->dw_attr == DW_AT_friend
7055 && tag == DW_TAG_friend))
7056 {
7057 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7058
7059 if (name_attr != NULL)
7060 {
7061 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7062
7063 if (decl == NULL)
7064 decl = target_die;
7065 CHECKSUM_ULEB128 ('N');
7066 CHECKSUM_ULEB128 (at->dw_attr);
7067 if (decl->die_parent != NULL)
7068 checksum_die_context (decl->die_parent, ctx);
7069 CHECKSUM_ULEB128 ('E');
7070 CHECKSUM_STRING (AT_string (name_attr));
7071 return;
7072 }
7073 }
7074
7075 /* For all other references to another DIE, we check to see if the
7076 target DIE has already been visited. If it has, we emit a
7077 backward reference; if not, we descend recursively. */
7078 if (target_die->die_mark > 0)
7079 {
7080 CHECKSUM_ULEB128 ('R');
7081 CHECKSUM_ULEB128 (at->dw_attr);
7082 CHECKSUM_ULEB128 (target_die->die_mark);
7083 }
7084 else
7085 {
7086 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7087
7088 if (decl == NULL)
7089 decl = target_die;
7090 target_die->die_mark = ++(*mark);
7091 CHECKSUM_ULEB128 ('T');
7092 CHECKSUM_ULEB128 (at->dw_attr);
7093 if (decl->die_parent != NULL)
7094 checksum_die_context (decl->die_parent, ctx);
7095 die_checksum_ordered (target_die, ctx, mark);
7096 }
7097 return;
7098 }
7099
7100 CHECKSUM_ULEB128 ('A');
7101 CHECKSUM_ULEB128 (at->dw_attr);
7102
7103 switch (AT_class (at))
7104 {
7105 case dw_val_class_const:
7106 case dw_val_class_const_implicit:
7107 CHECKSUM_ULEB128 (DW_FORM_sdata);
7108 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7109 break;
7110
7111 case dw_val_class_unsigned_const:
7112 case dw_val_class_unsigned_const_implicit:
7113 CHECKSUM_ULEB128 (DW_FORM_sdata);
7114 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7115 break;
7116
7117 case dw_val_class_const_double:
7118 CHECKSUM_ULEB128 (DW_FORM_block);
7119 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7120 CHECKSUM (at->dw_attr_val.v.val_double);
7121 break;
7122
7123 case dw_val_class_wide_int:
7124 CHECKSUM_ULEB128 (DW_FORM_block);
7125 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7126 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7127 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7128 get_full_len (*at->dw_attr_val.v.val_wide)
7129 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7130 break;
7131
7132 case dw_val_class_vec:
7133 CHECKSUM_ULEB128 (DW_FORM_block);
7134 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7135 * at->dw_attr_val.v.val_vec.elt_size);
7136 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7137 (at->dw_attr_val.v.val_vec.length
7138 * at->dw_attr_val.v.val_vec.elt_size));
7139 break;
7140
7141 case dw_val_class_flag:
7142 CHECKSUM_ULEB128 (DW_FORM_flag);
7143 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7144 break;
7145
7146 case dw_val_class_str:
7147 CHECKSUM_ULEB128 (DW_FORM_string);
7148 CHECKSUM_STRING (AT_string (at));
7149 break;
7150
7151 case dw_val_class_addr:
7152 r = AT_addr (at);
7153 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7154 CHECKSUM_ULEB128 (DW_FORM_string);
7155 CHECKSUM_STRING (XSTR (r, 0));
7156 break;
7157
7158 case dw_val_class_offset:
7159 CHECKSUM_ULEB128 (DW_FORM_sdata);
7160 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7161 break;
7162
7163 case dw_val_class_loc:
7164 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7165 loc_checksum_ordered (loc, ctx);
7166 break;
7167
7168 case dw_val_class_fde_ref:
7169 case dw_val_class_symview:
7170 case dw_val_class_lbl_id:
7171 case dw_val_class_lineptr:
7172 case dw_val_class_macptr:
7173 case dw_val_class_loclistsptr:
7174 case dw_val_class_high_pc:
7175 break;
7176
7177 case dw_val_class_file:
7178 case dw_val_class_file_implicit:
7179 CHECKSUM_ULEB128 (DW_FORM_string);
7180 CHECKSUM_STRING (AT_file (at)->filename);
7181 break;
7182
7183 case dw_val_class_data8:
7184 CHECKSUM (at->dw_attr_val.v.val_data8);
7185 break;
7186
7187 default:
7188 break;
7189 }
7190 }
7191
7192 struct checksum_attributes
7193 {
7194 dw_attr_node *at_name;
7195 dw_attr_node *at_type;
7196 dw_attr_node *at_friend;
7197 dw_attr_node *at_accessibility;
7198 dw_attr_node *at_address_class;
7199 dw_attr_node *at_alignment;
7200 dw_attr_node *at_allocated;
7201 dw_attr_node *at_artificial;
7202 dw_attr_node *at_associated;
7203 dw_attr_node *at_binary_scale;
7204 dw_attr_node *at_bit_offset;
7205 dw_attr_node *at_bit_size;
7206 dw_attr_node *at_bit_stride;
7207 dw_attr_node *at_byte_size;
7208 dw_attr_node *at_byte_stride;
7209 dw_attr_node *at_const_value;
7210 dw_attr_node *at_containing_type;
7211 dw_attr_node *at_count;
7212 dw_attr_node *at_data_location;
7213 dw_attr_node *at_data_member_location;
7214 dw_attr_node *at_decimal_scale;
7215 dw_attr_node *at_decimal_sign;
7216 dw_attr_node *at_default_value;
7217 dw_attr_node *at_digit_count;
7218 dw_attr_node *at_discr;
7219 dw_attr_node *at_discr_list;
7220 dw_attr_node *at_discr_value;
7221 dw_attr_node *at_encoding;
7222 dw_attr_node *at_endianity;
7223 dw_attr_node *at_explicit;
7224 dw_attr_node *at_is_optional;
7225 dw_attr_node *at_location;
7226 dw_attr_node *at_lower_bound;
7227 dw_attr_node *at_mutable;
7228 dw_attr_node *at_ordering;
7229 dw_attr_node *at_picture_string;
7230 dw_attr_node *at_prototyped;
7231 dw_attr_node *at_small;
7232 dw_attr_node *at_segment;
7233 dw_attr_node *at_string_length;
7234 dw_attr_node *at_string_length_bit_size;
7235 dw_attr_node *at_string_length_byte_size;
7236 dw_attr_node *at_threads_scaled;
7237 dw_attr_node *at_upper_bound;
7238 dw_attr_node *at_use_location;
7239 dw_attr_node *at_use_UTF8;
7240 dw_attr_node *at_variable_parameter;
7241 dw_attr_node *at_virtuality;
7242 dw_attr_node *at_visibility;
7243 dw_attr_node *at_vtable_elem_location;
7244 };
7245
7246 /* Collect the attributes that we will want to use for the checksum. */
7247
7248 static void
7249 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7250 {
7251 dw_attr_node *a;
7252 unsigned ix;
7253
7254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7255 {
7256 switch (a->dw_attr)
7257 {
7258 case DW_AT_name:
7259 attrs->at_name = a;
7260 break;
7261 case DW_AT_type:
7262 attrs->at_type = a;
7263 break;
7264 case DW_AT_friend:
7265 attrs->at_friend = a;
7266 break;
7267 case DW_AT_accessibility:
7268 attrs->at_accessibility = a;
7269 break;
7270 case DW_AT_address_class:
7271 attrs->at_address_class = a;
7272 break;
7273 case DW_AT_alignment:
7274 attrs->at_alignment = a;
7275 break;
7276 case DW_AT_allocated:
7277 attrs->at_allocated = a;
7278 break;
7279 case DW_AT_artificial:
7280 attrs->at_artificial = a;
7281 break;
7282 case DW_AT_associated:
7283 attrs->at_associated = a;
7284 break;
7285 case DW_AT_binary_scale:
7286 attrs->at_binary_scale = a;
7287 break;
7288 case DW_AT_bit_offset:
7289 attrs->at_bit_offset = a;
7290 break;
7291 case DW_AT_bit_size:
7292 attrs->at_bit_size = a;
7293 break;
7294 case DW_AT_bit_stride:
7295 attrs->at_bit_stride = a;
7296 break;
7297 case DW_AT_byte_size:
7298 attrs->at_byte_size = a;
7299 break;
7300 case DW_AT_byte_stride:
7301 attrs->at_byte_stride = a;
7302 break;
7303 case DW_AT_const_value:
7304 attrs->at_const_value = a;
7305 break;
7306 case DW_AT_containing_type:
7307 attrs->at_containing_type = a;
7308 break;
7309 case DW_AT_count:
7310 attrs->at_count = a;
7311 break;
7312 case DW_AT_data_location:
7313 attrs->at_data_location = a;
7314 break;
7315 case DW_AT_data_member_location:
7316 attrs->at_data_member_location = a;
7317 break;
7318 case DW_AT_decimal_scale:
7319 attrs->at_decimal_scale = a;
7320 break;
7321 case DW_AT_decimal_sign:
7322 attrs->at_decimal_sign = a;
7323 break;
7324 case DW_AT_default_value:
7325 attrs->at_default_value = a;
7326 break;
7327 case DW_AT_digit_count:
7328 attrs->at_digit_count = a;
7329 break;
7330 case DW_AT_discr:
7331 attrs->at_discr = a;
7332 break;
7333 case DW_AT_discr_list:
7334 attrs->at_discr_list = a;
7335 break;
7336 case DW_AT_discr_value:
7337 attrs->at_discr_value = a;
7338 break;
7339 case DW_AT_encoding:
7340 attrs->at_encoding = a;
7341 break;
7342 case DW_AT_endianity:
7343 attrs->at_endianity = a;
7344 break;
7345 case DW_AT_explicit:
7346 attrs->at_explicit = a;
7347 break;
7348 case DW_AT_is_optional:
7349 attrs->at_is_optional = a;
7350 break;
7351 case DW_AT_location:
7352 attrs->at_location = a;
7353 break;
7354 case DW_AT_lower_bound:
7355 attrs->at_lower_bound = a;
7356 break;
7357 case DW_AT_mutable:
7358 attrs->at_mutable = a;
7359 break;
7360 case DW_AT_ordering:
7361 attrs->at_ordering = a;
7362 break;
7363 case DW_AT_picture_string:
7364 attrs->at_picture_string = a;
7365 break;
7366 case DW_AT_prototyped:
7367 attrs->at_prototyped = a;
7368 break;
7369 case DW_AT_small:
7370 attrs->at_small = a;
7371 break;
7372 case DW_AT_segment:
7373 attrs->at_segment = a;
7374 break;
7375 case DW_AT_string_length:
7376 attrs->at_string_length = a;
7377 break;
7378 case DW_AT_string_length_bit_size:
7379 attrs->at_string_length_bit_size = a;
7380 break;
7381 case DW_AT_string_length_byte_size:
7382 attrs->at_string_length_byte_size = a;
7383 break;
7384 case DW_AT_threads_scaled:
7385 attrs->at_threads_scaled = a;
7386 break;
7387 case DW_AT_upper_bound:
7388 attrs->at_upper_bound = a;
7389 break;
7390 case DW_AT_use_location:
7391 attrs->at_use_location = a;
7392 break;
7393 case DW_AT_use_UTF8:
7394 attrs->at_use_UTF8 = a;
7395 break;
7396 case DW_AT_variable_parameter:
7397 attrs->at_variable_parameter = a;
7398 break;
7399 case DW_AT_virtuality:
7400 attrs->at_virtuality = a;
7401 break;
7402 case DW_AT_visibility:
7403 attrs->at_visibility = a;
7404 break;
7405 case DW_AT_vtable_elem_location:
7406 attrs->at_vtable_elem_location = a;
7407 break;
7408 default:
7409 break;
7410 }
7411 }
7412 }
7413
7414 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7415
7416 static void
7417 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7418 {
7419 dw_die_ref c;
7420 dw_die_ref decl;
7421 struct checksum_attributes attrs;
7422
7423 CHECKSUM_ULEB128 ('D');
7424 CHECKSUM_ULEB128 (die->die_tag);
7425
7426 memset (&attrs, 0, sizeof (attrs));
7427
7428 decl = get_AT_ref (die, DW_AT_specification);
7429 if (decl != NULL)
7430 collect_checksum_attributes (&attrs, decl);
7431 collect_checksum_attributes (&attrs, die);
7432
7433 CHECKSUM_ATTR (attrs.at_name);
7434 CHECKSUM_ATTR (attrs.at_accessibility);
7435 CHECKSUM_ATTR (attrs.at_address_class);
7436 CHECKSUM_ATTR (attrs.at_allocated);
7437 CHECKSUM_ATTR (attrs.at_artificial);
7438 CHECKSUM_ATTR (attrs.at_associated);
7439 CHECKSUM_ATTR (attrs.at_binary_scale);
7440 CHECKSUM_ATTR (attrs.at_bit_offset);
7441 CHECKSUM_ATTR (attrs.at_bit_size);
7442 CHECKSUM_ATTR (attrs.at_bit_stride);
7443 CHECKSUM_ATTR (attrs.at_byte_size);
7444 CHECKSUM_ATTR (attrs.at_byte_stride);
7445 CHECKSUM_ATTR (attrs.at_const_value);
7446 CHECKSUM_ATTR (attrs.at_containing_type);
7447 CHECKSUM_ATTR (attrs.at_count);
7448 CHECKSUM_ATTR (attrs.at_data_location);
7449 CHECKSUM_ATTR (attrs.at_data_member_location);
7450 CHECKSUM_ATTR (attrs.at_decimal_scale);
7451 CHECKSUM_ATTR (attrs.at_decimal_sign);
7452 CHECKSUM_ATTR (attrs.at_default_value);
7453 CHECKSUM_ATTR (attrs.at_digit_count);
7454 CHECKSUM_ATTR (attrs.at_discr);
7455 CHECKSUM_ATTR (attrs.at_discr_list);
7456 CHECKSUM_ATTR (attrs.at_discr_value);
7457 CHECKSUM_ATTR (attrs.at_encoding);
7458 CHECKSUM_ATTR (attrs.at_endianity);
7459 CHECKSUM_ATTR (attrs.at_explicit);
7460 CHECKSUM_ATTR (attrs.at_is_optional);
7461 CHECKSUM_ATTR (attrs.at_location);
7462 CHECKSUM_ATTR (attrs.at_lower_bound);
7463 CHECKSUM_ATTR (attrs.at_mutable);
7464 CHECKSUM_ATTR (attrs.at_ordering);
7465 CHECKSUM_ATTR (attrs.at_picture_string);
7466 CHECKSUM_ATTR (attrs.at_prototyped);
7467 CHECKSUM_ATTR (attrs.at_small);
7468 CHECKSUM_ATTR (attrs.at_segment);
7469 CHECKSUM_ATTR (attrs.at_string_length);
7470 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7471 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7472 CHECKSUM_ATTR (attrs.at_threads_scaled);
7473 CHECKSUM_ATTR (attrs.at_upper_bound);
7474 CHECKSUM_ATTR (attrs.at_use_location);
7475 CHECKSUM_ATTR (attrs.at_use_UTF8);
7476 CHECKSUM_ATTR (attrs.at_variable_parameter);
7477 CHECKSUM_ATTR (attrs.at_virtuality);
7478 CHECKSUM_ATTR (attrs.at_visibility);
7479 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7480 CHECKSUM_ATTR (attrs.at_type);
7481 CHECKSUM_ATTR (attrs.at_friend);
7482 CHECKSUM_ATTR (attrs.at_alignment);
7483
7484 /* Checksum the child DIEs. */
7485 c = die->die_child;
7486 if (c) do {
7487 dw_attr_node *name_attr;
7488
7489 c = c->die_sib;
7490 name_attr = get_AT (c, DW_AT_name);
7491 if (is_template_instantiation (c))
7492 {
7493 /* Ignore instantiations of member type and function templates. */
7494 }
7495 else if (name_attr != NULL
7496 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7497 {
7498 /* Use a shallow checksum for named nested types and member
7499 functions. */
7500 CHECKSUM_ULEB128 ('S');
7501 CHECKSUM_ULEB128 (c->die_tag);
7502 CHECKSUM_STRING (AT_string (name_attr));
7503 }
7504 else
7505 {
7506 /* Use a deep checksum for other children. */
7507 /* Mark this DIE so it gets processed when unmarking. */
7508 if (c->die_mark == 0)
7509 c->die_mark = -1;
7510 die_checksum_ordered (c, ctx, mark);
7511 }
7512 } while (c != die->die_child);
7513
7514 CHECKSUM_ULEB128 (0);
7515 }
7516
7517 /* Add a type name and tag to a hash. */
7518 static void
7519 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7520 {
7521 CHECKSUM_ULEB128 (tag);
7522 CHECKSUM_STRING (name);
7523 }
7524
7525 #undef CHECKSUM
7526 #undef CHECKSUM_STRING
7527 #undef CHECKSUM_ATTR
7528 #undef CHECKSUM_LEB128
7529 #undef CHECKSUM_ULEB128
7530
7531 /* Generate the type signature for DIE. This is computed by generating an
7532 MD5 checksum over the DIE's tag, its relevant attributes, and its
7533 children. Attributes that are references to other DIEs are processed
7534 by recursion, using the MARK field to prevent infinite recursion.
7535 If the DIE is nested inside a namespace or another type, we also
7536 need to include that context in the signature. The lower 64 bits
7537 of the resulting MD5 checksum comprise the signature. */
7538
7539 static void
7540 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7541 {
7542 int mark;
7543 const char *name;
7544 unsigned char checksum[16];
7545 struct md5_ctx ctx;
7546 dw_die_ref decl;
7547 dw_die_ref parent;
7548
7549 name = get_AT_string (die, DW_AT_name);
7550 decl = get_AT_ref (die, DW_AT_specification);
7551 parent = get_die_parent (die);
7552
7553 /* First, compute a signature for just the type name (and its surrounding
7554 context, if any. This is stored in the type unit DIE for link-time
7555 ODR (one-definition rule) checking. */
7556
7557 if (is_cxx () && name != NULL)
7558 {
7559 md5_init_ctx (&ctx);
7560
7561 /* Checksum the names of surrounding namespaces and structures. */
7562 if (parent != NULL)
7563 checksum_die_context (parent, &ctx);
7564
7565 /* Checksum the current DIE. */
7566 die_odr_checksum (die->die_tag, name, &ctx);
7567 md5_finish_ctx (&ctx, checksum);
7568
7569 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7570 }
7571
7572 /* Next, compute the complete type signature. */
7573
7574 md5_init_ctx (&ctx);
7575 mark = 1;
7576 die->die_mark = mark;
7577
7578 /* Checksum the names of surrounding namespaces and structures. */
7579 if (parent != NULL)
7580 checksum_die_context (parent, &ctx);
7581
7582 /* Checksum the DIE and its children. */
7583 die_checksum_ordered (die, &ctx, &mark);
7584 unmark_all_dies (die);
7585 md5_finish_ctx (&ctx, checksum);
7586
7587 /* Store the signature in the type node and link the type DIE and the
7588 type node together. */
7589 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7590 DWARF_TYPE_SIGNATURE_SIZE);
7591 die->comdat_type_p = true;
7592 die->die_id.die_type_node = type_node;
7593 type_node->type_die = die;
7594
7595 /* If the DIE is a specification, link its declaration to the type node
7596 as well. */
7597 if (decl != NULL)
7598 {
7599 decl->comdat_type_p = true;
7600 decl->die_id.die_type_node = type_node;
7601 }
7602 }
7603
7604 /* Do the location expressions look same? */
7605 static inline int
7606 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7607 {
7608 return loc1->dw_loc_opc == loc2->dw_loc_opc
7609 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7610 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7611 }
7612
7613 /* Do the values look the same? */
7614 static int
7615 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7616 {
7617 dw_loc_descr_ref loc1, loc2;
7618 rtx r1, r2;
7619
7620 if (v1->val_class != v2->val_class)
7621 return 0;
7622
7623 switch (v1->val_class)
7624 {
7625 case dw_val_class_const:
7626 case dw_val_class_const_implicit:
7627 return v1->v.val_int == v2->v.val_int;
7628 case dw_val_class_unsigned_const:
7629 case dw_val_class_unsigned_const_implicit:
7630 return v1->v.val_unsigned == v2->v.val_unsigned;
7631 case dw_val_class_const_double:
7632 return v1->v.val_double.high == v2->v.val_double.high
7633 && v1->v.val_double.low == v2->v.val_double.low;
7634 case dw_val_class_wide_int:
7635 return *v1->v.val_wide == *v2->v.val_wide;
7636 case dw_val_class_vec:
7637 if (v1->v.val_vec.length != v2->v.val_vec.length
7638 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7639 return 0;
7640 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7641 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7642 return 0;
7643 return 1;
7644 case dw_val_class_flag:
7645 return v1->v.val_flag == v2->v.val_flag;
7646 case dw_val_class_str:
7647 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7648
7649 case dw_val_class_addr:
7650 r1 = v1->v.val_addr;
7651 r2 = v2->v.val_addr;
7652 if (GET_CODE (r1) != GET_CODE (r2))
7653 return 0;
7654 return !rtx_equal_p (r1, r2);
7655
7656 case dw_val_class_offset:
7657 return v1->v.val_offset == v2->v.val_offset;
7658
7659 case dw_val_class_loc:
7660 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7661 loc1 && loc2;
7662 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7663 if (!same_loc_p (loc1, loc2, mark))
7664 return 0;
7665 return !loc1 && !loc2;
7666
7667 case dw_val_class_die_ref:
7668 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7669
7670 case dw_val_class_symview:
7671 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7672
7673 case dw_val_class_fde_ref:
7674 case dw_val_class_vms_delta:
7675 case dw_val_class_lbl_id:
7676 case dw_val_class_lineptr:
7677 case dw_val_class_macptr:
7678 case dw_val_class_loclistsptr:
7679 case dw_val_class_high_pc:
7680 return 1;
7681
7682 case dw_val_class_file:
7683 case dw_val_class_file_implicit:
7684 return v1->v.val_file == v2->v.val_file;
7685
7686 case dw_val_class_data8:
7687 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7688
7689 default:
7690 return 1;
7691 }
7692 }
7693
7694 /* Do the attributes look the same? */
7695
7696 static int
7697 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7698 {
7699 if (at1->dw_attr != at2->dw_attr)
7700 return 0;
7701
7702 /* We don't care that this was compiled with a different compiler
7703 snapshot; if the output is the same, that's what matters. */
7704 if (at1->dw_attr == DW_AT_producer)
7705 return 1;
7706
7707 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7708 }
7709
7710 /* Do the dies look the same? */
7711
7712 static int
7713 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7714 {
7715 dw_die_ref c1, c2;
7716 dw_attr_node *a1;
7717 unsigned ix;
7718
7719 /* To avoid infinite recursion. */
7720 if (die1->die_mark)
7721 return die1->die_mark == die2->die_mark;
7722 die1->die_mark = die2->die_mark = ++(*mark);
7723
7724 if (die1->die_tag != die2->die_tag)
7725 return 0;
7726
7727 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7728 return 0;
7729
7730 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7731 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7732 return 0;
7733
7734 c1 = die1->die_child;
7735 c2 = die2->die_child;
7736 if (! c1)
7737 {
7738 if (c2)
7739 return 0;
7740 }
7741 else
7742 for (;;)
7743 {
7744 if (!same_die_p (c1, c2, mark))
7745 return 0;
7746 c1 = c1->die_sib;
7747 c2 = c2->die_sib;
7748 if (c1 == die1->die_child)
7749 {
7750 if (c2 == die2->die_child)
7751 break;
7752 else
7753 return 0;
7754 }
7755 }
7756
7757 return 1;
7758 }
7759
7760 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7761 children, and set die_symbol. */
7762
7763 static void
7764 compute_comp_unit_symbol (dw_die_ref unit_die)
7765 {
7766 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7767 const char *base = die_name ? lbasename (die_name) : "anonymous";
7768 char *name = XALLOCAVEC (char, strlen (base) + 64);
7769 char *p;
7770 int i, mark;
7771 unsigned char checksum[16];
7772 struct md5_ctx ctx;
7773
7774 /* Compute the checksum of the DIE, then append part of it as hex digits to
7775 the name filename of the unit. */
7776
7777 md5_init_ctx (&ctx);
7778 mark = 0;
7779 die_checksum (unit_die, &ctx, &mark);
7780 unmark_all_dies (unit_die);
7781 md5_finish_ctx (&ctx, checksum);
7782
7783 /* When we this for comp_unit_die () we have a DW_AT_name that might
7784 not start with a letter but with anything valid for filenames and
7785 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7786 character is not a letter. */
7787 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7788 clean_symbol_name (name);
7789
7790 p = name + strlen (name);
7791 for (i = 0; i < 4; i++)
7792 {
7793 sprintf (p, "%.2x", checksum[i]);
7794 p += 2;
7795 }
7796
7797 unit_die->die_id.die_symbol = xstrdup (name);
7798 }
7799
7800 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7801
7802 static int
7803 is_type_die (dw_die_ref die)
7804 {
7805 switch (die->die_tag)
7806 {
7807 case DW_TAG_array_type:
7808 case DW_TAG_class_type:
7809 case DW_TAG_interface_type:
7810 case DW_TAG_enumeration_type:
7811 case DW_TAG_pointer_type:
7812 case DW_TAG_reference_type:
7813 case DW_TAG_rvalue_reference_type:
7814 case DW_TAG_string_type:
7815 case DW_TAG_structure_type:
7816 case DW_TAG_subroutine_type:
7817 case DW_TAG_union_type:
7818 case DW_TAG_ptr_to_member_type:
7819 case DW_TAG_set_type:
7820 case DW_TAG_subrange_type:
7821 case DW_TAG_base_type:
7822 case DW_TAG_const_type:
7823 case DW_TAG_file_type:
7824 case DW_TAG_packed_type:
7825 case DW_TAG_volatile_type:
7826 case DW_TAG_typedef:
7827 return 1;
7828 default:
7829 return 0;
7830 }
7831 }
7832
7833 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7834 Basically, we want to choose the bits that are likely to be shared between
7835 compilations (types) and leave out the bits that are specific to individual
7836 compilations (functions). */
7837
7838 static int
7839 is_comdat_die (dw_die_ref c)
7840 {
7841 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7842 we do for stabs. The advantage is a greater likelihood of sharing between
7843 objects that don't include headers in the same order (and therefore would
7844 put the base types in a different comdat). jason 8/28/00 */
7845
7846 if (c->die_tag == DW_TAG_base_type)
7847 return 0;
7848
7849 if (c->die_tag == DW_TAG_pointer_type
7850 || c->die_tag == DW_TAG_reference_type
7851 || c->die_tag == DW_TAG_rvalue_reference_type
7852 || c->die_tag == DW_TAG_const_type
7853 || c->die_tag == DW_TAG_volatile_type)
7854 {
7855 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7856
7857 return t ? is_comdat_die (t) : 0;
7858 }
7859
7860 return is_type_die (c);
7861 }
7862
7863 /* Returns true iff C is a compile-unit DIE. */
7864
7865 static inline bool
7866 is_cu_die (dw_die_ref c)
7867 {
7868 return c && (c->die_tag == DW_TAG_compile_unit
7869 || c->die_tag == DW_TAG_skeleton_unit);
7870 }
7871
7872 /* Returns true iff C is a unit DIE of some sort. */
7873
7874 static inline bool
7875 is_unit_die (dw_die_ref c)
7876 {
7877 return c && (c->die_tag == DW_TAG_compile_unit
7878 || c->die_tag == DW_TAG_partial_unit
7879 || c->die_tag == DW_TAG_type_unit
7880 || c->die_tag == DW_TAG_skeleton_unit);
7881 }
7882
7883 /* Returns true iff C is a namespace DIE. */
7884
7885 static inline bool
7886 is_namespace_die (dw_die_ref c)
7887 {
7888 return c && c->die_tag == DW_TAG_namespace;
7889 }
7890
7891 /* Returns true iff C is a class or structure DIE. */
7892
7893 static inline bool
7894 is_class_die (dw_die_ref c)
7895 {
7896 return c && (c->die_tag == DW_TAG_class_type
7897 || c->die_tag == DW_TAG_structure_type);
7898 }
7899
7900 /* Return non-zero if this DIE is a template parameter. */
7901
7902 static inline bool
7903 is_template_parameter (dw_die_ref die)
7904 {
7905 switch (die->die_tag)
7906 {
7907 case DW_TAG_template_type_param:
7908 case DW_TAG_template_value_param:
7909 case DW_TAG_GNU_template_template_param:
7910 case DW_TAG_GNU_template_parameter_pack:
7911 return true;
7912 default:
7913 return false;
7914 }
7915 }
7916
7917 /* Return non-zero if this DIE represents a template instantiation. */
7918
7919 static inline bool
7920 is_template_instantiation (dw_die_ref die)
7921 {
7922 dw_die_ref c;
7923
7924 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7925 return false;
7926 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7927 return false;
7928 }
7929
7930 static char *
7931 gen_internal_sym (const char *prefix)
7932 {
7933 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7934
7935 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7936 return xstrdup (buf);
7937 }
7938
7939 /* Return non-zero if this DIE is a declaration. */
7940
7941 static int
7942 is_declaration_die (dw_die_ref die)
7943 {
7944 dw_attr_node *a;
7945 unsigned ix;
7946
7947 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7948 if (a->dw_attr == DW_AT_declaration)
7949 return 1;
7950
7951 return 0;
7952 }
7953
7954 /* Return non-zero if this DIE is nested inside a subprogram. */
7955
7956 static int
7957 is_nested_in_subprogram (dw_die_ref die)
7958 {
7959 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7960
7961 if (decl == NULL)
7962 decl = die;
7963 return local_scope_p (decl);
7964 }
7965
7966 /* Return non-zero if this DIE contains a defining declaration of a
7967 subprogram. */
7968
7969 static int
7970 contains_subprogram_definition (dw_die_ref die)
7971 {
7972 dw_die_ref c;
7973
7974 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7975 return 1;
7976 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7977 return 0;
7978 }
7979
7980 /* Return non-zero if this is a type DIE that should be moved to a
7981 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7982 unit type. */
7983
7984 static int
7985 should_move_die_to_comdat (dw_die_ref die)
7986 {
7987 switch (die->die_tag)
7988 {
7989 case DW_TAG_class_type:
7990 case DW_TAG_structure_type:
7991 case DW_TAG_enumeration_type:
7992 case DW_TAG_union_type:
7993 /* Don't move declarations, inlined instances, types nested in a
7994 subprogram, or types that contain subprogram definitions. */
7995 if (is_declaration_die (die)
7996 || get_AT (die, DW_AT_abstract_origin)
7997 || is_nested_in_subprogram (die)
7998 || contains_subprogram_definition (die))
7999 return 0;
8000 return 1;
8001 case DW_TAG_array_type:
8002 case DW_TAG_interface_type:
8003 case DW_TAG_pointer_type:
8004 case DW_TAG_reference_type:
8005 case DW_TAG_rvalue_reference_type:
8006 case DW_TAG_string_type:
8007 case DW_TAG_subroutine_type:
8008 case DW_TAG_ptr_to_member_type:
8009 case DW_TAG_set_type:
8010 case DW_TAG_subrange_type:
8011 case DW_TAG_base_type:
8012 case DW_TAG_const_type:
8013 case DW_TAG_file_type:
8014 case DW_TAG_packed_type:
8015 case DW_TAG_volatile_type:
8016 case DW_TAG_typedef:
8017 default:
8018 return 0;
8019 }
8020 }
8021
8022 /* Make a clone of DIE. */
8023
8024 static dw_die_ref
8025 clone_die (dw_die_ref die)
8026 {
8027 dw_die_ref clone = new_die_raw (die->die_tag);
8028 dw_attr_node *a;
8029 unsigned ix;
8030
8031 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8032 add_dwarf_attr (clone, a);
8033
8034 return clone;
8035 }
8036
8037 /* Make a clone of the tree rooted at DIE. */
8038
8039 static dw_die_ref
8040 clone_tree (dw_die_ref die)
8041 {
8042 dw_die_ref c;
8043 dw_die_ref clone = clone_die (die);
8044
8045 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8046
8047 return clone;
8048 }
8049
8050 /* Make a clone of DIE as a declaration. */
8051
8052 static dw_die_ref
8053 clone_as_declaration (dw_die_ref die)
8054 {
8055 dw_die_ref clone;
8056 dw_die_ref decl;
8057 dw_attr_node *a;
8058 unsigned ix;
8059
8060 /* If the DIE is already a declaration, just clone it. */
8061 if (is_declaration_die (die))
8062 return clone_die (die);
8063
8064 /* If the DIE is a specification, just clone its declaration DIE. */
8065 decl = get_AT_ref (die, DW_AT_specification);
8066 if (decl != NULL)
8067 {
8068 clone = clone_die (decl);
8069 if (die->comdat_type_p)
8070 add_AT_die_ref (clone, DW_AT_signature, die);
8071 return clone;
8072 }
8073
8074 clone = new_die_raw (die->die_tag);
8075
8076 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8077 {
8078 /* We don't want to copy over all attributes.
8079 For example we don't want DW_AT_byte_size because otherwise we will no
8080 longer have a declaration and GDB will treat it as a definition. */
8081
8082 switch (a->dw_attr)
8083 {
8084 case DW_AT_abstract_origin:
8085 case DW_AT_artificial:
8086 case DW_AT_containing_type:
8087 case DW_AT_external:
8088 case DW_AT_name:
8089 case DW_AT_type:
8090 case DW_AT_virtuality:
8091 case DW_AT_linkage_name:
8092 case DW_AT_MIPS_linkage_name:
8093 add_dwarf_attr (clone, a);
8094 break;
8095 case DW_AT_byte_size:
8096 case DW_AT_alignment:
8097 default:
8098 break;
8099 }
8100 }
8101
8102 if (die->comdat_type_p)
8103 add_AT_die_ref (clone, DW_AT_signature, die);
8104
8105 add_AT_flag (clone, DW_AT_declaration, 1);
8106 return clone;
8107 }
8108
8109
8110 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8111
8112 struct decl_table_entry
8113 {
8114 dw_die_ref orig;
8115 dw_die_ref copy;
8116 };
8117
8118 /* Helpers to manipulate hash table of copied declarations. */
8119
8120 /* Hashtable helpers. */
8121
8122 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8123 {
8124 typedef die_struct *compare_type;
8125 static inline hashval_t hash (const decl_table_entry *);
8126 static inline bool equal (const decl_table_entry *, const die_struct *);
8127 };
8128
8129 inline hashval_t
8130 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8131 {
8132 return htab_hash_pointer (entry->orig);
8133 }
8134
8135 inline bool
8136 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8137 const die_struct *entry2)
8138 {
8139 return entry1->orig == entry2;
8140 }
8141
8142 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8143
8144 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8145 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8146 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8147 to check if the ancestor has already been copied into UNIT. */
8148
8149 static dw_die_ref
8150 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8151 decl_hash_type *decl_table)
8152 {
8153 dw_die_ref parent = die->die_parent;
8154 dw_die_ref new_parent = unit;
8155 dw_die_ref copy;
8156 decl_table_entry **slot = NULL;
8157 struct decl_table_entry *entry = NULL;
8158
8159 if (decl_table)
8160 {
8161 /* Check if the entry has already been copied to UNIT. */
8162 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8163 INSERT);
8164 if (*slot != HTAB_EMPTY_ENTRY)
8165 {
8166 entry = *slot;
8167 return entry->copy;
8168 }
8169
8170 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8171 entry = XCNEW (struct decl_table_entry);
8172 entry->orig = die;
8173 entry->copy = NULL;
8174 *slot = entry;
8175 }
8176
8177 if (parent != NULL)
8178 {
8179 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8180 if (spec != NULL)
8181 parent = spec;
8182 if (!is_unit_die (parent))
8183 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8184 }
8185
8186 copy = clone_as_declaration (die);
8187 add_child_die (new_parent, copy);
8188
8189 if (decl_table)
8190 {
8191 /* Record the pointer to the copy. */
8192 entry->copy = copy;
8193 }
8194
8195 return copy;
8196 }
8197 /* Copy the declaration context to the new type unit DIE. This includes
8198 any surrounding namespace or type declarations. If the DIE has an
8199 AT_specification attribute, it also includes attributes and children
8200 attached to the specification, and returns a pointer to the original
8201 parent of the declaration DIE. Returns NULL otherwise. */
8202
8203 static dw_die_ref
8204 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8205 {
8206 dw_die_ref decl;
8207 dw_die_ref new_decl;
8208 dw_die_ref orig_parent = NULL;
8209
8210 decl = get_AT_ref (die, DW_AT_specification);
8211 if (decl == NULL)
8212 decl = die;
8213 else
8214 {
8215 unsigned ix;
8216 dw_die_ref c;
8217 dw_attr_node *a;
8218
8219 /* The original DIE will be changed to a declaration, and must
8220 be moved to be a child of the original declaration DIE. */
8221 orig_parent = decl->die_parent;
8222
8223 /* Copy the type node pointer from the new DIE to the original
8224 declaration DIE so we can forward references later. */
8225 decl->comdat_type_p = true;
8226 decl->die_id.die_type_node = die->die_id.die_type_node;
8227
8228 remove_AT (die, DW_AT_specification);
8229
8230 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8231 {
8232 if (a->dw_attr != DW_AT_name
8233 && a->dw_attr != DW_AT_declaration
8234 && a->dw_attr != DW_AT_external)
8235 add_dwarf_attr (die, a);
8236 }
8237
8238 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8239 }
8240
8241 if (decl->die_parent != NULL
8242 && !is_unit_die (decl->die_parent))
8243 {
8244 new_decl = copy_ancestor_tree (unit, decl, NULL);
8245 if (new_decl != NULL)
8246 {
8247 remove_AT (new_decl, DW_AT_signature);
8248 add_AT_specification (die, new_decl);
8249 }
8250 }
8251
8252 return orig_parent;
8253 }
8254
8255 /* Generate the skeleton ancestor tree for the given NODE, then clone
8256 the DIE and add the clone into the tree. */
8257
8258 static void
8259 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8260 {
8261 if (node->new_die != NULL)
8262 return;
8263
8264 node->new_die = clone_as_declaration (node->old_die);
8265
8266 if (node->parent != NULL)
8267 {
8268 generate_skeleton_ancestor_tree (node->parent);
8269 add_child_die (node->parent->new_die, node->new_die);
8270 }
8271 }
8272
8273 /* Generate a skeleton tree of DIEs containing any declarations that are
8274 found in the original tree. We traverse the tree looking for declaration
8275 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8276
8277 static void
8278 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8279 {
8280 skeleton_chain_node node;
8281 dw_die_ref c;
8282 dw_die_ref first;
8283 dw_die_ref prev = NULL;
8284 dw_die_ref next = NULL;
8285
8286 node.parent = parent;
8287
8288 first = c = parent->old_die->die_child;
8289 if (c)
8290 next = c->die_sib;
8291 if (c) do {
8292 if (prev == NULL || prev->die_sib == c)
8293 prev = c;
8294 c = next;
8295 next = (c == first ? NULL : c->die_sib);
8296 node.old_die = c;
8297 node.new_die = NULL;
8298 if (is_declaration_die (c))
8299 {
8300 if (is_template_instantiation (c))
8301 {
8302 /* Instantiated templates do not need to be cloned into the
8303 type unit. Just move the DIE and its children back to
8304 the skeleton tree (in the main CU). */
8305 remove_child_with_prev (c, prev);
8306 add_child_die (parent->new_die, c);
8307 c = prev;
8308 }
8309 else if (c->comdat_type_p)
8310 {
8311 /* This is the skeleton of earlier break_out_comdat_types
8312 type. Clone the existing DIE, but keep the children
8313 under the original (which is in the main CU). */
8314 dw_die_ref clone = clone_die (c);
8315
8316 replace_child (c, clone, prev);
8317 generate_skeleton_ancestor_tree (parent);
8318 add_child_die (parent->new_die, c);
8319 c = clone;
8320 continue;
8321 }
8322 else
8323 {
8324 /* Clone the existing DIE, move the original to the skeleton
8325 tree (which is in the main CU), and put the clone, with
8326 all the original's children, where the original came from
8327 (which is about to be moved to the type unit). */
8328 dw_die_ref clone = clone_die (c);
8329 move_all_children (c, clone);
8330
8331 /* If the original has a DW_AT_object_pointer attribute,
8332 it would now point to a child DIE just moved to the
8333 cloned tree, so we need to remove that attribute from
8334 the original. */
8335 remove_AT (c, DW_AT_object_pointer);
8336
8337 replace_child (c, clone, prev);
8338 generate_skeleton_ancestor_tree (parent);
8339 add_child_die (parent->new_die, c);
8340 node.old_die = clone;
8341 node.new_die = c;
8342 c = clone;
8343 }
8344 }
8345 generate_skeleton_bottom_up (&node);
8346 } while (next != NULL);
8347 }
8348
8349 /* Wrapper function for generate_skeleton_bottom_up. */
8350
8351 static dw_die_ref
8352 generate_skeleton (dw_die_ref die)
8353 {
8354 skeleton_chain_node node;
8355
8356 node.old_die = die;
8357 node.new_die = NULL;
8358 node.parent = NULL;
8359
8360 /* If this type definition is nested inside another type,
8361 and is not an instantiation of a template, always leave
8362 at least a declaration in its place. */
8363 if (die->die_parent != NULL
8364 && is_type_die (die->die_parent)
8365 && !is_template_instantiation (die))
8366 node.new_die = clone_as_declaration (die);
8367
8368 generate_skeleton_bottom_up (&node);
8369 return node.new_die;
8370 }
8371
8372 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8373 declaration. The original DIE is moved to a new compile unit so that
8374 existing references to it follow it to the new location. If any of the
8375 original DIE's descendants is a declaration, we need to replace the
8376 original DIE with a skeleton tree and move the declarations back into the
8377 skeleton tree. */
8378
8379 static dw_die_ref
8380 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8381 dw_die_ref prev)
8382 {
8383 dw_die_ref skeleton, orig_parent;
8384
8385 /* Copy the declaration context to the type unit DIE. If the returned
8386 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8387 that DIE. */
8388 orig_parent = copy_declaration_context (unit, child);
8389
8390 skeleton = generate_skeleton (child);
8391 if (skeleton == NULL)
8392 remove_child_with_prev (child, prev);
8393 else
8394 {
8395 skeleton->comdat_type_p = true;
8396 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8397
8398 /* If the original DIE was a specification, we need to put
8399 the skeleton under the parent DIE of the declaration.
8400 This leaves the original declaration in the tree, but
8401 it will be pruned later since there are no longer any
8402 references to it. */
8403 if (orig_parent != NULL)
8404 {
8405 remove_child_with_prev (child, prev);
8406 add_child_die (orig_parent, skeleton);
8407 }
8408 else
8409 replace_child (child, skeleton, prev);
8410 }
8411
8412 return skeleton;
8413 }
8414
8415 static void
8416 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8417 comdat_type_node *type_node,
8418 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8419
8420 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8421 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8422 DWARF procedure references in the DW_AT_location attribute. */
8423
8424 static dw_die_ref
8425 copy_dwarf_procedure (dw_die_ref die,
8426 comdat_type_node *type_node,
8427 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8428 {
8429 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8430
8431 /* DWARF procedures are not supposed to have children... */
8432 gcc_assert (die->die_child == NULL);
8433
8434 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8435 gcc_assert (vec_safe_length (die->die_attr) == 1
8436 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8437
8438 /* Do not copy more than once DWARF procedures. */
8439 bool existed;
8440 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8441 if (existed)
8442 return die_copy;
8443
8444 die_copy = clone_die (die);
8445 add_child_die (type_node->root_die, die_copy);
8446 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8447 return die_copy;
8448 }
8449
8450 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8451 procedures in DIE's attributes. */
8452
8453 static void
8454 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8455 comdat_type_node *type_node,
8456 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8457 {
8458 dw_attr_node *a;
8459 unsigned i;
8460
8461 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8462 {
8463 dw_loc_descr_ref loc;
8464
8465 if (a->dw_attr_val.val_class != dw_val_class_loc)
8466 continue;
8467
8468 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8469 {
8470 switch (loc->dw_loc_opc)
8471 {
8472 case DW_OP_call2:
8473 case DW_OP_call4:
8474 case DW_OP_call_ref:
8475 gcc_assert (loc->dw_loc_oprnd1.val_class
8476 == dw_val_class_die_ref);
8477 loc->dw_loc_oprnd1.v.val_die_ref.die
8478 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8479 type_node,
8480 copied_dwarf_procs);
8481
8482 default:
8483 break;
8484 }
8485 }
8486 }
8487 }
8488
8489 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8490 rewrite references to point to the copies.
8491
8492 References are looked for in DIE's attributes and recursively in all its
8493 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8494 mapping from old DWARF procedures to their copy. It is used not to copy
8495 twice the same DWARF procedure under TYPE_NODE. */
8496
8497 static void
8498 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8499 comdat_type_node *type_node,
8500 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8501 {
8502 dw_die_ref c;
8503
8504 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8505 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8506 type_node,
8507 copied_dwarf_procs));
8508 }
8509
8510 /* Traverse the DIE and set up additional .debug_types or .debug_info
8511 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8512 section. */
8513
8514 static void
8515 break_out_comdat_types (dw_die_ref die)
8516 {
8517 dw_die_ref c;
8518 dw_die_ref first;
8519 dw_die_ref prev = NULL;
8520 dw_die_ref next = NULL;
8521 dw_die_ref unit = NULL;
8522
8523 first = c = die->die_child;
8524 if (c)
8525 next = c->die_sib;
8526 if (c) do {
8527 if (prev == NULL || prev->die_sib == c)
8528 prev = c;
8529 c = next;
8530 next = (c == first ? NULL : c->die_sib);
8531 if (should_move_die_to_comdat (c))
8532 {
8533 dw_die_ref replacement;
8534 comdat_type_node *type_node;
8535
8536 /* Break out nested types into their own type units. */
8537 break_out_comdat_types (c);
8538
8539 /* Create a new type unit DIE as the root for the new tree, and
8540 add it to the list of comdat types. */
8541 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8542 add_AT_unsigned (unit, DW_AT_language,
8543 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8544 type_node = ggc_cleared_alloc<comdat_type_node> ();
8545 type_node->root_die = unit;
8546 type_node->next = comdat_type_list;
8547 comdat_type_list = type_node;
8548
8549 /* Generate the type signature. */
8550 generate_type_signature (c, type_node);
8551
8552 /* Copy the declaration context, attributes, and children of the
8553 declaration into the new type unit DIE, then remove this DIE
8554 from the main CU (or replace it with a skeleton if necessary). */
8555 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8556 type_node->skeleton_die = replacement;
8557
8558 /* Add the DIE to the new compunit. */
8559 add_child_die (unit, c);
8560
8561 /* Types can reference DWARF procedures for type size or data location
8562 expressions. Calls in DWARF expressions cannot target procedures
8563 that are not in the same section. So we must copy DWARF procedures
8564 along with this type and then rewrite references to them. */
8565 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8566 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8567
8568 if (replacement != NULL)
8569 c = replacement;
8570 }
8571 else if (c->die_tag == DW_TAG_namespace
8572 || c->die_tag == DW_TAG_class_type
8573 || c->die_tag == DW_TAG_structure_type
8574 || c->die_tag == DW_TAG_union_type)
8575 {
8576 /* Look for nested types that can be broken out. */
8577 break_out_comdat_types (c);
8578 }
8579 } while (next != NULL);
8580 }
8581
8582 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8583 Enter all the cloned children into the hash table decl_table. */
8584
8585 static dw_die_ref
8586 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8587 {
8588 dw_die_ref c;
8589 dw_die_ref clone;
8590 struct decl_table_entry *entry;
8591 decl_table_entry **slot;
8592
8593 if (die->die_tag == DW_TAG_subprogram)
8594 clone = clone_as_declaration (die);
8595 else
8596 clone = clone_die (die);
8597
8598 slot = decl_table->find_slot_with_hash (die,
8599 htab_hash_pointer (die), INSERT);
8600
8601 /* Assert that DIE isn't in the hash table yet. If it would be there
8602 before, the ancestors would be necessarily there as well, therefore
8603 clone_tree_partial wouldn't be called. */
8604 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8605
8606 entry = XCNEW (struct decl_table_entry);
8607 entry->orig = die;
8608 entry->copy = clone;
8609 *slot = entry;
8610
8611 if (die->die_tag != DW_TAG_subprogram)
8612 FOR_EACH_CHILD (die, c,
8613 add_child_die (clone, clone_tree_partial (c, decl_table)));
8614
8615 return clone;
8616 }
8617
8618 /* Walk the DIE and its children, looking for references to incomplete
8619 or trivial types that are unmarked (i.e., that are not in the current
8620 type_unit). */
8621
8622 static void
8623 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8624 {
8625 dw_die_ref c;
8626 dw_attr_node *a;
8627 unsigned ix;
8628
8629 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8630 {
8631 if (AT_class (a) == dw_val_class_die_ref)
8632 {
8633 dw_die_ref targ = AT_ref (a);
8634 decl_table_entry **slot;
8635 struct decl_table_entry *entry;
8636
8637 if (targ->die_mark != 0 || targ->comdat_type_p)
8638 continue;
8639
8640 slot = decl_table->find_slot_with_hash (targ,
8641 htab_hash_pointer (targ),
8642 INSERT);
8643
8644 if (*slot != HTAB_EMPTY_ENTRY)
8645 {
8646 /* TARG has already been copied, so we just need to
8647 modify the reference to point to the copy. */
8648 entry = *slot;
8649 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8650 }
8651 else
8652 {
8653 dw_die_ref parent = unit;
8654 dw_die_ref copy = clone_die (targ);
8655
8656 /* Record in DECL_TABLE that TARG has been copied.
8657 Need to do this now, before the recursive call,
8658 because DECL_TABLE may be expanded and SLOT
8659 would no longer be a valid pointer. */
8660 entry = XCNEW (struct decl_table_entry);
8661 entry->orig = targ;
8662 entry->copy = copy;
8663 *slot = entry;
8664
8665 /* If TARG is not a declaration DIE, we need to copy its
8666 children. */
8667 if (!is_declaration_die (targ))
8668 {
8669 FOR_EACH_CHILD (
8670 targ, c,
8671 add_child_die (copy,
8672 clone_tree_partial (c, decl_table)));
8673 }
8674
8675 /* Make sure the cloned tree is marked as part of the
8676 type unit. */
8677 mark_dies (copy);
8678
8679 /* If TARG has surrounding context, copy its ancestor tree
8680 into the new type unit. */
8681 if (targ->die_parent != NULL
8682 && !is_unit_die (targ->die_parent))
8683 parent = copy_ancestor_tree (unit, targ->die_parent,
8684 decl_table);
8685
8686 add_child_die (parent, copy);
8687 a->dw_attr_val.v.val_die_ref.die = copy;
8688
8689 /* Make sure the newly-copied DIE is walked. If it was
8690 installed in a previously-added context, it won't
8691 get visited otherwise. */
8692 if (parent != unit)
8693 {
8694 /* Find the highest point of the newly-added tree,
8695 mark each node along the way, and walk from there. */
8696 parent->die_mark = 1;
8697 while (parent->die_parent
8698 && parent->die_parent->die_mark == 0)
8699 {
8700 parent = parent->die_parent;
8701 parent->die_mark = 1;
8702 }
8703 copy_decls_walk (unit, parent, decl_table);
8704 }
8705 }
8706 }
8707 }
8708
8709 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8710 }
8711
8712 /* Copy declarations for "unworthy" types into the new comdat section.
8713 Incomplete types, modified types, and certain other types aren't broken
8714 out into comdat sections of their own, so they don't have a signature,
8715 and we need to copy the declaration into the same section so that we
8716 don't have an external reference. */
8717
8718 static void
8719 copy_decls_for_unworthy_types (dw_die_ref unit)
8720 {
8721 mark_dies (unit);
8722 decl_hash_type decl_table (10);
8723 copy_decls_walk (unit, unit, &decl_table);
8724 unmark_dies (unit);
8725 }
8726
8727 /* Traverse the DIE and add a sibling attribute if it may have the
8728 effect of speeding up access to siblings. To save some space,
8729 avoid generating sibling attributes for DIE's without children. */
8730
8731 static void
8732 add_sibling_attributes (dw_die_ref die)
8733 {
8734 dw_die_ref c;
8735
8736 if (! die->die_child)
8737 return;
8738
8739 if (die->die_parent && die != die->die_parent->die_child)
8740 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8741
8742 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8743 }
8744
8745 /* Output all location lists for the DIE and its children. */
8746
8747 static void
8748 output_location_lists (dw_die_ref die)
8749 {
8750 dw_die_ref c;
8751 dw_attr_node *a;
8752 unsigned ix;
8753
8754 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8755 if (AT_class (a) == dw_val_class_loc_list)
8756 output_loc_list (AT_loc_list (a));
8757
8758 FOR_EACH_CHILD (die, c, output_location_lists (c));
8759 }
8760
8761 /* During assign_location_list_indexes and output_loclists_offset the
8762 current index, after it the number of assigned indexes (i.e. how
8763 large the .debug_loclists* offset table should be). */
8764 static unsigned int loc_list_idx;
8765
8766 /* Output all location list offsets for the DIE and its children. */
8767
8768 static void
8769 output_loclists_offsets (dw_die_ref die)
8770 {
8771 dw_die_ref c;
8772 dw_attr_node *a;
8773 unsigned ix;
8774
8775 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8776 if (AT_class (a) == dw_val_class_loc_list)
8777 {
8778 dw_loc_list_ref l = AT_loc_list (a);
8779 if (l->offset_emitted)
8780 continue;
8781 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8782 loc_section_label, NULL);
8783 gcc_assert (l->hash == loc_list_idx);
8784 loc_list_idx++;
8785 l->offset_emitted = true;
8786 }
8787
8788 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8789 }
8790
8791 /* Recursively set indexes of location lists. */
8792
8793 static void
8794 assign_location_list_indexes (dw_die_ref die)
8795 {
8796 dw_die_ref c;
8797 dw_attr_node *a;
8798 unsigned ix;
8799
8800 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8801 if (AT_class (a) == dw_val_class_loc_list)
8802 {
8803 dw_loc_list_ref list = AT_loc_list (a);
8804 if (!list->num_assigned)
8805 {
8806 list->num_assigned = true;
8807 list->hash = loc_list_idx++;
8808 }
8809 }
8810
8811 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8812 }
8813
8814 /* We want to limit the number of external references, because they are
8815 larger than local references: a relocation takes multiple words, and
8816 even a sig8 reference is always eight bytes, whereas a local reference
8817 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8818 So if we encounter multiple external references to the same type DIE, we
8819 make a local typedef stub for it and redirect all references there.
8820
8821 This is the element of the hash table for keeping track of these
8822 references. */
8823
8824 struct external_ref
8825 {
8826 dw_die_ref type;
8827 dw_die_ref stub;
8828 unsigned n_refs;
8829 };
8830
8831 /* Hashtable helpers. */
8832
8833 struct external_ref_hasher : free_ptr_hash <external_ref>
8834 {
8835 static inline hashval_t hash (const external_ref *);
8836 static inline bool equal (const external_ref *, const external_ref *);
8837 };
8838
8839 inline hashval_t
8840 external_ref_hasher::hash (const external_ref *r)
8841 {
8842 dw_die_ref die = r->type;
8843 hashval_t h = 0;
8844
8845 /* We can't use the address of the DIE for hashing, because
8846 that will make the order of the stub DIEs non-deterministic. */
8847 if (! die->comdat_type_p)
8848 /* We have a symbol; use it to compute a hash. */
8849 h = htab_hash_string (die->die_id.die_symbol);
8850 else
8851 {
8852 /* We have a type signature; use a subset of the bits as the hash.
8853 The 8-byte signature is at least as large as hashval_t. */
8854 comdat_type_node *type_node = die->die_id.die_type_node;
8855 memcpy (&h, type_node->signature, sizeof (h));
8856 }
8857 return h;
8858 }
8859
8860 inline bool
8861 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8862 {
8863 return r1->type == r2->type;
8864 }
8865
8866 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8867
8868 /* Return a pointer to the external_ref for references to DIE. */
8869
8870 static struct external_ref *
8871 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8872 {
8873 struct external_ref ref, *ref_p;
8874 external_ref **slot;
8875
8876 ref.type = die;
8877 slot = map->find_slot (&ref, INSERT);
8878 if (*slot != HTAB_EMPTY_ENTRY)
8879 return *slot;
8880
8881 ref_p = XCNEW (struct external_ref);
8882 ref_p->type = die;
8883 *slot = ref_p;
8884 return ref_p;
8885 }
8886
8887 /* Subroutine of optimize_external_refs, below.
8888
8889 If we see a type skeleton, record it as our stub. If we see external
8890 references, remember how many we've seen. */
8891
8892 static void
8893 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8894 {
8895 dw_die_ref c;
8896 dw_attr_node *a;
8897 unsigned ix;
8898 struct external_ref *ref_p;
8899
8900 if (is_type_die (die)
8901 && (c = get_AT_ref (die, DW_AT_signature)))
8902 {
8903 /* This is a local skeleton; use it for local references. */
8904 ref_p = lookup_external_ref (map, c);
8905 ref_p->stub = die;
8906 }
8907
8908 /* Scan the DIE references, and remember any that refer to DIEs from
8909 other CUs (i.e. those which are not marked). */
8910 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8911 if (AT_class (a) == dw_val_class_die_ref
8912 && (c = AT_ref (a))->die_mark == 0
8913 && is_type_die (c))
8914 {
8915 ref_p = lookup_external_ref (map, c);
8916 ref_p->n_refs++;
8917 }
8918
8919 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8920 }
8921
8922 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8923 points to an external_ref, DATA is the CU we're processing. If we don't
8924 already have a local stub, and we have multiple refs, build a stub. */
8925
8926 int
8927 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8928 {
8929 struct external_ref *ref_p = *slot;
8930
8931 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8932 {
8933 /* We have multiple references to this type, so build a small stub.
8934 Both of these forms are a bit dodgy from the perspective of the
8935 DWARF standard, since technically they should have names. */
8936 dw_die_ref cu = data;
8937 dw_die_ref type = ref_p->type;
8938 dw_die_ref stub = NULL;
8939
8940 if (type->comdat_type_p)
8941 {
8942 /* If we refer to this type via sig8, use AT_signature. */
8943 stub = new_die (type->die_tag, cu, NULL_TREE);
8944 add_AT_die_ref (stub, DW_AT_signature, type);
8945 }
8946 else
8947 {
8948 /* Otherwise, use a typedef with no name. */
8949 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8950 add_AT_die_ref (stub, DW_AT_type, type);
8951 }
8952
8953 stub->die_mark++;
8954 ref_p->stub = stub;
8955 }
8956 return 1;
8957 }
8958
8959 /* DIE is a unit; look through all the DIE references to see if there are
8960 any external references to types, and if so, create local stubs for
8961 them which will be applied in build_abbrev_table. This is useful because
8962 references to local DIEs are smaller. */
8963
8964 static external_ref_hash_type *
8965 optimize_external_refs (dw_die_ref die)
8966 {
8967 external_ref_hash_type *map = new external_ref_hash_type (10);
8968 optimize_external_refs_1 (die, map);
8969 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8970 return map;
8971 }
8972
8973 /* The following 3 variables are temporaries that are computed only during the
8974 build_abbrev_table call and used and released during the following
8975 optimize_abbrev_table call. */
8976
8977 /* First abbrev_id that can be optimized based on usage. */
8978 static unsigned int abbrev_opt_start;
8979
8980 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8981 abbrev_id smaller than this, because they must be already sized
8982 during build_abbrev_table). */
8983 static unsigned int abbrev_opt_base_type_end;
8984
8985 /* Vector of usage counts during build_abbrev_table. Indexed by
8986 abbrev_id - abbrev_opt_start. */
8987 static vec<unsigned int> abbrev_usage_count;
8988
8989 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8990 static vec<dw_die_ref> sorted_abbrev_dies;
8991
8992 /* The format of each DIE (and its attribute value pairs) is encoded in an
8993 abbreviation table. This routine builds the abbreviation table and assigns
8994 a unique abbreviation id for each abbreviation entry. The children of each
8995 die are visited recursively. */
8996
8997 static void
8998 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8999 {
9000 unsigned int abbrev_id = 0;
9001 dw_die_ref c;
9002 dw_attr_node *a;
9003 unsigned ix;
9004 dw_die_ref abbrev;
9005
9006 /* Scan the DIE references, and replace any that refer to
9007 DIEs from other CUs (i.e. those which are not marked) with
9008 the local stubs we built in optimize_external_refs. */
9009 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9010 if (AT_class (a) == dw_val_class_die_ref
9011 && (c = AT_ref (a))->die_mark == 0)
9012 {
9013 struct external_ref *ref_p;
9014 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9015
9016 ref_p = lookup_external_ref (extern_map, c);
9017 if (ref_p->stub && ref_p->stub != die)
9018 change_AT_die_ref (a, ref_p->stub);
9019 else
9020 /* We aren't changing this reference, so mark it external. */
9021 set_AT_ref_external (a, 1);
9022 }
9023
9024 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9025 {
9026 dw_attr_node *die_a, *abbrev_a;
9027 unsigned ix;
9028 bool ok = true;
9029
9030 if (abbrev_id == 0)
9031 continue;
9032 if (abbrev->die_tag != die->die_tag)
9033 continue;
9034 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9035 continue;
9036
9037 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9038 continue;
9039
9040 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9041 {
9042 abbrev_a = &(*abbrev->die_attr)[ix];
9043 if ((abbrev_a->dw_attr != die_a->dw_attr)
9044 || (value_format (abbrev_a) != value_format (die_a)))
9045 {
9046 ok = false;
9047 break;
9048 }
9049 }
9050 if (ok)
9051 break;
9052 }
9053
9054 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9055 {
9056 vec_safe_push (abbrev_die_table, die);
9057 if (abbrev_opt_start)
9058 abbrev_usage_count.safe_push (0);
9059 }
9060 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9061 {
9062 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9063 sorted_abbrev_dies.safe_push (die);
9064 }
9065
9066 die->die_abbrev = abbrev_id;
9067 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9068 }
9069
9070 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9071 by die_abbrev's usage count, from the most commonly used
9072 abbreviation to the least. */
9073
9074 static int
9075 die_abbrev_cmp (const void *p1, const void *p2)
9076 {
9077 dw_die_ref die1 = *(const dw_die_ref *) p1;
9078 dw_die_ref die2 = *(const dw_die_ref *) p2;
9079
9080 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9081 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9082
9083 if (die1->die_abbrev >= abbrev_opt_base_type_end
9084 && die2->die_abbrev >= abbrev_opt_base_type_end)
9085 {
9086 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9087 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9088 return -1;
9089 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9090 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9091 return 1;
9092 }
9093
9094 /* Stabilize the sort. */
9095 if (die1->die_abbrev < die2->die_abbrev)
9096 return -1;
9097 if (die1->die_abbrev > die2->die_abbrev)
9098 return 1;
9099
9100 return 0;
9101 }
9102
9103 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9104 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9105 into dw_val_class_const_implicit or
9106 dw_val_class_unsigned_const_implicit. */
9107
9108 static void
9109 optimize_implicit_const (unsigned int first_id, unsigned int end,
9110 vec<bool> &implicit_consts)
9111 {
9112 /* It never makes sense if there is just one DIE using the abbreviation. */
9113 if (end < first_id + 2)
9114 return;
9115
9116 dw_attr_node *a;
9117 unsigned ix, i;
9118 dw_die_ref die = sorted_abbrev_dies[first_id];
9119 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9120 if (implicit_consts[ix])
9121 {
9122 enum dw_val_class new_class = dw_val_class_none;
9123 switch (AT_class (a))
9124 {
9125 case dw_val_class_unsigned_const:
9126 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9127 continue;
9128
9129 /* The .debug_abbrev section will grow by
9130 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9131 in all the DIEs using that abbreviation. */
9132 if (constant_size (AT_unsigned (a)) * (end - first_id)
9133 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9134 continue;
9135
9136 new_class = dw_val_class_unsigned_const_implicit;
9137 break;
9138
9139 case dw_val_class_const:
9140 new_class = dw_val_class_const_implicit;
9141 break;
9142
9143 case dw_val_class_file:
9144 new_class = dw_val_class_file_implicit;
9145 break;
9146
9147 default:
9148 continue;
9149 }
9150 for (i = first_id; i < end; i++)
9151 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9152 = new_class;
9153 }
9154 }
9155
9156 /* Attempt to optimize abbreviation table from abbrev_opt_start
9157 abbreviation above. */
9158
9159 static void
9160 optimize_abbrev_table (void)
9161 {
9162 if (abbrev_opt_start
9163 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9164 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9165 {
9166 auto_vec<bool, 32> implicit_consts;
9167 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9168
9169 unsigned int abbrev_id = abbrev_opt_start - 1;
9170 unsigned int first_id = ~0U;
9171 unsigned int last_abbrev_id = 0;
9172 unsigned int i;
9173 dw_die_ref die;
9174 if (abbrev_opt_base_type_end > abbrev_opt_start)
9175 abbrev_id = abbrev_opt_base_type_end - 1;
9176 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9177 most commonly used abbreviations come first. */
9178 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9179 {
9180 dw_attr_node *a;
9181 unsigned ix;
9182
9183 /* If calc_base_type_die_sizes has been called, the CU and
9184 base types after it can't be optimized, because we've already
9185 calculated their DIE offsets. We've sorted them first. */
9186 if (die->die_abbrev < abbrev_opt_base_type_end)
9187 continue;
9188 if (die->die_abbrev != last_abbrev_id)
9189 {
9190 last_abbrev_id = die->die_abbrev;
9191 if (dwarf_version >= 5 && first_id != ~0U)
9192 optimize_implicit_const (first_id, i, implicit_consts);
9193 abbrev_id++;
9194 (*abbrev_die_table)[abbrev_id] = die;
9195 if (dwarf_version >= 5)
9196 {
9197 first_id = i;
9198 implicit_consts.truncate (0);
9199
9200 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9201 switch (AT_class (a))
9202 {
9203 case dw_val_class_const:
9204 case dw_val_class_unsigned_const:
9205 case dw_val_class_file:
9206 implicit_consts.safe_push (true);
9207 break;
9208 default:
9209 implicit_consts.safe_push (false);
9210 break;
9211 }
9212 }
9213 }
9214 else if (dwarf_version >= 5)
9215 {
9216 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9217 if (!implicit_consts[ix])
9218 continue;
9219 else
9220 {
9221 dw_attr_node *other_a
9222 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9223 if (!dw_val_equal_p (&a->dw_attr_val,
9224 &other_a->dw_attr_val))
9225 implicit_consts[ix] = false;
9226 }
9227 }
9228 die->die_abbrev = abbrev_id;
9229 }
9230 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9231 if (dwarf_version >= 5 && first_id != ~0U)
9232 optimize_implicit_const (first_id, i, implicit_consts);
9233 }
9234
9235 abbrev_opt_start = 0;
9236 abbrev_opt_base_type_end = 0;
9237 abbrev_usage_count.release ();
9238 sorted_abbrev_dies.release ();
9239 }
9240 \f
9241 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9242
9243 static int
9244 constant_size (unsigned HOST_WIDE_INT value)
9245 {
9246 int log;
9247
9248 if (value == 0)
9249 log = 0;
9250 else
9251 log = floor_log2 (value);
9252
9253 log = log / 8;
9254 log = 1 << (floor_log2 (log) + 1);
9255
9256 return log;
9257 }
9258
9259 /* Return the size of a DIE as it is represented in the
9260 .debug_info section. */
9261
9262 static unsigned long
9263 size_of_die (dw_die_ref die)
9264 {
9265 unsigned long size = 0;
9266 dw_attr_node *a;
9267 unsigned ix;
9268 enum dwarf_form form;
9269
9270 size += size_of_uleb128 (die->die_abbrev);
9271 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9272 {
9273 switch (AT_class (a))
9274 {
9275 case dw_val_class_addr:
9276 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9277 {
9278 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9279 size += size_of_uleb128 (AT_index (a));
9280 }
9281 else
9282 size += DWARF2_ADDR_SIZE;
9283 break;
9284 case dw_val_class_offset:
9285 size += DWARF_OFFSET_SIZE;
9286 break;
9287 case dw_val_class_loc:
9288 {
9289 unsigned long lsize = size_of_locs (AT_loc (a));
9290
9291 /* Block length. */
9292 if (dwarf_version >= 4)
9293 size += size_of_uleb128 (lsize);
9294 else
9295 size += constant_size (lsize);
9296 size += lsize;
9297 }
9298 break;
9299 case dw_val_class_loc_list:
9300 case dw_val_class_view_list:
9301 if (dwarf_split_debug_info && dwarf_version >= 5)
9302 {
9303 gcc_assert (AT_loc_list (a)->num_assigned);
9304 size += size_of_uleb128 (AT_loc_list (a)->hash);
9305 }
9306 else
9307 size += DWARF_OFFSET_SIZE;
9308 break;
9309 case dw_val_class_range_list:
9310 if (value_format (a) == DW_FORM_rnglistx)
9311 {
9312 gcc_assert (rnglist_idx);
9313 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9314 size += size_of_uleb128 (r->idx);
9315 }
9316 else
9317 size += DWARF_OFFSET_SIZE;
9318 break;
9319 case dw_val_class_const:
9320 size += size_of_sleb128 (AT_int (a));
9321 break;
9322 case dw_val_class_unsigned_const:
9323 {
9324 int csize = constant_size (AT_unsigned (a));
9325 if (dwarf_version == 3
9326 && a->dw_attr == DW_AT_data_member_location
9327 && csize >= 4)
9328 size += size_of_uleb128 (AT_unsigned (a));
9329 else
9330 size += csize;
9331 }
9332 break;
9333 case dw_val_class_symview:
9334 if (symview_upper_bound <= 0xff)
9335 size += 1;
9336 else if (symview_upper_bound <= 0xffff)
9337 size += 2;
9338 else if (symview_upper_bound <= 0xffffffff)
9339 size += 4;
9340 else
9341 size += 8;
9342 break;
9343 case dw_val_class_const_implicit:
9344 case dw_val_class_unsigned_const_implicit:
9345 case dw_val_class_file_implicit:
9346 /* These occupy no size in the DIE, just an extra sleb128 in
9347 .debug_abbrev. */
9348 break;
9349 case dw_val_class_const_double:
9350 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9351 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9352 size++; /* block */
9353 break;
9354 case dw_val_class_wide_int:
9355 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9356 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9357 if (get_full_len (*a->dw_attr_val.v.val_wide)
9358 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9359 size++; /* block */
9360 break;
9361 case dw_val_class_vec:
9362 size += constant_size (a->dw_attr_val.v.val_vec.length
9363 * a->dw_attr_val.v.val_vec.elt_size)
9364 + a->dw_attr_val.v.val_vec.length
9365 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9366 break;
9367 case dw_val_class_flag:
9368 if (dwarf_version >= 4)
9369 /* Currently all add_AT_flag calls pass in 1 as last argument,
9370 so DW_FORM_flag_present can be used. If that ever changes,
9371 we'll need to use DW_FORM_flag and have some optimization
9372 in build_abbrev_table that will change those to
9373 DW_FORM_flag_present if it is set to 1 in all DIEs using
9374 the same abbrev entry. */
9375 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9376 else
9377 size += 1;
9378 break;
9379 case dw_val_class_die_ref:
9380 if (AT_ref_external (a))
9381 {
9382 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9383 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9384 is sized by target address length, whereas in DWARF3
9385 it's always sized as an offset. */
9386 if (use_debug_types)
9387 size += DWARF_TYPE_SIGNATURE_SIZE;
9388 else if (dwarf_version == 2)
9389 size += DWARF2_ADDR_SIZE;
9390 else
9391 size += DWARF_OFFSET_SIZE;
9392 }
9393 else
9394 size += DWARF_OFFSET_SIZE;
9395 break;
9396 case dw_val_class_fde_ref:
9397 size += DWARF_OFFSET_SIZE;
9398 break;
9399 case dw_val_class_lbl_id:
9400 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9401 {
9402 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9403 size += size_of_uleb128 (AT_index (a));
9404 }
9405 else
9406 size += DWARF2_ADDR_SIZE;
9407 break;
9408 case dw_val_class_lineptr:
9409 case dw_val_class_macptr:
9410 case dw_val_class_loclistsptr:
9411 size += DWARF_OFFSET_SIZE;
9412 break;
9413 case dw_val_class_str:
9414 form = AT_string_form (a);
9415 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9416 size += DWARF_OFFSET_SIZE;
9417 else if (form == DW_FORM_GNU_str_index)
9418 size += size_of_uleb128 (AT_index (a));
9419 else
9420 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9421 break;
9422 case dw_val_class_file:
9423 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9424 break;
9425 case dw_val_class_data8:
9426 size += 8;
9427 break;
9428 case dw_val_class_vms_delta:
9429 size += DWARF_OFFSET_SIZE;
9430 break;
9431 case dw_val_class_high_pc:
9432 size += DWARF2_ADDR_SIZE;
9433 break;
9434 case dw_val_class_discr_value:
9435 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9436 break;
9437 case dw_val_class_discr_list:
9438 {
9439 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9440
9441 /* This is a block, so we have the block length and then its
9442 data. */
9443 size += constant_size (block_size) + block_size;
9444 }
9445 break;
9446 default:
9447 gcc_unreachable ();
9448 }
9449 }
9450
9451 return size;
9452 }
9453
9454 /* Size the debugging information associated with a given DIE. Visits the
9455 DIE's children recursively. Updates the global variable next_die_offset, on
9456 each time through. Uses the current value of next_die_offset to update the
9457 die_offset field in each DIE. */
9458
9459 static void
9460 calc_die_sizes (dw_die_ref die)
9461 {
9462 dw_die_ref c;
9463
9464 gcc_assert (die->die_offset == 0
9465 || (unsigned long int) die->die_offset == next_die_offset);
9466 die->die_offset = next_die_offset;
9467 next_die_offset += size_of_die (die);
9468
9469 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9470
9471 if (die->die_child != NULL)
9472 /* Count the null byte used to terminate sibling lists. */
9473 next_die_offset += 1;
9474 }
9475
9476 /* Size just the base type children at the start of the CU.
9477 This is needed because build_abbrev needs to size locs
9478 and sizing of type based stack ops needs to know die_offset
9479 values for the base types. */
9480
9481 static void
9482 calc_base_type_die_sizes (void)
9483 {
9484 unsigned long die_offset = (dwarf_split_debug_info
9485 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9486 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9487 unsigned int i;
9488 dw_die_ref base_type;
9489 #if ENABLE_ASSERT_CHECKING
9490 dw_die_ref prev = comp_unit_die ()->die_child;
9491 #endif
9492
9493 die_offset += size_of_die (comp_unit_die ());
9494 for (i = 0; base_types.iterate (i, &base_type); i++)
9495 {
9496 #if ENABLE_ASSERT_CHECKING
9497 gcc_assert (base_type->die_offset == 0
9498 && prev->die_sib == base_type
9499 && base_type->die_child == NULL
9500 && base_type->die_abbrev);
9501 prev = base_type;
9502 #endif
9503 if (abbrev_opt_start
9504 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9505 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9506 base_type->die_offset = die_offset;
9507 die_offset += size_of_die (base_type);
9508 }
9509 }
9510
9511 /* Set the marks for a die and its children. We do this so
9512 that we know whether or not a reference needs to use FORM_ref_addr; only
9513 DIEs in the same CU will be marked. We used to clear out the offset
9514 and use that as the flag, but ran into ordering problems. */
9515
9516 static void
9517 mark_dies (dw_die_ref die)
9518 {
9519 dw_die_ref c;
9520
9521 gcc_assert (!die->die_mark);
9522
9523 die->die_mark = 1;
9524 FOR_EACH_CHILD (die, c, mark_dies (c));
9525 }
9526
9527 /* Clear the marks for a die and its children. */
9528
9529 static void
9530 unmark_dies (dw_die_ref die)
9531 {
9532 dw_die_ref c;
9533
9534 if (! use_debug_types)
9535 gcc_assert (die->die_mark);
9536
9537 die->die_mark = 0;
9538 FOR_EACH_CHILD (die, c, unmark_dies (c));
9539 }
9540
9541 /* Clear the marks for a die, its children and referred dies. */
9542
9543 static void
9544 unmark_all_dies (dw_die_ref die)
9545 {
9546 dw_die_ref c;
9547 dw_attr_node *a;
9548 unsigned ix;
9549
9550 if (!die->die_mark)
9551 return;
9552 die->die_mark = 0;
9553
9554 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9555
9556 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9557 if (AT_class (a) == dw_val_class_die_ref)
9558 unmark_all_dies (AT_ref (a));
9559 }
9560
9561 /* Calculate if the entry should appear in the final output file. It may be
9562 from a pruned a type. */
9563
9564 static bool
9565 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9566 {
9567 /* By limiting gnu pubnames to definitions only, gold can generate a
9568 gdb index without entries for declarations, which don't include
9569 enough information to be useful. */
9570 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9571 return false;
9572
9573 if (table == pubname_table)
9574 {
9575 /* Enumerator names are part of the pubname table, but the
9576 parent DW_TAG_enumeration_type die may have been pruned.
9577 Don't output them if that is the case. */
9578 if (p->die->die_tag == DW_TAG_enumerator &&
9579 (p->die->die_parent == NULL
9580 || !p->die->die_parent->die_perennial_p))
9581 return false;
9582
9583 /* Everything else in the pubname table is included. */
9584 return true;
9585 }
9586
9587 /* The pubtypes table shouldn't include types that have been
9588 pruned. */
9589 return (p->die->die_offset != 0
9590 || !flag_eliminate_unused_debug_types);
9591 }
9592
9593 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9594 generated for the compilation unit. */
9595
9596 static unsigned long
9597 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9598 {
9599 unsigned long size;
9600 unsigned i;
9601 pubname_entry *p;
9602 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9603
9604 size = DWARF_PUBNAMES_HEADER_SIZE;
9605 FOR_EACH_VEC_ELT (*names, i, p)
9606 if (include_pubname_in_output (names, p))
9607 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9608
9609 size += DWARF_OFFSET_SIZE;
9610 return size;
9611 }
9612
9613 /* Return the size of the information in the .debug_aranges section. */
9614
9615 static unsigned long
9616 size_of_aranges (void)
9617 {
9618 unsigned long size;
9619
9620 size = DWARF_ARANGES_HEADER_SIZE;
9621
9622 /* Count the address/length pair for this compilation unit. */
9623 if (text_section_used)
9624 size += 2 * DWARF2_ADDR_SIZE;
9625 if (cold_text_section_used)
9626 size += 2 * DWARF2_ADDR_SIZE;
9627 if (have_multiple_function_sections)
9628 {
9629 unsigned fde_idx;
9630 dw_fde_ref fde;
9631
9632 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9633 {
9634 if (DECL_IGNORED_P (fde->decl))
9635 continue;
9636 if (!fde->in_std_section)
9637 size += 2 * DWARF2_ADDR_SIZE;
9638 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9639 size += 2 * DWARF2_ADDR_SIZE;
9640 }
9641 }
9642
9643 /* Count the two zero words used to terminated the address range table. */
9644 size += 2 * DWARF2_ADDR_SIZE;
9645 return size;
9646 }
9647 \f
9648 /* Select the encoding of an attribute value. */
9649
9650 static enum dwarf_form
9651 value_format (dw_attr_node *a)
9652 {
9653 switch (AT_class (a))
9654 {
9655 case dw_val_class_addr:
9656 /* Only very few attributes allow DW_FORM_addr. */
9657 switch (a->dw_attr)
9658 {
9659 case DW_AT_low_pc:
9660 case DW_AT_high_pc:
9661 case DW_AT_entry_pc:
9662 case DW_AT_trampoline:
9663 return (AT_index (a) == NOT_INDEXED
9664 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9665 default:
9666 break;
9667 }
9668 switch (DWARF2_ADDR_SIZE)
9669 {
9670 case 1:
9671 return DW_FORM_data1;
9672 case 2:
9673 return DW_FORM_data2;
9674 case 4:
9675 return DW_FORM_data4;
9676 case 8:
9677 return DW_FORM_data8;
9678 default:
9679 gcc_unreachable ();
9680 }
9681 case dw_val_class_loc_list:
9682 case dw_val_class_view_list:
9683 if (dwarf_split_debug_info
9684 && dwarf_version >= 5
9685 && AT_loc_list (a)->num_assigned)
9686 return DW_FORM_loclistx;
9687 /* FALLTHRU */
9688 case dw_val_class_range_list:
9689 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9690 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9691 care about sizes of .debug* sections in shared libraries and
9692 executables and don't take into account relocations that affect just
9693 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9694 table in the .debug_rnglists section. */
9695 if (dwarf_split_debug_info
9696 && dwarf_version >= 5
9697 && AT_class (a) == dw_val_class_range_list
9698 && rnglist_idx
9699 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9700 return DW_FORM_rnglistx;
9701 if (dwarf_version >= 4)
9702 return DW_FORM_sec_offset;
9703 /* FALLTHRU */
9704 case dw_val_class_vms_delta:
9705 case dw_val_class_offset:
9706 switch (DWARF_OFFSET_SIZE)
9707 {
9708 case 4:
9709 return DW_FORM_data4;
9710 case 8:
9711 return DW_FORM_data8;
9712 default:
9713 gcc_unreachable ();
9714 }
9715 case dw_val_class_loc:
9716 if (dwarf_version >= 4)
9717 return DW_FORM_exprloc;
9718 switch (constant_size (size_of_locs (AT_loc (a))))
9719 {
9720 case 1:
9721 return DW_FORM_block1;
9722 case 2:
9723 return DW_FORM_block2;
9724 case 4:
9725 return DW_FORM_block4;
9726 default:
9727 gcc_unreachable ();
9728 }
9729 case dw_val_class_const:
9730 return DW_FORM_sdata;
9731 case dw_val_class_unsigned_const:
9732 switch (constant_size (AT_unsigned (a)))
9733 {
9734 case 1:
9735 return DW_FORM_data1;
9736 case 2:
9737 return DW_FORM_data2;
9738 case 4:
9739 /* In DWARF3 DW_AT_data_member_location with
9740 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9741 constant, so we need to use DW_FORM_udata if we need
9742 a large constant. */
9743 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9744 return DW_FORM_udata;
9745 return DW_FORM_data4;
9746 case 8:
9747 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9748 return DW_FORM_udata;
9749 return DW_FORM_data8;
9750 default:
9751 gcc_unreachable ();
9752 }
9753 case dw_val_class_const_implicit:
9754 case dw_val_class_unsigned_const_implicit:
9755 case dw_val_class_file_implicit:
9756 return DW_FORM_implicit_const;
9757 case dw_val_class_const_double:
9758 switch (HOST_BITS_PER_WIDE_INT)
9759 {
9760 case 8:
9761 return DW_FORM_data2;
9762 case 16:
9763 return DW_FORM_data4;
9764 case 32:
9765 return DW_FORM_data8;
9766 case 64:
9767 if (dwarf_version >= 5)
9768 return DW_FORM_data16;
9769 /* FALLTHRU */
9770 default:
9771 return DW_FORM_block1;
9772 }
9773 case dw_val_class_wide_int:
9774 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9775 {
9776 case 8:
9777 return DW_FORM_data1;
9778 case 16:
9779 return DW_FORM_data2;
9780 case 32:
9781 return DW_FORM_data4;
9782 case 64:
9783 return DW_FORM_data8;
9784 case 128:
9785 if (dwarf_version >= 5)
9786 return DW_FORM_data16;
9787 /* FALLTHRU */
9788 default:
9789 return DW_FORM_block1;
9790 }
9791 case dw_val_class_symview:
9792 /* ??? We might use uleb128, but then we'd have to compute
9793 .debug_info offsets in the assembler. */
9794 if (symview_upper_bound <= 0xff)
9795 return DW_FORM_data1;
9796 else if (symview_upper_bound <= 0xffff)
9797 return DW_FORM_data2;
9798 else if (symview_upper_bound <= 0xffffffff)
9799 return DW_FORM_data4;
9800 else
9801 return DW_FORM_data8;
9802 case dw_val_class_vec:
9803 switch (constant_size (a->dw_attr_val.v.val_vec.length
9804 * a->dw_attr_val.v.val_vec.elt_size))
9805 {
9806 case 1:
9807 return DW_FORM_block1;
9808 case 2:
9809 return DW_FORM_block2;
9810 case 4:
9811 return DW_FORM_block4;
9812 default:
9813 gcc_unreachable ();
9814 }
9815 case dw_val_class_flag:
9816 if (dwarf_version >= 4)
9817 {
9818 /* Currently all add_AT_flag calls pass in 1 as last argument,
9819 so DW_FORM_flag_present can be used. If that ever changes,
9820 we'll need to use DW_FORM_flag and have some optimization
9821 in build_abbrev_table that will change those to
9822 DW_FORM_flag_present if it is set to 1 in all DIEs using
9823 the same abbrev entry. */
9824 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9825 return DW_FORM_flag_present;
9826 }
9827 return DW_FORM_flag;
9828 case dw_val_class_die_ref:
9829 if (AT_ref_external (a))
9830 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9831 else
9832 return DW_FORM_ref;
9833 case dw_val_class_fde_ref:
9834 return DW_FORM_data;
9835 case dw_val_class_lbl_id:
9836 return (AT_index (a) == NOT_INDEXED
9837 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9838 case dw_val_class_lineptr:
9839 case dw_val_class_macptr:
9840 case dw_val_class_loclistsptr:
9841 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9842 case dw_val_class_str:
9843 return AT_string_form (a);
9844 case dw_val_class_file:
9845 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9846 {
9847 case 1:
9848 return DW_FORM_data1;
9849 case 2:
9850 return DW_FORM_data2;
9851 case 4:
9852 return DW_FORM_data4;
9853 default:
9854 gcc_unreachable ();
9855 }
9856
9857 case dw_val_class_data8:
9858 return DW_FORM_data8;
9859
9860 case dw_val_class_high_pc:
9861 switch (DWARF2_ADDR_SIZE)
9862 {
9863 case 1:
9864 return DW_FORM_data1;
9865 case 2:
9866 return DW_FORM_data2;
9867 case 4:
9868 return DW_FORM_data4;
9869 case 8:
9870 return DW_FORM_data8;
9871 default:
9872 gcc_unreachable ();
9873 }
9874
9875 case dw_val_class_discr_value:
9876 return (a->dw_attr_val.v.val_discr_value.pos
9877 ? DW_FORM_udata
9878 : DW_FORM_sdata);
9879 case dw_val_class_discr_list:
9880 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9881 {
9882 case 1:
9883 return DW_FORM_block1;
9884 case 2:
9885 return DW_FORM_block2;
9886 case 4:
9887 return DW_FORM_block4;
9888 default:
9889 gcc_unreachable ();
9890 }
9891
9892 default:
9893 gcc_unreachable ();
9894 }
9895 }
9896
9897 /* Output the encoding of an attribute value. */
9898
9899 static void
9900 output_value_format (dw_attr_node *a)
9901 {
9902 enum dwarf_form form = value_format (a);
9903
9904 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9905 }
9906
9907 /* Given a die and id, produce the appropriate abbreviations. */
9908
9909 static void
9910 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9911 {
9912 unsigned ix;
9913 dw_attr_node *a_attr;
9914
9915 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9916 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9917 dwarf_tag_name (abbrev->die_tag));
9918
9919 if (abbrev->die_child != NULL)
9920 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9921 else
9922 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9923
9924 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9925 {
9926 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9927 dwarf_attr_name (a_attr->dw_attr));
9928 output_value_format (a_attr);
9929 if (value_format (a_attr) == DW_FORM_implicit_const)
9930 {
9931 if (AT_class (a_attr) == dw_val_class_file_implicit)
9932 {
9933 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9934 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9935 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9936 }
9937 else
9938 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9939 }
9940 }
9941
9942 dw2_asm_output_data (1, 0, NULL);
9943 dw2_asm_output_data (1, 0, NULL);
9944 }
9945
9946
9947 /* Output the .debug_abbrev section which defines the DIE abbreviation
9948 table. */
9949
9950 static void
9951 output_abbrev_section (void)
9952 {
9953 unsigned int abbrev_id;
9954 dw_die_ref abbrev;
9955
9956 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9957 if (abbrev_id != 0)
9958 output_die_abbrevs (abbrev_id, abbrev);
9959
9960 /* Terminate the table. */
9961 dw2_asm_output_data (1, 0, NULL);
9962 }
9963
9964 /* Return a new location list, given the begin and end range, and the
9965 expression. */
9966
9967 static inline dw_loc_list_ref
9968 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9969 const char *end, var_loc_view vend,
9970 const char *section)
9971 {
9972 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9973
9974 retlist->begin = begin;
9975 retlist->begin_entry = NULL;
9976 retlist->end = end;
9977 retlist->expr = expr;
9978 retlist->section = section;
9979 retlist->vbegin = vbegin;
9980 retlist->vend = vend;
9981
9982 return retlist;
9983 }
9984
9985 /* Return true iff there's any nonzero view number in the loc list. */
9986
9987 static bool
9988 loc_list_has_views (dw_loc_list_ref list)
9989 {
9990 if (!debug_variable_location_views)
9991 return false;
9992
9993 for (dw_loc_list_ref loc = list;
9994 loc != NULL; loc = loc->dw_loc_next)
9995 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
9996 return true;
9997
9998 return false;
9999 }
10000
10001 /* Generate a new internal symbol for this location list node, if it
10002 hasn't got one yet. */
10003
10004 static inline void
10005 gen_llsym (dw_loc_list_ref list)
10006 {
10007 gcc_assert (!list->ll_symbol);
10008 list->ll_symbol = gen_internal_sym ("LLST");
10009
10010 if (!loc_list_has_views (list))
10011 return;
10012
10013 if (dwarf2out_locviews_in_attribute ())
10014 {
10015 /* Use the same label_num for the view list. */
10016 label_num--;
10017 list->vl_symbol = gen_internal_sym ("LVUS");
10018 }
10019 else
10020 list->vl_symbol = list->ll_symbol;
10021 }
10022
10023 /* Generate a symbol for the list, but only if we really want to emit
10024 it as a list. */
10025
10026 static inline void
10027 maybe_gen_llsym (dw_loc_list_ref list)
10028 {
10029 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10030 return;
10031
10032 gen_llsym (list);
10033 }
10034
10035 /* Determine whether or not to skip loc_list entry CURR. If we're not
10036 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10037 representation in *SIZEP. */
10038
10039 static bool
10040 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = 0)
10041 {
10042 /* Don't output an entry that starts and ends at the same address. */
10043 if (strcmp (curr->begin, curr->end) == 0
10044 && curr->vbegin == curr->vend && !curr->force)
10045 return true;
10046
10047 unsigned long size = size_of_locs (curr->expr);
10048
10049 /* If the expression is too large, drop it on the floor. We could
10050 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10051 in the expression, but >= 64KB expressions for a single value
10052 in a single range are unlikely very useful. */
10053 if (dwarf_version < 5 && size > 0xffff)
10054 return true;
10055
10056 if (sizep)
10057 *sizep = size;
10058
10059 return false;
10060 }
10061
10062 /* Output a view pair loclist entry for CURR, if it requires one. */
10063
10064 static void
10065 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10066 {
10067 if (!dwarf2out_locviews_in_loclist ())
10068 return;
10069
10070 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10071 return;
10072
10073 #ifdef DW_LLE_view_pair
10074 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10075
10076 if (dwarf2out_as_locview_support)
10077 {
10078 if (ZERO_VIEW_P (curr->vbegin))
10079 dw2_asm_output_data_uleb128 (0, "Location view begin");
10080 else
10081 {
10082 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10083 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10084 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10085 }
10086
10087 if (ZERO_VIEW_P (curr->vend))
10088 dw2_asm_output_data_uleb128 (0, "Location view end");
10089 else
10090 {
10091 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10092 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10093 dw2_asm_output_symname_uleb128 (label, "Location view end");
10094 }
10095 }
10096 else
10097 {
10098 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10099 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10100 }
10101 #endif /* DW_LLE_view_pair */
10102
10103 return;
10104 }
10105
10106 /* Output the location list given to us. */
10107
10108 static void
10109 output_loc_list (dw_loc_list_ref list_head)
10110 {
10111 int vcount = 0, lcount = 0;
10112
10113 if (list_head->emitted)
10114 return;
10115 list_head->emitted = true;
10116
10117 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10118 {
10119 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10120
10121 for (dw_loc_list_ref curr = list_head; curr != NULL;
10122 curr = curr->dw_loc_next)
10123 {
10124 if (skip_loc_list_entry (curr))
10125 continue;
10126
10127 vcount++;
10128
10129 /* ?? dwarf_split_debug_info? */
10130 if (dwarf2out_as_locview_support)
10131 {
10132 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10133
10134 if (!ZERO_VIEW_P (curr->vbegin))
10135 {
10136 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10137 dw2_asm_output_symname_uleb128 (label,
10138 "View list begin (%s)",
10139 list_head->vl_symbol);
10140 }
10141 else
10142 dw2_asm_output_data_uleb128 (0,
10143 "View list begin (%s)",
10144 list_head->vl_symbol);
10145
10146 if (!ZERO_VIEW_P (curr->vend))
10147 {
10148 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10149 dw2_asm_output_symname_uleb128 (label,
10150 "View list end (%s)",
10151 list_head->vl_symbol);
10152 }
10153 else
10154 dw2_asm_output_data_uleb128 (0,
10155 "View list end (%s)",
10156 list_head->vl_symbol);
10157 }
10158 else
10159 {
10160 dw2_asm_output_data_uleb128 (curr->vbegin,
10161 "View list begin (%s)",
10162 list_head->vl_symbol);
10163 dw2_asm_output_data_uleb128 (curr->vend,
10164 "View list end (%s)",
10165 list_head->vl_symbol);
10166 }
10167 }
10168 }
10169
10170 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10171
10172 const char *last_section = NULL;
10173 const char *base_label = NULL;
10174
10175 /* Walk the location list, and output each range + expression. */
10176 for (dw_loc_list_ref curr = list_head; curr != NULL;
10177 curr = curr->dw_loc_next)
10178 {
10179 unsigned long size;
10180
10181 /* Skip this entry? If we skip it here, we must skip it in the
10182 view list above as well. */
10183 if (skip_loc_list_entry (curr, &size))
10184 continue;
10185
10186 lcount++;
10187
10188 if (dwarf_version >= 5)
10189 {
10190 if (dwarf_split_debug_info)
10191 {
10192 dwarf2out_maybe_output_loclist_view_pair (curr);
10193 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10194 uleb128 index into .debug_addr and uleb128 length. */
10195 dw2_asm_output_data (1, DW_LLE_startx_length,
10196 "DW_LLE_startx_length (%s)",
10197 list_head->ll_symbol);
10198 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10199 "Location list range start index "
10200 "(%s)", curr->begin);
10201 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10202 For that case we probably need to emit DW_LLE_startx_endx,
10203 but we'd need 2 .debug_addr entries rather than just one. */
10204 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10205 "Location list length (%s)",
10206 list_head->ll_symbol);
10207 }
10208 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10209 {
10210 dwarf2out_maybe_output_loclist_view_pair (curr);
10211 /* If all code is in .text section, the base address is
10212 already provided by the CU attributes. Use
10213 DW_LLE_offset_pair where both addresses are uleb128 encoded
10214 offsets against that base. */
10215 dw2_asm_output_data (1, DW_LLE_offset_pair,
10216 "DW_LLE_offset_pair (%s)",
10217 list_head->ll_symbol);
10218 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10219 "Location list begin address (%s)",
10220 list_head->ll_symbol);
10221 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10222 "Location list end address (%s)",
10223 list_head->ll_symbol);
10224 }
10225 else if (HAVE_AS_LEB128)
10226 {
10227 /* Otherwise, find out how many consecutive entries could share
10228 the same base entry. If just one, emit DW_LLE_start_length,
10229 otherwise emit DW_LLE_base_address for the base address
10230 followed by a series of DW_LLE_offset_pair. */
10231 if (last_section == NULL || curr->section != last_section)
10232 {
10233 dw_loc_list_ref curr2;
10234 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10235 curr2 = curr2->dw_loc_next)
10236 {
10237 if (strcmp (curr2->begin, curr2->end) == 0
10238 && !curr2->force)
10239 continue;
10240 break;
10241 }
10242 if (curr2 == NULL || curr->section != curr2->section)
10243 last_section = NULL;
10244 else
10245 {
10246 last_section = curr->section;
10247 base_label = curr->begin;
10248 dw2_asm_output_data (1, DW_LLE_base_address,
10249 "DW_LLE_base_address (%s)",
10250 list_head->ll_symbol);
10251 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10252 "Base address (%s)",
10253 list_head->ll_symbol);
10254 }
10255 }
10256 /* Only one entry with the same base address. Use
10257 DW_LLE_start_length with absolute address and uleb128
10258 length. */
10259 if (last_section == NULL)
10260 {
10261 dwarf2out_maybe_output_loclist_view_pair (curr);
10262 dw2_asm_output_data (1, DW_LLE_start_length,
10263 "DW_LLE_start_length (%s)",
10264 list_head->ll_symbol);
10265 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10266 "Location list begin address (%s)",
10267 list_head->ll_symbol);
10268 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10269 "Location list length "
10270 "(%s)", list_head->ll_symbol);
10271 }
10272 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10273 DW_LLE_base_address. */
10274 else
10275 {
10276 dwarf2out_maybe_output_loclist_view_pair (curr);
10277 dw2_asm_output_data (1, DW_LLE_offset_pair,
10278 "DW_LLE_offset_pair (%s)",
10279 list_head->ll_symbol);
10280 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10281 "Location list begin address "
10282 "(%s)", list_head->ll_symbol);
10283 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10284 "Location list end address "
10285 "(%s)", list_head->ll_symbol);
10286 }
10287 }
10288 /* The assembler does not support .uleb128 directive. Emit
10289 DW_LLE_start_end with a pair of absolute addresses. */
10290 else
10291 {
10292 dwarf2out_maybe_output_loclist_view_pair (curr);
10293 dw2_asm_output_data (1, DW_LLE_start_end,
10294 "DW_LLE_start_end (%s)",
10295 list_head->ll_symbol);
10296 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10297 "Location list begin address (%s)",
10298 list_head->ll_symbol);
10299 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10300 "Location list end address (%s)",
10301 list_head->ll_symbol);
10302 }
10303 }
10304 else if (dwarf_split_debug_info)
10305 {
10306 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10307 and 4 byte length. */
10308 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10309 "Location list start/length entry (%s)",
10310 list_head->ll_symbol);
10311 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10312 "Location list range start index (%s)",
10313 curr->begin);
10314 /* The length field is 4 bytes. If we ever need to support
10315 an 8-byte length, we can add a new DW_LLE code or fall back
10316 to DW_LLE_GNU_start_end_entry. */
10317 dw2_asm_output_delta (4, curr->end, curr->begin,
10318 "Location list range length (%s)",
10319 list_head->ll_symbol);
10320 }
10321 else if (!have_multiple_function_sections)
10322 {
10323 /* Pair of relative addresses against start of text section. */
10324 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10325 "Location list begin address (%s)",
10326 list_head->ll_symbol);
10327 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10328 "Location list end address (%s)",
10329 list_head->ll_symbol);
10330 }
10331 else
10332 {
10333 /* Pair of absolute addresses. */
10334 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10335 "Location list begin address (%s)",
10336 list_head->ll_symbol);
10337 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10338 "Location list end address (%s)",
10339 list_head->ll_symbol);
10340 }
10341
10342 /* Output the block length for this list of location operations. */
10343 if (dwarf_version >= 5)
10344 dw2_asm_output_data_uleb128 (size, "Location expression size");
10345 else
10346 {
10347 gcc_assert (size <= 0xffff);
10348 dw2_asm_output_data (2, size, "Location expression size");
10349 }
10350
10351 output_loc_sequence (curr->expr, -1);
10352 }
10353
10354 /* And finally list termination. */
10355 if (dwarf_version >= 5)
10356 dw2_asm_output_data (1, DW_LLE_end_of_list,
10357 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10358 else if (dwarf_split_debug_info)
10359 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10360 "Location list terminator (%s)",
10361 list_head->ll_symbol);
10362 else
10363 {
10364 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10365 "Location list terminator begin (%s)",
10366 list_head->ll_symbol);
10367 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10368 "Location list terminator end (%s)",
10369 list_head->ll_symbol);
10370 }
10371
10372 gcc_assert (!list_head->vl_symbol
10373 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10374 }
10375
10376 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10377 section. Emit a relocated reference if val_entry is NULL, otherwise,
10378 emit an indirect reference. */
10379
10380 static void
10381 output_range_list_offset (dw_attr_node *a)
10382 {
10383 const char *name = dwarf_attr_name (a->dw_attr);
10384
10385 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10386 {
10387 if (dwarf_version >= 5)
10388 {
10389 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10390 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10391 debug_ranges_section, "%s", name);
10392 }
10393 else
10394 {
10395 char *p = strchr (ranges_section_label, '\0');
10396 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10397 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10398 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10399 debug_ranges_section, "%s", name);
10400 *p = '\0';
10401 }
10402 }
10403 else if (dwarf_version >= 5)
10404 {
10405 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10406 gcc_assert (rnglist_idx);
10407 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10408 }
10409 else
10410 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10411 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10412 "%s (offset from %s)", name, ranges_section_label);
10413 }
10414
10415 /* Output the offset into the debug_loc section. */
10416
10417 static void
10418 output_loc_list_offset (dw_attr_node *a)
10419 {
10420 char *sym = AT_loc_list (a)->ll_symbol;
10421
10422 gcc_assert (sym);
10423 if (!dwarf_split_debug_info)
10424 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10425 "%s", dwarf_attr_name (a->dw_attr));
10426 else if (dwarf_version >= 5)
10427 {
10428 gcc_assert (AT_loc_list (a)->num_assigned);
10429 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10430 dwarf_attr_name (a->dw_attr),
10431 sym);
10432 }
10433 else
10434 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10435 "%s", dwarf_attr_name (a->dw_attr));
10436 }
10437
10438 /* Output the offset into the debug_loc section. */
10439
10440 static void
10441 output_view_list_offset (dw_attr_node *a)
10442 {
10443 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10444
10445 gcc_assert (sym);
10446 if (dwarf_split_debug_info)
10447 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10448 "%s", dwarf_attr_name (a->dw_attr));
10449 else
10450 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10451 "%s", dwarf_attr_name (a->dw_attr));
10452 }
10453
10454 /* Output an attribute's index or value appropriately. */
10455
10456 static void
10457 output_attr_index_or_value (dw_attr_node *a)
10458 {
10459 const char *name = dwarf_attr_name (a->dw_attr);
10460
10461 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10462 {
10463 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10464 return;
10465 }
10466 switch (AT_class (a))
10467 {
10468 case dw_val_class_addr:
10469 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10470 break;
10471 case dw_val_class_high_pc:
10472 case dw_val_class_lbl_id:
10473 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10474 break;
10475 default:
10476 gcc_unreachable ();
10477 }
10478 }
10479
10480 /* Output a type signature. */
10481
10482 static inline void
10483 output_signature (const char *sig, const char *name)
10484 {
10485 int i;
10486
10487 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10488 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10489 }
10490
10491 /* Output a discriminant value. */
10492
10493 static inline void
10494 output_discr_value (dw_discr_value *discr_value, const char *name)
10495 {
10496 if (discr_value->pos)
10497 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10498 else
10499 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10500 }
10501
10502 /* Output the DIE and its attributes. Called recursively to generate
10503 the definitions of each child DIE. */
10504
10505 static void
10506 output_die (dw_die_ref die)
10507 {
10508 dw_attr_node *a;
10509 dw_die_ref c;
10510 unsigned long size;
10511 unsigned ix;
10512
10513 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10514 (unsigned long)die->die_offset,
10515 dwarf_tag_name (die->die_tag));
10516
10517 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10518 {
10519 const char *name = dwarf_attr_name (a->dw_attr);
10520
10521 switch (AT_class (a))
10522 {
10523 case dw_val_class_addr:
10524 output_attr_index_or_value (a);
10525 break;
10526
10527 case dw_val_class_offset:
10528 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10529 "%s", name);
10530 break;
10531
10532 case dw_val_class_range_list:
10533 output_range_list_offset (a);
10534 break;
10535
10536 case dw_val_class_loc:
10537 size = size_of_locs (AT_loc (a));
10538
10539 /* Output the block length for this list of location operations. */
10540 if (dwarf_version >= 4)
10541 dw2_asm_output_data_uleb128 (size, "%s", name);
10542 else
10543 dw2_asm_output_data (constant_size (size), size, "%s", name);
10544
10545 output_loc_sequence (AT_loc (a), -1);
10546 break;
10547
10548 case dw_val_class_const:
10549 /* ??? It would be slightly more efficient to use a scheme like is
10550 used for unsigned constants below, but gdb 4.x does not sign
10551 extend. Gdb 5.x does sign extend. */
10552 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10553 break;
10554
10555 case dw_val_class_unsigned_const:
10556 {
10557 int csize = constant_size (AT_unsigned (a));
10558 if (dwarf_version == 3
10559 && a->dw_attr == DW_AT_data_member_location
10560 && csize >= 4)
10561 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10562 else
10563 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10564 }
10565 break;
10566
10567 case dw_val_class_symview:
10568 {
10569 int vsize;
10570 if (symview_upper_bound <= 0xff)
10571 vsize = 1;
10572 else if (symview_upper_bound <= 0xffff)
10573 vsize = 2;
10574 else if (symview_upper_bound <= 0xffffffff)
10575 vsize = 4;
10576 else
10577 vsize = 8;
10578 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10579 "%s", name);
10580 }
10581 break;
10582
10583 case dw_val_class_const_implicit:
10584 if (flag_debug_asm)
10585 fprintf (asm_out_file, "\t\t\t%s %s ("
10586 HOST_WIDE_INT_PRINT_DEC ")\n",
10587 ASM_COMMENT_START, name, AT_int (a));
10588 break;
10589
10590 case dw_val_class_unsigned_const_implicit:
10591 if (flag_debug_asm)
10592 fprintf (asm_out_file, "\t\t\t%s %s ("
10593 HOST_WIDE_INT_PRINT_HEX ")\n",
10594 ASM_COMMENT_START, name, AT_unsigned (a));
10595 break;
10596
10597 case dw_val_class_const_double:
10598 {
10599 unsigned HOST_WIDE_INT first, second;
10600
10601 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10602 dw2_asm_output_data (1,
10603 HOST_BITS_PER_DOUBLE_INT
10604 / HOST_BITS_PER_CHAR,
10605 NULL);
10606
10607 if (WORDS_BIG_ENDIAN)
10608 {
10609 first = a->dw_attr_val.v.val_double.high;
10610 second = a->dw_attr_val.v.val_double.low;
10611 }
10612 else
10613 {
10614 first = a->dw_attr_val.v.val_double.low;
10615 second = a->dw_attr_val.v.val_double.high;
10616 }
10617
10618 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10619 first, "%s", name);
10620 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10621 second, NULL);
10622 }
10623 break;
10624
10625 case dw_val_class_wide_int:
10626 {
10627 int i;
10628 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10629 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10630 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10631 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10632 * l, NULL);
10633
10634 if (WORDS_BIG_ENDIAN)
10635 for (i = len - 1; i >= 0; --i)
10636 {
10637 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10638 "%s", name);
10639 name = "";
10640 }
10641 else
10642 for (i = 0; i < len; ++i)
10643 {
10644 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10645 "%s", name);
10646 name = "";
10647 }
10648 }
10649 break;
10650
10651 case dw_val_class_vec:
10652 {
10653 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10654 unsigned int len = a->dw_attr_val.v.val_vec.length;
10655 unsigned int i;
10656 unsigned char *p;
10657
10658 dw2_asm_output_data (constant_size (len * elt_size),
10659 len * elt_size, "%s", name);
10660 if (elt_size > sizeof (HOST_WIDE_INT))
10661 {
10662 elt_size /= 2;
10663 len *= 2;
10664 }
10665 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10666 i < len;
10667 i++, p += elt_size)
10668 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10669 "fp or vector constant word %u", i);
10670 break;
10671 }
10672
10673 case dw_val_class_flag:
10674 if (dwarf_version >= 4)
10675 {
10676 /* Currently all add_AT_flag calls pass in 1 as last argument,
10677 so DW_FORM_flag_present can be used. If that ever changes,
10678 we'll need to use DW_FORM_flag and have some optimization
10679 in build_abbrev_table that will change those to
10680 DW_FORM_flag_present if it is set to 1 in all DIEs using
10681 the same abbrev entry. */
10682 gcc_assert (AT_flag (a) == 1);
10683 if (flag_debug_asm)
10684 fprintf (asm_out_file, "\t\t\t%s %s\n",
10685 ASM_COMMENT_START, name);
10686 break;
10687 }
10688 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10689 break;
10690
10691 case dw_val_class_loc_list:
10692 output_loc_list_offset (a);
10693 break;
10694
10695 case dw_val_class_view_list:
10696 output_view_list_offset (a);
10697 break;
10698
10699 case dw_val_class_die_ref:
10700 if (AT_ref_external (a))
10701 {
10702 if (AT_ref (a)->comdat_type_p)
10703 {
10704 comdat_type_node *type_node
10705 = AT_ref (a)->die_id.die_type_node;
10706
10707 gcc_assert (type_node);
10708 output_signature (type_node->signature, name);
10709 }
10710 else
10711 {
10712 const char *sym = AT_ref (a)->die_id.die_symbol;
10713 int size;
10714
10715 gcc_assert (sym);
10716 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10717 length, whereas in DWARF3 it's always sized as an
10718 offset. */
10719 if (dwarf_version == 2)
10720 size = DWARF2_ADDR_SIZE;
10721 else
10722 size = DWARF_OFFSET_SIZE;
10723 /* ??? We cannot unconditionally output die_offset if
10724 non-zero - others might create references to those
10725 DIEs via symbols.
10726 And we do not clear its DIE offset after outputting it
10727 (and the label refers to the actual DIEs, not the
10728 DWARF CU unit header which is when using label + offset
10729 would be the correct thing to do).
10730 ??? This is the reason for the with_offset flag. */
10731 if (AT_ref (a)->with_offset)
10732 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10733 debug_info_section, "%s", name);
10734 else
10735 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10736 name);
10737 }
10738 }
10739 else
10740 {
10741 gcc_assert (AT_ref (a)->die_offset);
10742 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10743 "%s", name);
10744 }
10745 break;
10746
10747 case dw_val_class_fde_ref:
10748 {
10749 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10750
10751 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10752 a->dw_attr_val.v.val_fde_index * 2);
10753 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10754 "%s", name);
10755 }
10756 break;
10757
10758 case dw_val_class_vms_delta:
10759 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10760 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10761 AT_vms_delta2 (a), AT_vms_delta1 (a),
10762 "%s", name);
10763 #else
10764 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10765 AT_vms_delta2 (a), AT_vms_delta1 (a),
10766 "%s", name);
10767 #endif
10768 break;
10769
10770 case dw_val_class_lbl_id:
10771 output_attr_index_or_value (a);
10772 break;
10773
10774 case dw_val_class_lineptr:
10775 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10776 debug_line_section, "%s", name);
10777 break;
10778
10779 case dw_val_class_macptr:
10780 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10781 debug_macinfo_section, "%s", name);
10782 break;
10783
10784 case dw_val_class_loclistsptr:
10785 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10786 debug_loc_section, "%s", name);
10787 break;
10788
10789 case dw_val_class_str:
10790 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10791 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10792 a->dw_attr_val.v.val_str->label,
10793 debug_str_section,
10794 "%s: \"%s\"", name, AT_string (a));
10795 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10796 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10797 a->dw_attr_val.v.val_str->label,
10798 debug_line_str_section,
10799 "%s: \"%s\"", name, AT_string (a));
10800 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10801 dw2_asm_output_data_uleb128 (AT_index (a),
10802 "%s: \"%s\"", name, AT_string (a));
10803 else
10804 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10805 break;
10806
10807 case dw_val_class_file:
10808 {
10809 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10810
10811 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10812 a->dw_attr_val.v.val_file->filename);
10813 break;
10814 }
10815
10816 case dw_val_class_file_implicit:
10817 if (flag_debug_asm)
10818 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10819 ASM_COMMENT_START, name,
10820 maybe_emit_file (a->dw_attr_val.v.val_file),
10821 a->dw_attr_val.v.val_file->filename);
10822 break;
10823
10824 case dw_val_class_data8:
10825 {
10826 int i;
10827
10828 for (i = 0; i < 8; i++)
10829 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10830 i == 0 ? "%s" : NULL, name);
10831 break;
10832 }
10833
10834 case dw_val_class_high_pc:
10835 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10836 get_AT_low_pc (die), "DW_AT_high_pc");
10837 break;
10838
10839 case dw_val_class_discr_value:
10840 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10841 break;
10842
10843 case dw_val_class_discr_list:
10844 {
10845 dw_discr_list_ref list = AT_discr_list (a);
10846 const int size = size_of_discr_list (list);
10847
10848 /* This is a block, so output its length first. */
10849 dw2_asm_output_data (constant_size (size), size,
10850 "%s: block size", name);
10851
10852 for (; list != NULL; list = list->dw_discr_next)
10853 {
10854 /* One byte for the discriminant value descriptor, and then as
10855 many LEB128 numbers as required. */
10856 if (list->dw_discr_range)
10857 dw2_asm_output_data (1, DW_DSC_range,
10858 "%s: DW_DSC_range", name);
10859 else
10860 dw2_asm_output_data (1, DW_DSC_label,
10861 "%s: DW_DSC_label", name);
10862
10863 output_discr_value (&list->dw_discr_lower_bound, name);
10864 if (list->dw_discr_range)
10865 output_discr_value (&list->dw_discr_upper_bound, name);
10866 }
10867 break;
10868 }
10869
10870 default:
10871 gcc_unreachable ();
10872 }
10873 }
10874
10875 FOR_EACH_CHILD (die, c, output_die (c));
10876
10877 /* Add null byte to terminate sibling list. */
10878 if (die->die_child != NULL)
10879 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10880 (unsigned long) die->die_offset);
10881 }
10882
10883 /* Output the dwarf version number. */
10884
10885 static void
10886 output_dwarf_version ()
10887 {
10888 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10889 views in loclist. That will change eventually. */
10890 if (dwarf_version == 6)
10891 {
10892 static bool once;
10893 if (!once)
10894 {
10895 warning (0,
10896 "-gdwarf-6 is output as version 5 with incompatibilities");
10897 once = true;
10898 }
10899 dw2_asm_output_data (2, 5, "DWARF version number");
10900 }
10901 else
10902 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10903 }
10904
10905 /* Output the compilation unit that appears at the beginning of the
10906 .debug_info section, and precedes the DIE descriptions. */
10907
10908 static void
10909 output_compilation_unit_header (enum dwarf_unit_type ut)
10910 {
10911 if (!XCOFF_DEBUGGING_INFO)
10912 {
10913 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10914 dw2_asm_output_data (4, 0xffffffff,
10915 "Initial length escape value indicating 64-bit DWARF extension");
10916 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10917 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10918 "Length of Compilation Unit Info");
10919 }
10920
10921 output_dwarf_version ();
10922 if (dwarf_version >= 5)
10923 {
10924 const char *name;
10925 switch (ut)
10926 {
10927 case DW_UT_compile: name = "DW_UT_compile"; break;
10928 case DW_UT_type: name = "DW_UT_type"; break;
10929 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10930 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10931 default: gcc_unreachable ();
10932 }
10933 dw2_asm_output_data (1, ut, "%s", name);
10934 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10935 }
10936 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10937 debug_abbrev_section,
10938 "Offset Into Abbrev. Section");
10939 if (dwarf_version < 5)
10940 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10941 }
10942
10943 /* Output the compilation unit DIE and its children. */
10944
10945 static void
10946 output_comp_unit (dw_die_ref die, int output_if_empty,
10947 const unsigned char *dwo_id)
10948 {
10949 const char *secname, *oldsym;
10950 char *tmp;
10951
10952 /* Unless we are outputting main CU, we may throw away empty ones. */
10953 if (!output_if_empty && die->die_child == NULL)
10954 return;
10955
10956 /* Even if there are no children of this DIE, we must output the information
10957 about the compilation unit. Otherwise, on an empty translation unit, we
10958 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10959 will then complain when examining the file. First mark all the DIEs in
10960 this CU so we know which get local refs. */
10961 mark_dies (die);
10962
10963 external_ref_hash_type *extern_map = optimize_external_refs (die);
10964
10965 /* For now, optimize only the main CU, in order to optimize the rest
10966 we'd need to see all of them earlier. Leave the rest for post-linking
10967 tools like DWZ. */
10968 if (die == comp_unit_die ())
10969 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10970
10971 build_abbrev_table (die, extern_map);
10972
10973 optimize_abbrev_table ();
10974
10975 delete extern_map;
10976
10977 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10978 next_die_offset = (dwo_id
10979 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10980 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10981 calc_die_sizes (die);
10982
10983 oldsym = die->die_id.die_symbol;
10984 if (oldsym && die->comdat_type_p)
10985 {
10986 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10987
10988 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10989 secname = tmp;
10990 die->die_id.die_symbol = NULL;
10991 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10992 }
10993 else
10994 {
10995 switch_to_section (debug_info_section);
10996 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10997 info_section_emitted = true;
10998 }
10999
11000 /* For LTO cross unit DIE refs we want a symbol on the start of the
11001 debuginfo section, not on the CU DIE. */
11002 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11003 {
11004 /* ??? No way to get visibility assembled without a decl. */
11005 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11006 get_identifier (oldsym), char_type_node);
11007 TREE_PUBLIC (decl) = true;
11008 TREE_STATIC (decl) = true;
11009 DECL_ARTIFICIAL (decl) = true;
11010 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11011 DECL_VISIBILITY_SPECIFIED (decl) = true;
11012 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11013 #ifdef ASM_WEAKEN_LABEL
11014 /* We prefer a .weak because that handles duplicates from duplicate
11015 archive members in a graceful way. */
11016 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11017 #else
11018 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11019 #endif
11020 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11021 }
11022
11023 /* Output debugging information. */
11024 output_compilation_unit_header (dwo_id
11025 ? DW_UT_split_compile : DW_UT_compile);
11026 if (dwarf_version >= 5)
11027 {
11028 if (dwo_id != NULL)
11029 for (int i = 0; i < 8; i++)
11030 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11031 }
11032 output_die (die);
11033
11034 /* Leave the marks on the main CU, so we can check them in
11035 output_pubnames. */
11036 if (oldsym)
11037 {
11038 unmark_dies (die);
11039 die->die_id.die_symbol = oldsym;
11040 }
11041 }
11042
11043 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11044 and .debug_pubtypes. This is configured per-target, but can be
11045 overridden by the -gpubnames or -gno-pubnames options. */
11046
11047 static inline bool
11048 want_pubnames (void)
11049 {
11050 if (debug_info_level <= DINFO_LEVEL_TERSE)
11051 return false;
11052 if (debug_generate_pub_sections != -1)
11053 return debug_generate_pub_sections;
11054 return targetm.want_debug_pub_sections;
11055 }
11056
11057 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11058
11059 static void
11060 add_AT_pubnames (dw_die_ref die)
11061 {
11062 if (want_pubnames ())
11063 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11064 }
11065
11066 /* Add a string attribute value to a skeleton DIE. */
11067
11068 static inline void
11069 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11070 const char *str)
11071 {
11072 dw_attr_node attr;
11073 struct indirect_string_node *node;
11074
11075 if (! skeleton_debug_str_hash)
11076 skeleton_debug_str_hash
11077 = hash_table<indirect_string_hasher>::create_ggc (10);
11078
11079 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11080 find_string_form (node);
11081 if (node->form == DW_FORM_GNU_str_index)
11082 node->form = DW_FORM_strp;
11083
11084 attr.dw_attr = attr_kind;
11085 attr.dw_attr_val.val_class = dw_val_class_str;
11086 attr.dw_attr_val.val_entry = NULL;
11087 attr.dw_attr_val.v.val_str = node;
11088 add_dwarf_attr (die, &attr);
11089 }
11090
11091 /* Helper function to generate top-level dies for skeleton debug_info and
11092 debug_types. */
11093
11094 static void
11095 add_top_level_skeleton_die_attrs (dw_die_ref die)
11096 {
11097 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11098 const char *comp_dir = comp_dir_string ();
11099
11100 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11101 if (comp_dir != NULL)
11102 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11103 add_AT_pubnames (die);
11104 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
11105 }
11106
11107 /* Output skeleton debug sections that point to the dwo file. */
11108
11109 static void
11110 output_skeleton_debug_sections (dw_die_ref comp_unit,
11111 const unsigned char *dwo_id)
11112 {
11113 /* These attributes will be found in the full debug_info section. */
11114 remove_AT (comp_unit, DW_AT_producer);
11115 remove_AT (comp_unit, DW_AT_language);
11116
11117 switch_to_section (debug_skeleton_info_section);
11118 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11119
11120 /* Produce the skeleton compilation-unit header. This one differs enough from
11121 a normal CU header that it's better not to call output_compilation_unit
11122 header. */
11123 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11124 dw2_asm_output_data (4, 0xffffffff,
11125 "Initial length escape value indicating 64-bit "
11126 "DWARF extension");
11127
11128 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11129 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11130 - DWARF_INITIAL_LENGTH_SIZE
11131 + size_of_die (comp_unit),
11132 "Length of Compilation Unit Info");
11133 output_dwarf_version ();
11134 if (dwarf_version >= 5)
11135 {
11136 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11137 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11138 }
11139 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11140 debug_skeleton_abbrev_section,
11141 "Offset Into Abbrev. Section");
11142 if (dwarf_version < 5)
11143 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11144 else
11145 for (int i = 0; i < 8; i++)
11146 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11147
11148 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11149 output_die (comp_unit);
11150
11151 /* Build the skeleton debug_abbrev section. */
11152 switch_to_section (debug_skeleton_abbrev_section);
11153 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11154
11155 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11156
11157 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11158 }
11159
11160 /* Output a comdat type unit DIE and its children. */
11161
11162 static void
11163 output_comdat_type_unit (comdat_type_node *node)
11164 {
11165 const char *secname;
11166 char *tmp;
11167 int i;
11168 #if defined (OBJECT_FORMAT_ELF)
11169 tree comdat_key;
11170 #endif
11171
11172 /* First mark all the DIEs in this CU so we know which get local refs. */
11173 mark_dies (node->root_die);
11174
11175 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11176
11177 build_abbrev_table (node->root_die, extern_map);
11178
11179 delete extern_map;
11180 extern_map = NULL;
11181
11182 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11183 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11184 calc_die_sizes (node->root_die);
11185
11186 #if defined (OBJECT_FORMAT_ELF)
11187 if (dwarf_version >= 5)
11188 {
11189 if (!dwarf_split_debug_info)
11190 secname = ".debug_info";
11191 else
11192 secname = ".debug_info.dwo";
11193 }
11194 else if (!dwarf_split_debug_info)
11195 secname = ".debug_types";
11196 else
11197 secname = ".debug_types.dwo";
11198
11199 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11200 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11201 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11202 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11203 comdat_key = get_identifier (tmp);
11204 targetm.asm_out.named_section (secname,
11205 SECTION_DEBUG | SECTION_LINKONCE,
11206 comdat_key);
11207 #else
11208 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11209 sprintf (tmp, (dwarf_version >= 5
11210 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11211 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11212 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11213 secname = tmp;
11214 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11215 #endif
11216
11217 /* Output debugging information. */
11218 output_compilation_unit_header (dwarf_split_debug_info
11219 ? DW_UT_split_type : DW_UT_type);
11220 output_signature (node->signature, "Type Signature");
11221 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11222 "Offset to Type DIE");
11223 output_die (node->root_die);
11224
11225 unmark_dies (node->root_die);
11226 }
11227
11228 /* Return the DWARF2/3 pubname associated with a decl. */
11229
11230 static const char *
11231 dwarf2_name (tree decl, int scope)
11232 {
11233 if (DECL_NAMELESS (decl))
11234 return NULL;
11235 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11236 }
11237
11238 /* Add a new entry to .debug_pubnames if appropriate. */
11239
11240 static void
11241 add_pubname_string (const char *str, dw_die_ref die)
11242 {
11243 pubname_entry e;
11244
11245 e.die = die;
11246 e.name = xstrdup (str);
11247 vec_safe_push (pubname_table, e);
11248 }
11249
11250 static void
11251 add_pubname (tree decl, dw_die_ref die)
11252 {
11253 if (!want_pubnames ())
11254 return;
11255
11256 /* Don't add items to the table when we expect that the consumer will have
11257 just read the enclosing die. For example, if the consumer is looking at a
11258 class_member, it will either be inside the class already, or will have just
11259 looked up the class to find the member. Either way, searching the class is
11260 faster than searching the index. */
11261 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11262 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11263 {
11264 const char *name = dwarf2_name (decl, 1);
11265
11266 if (name)
11267 add_pubname_string (name, die);
11268 }
11269 }
11270
11271 /* Add an enumerator to the pubnames section. */
11272
11273 static void
11274 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11275 {
11276 pubname_entry e;
11277
11278 gcc_assert (scope_name);
11279 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11280 e.die = die;
11281 vec_safe_push (pubname_table, e);
11282 }
11283
11284 /* Add a new entry to .debug_pubtypes if appropriate. */
11285
11286 static void
11287 add_pubtype (tree decl, dw_die_ref die)
11288 {
11289 pubname_entry e;
11290
11291 if (!want_pubnames ())
11292 return;
11293
11294 if ((TREE_PUBLIC (decl)
11295 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11296 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11297 {
11298 tree scope = NULL;
11299 const char *scope_name = "";
11300 const char *sep = is_cxx () ? "::" : ".";
11301 const char *name;
11302
11303 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11304 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11305 {
11306 scope_name = lang_hooks.dwarf_name (scope, 1);
11307 if (scope_name != NULL && scope_name[0] != '\0')
11308 scope_name = concat (scope_name, sep, NULL);
11309 else
11310 scope_name = "";
11311 }
11312
11313 if (TYPE_P (decl))
11314 name = type_tag (decl);
11315 else
11316 name = lang_hooks.dwarf_name (decl, 1);
11317
11318 /* If we don't have a name for the type, there's no point in adding
11319 it to the table. */
11320 if (name != NULL && name[0] != '\0')
11321 {
11322 e.die = die;
11323 e.name = concat (scope_name, name, NULL);
11324 vec_safe_push (pubtype_table, e);
11325 }
11326
11327 /* Although it might be more consistent to add the pubinfo for the
11328 enumerators as their dies are created, they should only be added if the
11329 enum type meets the criteria above. So rather than re-check the parent
11330 enum type whenever an enumerator die is created, just output them all
11331 here. This isn't protected by the name conditional because anonymous
11332 enums don't have names. */
11333 if (die->die_tag == DW_TAG_enumeration_type)
11334 {
11335 dw_die_ref c;
11336
11337 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11338 }
11339 }
11340 }
11341
11342 /* Output a single entry in the pubnames table. */
11343
11344 static void
11345 output_pubname (dw_offset die_offset, pubname_entry *entry)
11346 {
11347 dw_die_ref die = entry->die;
11348 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11349
11350 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11351
11352 if (debug_generate_pub_sections == 2)
11353 {
11354 /* This logic follows gdb's method for determining the value of the flag
11355 byte. */
11356 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11357 switch (die->die_tag)
11358 {
11359 case DW_TAG_typedef:
11360 case DW_TAG_base_type:
11361 case DW_TAG_subrange_type:
11362 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11363 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11364 break;
11365 case DW_TAG_enumerator:
11366 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11367 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11368 if (!is_cxx ())
11369 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11370 break;
11371 case DW_TAG_subprogram:
11372 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11373 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11374 if (!is_ada ())
11375 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11376 break;
11377 case DW_TAG_constant:
11378 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11379 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11380 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11381 break;
11382 case DW_TAG_variable:
11383 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11384 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11385 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11386 break;
11387 case DW_TAG_namespace:
11388 case DW_TAG_imported_declaration:
11389 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11390 break;
11391 case DW_TAG_class_type:
11392 case DW_TAG_interface_type:
11393 case DW_TAG_structure_type:
11394 case DW_TAG_union_type:
11395 case DW_TAG_enumeration_type:
11396 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11397 if (!is_cxx ())
11398 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11399 break;
11400 default:
11401 /* An unusual tag. Leave the flag-byte empty. */
11402 break;
11403 }
11404 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11405 "GDB-index flags");
11406 }
11407
11408 dw2_asm_output_nstring (entry->name, -1, "external name");
11409 }
11410
11411
11412 /* Output the public names table used to speed up access to externally
11413 visible names; or the public types table used to find type definitions. */
11414
11415 static void
11416 output_pubnames (vec<pubname_entry, va_gc> *names)
11417 {
11418 unsigned i;
11419 unsigned long pubnames_length = size_of_pubnames (names);
11420 pubname_entry *pub;
11421
11422 if (!XCOFF_DEBUGGING_INFO)
11423 {
11424 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11425 dw2_asm_output_data (4, 0xffffffff,
11426 "Initial length escape value indicating 64-bit DWARF extension");
11427 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11428 "Pub Info Length");
11429 }
11430
11431 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11432 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11433
11434 if (dwarf_split_debug_info)
11435 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11436 debug_skeleton_info_section,
11437 "Offset of Compilation Unit Info");
11438 else
11439 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11440 debug_info_section,
11441 "Offset of Compilation Unit Info");
11442 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11443 "Compilation Unit Length");
11444
11445 FOR_EACH_VEC_ELT (*names, i, pub)
11446 {
11447 if (include_pubname_in_output (names, pub))
11448 {
11449 dw_offset die_offset = pub->die->die_offset;
11450
11451 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11452 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11453 gcc_assert (pub->die->die_mark);
11454
11455 /* If we're putting types in their own .debug_types sections,
11456 the .debug_pubtypes table will still point to the compile
11457 unit (not the type unit), so we want to use the offset of
11458 the skeleton DIE (if there is one). */
11459 if (pub->die->comdat_type_p && names == pubtype_table)
11460 {
11461 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11462
11463 if (type_node != NULL)
11464 die_offset = (type_node->skeleton_die != NULL
11465 ? type_node->skeleton_die->die_offset
11466 : comp_unit_die ()->die_offset);
11467 }
11468
11469 output_pubname (die_offset, pub);
11470 }
11471 }
11472
11473 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11474 }
11475
11476 /* Output public names and types tables if necessary. */
11477
11478 static void
11479 output_pubtables (void)
11480 {
11481 if (!want_pubnames () || !info_section_emitted)
11482 return;
11483
11484 switch_to_section (debug_pubnames_section);
11485 output_pubnames (pubname_table);
11486 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11487 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11488 simply won't look for the section. */
11489 switch_to_section (debug_pubtypes_section);
11490 output_pubnames (pubtype_table);
11491 }
11492
11493
11494 /* Output the information that goes into the .debug_aranges table.
11495 Namely, define the beginning and ending address range of the
11496 text section generated for this compilation unit. */
11497
11498 static void
11499 output_aranges (void)
11500 {
11501 unsigned i;
11502 unsigned long aranges_length = size_of_aranges ();
11503
11504 if (!XCOFF_DEBUGGING_INFO)
11505 {
11506 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11507 dw2_asm_output_data (4, 0xffffffff,
11508 "Initial length escape value indicating 64-bit DWARF extension");
11509 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11510 "Length of Address Ranges Info");
11511 }
11512
11513 /* Version number for aranges is still 2, even up to DWARF5. */
11514 dw2_asm_output_data (2, 2, "DWARF aranges version");
11515 if (dwarf_split_debug_info)
11516 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11517 debug_skeleton_info_section,
11518 "Offset of Compilation Unit Info");
11519 else
11520 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11521 debug_info_section,
11522 "Offset of Compilation Unit Info");
11523 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11524 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11525
11526 /* We need to align to twice the pointer size here. */
11527 if (DWARF_ARANGES_PAD_SIZE)
11528 {
11529 /* Pad using a 2 byte words so that padding is correct for any
11530 pointer size. */
11531 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11532 2 * DWARF2_ADDR_SIZE);
11533 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11534 dw2_asm_output_data (2, 0, NULL);
11535 }
11536
11537 /* It is necessary not to output these entries if the sections were
11538 not used; if the sections were not used, the length will be 0 and
11539 the address may end up as 0 if the section is discarded by ld
11540 --gc-sections, leaving an invalid (0, 0) entry that can be
11541 confused with the terminator. */
11542 if (text_section_used)
11543 {
11544 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11545 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11546 text_section_label, "Length");
11547 }
11548 if (cold_text_section_used)
11549 {
11550 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11551 "Address");
11552 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11553 cold_text_section_label, "Length");
11554 }
11555
11556 if (have_multiple_function_sections)
11557 {
11558 unsigned fde_idx;
11559 dw_fde_ref fde;
11560
11561 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11562 {
11563 if (DECL_IGNORED_P (fde->decl))
11564 continue;
11565 if (!fde->in_std_section)
11566 {
11567 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11568 "Address");
11569 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11570 fde->dw_fde_begin, "Length");
11571 }
11572 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11573 {
11574 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11575 "Address");
11576 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11577 fde->dw_fde_second_begin, "Length");
11578 }
11579 }
11580 }
11581
11582 /* Output the terminator words. */
11583 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11584 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11585 }
11586
11587 /* Add a new entry to .debug_ranges. Return its index into
11588 ranges_table vector. */
11589
11590 static unsigned int
11591 add_ranges_num (int num, bool maybe_new_sec)
11592 {
11593 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11594 vec_safe_push (ranges_table, r);
11595 return vec_safe_length (ranges_table) - 1;
11596 }
11597
11598 /* Add a new entry to .debug_ranges corresponding to a block, or a
11599 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11600 this entry might be in a different section from previous range. */
11601
11602 static unsigned int
11603 add_ranges (const_tree block, bool maybe_new_sec)
11604 {
11605 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11606 }
11607
11608 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11609 chain, or middle entry of a chain that will be directly referred to. */
11610
11611 static void
11612 note_rnglist_head (unsigned int offset)
11613 {
11614 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11615 return;
11616 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11617 }
11618
11619 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11620 When using dwarf_split_debug_info, address attributes in dies destined
11621 for the final executable should be direct references--setting the
11622 parameter force_direct ensures this behavior. */
11623
11624 static void
11625 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11626 bool *added, bool force_direct)
11627 {
11628 unsigned int in_use = vec_safe_length (ranges_by_label);
11629 unsigned int offset;
11630 dw_ranges_by_label rbl = { begin, end };
11631 vec_safe_push (ranges_by_label, rbl);
11632 offset = add_ranges_num (-(int)in_use - 1, true);
11633 if (!*added)
11634 {
11635 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11636 *added = true;
11637 note_rnglist_head (offset);
11638 }
11639 }
11640
11641 /* Emit .debug_ranges section. */
11642
11643 static void
11644 output_ranges (void)
11645 {
11646 unsigned i;
11647 static const char *const start_fmt = "Offset %#x";
11648 const char *fmt = start_fmt;
11649 dw_ranges *r;
11650
11651 switch_to_section (debug_ranges_section);
11652 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11653 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11654 {
11655 int block_num = r->num;
11656
11657 if (block_num > 0)
11658 {
11659 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11660 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11661
11662 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11663 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11664
11665 /* If all code is in the text section, then the compilation
11666 unit base address defaults to DW_AT_low_pc, which is the
11667 base of the text section. */
11668 if (!have_multiple_function_sections)
11669 {
11670 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11671 text_section_label,
11672 fmt, i * 2 * DWARF2_ADDR_SIZE);
11673 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11674 text_section_label, NULL);
11675 }
11676
11677 /* Otherwise, the compilation unit base address is zero,
11678 which allows us to use absolute addresses, and not worry
11679 about whether the target supports cross-section
11680 arithmetic. */
11681 else
11682 {
11683 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11684 fmt, i * 2 * DWARF2_ADDR_SIZE);
11685 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11686 }
11687
11688 fmt = NULL;
11689 }
11690
11691 /* Negative block_num stands for an index into ranges_by_label. */
11692 else if (block_num < 0)
11693 {
11694 int lab_idx = - block_num - 1;
11695
11696 if (!have_multiple_function_sections)
11697 {
11698 gcc_unreachable ();
11699 #if 0
11700 /* If we ever use add_ranges_by_labels () for a single
11701 function section, all we have to do is to take out
11702 the #if 0 above. */
11703 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11704 (*ranges_by_label)[lab_idx].begin,
11705 text_section_label,
11706 fmt, i * 2 * DWARF2_ADDR_SIZE);
11707 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11708 (*ranges_by_label)[lab_idx].end,
11709 text_section_label, NULL);
11710 #endif
11711 }
11712 else
11713 {
11714 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11715 (*ranges_by_label)[lab_idx].begin,
11716 fmt, i * 2 * DWARF2_ADDR_SIZE);
11717 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11718 (*ranges_by_label)[lab_idx].end,
11719 NULL);
11720 }
11721 }
11722 else
11723 {
11724 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11725 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11726 fmt = start_fmt;
11727 }
11728 }
11729 }
11730
11731 /* Non-zero if .debug_line_str should be used for .debug_line section
11732 strings or strings that are likely shareable with those. */
11733 #define DWARF5_USE_DEBUG_LINE_STR \
11734 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11735 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11736 /* FIXME: there is no .debug_line_str.dwo section, \
11737 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11738 && !dwarf_split_debug_info)
11739
11740 /* Assign .debug_rnglists indexes. */
11741
11742 static void
11743 index_rnglists (void)
11744 {
11745 unsigned i;
11746 dw_ranges *r;
11747
11748 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11749 if (r->label)
11750 r->idx = rnglist_idx++;
11751 }
11752
11753 /* Emit .debug_rnglists section. */
11754
11755 static void
11756 output_rnglists (unsigned generation)
11757 {
11758 unsigned i;
11759 dw_ranges *r;
11760 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11761 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11762 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11763
11764 switch_to_section (debug_ranges_section);
11765 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11766 /* There are up to 4 unique ranges labels per generation.
11767 See also init_sections_and_labels. */
11768 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11769 2 + generation * 4);
11770 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11771 3 + generation * 4);
11772 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11773 dw2_asm_output_data (4, 0xffffffff,
11774 "Initial length escape value indicating "
11775 "64-bit DWARF extension");
11776 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11777 "Length of Range Lists");
11778 ASM_OUTPUT_LABEL (asm_out_file, l1);
11779 output_dwarf_version ();
11780 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11781 dw2_asm_output_data (1, 0, "Segment Size");
11782 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11783 about relocation sizes and primarily care about the size of .debug*
11784 sections in linked shared libraries and executables, then
11785 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11786 into it are usually larger than just DW_FORM_sec_offset offsets
11787 into the .debug_rnglists section. */
11788 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11789 "Offset Entry Count");
11790 if (dwarf_split_debug_info)
11791 {
11792 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11793 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11794 if (r->label)
11795 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11796 ranges_base_label, NULL);
11797 }
11798
11799 const char *lab = "";
11800 unsigned int len = vec_safe_length (ranges_table);
11801 const char *base = NULL;
11802 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11803 {
11804 int block_num = r->num;
11805
11806 if (r->label)
11807 {
11808 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11809 lab = r->label;
11810 }
11811 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11812 base = NULL;
11813 if (block_num > 0)
11814 {
11815 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11816 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11817
11818 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11819 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11820
11821 if (HAVE_AS_LEB128)
11822 {
11823 /* If all code is in the text section, then the compilation
11824 unit base address defaults to DW_AT_low_pc, which is the
11825 base of the text section. */
11826 if (!have_multiple_function_sections)
11827 {
11828 dw2_asm_output_data (1, DW_RLE_offset_pair,
11829 "DW_RLE_offset_pair (%s)", lab);
11830 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11831 "Range begin address (%s)", lab);
11832 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11833 "Range end address (%s)", lab);
11834 continue;
11835 }
11836 if (base == NULL)
11837 {
11838 dw_ranges *r2 = NULL;
11839 if (i < len - 1)
11840 r2 = &(*ranges_table)[i + 1];
11841 if (r2
11842 && r2->num != 0
11843 && r2->label == NULL
11844 && !r2->maybe_new_sec)
11845 {
11846 dw2_asm_output_data (1, DW_RLE_base_address,
11847 "DW_RLE_base_address (%s)", lab);
11848 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11849 "Base address (%s)", lab);
11850 strcpy (basebuf, blabel);
11851 base = basebuf;
11852 }
11853 }
11854 if (base)
11855 {
11856 dw2_asm_output_data (1, DW_RLE_offset_pair,
11857 "DW_RLE_offset_pair (%s)", lab);
11858 dw2_asm_output_delta_uleb128 (blabel, base,
11859 "Range begin address (%s)", lab);
11860 dw2_asm_output_delta_uleb128 (elabel, base,
11861 "Range end address (%s)", lab);
11862 continue;
11863 }
11864 dw2_asm_output_data (1, DW_RLE_start_length,
11865 "DW_RLE_start_length (%s)", lab);
11866 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11867 "Range begin address (%s)", lab);
11868 dw2_asm_output_delta_uleb128 (elabel, blabel,
11869 "Range length (%s)", lab);
11870 }
11871 else
11872 {
11873 dw2_asm_output_data (1, DW_RLE_start_end,
11874 "DW_RLE_start_end (%s)", lab);
11875 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11876 "Range begin address (%s)", lab);
11877 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11878 "Range end address (%s)", lab);
11879 }
11880 }
11881
11882 /* Negative block_num stands for an index into ranges_by_label. */
11883 else if (block_num < 0)
11884 {
11885 int lab_idx = - block_num - 1;
11886 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11887 const char *elabel = (*ranges_by_label)[lab_idx].end;
11888
11889 if (!have_multiple_function_sections)
11890 gcc_unreachable ();
11891 if (HAVE_AS_LEB128)
11892 {
11893 dw2_asm_output_data (1, DW_RLE_start_length,
11894 "DW_RLE_start_length (%s)", lab);
11895 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11896 "Range begin address (%s)", lab);
11897 dw2_asm_output_delta_uleb128 (elabel, blabel,
11898 "Range length (%s)", lab);
11899 }
11900 else
11901 {
11902 dw2_asm_output_data (1, DW_RLE_start_end,
11903 "DW_RLE_start_end (%s)", lab);
11904 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11905 "Range begin address (%s)", lab);
11906 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11907 "Range end address (%s)", lab);
11908 }
11909 }
11910 else
11911 dw2_asm_output_data (1, DW_RLE_end_of_list,
11912 "DW_RLE_end_of_list (%s)", lab);
11913 }
11914 ASM_OUTPUT_LABEL (asm_out_file, l2);
11915 }
11916
11917 /* Data structure containing information about input files. */
11918 struct file_info
11919 {
11920 const char *path; /* Complete file name. */
11921 const char *fname; /* File name part. */
11922 int length; /* Length of entire string. */
11923 struct dwarf_file_data * file_idx; /* Index in input file table. */
11924 int dir_idx; /* Index in directory table. */
11925 };
11926
11927 /* Data structure containing information about directories with source
11928 files. */
11929 struct dir_info
11930 {
11931 const char *path; /* Path including directory name. */
11932 int length; /* Path length. */
11933 int prefix; /* Index of directory entry which is a prefix. */
11934 int count; /* Number of files in this directory. */
11935 int dir_idx; /* Index of directory used as base. */
11936 };
11937
11938 /* Callback function for file_info comparison. We sort by looking at
11939 the directories in the path. */
11940
11941 static int
11942 file_info_cmp (const void *p1, const void *p2)
11943 {
11944 const struct file_info *const s1 = (const struct file_info *) p1;
11945 const struct file_info *const s2 = (const struct file_info *) p2;
11946 const unsigned char *cp1;
11947 const unsigned char *cp2;
11948
11949 /* Take care of file names without directories. We need to make sure that
11950 we return consistent values to qsort since some will get confused if
11951 we return the same value when identical operands are passed in opposite
11952 orders. So if neither has a directory, return 0 and otherwise return
11953 1 or -1 depending on which one has the directory. */
11954 if ((s1->path == s1->fname || s2->path == s2->fname))
11955 return (s2->path == s2->fname) - (s1->path == s1->fname);
11956
11957 cp1 = (const unsigned char *) s1->path;
11958 cp2 = (const unsigned char *) s2->path;
11959
11960 while (1)
11961 {
11962 ++cp1;
11963 ++cp2;
11964 /* Reached the end of the first path? If so, handle like above. */
11965 if ((cp1 == (const unsigned char *) s1->fname)
11966 || (cp2 == (const unsigned char *) s2->fname))
11967 return ((cp2 == (const unsigned char *) s2->fname)
11968 - (cp1 == (const unsigned char *) s1->fname));
11969
11970 /* Character of current path component the same? */
11971 else if (*cp1 != *cp2)
11972 return *cp1 - *cp2;
11973 }
11974 }
11975
11976 struct file_name_acquire_data
11977 {
11978 struct file_info *files;
11979 int used_files;
11980 int max_files;
11981 };
11982
11983 /* Traversal function for the hash table. */
11984
11985 int
11986 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11987 {
11988 struct dwarf_file_data *d = *slot;
11989 struct file_info *fi;
11990 const char *f;
11991
11992 gcc_assert (fnad->max_files >= d->emitted_number);
11993
11994 if (! d->emitted_number)
11995 return 1;
11996
11997 gcc_assert (fnad->max_files != fnad->used_files);
11998
11999 fi = fnad->files + fnad->used_files++;
12000
12001 /* Skip all leading "./". */
12002 f = d->filename;
12003 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12004 f += 2;
12005
12006 /* Create a new array entry. */
12007 fi->path = f;
12008 fi->length = strlen (f);
12009 fi->file_idx = d;
12010
12011 /* Search for the file name part. */
12012 f = strrchr (f, DIR_SEPARATOR);
12013 #if defined (DIR_SEPARATOR_2)
12014 {
12015 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12016
12017 if (g != NULL)
12018 {
12019 if (f == NULL || f < g)
12020 f = g;
12021 }
12022 }
12023 #endif
12024
12025 fi->fname = f == NULL ? fi->path : f + 1;
12026 return 1;
12027 }
12028
12029 /* Helper function for output_file_names. Emit a FORM encoded
12030 string STR, with assembly comment start ENTRY_KIND and
12031 index IDX */
12032
12033 static void
12034 output_line_string (enum dwarf_form form, const char *str,
12035 const char *entry_kind, unsigned int idx)
12036 {
12037 switch (form)
12038 {
12039 case DW_FORM_string:
12040 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12041 break;
12042 case DW_FORM_line_strp:
12043 if (!debug_line_str_hash)
12044 debug_line_str_hash
12045 = hash_table<indirect_string_hasher>::create_ggc (10);
12046
12047 struct indirect_string_node *node;
12048 node = find_AT_string_in_table (str, debug_line_str_hash);
12049 set_indirect_string (node);
12050 node->form = form;
12051 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12052 debug_line_str_section, "%s: %#x: \"%s\"",
12053 entry_kind, 0, node->str);
12054 break;
12055 default:
12056 gcc_unreachable ();
12057 }
12058 }
12059
12060 /* Output the directory table and the file name table. We try to minimize
12061 the total amount of memory needed. A heuristic is used to avoid large
12062 slowdowns with many input files. */
12063
12064 static void
12065 output_file_names (void)
12066 {
12067 struct file_name_acquire_data fnad;
12068 int numfiles;
12069 struct file_info *files;
12070 struct dir_info *dirs;
12071 int *saved;
12072 int *savehere;
12073 int *backmap;
12074 int ndirs;
12075 int idx_offset;
12076 int i;
12077
12078 if (!last_emitted_file)
12079 {
12080 if (dwarf_version >= 5)
12081 {
12082 dw2_asm_output_data (1, 0, "Directory entry format count");
12083 dw2_asm_output_data_uleb128 (0, "Directories count");
12084 dw2_asm_output_data (1, 0, "File name entry format count");
12085 dw2_asm_output_data_uleb128 (0, "File names count");
12086 }
12087 else
12088 {
12089 dw2_asm_output_data (1, 0, "End directory table");
12090 dw2_asm_output_data (1, 0, "End file name table");
12091 }
12092 return;
12093 }
12094
12095 numfiles = last_emitted_file->emitted_number;
12096
12097 /* Allocate the various arrays we need. */
12098 files = XALLOCAVEC (struct file_info, numfiles);
12099 dirs = XALLOCAVEC (struct dir_info, numfiles);
12100
12101 fnad.files = files;
12102 fnad.used_files = 0;
12103 fnad.max_files = numfiles;
12104 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12105 gcc_assert (fnad.used_files == fnad.max_files);
12106
12107 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12108
12109 /* Find all the different directories used. */
12110 dirs[0].path = files[0].path;
12111 dirs[0].length = files[0].fname - files[0].path;
12112 dirs[0].prefix = -1;
12113 dirs[0].count = 1;
12114 dirs[0].dir_idx = 0;
12115 files[0].dir_idx = 0;
12116 ndirs = 1;
12117
12118 for (i = 1; i < numfiles; i++)
12119 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12120 && memcmp (dirs[ndirs - 1].path, files[i].path,
12121 dirs[ndirs - 1].length) == 0)
12122 {
12123 /* Same directory as last entry. */
12124 files[i].dir_idx = ndirs - 1;
12125 ++dirs[ndirs - 1].count;
12126 }
12127 else
12128 {
12129 int j;
12130
12131 /* This is a new directory. */
12132 dirs[ndirs].path = files[i].path;
12133 dirs[ndirs].length = files[i].fname - files[i].path;
12134 dirs[ndirs].count = 1;
12135 dirs[ndirs].dir_idx = ndirs;
12136 files[i].dir_idx = ndirs;
12137
12138 /* Search for a prefix. */
12139 dirs[ndirs].prefix = -1;
12140 for (j = 0; j < ndirs; j++)
12141 if (dirs[j].length < dirs[ndirs].length
12142 && dirs[j].length > 1
12143 && (dirs[ndirs].prefix == -1
12144 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12145 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12146 dirs[ndirs].prefix = j;
12147
12148 ++ndirs;
12149 }
12150
12151 /* Now to the actual work. We have to find a subset of the directories which
12152 allow expressing the file name using references to the directory table
12153 with the least amount of characters. We do not do an exhaustive search
12154 where we would have to check out every combination of every single
12155 possible prefix. Instead we use a heuristic which provides nearly optimal
12156 results in most cases and never is much off. */
12157 saved = XALLOCAVEC (int, ndirs);
12158 savehere = XALLOCAVEC (int, ndirs);
12159
12160 memset (saved, '\0', ndirs * sizeof (saved[0]));
12161 for (i = 0; i < ndirs; i++)
12162 {
12163 int j;
12164 int total;
12165
12166 /* We can always save some space for the current directory. But this
12167 does not mean it will be enough to justify adding the directory. */
12168 savehere[i] = dirs[i].length;
12169 total = (savehere[i] - saved[i]) * dirs[i].count;
12170
12171 for (j = i + 1; j < ndirs; j++)
12172 {
12173 savehere[j] = 0;
12174 if (saved[j] < dirs[i].length)
12175 {
12176 /* Determine whether the dirs[i] path is a prefix of the
12177 dirs[j] path. */
12178 int k;
12179
12180 k = dirs[j].prefix;
12181 while (k != -1 && k != (int) i)
12182 k = dirs[k].prefix;
12183
12184 if (k == (int) i)
12185 {
12186 /* Yes it is. We can possibly save some memory by
12187 writing the filenames in dirs[j] relative to
12188 dirs[i]. */
12189 savehere[j] = dirs[i].length;
12190 total += (savehere[j] - saved[j]) * dirs[j].count;
12191 }
12192 }
12193 }
12194
12195 /* Check whether we can save enough to justify adding the dirs[i]
12196 directory. */
12197 if (total > dirs[i].length + 1)
12198 {
12199 /* It's worthwhile adding. */
12200 for (j = i; j < ndirs; j++)
12201 if (savehere[j] > 0)
12202 {
12203 /* Remember how much we saved for this directory so far. */
12204 saved[j] = savehere[j];
12205
12206 /* Remember the prefix directory. */
12207 dirs[j].dir_idx = i;
12208 }
12209 }
12210 }
12211
12212 /* Emit the directory name table. */
12213 idx_offset = dirs[0].length > 0 ? 1 : 0;
12214 enum dwarf_form str_form = DW_FORM_string;
12215 enum dwarf_form idx_form = DW_FORM_udata;
12216 if (dwarf_version >= 5)
12217 {
12218 const char *comp_dir = comp_dir_string ();
12219 if (comp_dir == NULL)
12220 comp_dir = "";
12221 dw2_asm_output_data (1, 1, "Directory entry format count");
12222 if (DWARF5_USE_DEBUG_LINE_STR)
12223 str_form = DW_FORM_line_strp;
12224 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12225 dw2_asm_output_data_uleb128 (str_form, "%s",
12226 get_DW_FORM_name (str_form));
12227 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12228 if (str_form == DW_FORM_string)
12229 {
12230 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12231 for (i = 1 - idx_offset; i < ndirs; i++)
12232 dw2_asm_output_nstring (dirs[i].path,
12233 dirs[i].length
12234 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12235 "Directory Entry: %#x", i + idx_offset);
12236 }
12237 else
12238 {
12239 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12240 for (i = 1 - idx_offset; i < ndirs; i++)
12241 {
12242 const char *str
12243 = ggc_alloc_string (dirs[i].path,
12244 dirs[i].length
12245 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12246 output_line_string (str_form, str, "Directory Entry",
12247 (unsigned) i + idx_offset);
12248 }
12249 }
12250 }
12251 else
12252 {
12253 for (i = 1 - idx_offset; i < ndirs; i++)
12254 dw2_asm_output_nstring (dirs[i].path,
12255 dirs[i].length
12256 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12257 "Directory Entry: %#x", i + idx_offset);
12258
12259 dw2_asm_output_data (1, 0, "End directory table");
12260 }
12261
12262 /* We have to emit them in the order of emitted_number since that's
12263 used in the debug info generation. To do this efficiently we
12264 generate a back-mapping of the indices first. */
12265 backmap = XALLOCAVEC (int, numfiles);
12266 for (i = 0; i < numfiles; i++)
12267 backmap[files[i].file_idx->emitted_number - 1] = i;
12268
12269 if (dwarf_version >= 5)
12270 {
12271 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12272 if (filename0 == NULL)
12273 filename0 = "";
12274 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12275 DW_FORM_data2. Choose one based on the number of directories
12276 and how much space would they occupy in each encoding.
12277 If we have at most 256 directories, all indexes fit into
12278 a single byte, so DW_FORM_data1 is most compact (if there
12279 are at most 128 directories, DW_FORM_udata would be as
12280 compact as that, but not shorter and slower to decode). */
12281 if (ndirs + idx_offset <= 256)
12282 idx_form = DW_FORM_data1;
12283 /* If there are more than 65536 directories, we have to use
12284 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12285 Otherwise, compute what space would occupy if all the indexes
12286 used DW_FORM_udata - sum - and compare that to how large would
12287 be DW_FORM_data2 encoding, and pick the more efficient one. */
12288 else if (ndirs + idx_offset <= 65536)
12289 {
12290 unsigned HOST_WIDE_INT sum = 1;
12291 for (i = 0; i < numfiles; i++)
12292 {
12293 int file_idx = backmap[i];
12294 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12295 sum += size_of_uleb128 (dir_idx);
12296 }
12297 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12298 idx_form = DW_FORM_data2;
12299 }
12300 #ifdef VMS_DEBUGGING_INFO
12301 dw2_asm_output_data (1, 4, "File name entry format count");
12302 #else
12303 dw2_asm_output_data (1, 2, "File name entry format count");
12304 #endif
12305 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12306 dw2_asm_output_data_uleb128 (str_form, "%s",
12307 get_DW_FORM_name (str_form));
12308 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12309 "DW_LNCT_directory_index");
12310 dw2_asm_output_data_uleb128 (idx_form, "%s",
12311 get_DW_FORM_name (idx_form));
12312 #ifdef VMS_DEBUGGING_INFO
12313 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12314 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12315 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12316 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12317 #endif
12318 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12319
12320 output_line_string (str_form, filename0, "File Entry", 0);
12321
12322 /* Include directory index. */
12323 if (idx_form != DW_FORM_udata)
12324 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12325 0, NULL);
12326 else
12327 dw2_asm_output_data_uleb128 (0, NULL);
12328
12329 #ifdef VMS_DEBUGGING_INFO
12330 dw2_asm_output_data_uleb128 (0, NULL);
12331 dw2_asm_output_data_uleb128 (0, NULL);
12332 #endif
12333 }
12334
12335 /* Now write all the file names. */
12336 for (i = 0; i < numfiles; i++)
12337 {
12338 int file_idx = backmap[i];
12339 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12340
12341 #ifdef VMS_DEBUGGING_INFO
12342 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12343
12344 /* Setting these fields can lead to debugger miscomparisons,
12345 but VMS Debug requires them to be set correctly. */
12346
12347 int ver;
12348 long long cdt;
12349 long siz;
12350 int maxfilelen = (strlen (files[file_idx].path)
12351 + dirs[dir_idx].length
12352 + MAX_VMS_VERSION_LEN + 1);
12353 char *filebuf = XALLOCAVEC (char, maxfilelen);
12354
12355 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12356 snprintf (filebuf, maxfilelen, "%s;%d",
12357 files[file_idx].path + dirs[dir_idx].length, ver);
12358
12359 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12360
12361 /* Include directory index. */
12362 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12363 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12364 dir_idx + idx_offset, NULL);
12365 else
12366 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12367
12368 /* Modification time. */
12369 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12370 &cdt, 0, 0, 0) == 0)
12371 ? cdt : 0, NULL);
12372
12373 /* File length in bytes. */
12374 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12375 0, &siz, 0, 0) == 0)
12376 ? siz : 0, NULL);
12377 #else
12378 output_line_string (str_form,
12379 files[file_idx].path + dirs[dir_idx].length,
12380 "File Entry", (unsigned) i + 1);
12381
12382 /* Include directory index. */
12383 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12384 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12385 dir_idx + idx_offset, NULL);
12386 else
12387 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12388
12389 if (dwarf_version >= 5)
12390 continue;
12391
12392 /* Modification time. */
12393 dw2_asm_output_data_uleb128 (0, NULL);
12394
12395 /* File length in bytes. */
12396 dw2_asm_output_data_uleb128 (0, NULL);
12397 #endif /* VMS_DEBUGGING_INFO */
12398 }
12399
12400 if (dwarf_version < 5)
12401 dw2_asm_output_data (1, 0, "End file name table");
12402 }
12403
12404
12405 /* Output one line number table into the .debug_line section. */
12406
12407 static void
12408 output_one_line_info_table (dw_line_info_table *table)
12409 {
12410 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12411 unsigned int current_line = 1;
12412 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12413 dw_line_info_entry *ent, *prev_addr;
12414 size_t i;
12415 unsigned int view;
12416
12417 view = 0;
12418
12419 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12420 {
12421 switch (ent->opcode)
12422 {
12423 case LI_set_address:
12424 /* ??? Unfortunately, we have little choice here currently, and
12425 must always use the most general form. GCC does not know the
12426 address delta itself, so we can't use DW_LNS_advance_pc. Many
12427 ports do have length attributes which will give an upper bound
12428 on the address range. We could perhaps use length attributes
12429 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12430 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12431
12432 view = 0;
12433
12434 /* This can handle any delta. This takes
12435 4+DWARF2_ADDR_SIZE bytes. */
12436 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12437 debug_variable_location_views
12438 ? ", reset view to 0" : "");
12439 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12440 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12441 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12442
12443 prev_addr = ent;
12444 break;
12445
12446 case LI_adv_address:
12447 {
12448 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12449 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12450 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12451
12452 view++;
12453
12454 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12455 dw2_asm_output_delta (2, line_label, prev_label,
12456 "from %s to %s", prev_label, line_label);
12457
12458 prev_addr = ent;
12459 break;
12460 }
12461
12462 case LI_set_line:
12463 if (ent->val == current_line)
12464 {
12465 /* We still need to start a new row, so output a copy insn. */
12466 dw2_asm_output_data (1, DW_LNS_copy,
12467 "copy line %u", current_line);
12468 }
12469 else
12470 {
12471 int line_offset = ent->val - current_line;
12472 int line_delta = line_offset - DWARF_LINE_BASE;
12473
12474 current_line = ent->val;
12475 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12476 {
12477 /* This can handle deltas from -10 to 234, using the current
12478 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12479 This takes 1 byte. */
12480 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12481 "line %u", current_line);
12482 }
12483 else
12484 {
12485 /* This can handle any delta. This takes at least 4 bytes,
12486 depending on the value being encoded. */
12487 dw2_asm_output_data (1, DW_LNS_advance_line,
12488 "advance to line %u", current_line);
12489 dw2_asm_output_data_sleb128 (line_offset, NULL);
12490 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12491 }
12492 }
12493 break;
12494
12495 case LI_set_file:
12496 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12497 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12498 break;
12499
12500 case LI_set_column:
12501 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12502 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12503 break;
12504
12505 case LI_negate_stmt:
12506 current_is_stmt = !current_is_stmt;
12507 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12508 "is_stmt %d", current_is_stmt);
12509 break;
12510
12511 case LI_set_prologue_end:
12512 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12513 "set prologue end");
12514 break;
12515
12516 case LI_set_epilogue_begin:
12517 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12518 "set epilogue begin");
12519 break;
12520
12521 case LI_set_discriminator:
12522 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12523 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12524 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12525 dw2_asm_output_data_uleb128 (ent->val, NULL);
12526 break;
12527 }
12528 }
12529
12530 /* Emit debug info for the address of the end of the table. */
12531 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12532 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12533 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12534 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12535
12536 dw2_asm_output_data (1, 0, "end sequence");
12537 dw2_asm_output_data_uleb128 (1, NULL);
12538 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12539 }
12540
12541 /* Output the source line number correspondence information. This
12542 information goes into the .debug_line section. */
12543
12544 static void
12545 output_line_info (bool prologue_only)
12546 {
12547 static unsigned int generation;
12548 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12549 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12550 bool saw_one = false;
12551 int opc;
12552
12553 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12554 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12555 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12556 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12557
12558 if (!XCOFF_DEBUGGING_INFO)
12559 {
12560 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12561 dw2_asm_output_data (4, 0xffffffff,
12562 "Initial length escape value indicating 64-bit DWARF extension");
12563 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12564 "Length of Source Line Info");
12565 }
12566
12567 ASM_OUTPUT_LABEL (asm_out_file, l1);
12568
12569 output_dwarf_version ();
12570 if (dwarf_version >= 5)
12571 {
12572 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12573 dw2_asm_output_data (1, 0, "Segment Size");
12574 }
12575 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12576 ASM_OUTPUT_LABEL (asm_out_file, p1);
12577
12578 /* Define the architecture-dependent minimum instruction length (in bytes).
12579 In this implementation of DWARF, this field is used for information
12580 purposes only. Since GCC generates assembly language, we have no
12581 a priori knowledge of how many instruction bytes are generated for each
12582 source line, and therefore can use only the DW_LNE_set_address and
12583 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12584 this as '1', which is "correct enough" for all architectures,
12585 and don't let the target override. */
12586 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12587
12588 if (dwarf_version >= 4)
12589 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12590 "Maximum Operations Per Instruction");
12591 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12592 "Default is_stmt_start flag");
12593 dw2_asm_output_data (1, DWARF_LINE_BASE,
12594 "Line Base Value (Special Opcodes)");
12595 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12596 "Line Range Value (Special Opcodes)");
12597 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12598 "Special Opcode Base");
12599
12600 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12601 {
12602 int n_op_args;
12603 switch (opc)
12604 {
12605 case DW_LNS_advance_pc:
12606 case DW_LNS_advance_line:
12607 case DW_LNS_set_file:
12608 case DW_LNS_set_column:
12609 case DW_LNS_fixed_advance_pc:
12610 case DW_LNS_set_isa:
12611 n_op_args = 1;
12612 break;
12613 default:
12614 n_op_args = 0;
12615 break;
12616 }
12617
12618 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12619 opc, n_op_args);
12620 }
12621
12622 /* Write out the information about the files we use. */
12623 output_file_names ();
12624 ASM_OUTPUT_LABEL (asm_out_file, p2);
12625 if (prologue_only)
12626 {
12627 /* Output the marker for the end of the line number info. */
12628 ASM_OUTPUT_LABEL (asm_out_file, l2);
12629 return;
12630 }
12631
12632 if (separate_line_info)
12633 {
12634 dw_line_info_table *table;
12635 size_t i;
12636
12637 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12638 if (table->in_use)
12639 {
12640 output_one_line_info_table (table);
12641 saw_one = true;
12642 }
12643 }
12644 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12645 {
12646 output_one_line_info_table (cold_text_section_line_info);
12647 saw_one = true;
12648 }
12649
12650 /* ??? Some Darwin linkers crash on a .debug_line section with no
12651 sequences. Further, merely a DW_LNE_end_sequence entry is not
12652 sufficient -- the address column must also be initialized.
12653 Make sure to output at least one set_address/end_sequence pair,
12654 choosing .text since that section is always present. */
12655 if (text_section_line_info->in_use || !saw_one)
12656 output_one_line_info_table (text_section_line_info);
12657
12658 /* Output the marker for the end of the line number info. */
12659 ASM_OUTPUT_LABEL (asm_out_file, l2);
12660 }
12661 \f
12662 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12663
12664 static inline bool
12665 need_endianity_attribute_p (bool reverse)
12666 {
12667 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12668 }
12669
12670 /* Given a pointer to a tree node for some base type, return a pointer to
12671 a DIE that describes the given type. REVERSE is true if the type is
12672 to be interpreted in the reverse storage order wrt the target order.
12673
12674 This routine must only be called for GCC type nodes that correspond to
12675 Dwarf base (fundamental) types. */
12676
12677 static dw_die_ref
12678 base_type_die (tree type, bool reverse)
12679 {
12680 dw_die_ref base_type_result;
12681 enum dwarf_type encoding;
12682 bool fpt_used = false;
12683 struct fixed_point_type_info fpt_info;
12684 tree type_bias = NULL_TREE;
12685
12686 /* If this is a subtype that should not be emitted as a subrange type,
12687 use the base type. See subrange_type_for_debug_p. */
12688 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12689 type = TREE_TYPE (type);
12690
12691 switch (TREE_CODE (type))
12692 {
12693 case INTEGER_TYPE:
12694 if ((dwarf_version >= 4 || !dwarf_strict)
12695 && TYPE_NAME (type)
12696 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12697 && DECL_IS_BUILTIN (TYPE_NAME (type))
12698 && DECL_NAME (TYPE_NAME (type)))
12699 {
12700 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12701 if (strcmp (name, "char16_t") == 0
12702 || strcmp (name, "char32_t") == 0)
12703 {
12704 encoding = DW_ATE_UTF;
12705 break;
12706 }
12707 }
12708 if ((dwarf_version >= 3 || !dwarf_strict)
12709 && lang_hooks.types.get_fixed_point_type_info)
12710 {
12711 memset (&fpt_info, 0, sizeof (fpt_info));
12712 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12713 {
12714 fpt_used = true;
12715 encoding = ((TYPE_UNSIGNED (type))
12716 ? DW_ATE_unsigned_fixed
12717 : DW_ATE_signed_fixed);
12718 break;
12719 }
12720 }
12721 if (TYPE_STRING_FLAG (type))
12722 {
12723 if (TYPE_UNSIGNED (type))
12724 encoding = DW_ATE_unsigned_char;
12725 else
12726 encoding = DW_ATE_signed_char;
12727 }
12728 else if (TYPE_UNSIGNED (type))
12729 encoding = DW_ATE_unsigned;
12730 else
12731 encoding = DW_ATE_signed;
12732
12733 if (!dwarf_strict
12734 && lang_hooks.types.get_type_bias)
12735 type_bias = lang_hooks.types.get_type_bias (type);
12736 break;
12737
12738 case REAL_TYPE:
12739 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12740 {
12741 if (dwarf_version >= 3 || !dwarf_strict)
12742 encoding = DW_ATE_decimal_float;
12743 else
12744 encoding = DW_ATE_lo_user;
12745 }
12746 else
12747 encoding = DW_ATE_float;
12748 break;
12749
12750 case FIXED_POINT_TYPE:
12751 if (!(dwarf_version >= 3 || !dwarf_strict))
12752 encoding = DW_ATE_lo_user;
12753 else if (TYPE_UNSIGNED (type))
12754 encoding = DW_ATE_unsigned_fixed;
12755 else
12756 encoding = DW_ATE_signed_fixed;
12757 break;
12758
12759 /* Dwarf2 doesn't know anything about complex ints, so use
12760 a user defined type for it. */
12761 case COMPLEX_TYPE:
12762 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12763 encoding = DW_ATE_complex_float;
12764 else
12765 encoding = DW_ATE_lo_user;
12766 break;
12767
12768 case BOOLEAN_TYPE:
12769 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12770 encoding = DW_ATE_boolean;
12771 break;
12772
12773 default:
12774 /* No other TREE_CODEs are Dwarf fundamental types. */
12775 gcc_unreachable ();
12776 }
12777
12778 base_type_result = new_die_raw (DW_TAG_base_type);
12779
12780 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12781 int_size_in_bytes (type));
12782 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12783
12784 if (need_endianity_attribute_p (reverse))
12785 add_AT_unsigned (base_type_result, DW_AT_endianity,
12786 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12787
12788 add_alignment_attribute (base_type_result, type);
12789
12790 if (fpt_used)
12791 {
12792 switch (fpt_info.scale_factor_kind)
12793 {
12794 case fixed_point_scale_factor_binary:
12795 add_AT_int (base_type_result, DW_AT_binary_scale,
12796 fpt_info.scale_factor.binary);
12797 break;
12798
12799 case fixed_point_scale_factor_decimal:
12800 add_AT_int (base_type_result, DW_AT_decimal_scale,
12801 fpt_info.scale_factor.decimal);
12802 break;
12803
12804 case fixed_point_scale_factor_arbitrary:
12805 /* Arbitrary scale factors cannot be described in standard DWARF,
12806 yet. */
12807 if (!dwarf_strict)
12808 {
12809 /* Describe the scale factor as a rational constant. */
12810 const dw_die_ref scale_factor
12811 = new_die (DW_TAG_constant, comp_unit_die (), type);
12812
12813 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12814 fpt_info.scale_factor.arbitrary.numerator);
12815 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12816 fpt_info.scale_factor.arbitrary.denominator);
12817
12818 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12819 }
12820 break;
12821
12822 default:
12823 gcc_unreachable ();
12824 }
12825 }
12826
12827 if (type_bias)
12828 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12829 dw_scalar_form_constant
12830 | dw_scalar_form_exprloc
12831 | dw_scalar_form_reference,
12832 NULL);
12833
12834 return base_type_result;
12835 }
12836
12837 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12838 named 'auto' in its type: return true for it, false otherwise. */
12839
12840 static inline bool
12841 is_cxx_auto (tree type)
12842 {
12843 if (is_cxx ())
12844 {
12845 tree name = TYPE_IDENTIFIER (type);
12846 if (name == get_identifier ("auto")
12847 || name == get_identifier ("decltype(auto)"))
12848 return true;
12849 }
12850 return false;
12851 }
12852
12853 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12854 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12855
12856 static inline int
12857 is_base_type (tree type)
12858 {
12859 switch (TREE_CODE (type))
12860 {
12861 case INTEGER_TYPE:
12862 case REAL_TYPE:
12863 case FIXED_POINT_TYPE:
12864 case COMPLEX_TYPE:
12865 case BOOLEAN_TYPE:
12866 case POINTER_BOUNDS_TYPE:
12867 return 1;
12868
12869 case VOID_TYPE:
12870 case ARRAY_TYPE:
12871 case RECORD_TYPE:
12872 case UNION_TYPE:
12873 case QUAL_UNION_TYPE:
12874 case ENUMERAL_TYPE:
12875 case FUNCTION_TYPE:
12876 case METHOD_TYPE:
12877 case POINTER_TYPE:
12878 case REFERENCE_TYPE:
12879 case NULLPTR_TYPE:
12880 case OFFSET_TYPE:
12881 case LANG_TYPE:
12882 case VECTOR_TYPE:
12883 return 0;
12884
12885 default:
12886 if (is_cxx_auto (type))
12887 return 0;
12888 gcc_unreachable ();
12889 }
12890
12891 return 0;
12892 }
12893
12894 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12895 node, return the size in bits for the type if it is a constant, or else
12896 return the alignment for the type if the type's size is not constant, or
12897 else return BITS_PER_WORD if the type actually turns out to be an
12898 ERROR_MARK node. */
12899
12900 static inline unsigned HOST_WIDE_INT
12901 simple_type_size_in_bits (const_tree type)
12902 {
12903 if (TREE_CODE (type) == ERROR_MARK)
12904 return BITS_PER_WORD;
12905 else if (TYPE_SIZE (type) == NULL_TREE)
12906 return 0;
12907 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12908 return tree_to_uhwi (TYPE_SIZE (type));
12909 else
12910 return TYPE_ALIGN (type);
12911 }
12912
12913 /* Similarly, but return an offset_int instead of UHWI. */
12914
12915 static inline offset_int
12916 offset_int_type_size_in_bits (const_tree type)
12917 {
12918 if (TREE_CODE (type) == ERROR_MARK)
12919 return BITS_PER_WORD;
12920 else if (TYPE_SIZE (type) == NULL_TREE)
12921 return 0;
12922 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12923 return wi::to_offset (TYPE_SIZE (type));
12924 else
12925 return TYPE_ALIGN (type);
12926 }
12927
12928 /* Given a pointer to a tree node for a subrange type, return a pointer
12929 to a DIE that describes the given type. */
12930
12931 static dw_die_ref
12932 subrange_type_die (tree type, tree low, tree high, tree bias,
12933 dw_die_ref context_die)
12934 {
12935 dw_die_ref subrange_die;
12936 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12937
12938 if (context_die == NULL)
12939 context_die = comp_unit_die ();
12940
12941 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12942
12943 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12944 {
12945 /* The size of the subrange type and its base type do not match,
12946 so we need to generate a size attribute for the subrange type. */
12947 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12948 }
12949
12950 add_alignment_attribute (subrange_die, type);
12951
12952 if (low)
12953 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12954 if (high)
12955 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12956 if (bias && !dwarf_strict)
12957 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12958 dw_scalar_form_constant
12959 | dw_scalar_form_exprloc
12960 | dw_scalar_form_reference,
12961 NULL);
12962
12963 return subrange_die;
12964 }
12965
12966 /* Returns the (const and/or volatile) cv_qualifiers associated with
12967 the decl node. This will normally be augmented with the
12968 cv_qualifiers of the underlying type in add_type_attribute. */
12969
12970 static int
12971 decl_quals (const_tree decl)
12972 {
12973 return ((TREE_READONLY (decl)
12974 /* The C++ front-end correctly marks reference-typed
12975 variables as readonly, but from a language (and debug
12976 info) standpoint they are not const-qualified. */
12977 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12978 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12979 | (TREE_THIS_VOLATILE (decl)
12980 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12981 }
12982
12983 /* Determine the TYPE whose qualifiers match the largest strict subset
12984 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12985 qualifiers outside QUAL_MASK. */
12986
12987 static int
12988 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12989 {
12990 tree t;
12991 int best_rank = 0, best_qual = 0, max_rank;
12992
12993 type_quals &= qual_mask;
12994 max_rank = popcount_hwi (type_quals) - 1;
12995
12996 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12997 t = TYPE_NEXT_VARIANT (t))
12998 {
12999 int q = TYPE_QUALS (t) & qual_mask;
13000
13001 if ((q & type_quals) == q && q != type_quals
13002 && check_base_type (t, type))
13003 {
13004 int rank = popcount_hwi (q);
13005
13006 if (rank > best_rank)
13007 {
13008 best_rank = rank;
13009 best_qual = q;
13010 }
13011 }
13012 }
13013
13014 return best_qual;
13015 }
13016
13017 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13018 static const dwarf_qual_info_t dwarf_qual_info[] =
13019 {
13020 { TYPE_QUAL_CONST, DW_TAG_const_type },
13021 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13022 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13023 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13024 };
13025 static const unsigned int dwarf_qual_info_size
13026 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13027
13028 /* If DIE is a qualified DIE of some base DIE with the same parent,
13029 return the base DIE, otherwise return NULL. Set MASK to the
13030 qualifiers added compared to the returned DIE. */
13031
13032 static dw_die_ref
13033 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13034 {
13035 unsigned int i;
13036 for (i = 0; i < dwarf_qual_info_size; i++)
13037 if (die->die_tag == dwarf_qual_info[i].t)
13038 break;
13039 if (i == dwarf_qual_info_size)
13040 return NULL;
13041 if (vec_safe_length (die->die_attr) != 1)
13042 return NULL;
13043 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13044 if (type == NULL || type->die_parent != die->die_parent)
13045 return NULL;
13046 *mask |= dwarf_qual_info[i].q;
13047 if (depth)
13048 {
13049 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13050 if (ret)
13051 return ret;
13052 }
13053 return type;
13054 }
13055
13056 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13057 entry that chains the modifiers specified by CV_QUALS in front of the
13058 given type. REVERSE is true if the type is to be interpreted in the
13059 reverse storage order wrt the target order. */
13060
13061 static dw_die_ref
13062 modified_type_die (tree type, int cv_quals, bool reverse,
13063 dw_die_ref context_die)
13064 {
13065 enum tree_code code = TREE_CODE (type);
13066 dw_die_ref mod_type_die;
13067 dw_die_ref sub_die = NULL;
13068 tree item_type = NULL;
13069 tree qualified_type;
13070 tree name, low, high;
13071 dw_die_ref mod_scope;
13072 /* Only these cv-qualifiers are currently handled. */
13073 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13074 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13075 ENCODE_QUAL_ADDR_SPACE(~0U));
13076 const bool reverse_base_type
13077 = need_endianity_attribute_p (reverse) && is_base_type (type);
13078
13079 if (code == ERROR_MARK)
13080 return NULL;
13081
13082 if (lang_hooks.types.get_debug_type)
13083 {
13084 tree debug_type = lang_hooks.types.get_debug_type (type);
13085
13086 if (debug_type != NULL_TREE && debug_type != type)
13087 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13088 }
13089
13090 cv_quals &= cv_qual_mask;
13091
13092 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13093 tag modifier (and not an attribute) old consumers won't be able
13094 to handle it. */
13095 if (dwarf_version < 3)
13096 cv_quals &= ~TYPE_QUAL_RESTRICT;
13097
13098 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13099 if (dwarf_version < 5)
13100 cv_quals &= ~TYPE_QUAL_ATOMIC;
13101
13102 /* See if we already have the appropriately qualified variant of
13103 this type. */
13104 qualified_type = get_qualified_type (type, cv_quals);
13105
13106 if (qualified_type == sizetype)
13107 {
13108 /* Try not to expose the internal sizetype type's name. */
13109 if (TYPE_NAME (qualified_type)
13110 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13111 {
13112 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13113
13114 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13115 && (TYPE_PRECISION (t)
13116 == TYPE_PRECISION (qualified_type))
13117 && (TYPE_UNSIGNED (t)
13118 == TYPE_UNSIGNED (qualified_type)));
13119 qualified_type = t;
13120 }
13121 else if (qualified_type == sizetype
13122 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13123 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13124 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13125 qualified_type = size_type_node;
13126 }
13127
13128 /* If we do, then we can just use its DIE, if it exists. */
13129 if (qualified_type)
13130 {
13131 mod_type_die = lookup_type_die (qualified_type);
13132
13133 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13134 dealt with specially: the DIE with the attribute, if it exists, is
13135 placed immediately after the regular DIE for the same base type. */
13136 if (mod_type_die
13137 && (!reverse_base_type
13138 || ((mod_type_die = mod_type_die->die_sib) != NULL
13139 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13140 return mod_type_die;
13141 }
13142
13143 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13144
13145 /* Handle C typedef types. */
13146 if (name
13147 && TREE_CODE (name) == TYPE_DECL
13148 && DECL_ORIGINAL_TYPE (name)
13149 && !DECL_ARTIFICIAL (name))
13150 {
13151 tree dtype = TREE_TYPE (name);
13152
13153 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13154 if (qualified_type == dtype && !reverse_base_type)
13155 {
13156 tree origin = decl_ultimate_origin (name);
13157
13158 /* Typedef variants that have an abstract origin don't get their own
13159 type DIE (see gen_typedef_die), so fall back on the ultimate
13160 abstract origin instead. */
13161 if (origin != NULL && origin != name)
13162 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13163 context_die);
13164
13165 /* For a named type, use the typedef. */
13166 gen_type_die (qualified_type, context_die);
13167 return lookup_type_die (qualified_type);
13168 }
13169 else
13170 {
13171 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13172 dquals &= cv_qual_mask;
13173 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13174 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13175 /* cv-unqualified version of named type. Just use
13176 the unnamed type to which it refers. */
13177 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13178 reverse, context_die);
13179 /* Else cv-qualified version of named type; fall through. */
13180 }
13181 }
13182
13183 mod_scope = scope_die_for (type, context_die);
13184
13185 if (cv_quals)
13186 {
13187 int sub_quals = 0, first_quals = 0;
13188 unsigned i;
13189 dw_die_ref first = NULL, last = NULL;
13190
13191 /* Determine a lesser qualified type that most closely matches
13192 this one. Then generate DW_TAG_* entries for the remaining
13193 qualifiers. */
13194 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13195 cv_qual_mask);
13196 if (sub_quals && use_debug_types)
13197 {
13198 bool needed = false;
13199 /* If emitting type units, make sure the order of qualifiers
13200 is canonical. Thus, start from unqualified type if
13201 an earlier qualifier is missing in sub_quals, but some later
13202 one is present there. */
13203 for (i = 0; i < dwarf_qual_info_size; i++)
13204 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13205 needed = true;
13206 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13207 {
13208 sub_quals = 0;
13209 break;
13210 }
13211 }
13212 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13213 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13214 {
13215 /* As not all intermediate qualified DIEs have corresponding
13216 tree types, ensure that qualified DIEs in the same scope
13217 as their DW_AT_type are emitted after their DW_AT_type,
13218 only with other qualified DIEs for the same type possibly
13219 in between them. Determine the range of such qualified
13220 DIEs now (first being the base type, last being corresponding
13221 last qualified DIE for it). */
13222 unsigned int count = 0;
13223 first = qualified_die_p (mod_type_die, &first_quals,
13224 dwarf_qual_info_size);
13225 if (first == NULL)
13226 first = mod_type_die;
13227 gcc_assert ((first_quals & ~sub_quals) == 0);
13228 for (count = 0, last = first;
13229 count < (1U << dwarf_qual_info_size);
13230 count++, last = last->die_sib)
13231 {
13232 int quals = 0;
13233 if (last == mod_scope->die_child)
13234 break;
13235 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13236 != first)
13237 break;
13238 }
13239 }
13240
13241 for (i = 0; i < dwarf_qual_info_size; i++)
13242 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13243 {
13244 dw_die_ref d;
13245 if (first && first != last)
13246 {
13247 for (d = first->die_sib; ; d = d->die_sib)
13248 {
13249 int quals = 0;
13250 qualified_die_p (d, &quals, dwarf_qual_info_size);
13251 if (quals == (first_quals | dwarf_qual_info[i].q))
13252 break;
13253 if (d == last)
13254 {
13255 d = NULL;
13256 break;
13257 }
13258 }
13259 if (d)
13260 {
13261 mod_type_die = d;
13262 continue;
13263 }
13264 }
13265 if (first)
13266 {
13267 d = new_die_raw (dwarf_qual_info[i].t);
13268 add_child_die_after (mod_scope, d, last);
13269 last = d;
13270 }
13271 else
13272 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13273 if (mod_type_die)
13274 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13275 mod_type_die = d;
13276 first_quals |= dwarf_qual_info[i].q;
13277 }
13278 }
13279 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13280 {
13281 dwarf_tag tag = DW_TAG_pointer_type;
13282 if (code == REFERENCE_TYPE)
13283 {
13284 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13285 tag = DW_TAG_rvalue_reference_type;
13286 else
13287 tag = DW_TAG_reference_type;
13288 }
13289 mod_type_die = new_die (tag, mod_scope, type);
13290
13291 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13292 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13293 add_alignment_attribute (mod_type_die, type);
13294 item_type = TREE_TYPE (type);
13295
13296 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13297 if (!ADDR_SPACE_GENERIC_P (as))
13298 {
13299 int action = targetm.addr_space.debug (as);
13300 if (action >= 0)
13301 {
13302 /* Positive values indicate an address_class. */
13303 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13304 }
13305 else
13306 {
13307 /* Negative values indicate an (inverted) segment base reg. */
13308 dw_loc_descr_ref d
13309 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13310 add_AT_loc (mod_type_die, DW_AT_segment, d);
13311 }
13312 }
13313 }
13314 else if (code == INTEGER_TYPE
13315 && TREE_TYPE (type) != NULL_TREE
13316 && subrange_type_for_debug_p (type, &low, &high))
13317 {
13318 tree bias = NULL_TREE;
13319 if (lang_hooks.types.get_type_bias)
13320 bias = lang_hooks.types.get_type_bias (type);
13321 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13322 item_type = TREE_TYPE (type);
13323 }
13324 else if (is_base_type (type))
13325 {
13326 mod_type_die = base_type_die (type, reverse);
13327
13328 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13329 if (reverse_base_type)
13330 {
13331 dw_die_ref after_die
13332 = modified_type_die (type, cv_quals, false, context_die);
13333 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13334 }
13335 else
13336 add_child_die (comp_unit_die (), mod_type_die);
13337
13338 add_pubtype (type, mod_type_die);
13339 }
13340 else
13341 {
13342 gen_type_die (type, context_die);
13343
13344 /* We have to get the type_main_variant here (and pass that to the
13345 `lookup_type_die' routine) because the ..._TYPE node we have
13346 might simply be a *copy* of some original type node (where the
13347 copy was created to help us keep track of typedef names) and
13348 that copy might have a different TYPE_UID from the original
13349 ..._TYPE node. */
13350 if (TREE_CODE (type) == FUNCTION_TYPE
13351 || TREE_CODE (type) == METHOD_TYPE)
13352 {
13353 /* For function/method types, can't just use type_main_variant here,
13354 because that can have different ref-qualifiers for C++,
13355 but try to canonicalize. */
13356 tree main = TYPE_MAIN_VARIANT (type);
13357 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13358 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13359 && check_base_type (t, main)
13360 && check_lang_type (t, type))
13361 return lookup_type_die (t);
13362 return lookup_type_die (type);
13363 }
13364 else if (TREE_CODE (type) != VECTOR_TYPE
13365 && TREE_CODE (type) != ARRAY_TYPE)
13366 return lookup_type_die (type_main_variant (type));
13367 else
13368 /* Vectors have the debugging information in the type,
13369 not the main variant. */
13370 return lookup_type_die (type);
13371 }
13372
13373 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13374 don't output a DW_TAG_typedef, since there isn't one in the
13375 user's program; just attach a DW_AT_name to the type.
13376 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13377 if the base type already has the same name. */
13378 if (name
13379 && ((TREE_CODE (name) != TYPE_DECL
13380 && (qualified_type == TYPE_MAIN_VARIANT (type)
13381 || (cv_quals == TYPE_UNQUALIFIED)))
13382 || (TREE_CODE (name) == TYPE_DECL
13383 && TREE_TYPE (name) == qualified_type
13384 && DECL_NAME (name))))
13385 {
13386 if (TREE_CODE (name) == TYPE_DECL)
13387 /* Could just call add_name_and_src_coords_attributes here,
13388 but since this is a builtin type it doesn't have any
13389 useful source coordinates anyway. */
13390 name = DECL_NAME (name);
13391 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13392 }
13393 /* This probably indicates a bug. */
13394 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13395 {
13396 name = TYPE_IDENTIFIER (type);
13397 add_name_attribute (mod_type_die,
13398 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13399 }
13400
13401 if (qualified_type && !reverse_base_type)
13402 equate_type_number_to_die (qualified_type, mod_type_die);
13403
13404 if (item_type)
13405 /* We must do this after the equate_type_number_to_die call, in case
13406 this is a recursive type. This ensures that the modified_type_die
13407 recursion will terminate even if the type is recursive. Recursive
13408 types are possible in Ada. */
13409 sub_die = modified_type_die (item_type,
13410 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13411 reverse,
13412 context_die);
13413
13414 if (sub_die != NULL)
13415 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13416
13417 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13418 if (TYPE_ARTIFICIAL (type))
13419 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13420
13421 return mod_type_die;
13422 }
13423
13424 /* Generate DIEs for the generic parameters of T.
13425 T must be either a generic type or a generic function.
13426 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13427
13428 static void
13429 gen_generic_params_dies (tree t)
13430 {
13431 tree parms, args;
13432 int parms_num, i;
13433 dw_die_ref die = NULL;
13434 int non_default;
13435
13436 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13437 return;
13438
13439 if (TYPE_P (t))
13440 die = lookup_type_die (t);
13441 else if (DECL_P (t))
13442 die = lookup_decl_die (t);
13443
13444 gcc_assert (die);
13445
13446 parms = lang_hooks.get_innermost_generic_parms (t);
13447 if (!parms)
13448 /* T has no generic parameter. It means T is neither a generic type
13449 or function. End of story. */
13450 return;
13451
13452 parms_num = TREE_VEC_LENGTH (parms);
13453 args = lang_hooks.get_innermost_generic_args (t);
13454 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13455 non_default = int_cst_value (TREE_CHAIN (args));
13456 else
13457 non_default = TREE_VEC_LENGTH (args);
13458 for (i = 0; i < parms_num; i++)
13459 {
13460 tree parm, arg, arg_pack_elems;
13461 dw_die_ref parm_die;
13462
13463 parm = TREE_VEC_ELT (parms, i);
13464 arg = TREE_VEC_ELT (args, i);
13465 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13466 gcc_assert (parm && TREE_VALUE (parm) && arg);
13467
13468 if (parm && TREE_VALUE (parm) && arg)
13469 {
13470 /* If PARM represents a template parameter pack,
13471 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13472 by DW_TAG_template_*_parameter DIEs for the argument
13473 pack elements of ARG. Note that ARG would then be
13474 an argument pack. */
13475 if (arg_pack_elems)
13476 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13477 arg_pack_elems,
13478 die);
13479 else
13480 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13481 true /* emit name */, die);
13482 if (i >= non_default)
13483 add_AT_flag (parm_die, DW_AT_default_value, 1);
13484 }
13485 }
13486 }
13487
13488 /* Create and return a DIE for PARM which should be
13489 the representation of a generic type parameter.
13490 For instance, in the C++ front end, PARM would be a template parameter.
13491 ARG is the argument to PARM.
13492 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13493 name of the PARM.
13494 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13495 as a child node. */
13496
13497 static dw_die_ref
13498 generic_parameter_die (tree parm, tree arg,
13499 bool emit_name_p,
13500 dw_die_ref parent_die)
13501 {
13502 dw_die_ref tmpl_die = NULL;
13503 const char *name = NULL;
13504
13505 if (!parm || !DECL_NAME (parm) || !arg)
13506 return NULL;
13507
13508 /* We support non-type generic parameters and arguments,
13509 type generic parameters and arguments, as well as
13510 generic generic parameters (a.k.a. template template parameters in C++)
13511 and arguments. */
13512 if (TREE_CODE (parm) == PARM_DECL)
13513 /* PARM is a nontype generic parameter */
13514 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13515 else if (TREE_CODE (parm) == TYPE_DECL)
13516 /* PARM is a type generic parameter. */
13517 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13518 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13519 /* PARM is a generic generic parameter.
13520 Its DIE is a GNU extension. It shall have a
13521 DW_AT_name attribute to represent the name of the template template
13522 parameter, and a DW_AT_GNU_template_name attribute to represent the
13523 name of the template template argument. */
13524 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13525 parent_die, parm);
13526 else
13527 gcc_unreachable ();
13528
13529 if (tmpl_die)
13530 {
13531 tree tmpl_type;
13532
13533 /* If PARM is a generic parameter pack, it means we are
13534 emitting debug info for a template argument pack element.
13535 In other terms, ARG is a template argument pack element.
13536 In that case, we don't emit any DW_AT_name attribute for
13537 the die. */
13538 if (emit_name_p)
13539 {
13540 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13541 gcc_assert (name);
13542 add_AT_string (tmpl_die, DW_AT_name, name);
13543 }
13544
13545 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13546 {
13547 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13548 TMPL_DIE should have a child DW_AT_type attribute that is set
13549 to the type of the argument to PARM, which is ARG.
13550 If PARM is a type generic parameter, TMPL_DIE should have a
13551 child DW_AT_type that is set to ARG. */
13552 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13553 add_type_attribute (tmpl_die, tmpl_type,
13554 (TREE_THIS_VOLATILE (tmpl_type)
13555 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13556 false, parent_die);
13557 }
13558 else
13559 {
13560 /* So TMPL_DIE is a DIE representing a
13561 a generic generic template parameter, a.k.a template template
13562 parameter in C++ and arg is a template. */
13563
13564 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13565 to the name of the argument. */
13566 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13567 if (name)
13568 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13569 }
13570
13571 if (TREE_CODE (parm) == PARM_DECL)
13572 /* So PARM is a non-type generic parameter.
13573 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13574 attribute of TMPL_DIE which value represents the value
13575 of ARG.
13576 We must be careful here:
13577 The value of ARG might reference some function decls.
13578 We might currently be emitting debug info for a generic
13579 type and types are emitted before function decls, we don't
13580 know if the function decls referenced by ARG will actually be
13581 emitted after cgraph computations.
13582 So must defer the generation of the DW_AT_const_value to
13583 after cgraph is ready. */
13584 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13585 }
13586
13587 return tmpl_die;
13588 }
13589
13590 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13591 PARM_PACK must be a template parameter pack. The returned DIE
13592 will be child DIE of PARENT_DIE. */
13593
13594 static dw_die_ref
13595 template_parameter_pack_die (tree parm_pack,
13596 tree parm_pack_args,
13597 dw_die_ref parent_die)
13598 {
13599 dw_die_ref die;
13600 int j;
13601
13602 gcc_assert (parent_die && parm_pack);
13603
13604 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13605 add_name_and_src_coords_attributes (die, parm_pack);
13606 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13607 generic_parameter_die (parm_pack,
13608 TREE_VEC_ELT (parm_pack_args, j),
13609 false /* Don't emit DW_AT_name */,
13610 die);
13611 return die;
13612 }
13613
13614 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13615 an enumerated type. */
13616
13617 static inline int
13618 type_is_enum (const_tree type)
13619 {
13620 return TREE_CODE (type) == ENUMERAL_TYPE;
13621 }
13622
13623 /* Return the DBX register number described by a given RTL node. */
13624
13625 static unsigned int
13626 dbx_reg_number (const_rtx rtl)
13627 {
13628 unsigned regno = REGNO (rtl);
13629
13630 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13631
13632 #ifdef LEAF_REG_REMAP
13633 if (crtl->uses_only_leaf_regs)
13634 {
13635 int leaf_reg = LEAF_REG_REMAP (regno);
13636 if (leaf_reg != -1)
13637 regno = (unsigned) leaf_reg;
13638 }
13639 #endif
13640
13641 regno = DBX_REGISTER_NUMBER (regno);
13642 gcc_assert (regno != INVALID_REGNUM);
13643 return regno;
13644 }
13645
13646 /* Optionally add a DW_OP_piece term to a location description expression.
13647 DW_OP_piece is only added if the location description expression already
13648 doesn't end with DW_OP_piece. */
13649
13650 static void
13651 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13652 {
13653 dw_loc_descr_ref loc;
13654
13655 if (*list_head != NULL)
13656 {
13657 /* Find the end of the chain. */
13658 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13659 ;
13660
13661 if (loc->dw_loc_opc != DW_OP_piece)
13662 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13663 }
13664 }
13665
13666 /* Return a location descriptor that designates a machine register or
13667 zero if there is none. */
13668
13669 static dw_loc_descr_ref
13670 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13671 {
13672 rtx regs;
13673
13674 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13675 return 0;
13676
13677 /* We only use "frame base" when we're sure we're talking about the
13678 post-prologue local stack frame. We do this by *not* running
13679 register elimination until this point, and recognizing the special
13680 argument pointer and soft frame pointer rtx's.
13681 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13682 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13683 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13684 {
13685 dw_loc_descr_ref result = NULL;
13686
13687 if (dwarf_version >= 4 || !dwarf_strict)
13688 {
13689 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13690 initialized);
13691 if (result)
13692 add_loc_descr (&result,
13693 new_loc_descr (DW_OP_stack_value, 0, 0));
13694 }
13695 return result;
13696 }
13697
13698 regs = targetm.dwarf_register_span (rtl);
13699
13700 if (REG_NREGS (rtl) > 1 || regs)
13701 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13702 else
13703 {
13704 unsigned int dbx_regnum = dbx_reg_number (rtl);
13705 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13706 return 0;
13707 return one_reg_loc_descriptor (dbx_regnum, initialized);
13708 }
13709 }
13710
13711 /* Return a location descriptor that designates a machine register for
13712 a given hard register number. */
13713
13714 static dw_loc_descr_ref
13715 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13716 {
13717 dw_loc_descr_ref reg_loc_descr;
13718
13719 if (regno <= 31)
13720 reg_loc_descr
13721 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13722 else
13723 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13724
13725 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13726 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13727
13728 return reg_loc_descr;
13729 }
13730
13731 /* Given an RTL of a register, return a location descriptor that
13732 designates a value that spans more than one register. */
13733
13734 static dw_loc_descr_ref
13735 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13736 enum var_init_status initialized)
13737 {
13738 int size, i;
13739 dw_loc_descr_ref loc_result = NULL;
13740
13741 /* Simple, contiguous registers. */
13742 if (regs == NULL_RTX)
13743 {
13744 unsigned reg = REGNO (rtl);
13745 int nregs;
13746
13747 #ifdef LEAF_REG_REMAP
13748 if (crtl->uses_only_leaf_regs)
13749 {
13750 int leaf_reg = LEAF_REG_REMAP (reg);
13751 if (leaf_reg != -1)
13752 reg = (unsigned) leaf_reg;
13753 }
13754 #endif
13755
13756 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13757 nregs = REG_NREGS (rtl);
13758
13759 /* At present we only track constant-sized pieces. */
13760 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13761 return NULL;
13762 size /= nregs;
13763
13764 loc_result = NULL;
13765 while (nregs--)
13766 {
13767 dw_loc_descr_ref t;
13768
13769 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13770 VAR_INIT_STATUS_INITIALIZED);
13771 add_loc_descr (&loc_result, t);
13772 add_loc_descr_op_piece (&loc_result, size);
13773 ++reg;
13774 }
13775 return loc_result;
13776 }
13777
13778 /* Now onto stupid register sets in non contiguous locations. */
13779
13780 gcc_assert (GET_CODE (regs) == PARALLEL);
13781
13782 /* At present we only track constant-sized pieces. */
13783 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13784 return NULL;
13785 loc_result = NULL;
13786
13787 for (i = 0; i < XVECLEN (regs, 0); ++i)
13788 {
13789 dw_loc_descr_ref t;
13790
13791 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13792 VAR_INIT_STATUS_INITIALIZED);
13793 add_loc_descr (&loc_result, t);
13794 add_loc_descr_op_piece (&loc_result, size);
13795 }
13796
13797 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13798 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13799 return loc_result;
13800 }
13801
13802 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13803
13804 /* Return a location descriptor that designates a constant i,
13805 as a compound operation from constant (i >> shift), constant shift
13806 and DW_OP_shl. */
13807
13808 static dw_loc_descr_ref
13809 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13810 {
13811 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13812 add_loc_descr (&ret, int_loc_descriptor (shift));
13813 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13814 return ret;
13815 }
13816
13817 /* Return a location descriptor that designates constant POLY_I. */
13818
13819 static dw_loc_descr_ref
13820 int_loc_descriptor (poly_int64 poly_i)
13821 {
13822 enum dwarf_location_atom op;
13823
13824 HOST_WIDE_INT i;
13825 if (!poly_i.is_constant (&i))
13826 {
13827 /* Create location descriptions for the non-constant part and
13828 add any constant offset at the end. */
13829 dw_loc_descr_ref ret = NULL;
13830 HOST_WIDE_INT constant = poly_i.coeffs[0];
13831 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13832 {
13833 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13834 if (coeff != 0)
13835 {
13836 dw_loc_descr_ref start = ret;
13837 unsigned int factor;
13838 int bias;
13839 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13840 (j, &factor, &bias);
13841
13842 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13843 add COEFF * (REGNO / FACTOR) now and subtract
13844 COEFF * BIAS from the final constant part. */
13845 constant -= coeff * bias;
13846 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13847 if (coeff % factor == 0)
13848 coeff /= factor;
13849 else
13850 {
13851 int amount = exact_log2 (factor);
13852 gcc_assert (amount >= 0);
13853 add_loc_descr (&ret, int_loc_descriptor (amount));
13854 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13855 }
13856 if (coeff != 1)
13857 {
13858 add_loc_descr (&ret, int_loc_descriptor (coeff));
13859 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13860 }
13861 if (start)
13862 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13863 }
13864 }
13865 loc_descr_plus_const (&ret, constant);
13866 return ret;
13867 }
13868
13869 /* Pick the smallest representation of a constant, rather than just
13870 defaulting to the LEB encoding. */
13871 if (i >= 0)
13872 {
13873 int clz = clz_hwi (i);
13874 int ctz = ctz_hwi (i);
13875 if (i <= 31)
13876 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13877 else if (i <= 0xff)
13878 op = DW_OP_const1u;
13879 else if (i <= 0xffff)
13880 op = DW_OP_const2u;
13881 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13882 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13883 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13884 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13885 while DW_OP_const4u is 5 bytes. */
13886 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13887 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13888 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13889 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13890 while DW_OP_const4u is 5 bytes. */
13891 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13892
13893 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13894 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13895 <= 4)
13896 {
13897 /* As i >= 2**31, the double cast above will yield a negative number.
13898 Since wrapping is defined in DWARF expressions we can output big
13899 positive integers as small negative ones, regardless of the size
13900 of host wide ints.
13901
13902 Here, since the evaluator will handle 32-bit values and since i >=
13903 2**31, we know it's going to be interpreted as a negative literal:
13904 store it this way if we can do better than 5 bytes this way. */
13905 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13906 }
13907 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13908 op = DW_OP_const4u;
13909
13910 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13911 least 6 bytes: see if we can do better before falling back to it. */
13912 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13913 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13914 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13915 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13916 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13917 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13918 >= HOST_BITS_PER_WIDE_INT)
13919 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13920 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13921 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13922 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13923 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13924 && size_of_uleb128 (i) > 6)
13925 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13926 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13927 else
13928 op = DW_OP_constu;
13929 }
13930 else
13931 {
13932 if (i >= -0x80)
13933 op = DW_OP_const1s;
13934 else if (i >= -0x8000)
13935 op = DW_OP_const2s;
13936 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13937 {
13938 if (size_of_int_loc_descriptor (i) < 5)
13939 {
13940 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13941 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13942 return ret;
13943 }
13944 op = DW_OP_const4s;
13945 }
13946 else
13947 {
13948 if (size_of_int_loc_descriptor (i)
13949 < (unsigned long) 1 + size_of_sleb128 (i))
13950 {
13951 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13952 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13953 return ret;
13954 }
13955 op = DW_OP_consts;
13956 }
13957 }
13958
13959 return new_loc_descr (op, i, 0);
13960 }
13961
13962 /* Likewise, for unsigned constants. */
13963
13964 static dw_loc_descr_ref
13965 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13966 {
13967 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13968 const unsigned HOST_WIDE_INT max_uint
13969 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13970
13971 /* If possible, use the clever signed constants handling. */
13972 if (i <= max_int)
13973 return int_loc_descriptor ((HOST_WIDE_INT) i);
13974
13975 /* Here, we are left with positive numbers that cannot be represented as
13976 HOST_WIDE_INT, i.e.:
13977 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13978
13979 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13980 whereas may be better to output a negative integer: thanks to integer
13981 wrapping, we know that:
13982 x = x - 2 ** DWARF2_ADDR_SIZE
13983 = x - 2 * (max (HOST_WIDE_INT) + 1)
13984 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13985 small negative integers. Let's try that in cases it will clearly improve
13986 the encoding: there is no gain turning DW_OP_const4u into
13987 DW_OP_const4s. */
13988 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13989 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13990 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13991 {
13992 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13993
13994 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13995 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13996 const HOST_WIDE_INT second_shift
13997 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13998
13999 /* So we finally have:
14000 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14001 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14002 return int_loc_descriptor (second_shift);
14003 }
14004
14005 /* Last chance: fallback to a simple constant operation. */
14006 return new_loc_descr
14007 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14008 ? DW_OP_const4u
14009 : DW_OP_const8u,
14010 i, 0);
14011 }
14012
14013 /* Generate and return a location description that computes the unsigned
14014 comparison of the two stack top entries (a OP b where b is the top-most
14015 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14016 LE_EXPR, GT_EXPR or GE_EXPR. */
14017
14018 static dw_loc_descr_ref
14019 uint_comparison_loc_list (enum tree_code kind)
14020 {
14021 enum dwarf_location_atom op, flip_op;
14022 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14023
14024 switch (kind)
14025 {
14026 case LT_EXPR:
14027 op = DW_OP_lt;
14028 break;
14029 case LE_EXPR:
14030 op = DW_OP_le;
14031 break;
14032 case GT_EXPR:
14033 op = DW_OP_gt;
14034 break;
14035 case GE_EXPR:
14036 op = DW_OP_ge;
14037 break;
14038 default:
14039 gcc_unreachable ();
14040 }
14041
14042 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14043 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14044
14045 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14046 possible to perform unsigned comparisons: we just have to distinguish
14047 three cases:
14048
14049 1. when a and b have the same sign (as signed integers); then we should
14050 return: a OP(signed) b;
14051
14052 2. when a is a negative signed integer while b is a positive one, then a
14053 is a greater unsigned integer than b; likewise when a and b's roles
14054 are flipped.
14055
14056 So first, compare the sign of the two operands. */
14057 ret = new_loc_descr (DW_OP_over, 0, 0);
14058 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14059 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14060 /* If they have different signs (i.e. they have different sign bits), then
14061 the stack top value has now the sign bit set and thus it's smaller than
14062 zero. */
14063 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14064 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14065 add_loc_descr (&ret, bra_node);
14066
14067 /* We are in case 1. At this point, we know both operands have the same
14068 sign, to it's safe to use the built-in signed comparison. */
14069 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14070 add_loc_descr (&ret, jmp_node);
14071
14072 /* We are in case 2. Here, we know both operands do not have the same sign,
14073 so we have to flip the signed comparison. */
14074 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14075 tmp = new_loc_descr (flip_op, 0, 0);
14076 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14077 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14078 add_loc_descr (&ret, tmp);
14079
14080 /* This dummy operation is necessary to make the two branches join. */
14081 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14082 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14083 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14084 add_loc_descr (&ret, tmp);
14085
14086 return ret;
14087 }
14088
14089 /* Likewise, but takes the location description lists (might be destructive on
14090 them). Return NULL if either is NULL or if concatenation fails. */
14091
14092 static dw_loc_list_ref
14093 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14094 enum tree_code kind)
14095 {
14096 if (left == NULL || right == NULL)
14097 return NULL;
14098
14099 add_loc_list (&left, right);
14100 if (left == NULL)
14101 return NULL;
14102
14103 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14104 return left;
14105 }
14106
14107 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14108 without actually allocating it. */
14109
14110 static unsigned long
14111 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14112 {
14113 return size_of_int_loc_descriptor (i >> shift)
14114 + size_of_int_loc_descriptor (shift)
14115 + 1;
14116 }
14117
14118 /* Return size_of_locs (int_loc_descriptor (i)) without
14119 actually allocating it. */
14120
14121 static unsigned long
14122 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14123 {
14124 unsigned long s;
14125
14126 if (i >= 0)
14127 {
14128 int clz, ctz;
14129 if (i <= 31)
14130 return 1;
14131 else if (i <= 0xff)
14132 return 2;
14133 else if (i <= 0xffff)
14134 return 3;
14135 clz = clz_hwi (i);
14136 ctz = ctz_hwi (i);
14137 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14138 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14139 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14140 - clz - 5);
14141 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14142 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14143 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14144 - clz - 8);
14145 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14146 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14147 <= 4)
14148 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14149 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14150 return 5;
14151 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14152 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14153 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14154 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14155 - clz - 8);
14156 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14157 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14158 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14159 - clz - 16);
14160 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14161 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14162 && s > 6)
14163 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14164 - clz - 32);
14165 else
14166 return 1 + s;
14167 }
14168 else
14169 {
14170 if (i >= -0x80)
14171 return 2;
14172 else if (i >= -0x8000)
14173 return 3;
14174 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14175 {
14176 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14177 {
14178 s = size_of_int_loc_descriptor (-i) + 1;
14179 if (s < 5)
14180 return s;
14181 }
14182 return 5;
14183 }
14184 else
14185 {
14186 unsigned long r = 1 + size_of_sleb128 (i);
14187 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14188 {
14189 s = size_of_int_loc_descriptor (-i) + 1;
14190 if (s < r)
14191 return s;
14192 }
14193 return r;
14194 }
14195 }
14196 }
14197
14198 /* Return loc description representing "address" of integer value.
14199 This can appear only as toplevel expression. */
14200
14201 static dw_loc_descr_ref
14202 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14203 {
14204 int litsize;
14205 dw_loc_descr_ref loc_result = NULL;
14206
14207 if (!(dwarf_version >= 4 || !dwarf_strict))
14208 return NULL;
14209
14210 litsize = size_of_int_loc_descriptor (i);
14211 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14212 is more compact. For DW_OP_stack_value we need:
14213 litsize + 1 (DW_OP_stack_value)
14214 and for DW_OP_implicit_value:
14215 1 (DW_OP_implicit_value) + 1 (length) + size. */
14216 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14217 {
14218 loc_result = int_loc_descriptor (i);
14219 add_loc_descr (&loc_result,
14220 new_loc_descr (DW_OP_stack_value, 0, 0));
14221 return loc_result;
14222 }
14223
14224 loc_result = new_loc_descr (DW_OP_implicit_value,
14225 size, 0);
14226 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14227 loc_result->dw_loc_oprnd2.v.val_int = i;
14228 return loc_result;
14229 }
14230
14231 /* Return a location descriptor that designates a base+offset location. */
14232
14233 static dw_loc_descr_ref
14234 based_loc_descr (rtx reg, poly_int64 offset,
14235 enum var_init_status initialized)
14236 {
14237 unsigned int regno;
14238 dw_loc_descr_ref result;
14239 dw_fde_ref fde = cfun->fde;
14240
14241 /* We only use "frame base" when we're sure we're talking about the
14242 post-prologue local stack frame. We do this by *not* running
14243 register elimination until this point, and recognizing the special
14244 argument pointer and soft frame pointer rtx's. */
14245 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14246 {
14247 rtx elim = (ira_use_lra_p
14248 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14249 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14250
14251 if (elim != reg)
14252 {
14253 elim = strip_offset_and_add (elim, &offset);
14254 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14255 && (elim == hard_frame_pointer_rtx
14256 || elim == stack_pointer_rtx))
14257 || elim == (frame_pointer_needed
14258 ? hard_frame_pointer_rtx
14259 : stack_pointer_rtx));
14260
14261 /* If drap register is used to align stack, use frame
14262 pointer + offset to access stack variables. If stack
14263 is aligned without drap, use stack pointer + offset to
14264 access stack variables. */
14265 if (crtl->stack_realign_tried
14266 && reg == frame_pointer_rtx)
14267 {
14268 int base_reg
14269 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14270 ? HARD_FRAME_POINTER_REGNUM
14271 : REGNO (elim));
14272 return new_reg_loc_descr (base_reg, offset);
14273 }
14274
14275 gcc_assert (frame_pointer_fb_offset_valid);
14276 offset += frame_pointer_fb_offset;
14277 HOST_WIDE_INT const_offset;
14278 if (offset.is_constant (&const_offset))
14279 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14280 else
14281 {
14282 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14283 loc_descr_plus_const (&ret, offset);
14284 return ret;
14285 }
14286 }
14287 }
14288
14289 regno = REGNO (reg);
14290 #ifdef LEAF_REG_REMAP
14291 if (crtl->uses_only_leaf_regs)
14292 {
14293 int leaf_reg = LEAF_REG_REMAP (regno);
14294 if (leaf_reg != -1)
14295 regno = (unsigned) leaf_reg;
14296 }
14297 #endif
14298 regno = DWARF_FRAME_REGNUM (regno);
14299
14300 HOST_WIDE_INT const_offset;
14301 if (!optimize && fde
14302 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14303 && offset.is_constant (&const_offset))
14304 {
14305 /* Use cfa+offset to represent the location of arguments passed
14306 on the stack when drap is used to align stack.
14307 Only do this when not optimizing, for optimized code var-tracking
14308 is supposed to track where the arguments live and the register
14309 used as vdrap or drap in some spot might be used for something
14310 else in other part of the routine. */
14311 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14312 }
14313
14314 result = new_reg_loc_descr (regno, offset);
14315
14316 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14317 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14318
14319 return result;
14320 }
14321
14322 /* Return true if this RTL expression describes a base+offset calculation. */
14323
14324 static inline int
14325 is_based_loc (const_rtx rtl)
14326 {
14327 return (GET_CODE (rtl) == PLUS
14328 && ((REG_P (XEXP (rtl, 0))
14329 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14330 && CONST_INT_P (XEXP (rtl, 1)))));
14331 }
14332
14333 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14334 failed. */
14335
14336 static dw_loc_descr_ref
14337 tls_mem_loc_descriptor (rtx mem)
14338 {
14339 tree base;
14340 dw_loc_descr_ref loc_result;
14341
14342 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14343 return NULL;
14344
14345 base = get_base_address (MEM_EXPR (mem));
14346 if (base == NULL
14347 || !VAR_P (base)
14348 || !DECL_THREAD_LOCAL_P (base))
14349 return NULL;
14350
14351 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14352 if (loc_result == NULL)
14353 return NULL;
14354
14355 if (maybe_ne (MEM_OFFSET (mem), 0))
14356 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14357
14358 return loc_result;
14359 }
14360
14361 /* Output debug info about reason why we failed to expand expression as dwarf
14362 expression. */
14363
14364 static void
14365 expansion_failed (tree expr, rtx rtl, char const *reason)
14366 {
14367 if (dump_file && (dump_flags & TDF_DETAILS))
14368 {
14369 fprintf (dump_file, "Failed to expand as dwarf: ");
14370 if (expr)
14371 print_generic_expr (dump_file, expr, dump_flags);
14372 if (rtl)
14373 {
14374 fprintf (dump_file, "\n");
14375 print_rtl (dump_file, rtl);
14376 }
14377 fprintf (dump_file, "\nReason: %s\n", reason);
14378 }
14379 }
14380
14381 /* Helper function for const_ok_for_output. */
14382
14383 static bool
14384 const_ok_for_output_1 (rtx rtl)
14385 {
14386 if (targetm.const_not_ok_for_debug_p (rtl))
14387 {
14388 if (GET_CODE (rtl) != UNSPEC)
14389 {
14390 expansion_failed (NULL_TREE, rtl,
14391 "Expression rejected for debug by the backend.\n");
14392 return false;
14393 }
14394
14395 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14396 the target hook doesn't explicitly allow it in debug info, assume
14397 we can't express it in the debug info. */
14398 /* Don't complain about TLS UNSPECs, those are just too hard to
14399 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14400 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14401 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14402 if (flag_checking
14403 && (XVECLEN (rtl, 0) == 0
14404 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14405 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14406 inform (current_function_decl
14407 ? DECL_SOURCE_LOCATION (current_function_decl)
14408 : UNKNOWN_LOCATION,
14409 #if NUM_UNSPEC_VALUES > 0
14410 "non-delegitimized UNSPEC %s (%d) found in variable location",
14411 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14412 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14413 XINT (rtl, 1));
14414 #else
14415 "non-delegitimized UNSPEC %d found in variable location",
14416 XINT (rtl, 1));
14417 #endif
14418 expansion_failed (NULL_TREE, rtl,
14419 "UNSPEC hasn't been delegitimized.\n");
14420 return false;
14421 }
14422
14423 if (CONST_POLY_INT_P (rtl))
14424 return false;
14425
14426 if (targetm.const_not_ok_for_debug_p (rtl))
14427 {
14428 expansion_failed (NULL_TREE, rtl,
14429 "Expression rejected for debug by the backend.\n");
14430 return false;
14431 }
14432
14433 /* FIXME: Refer to PR60655. It is possible for simplification
14434 of rtl expressions in var tracking to produce such expressions.
14435 We should really identify / validate expressions
14436 enclosed in CONST that can be handled by assemblers on various
14437 targets and only handle legitimate cases here. */
14438 switch (GET_CODE (rtl))
14439 {
14440 case SYMBOL_REF:
14441 break;
14442 case NOT:
14443 case NEG:
14444 return false;
14445 default:
14446 return true;
14447 }
14448
14449 if (CONSTANT_POOL_ADDRESS_P (rtl))
14450 {
14451 bool marked;
14452 get_pool_constant_mark (rtl, &marked);
14453 /* If all references to this pool constant were optimized away,
14454 it was not output and thus we can't represent it. */
14455 if (!marked)
14456 {
14457 expansion_failed (NULL_TREE, rtl,
14458 "Constant was removed from constant pool.\n");
14459 return false;
14460 }
14461 }
14462
14463 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14464 return false;
14465
14466 /* Avoid references to external symbols in debug info, on several targets
14467 the linker might even refuse to link when linking a shared library,
14468 and in many other cases the relocations for .debug_info/.debug_loc are
14469 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14470 to be defined within the same shared library or executable are fine. */
14471 if (SYMBOL_REF_EXTERNAL_P (rtl))
14472 {
14473 tree decl = SYMBOL_REF_DECL (rtl);
14474
14475 if (decl == NULL || !targetm.binds_local_p (decl))
14476 {
14477 expansion_failed (NULL_TREE, rtl,
14478 "Symbol not defined in current TU.\n");
14479 return false;
14480 }
14481 }
14482
14483 return true;
14484 }
14485
14486 /* Return true if constant RTL can be emitted in DW_OP_addr or
14487 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14488 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14489
14490 static bool
14491 const_ok_for_output (rtx rtl)
14492 {
14493 if (GET_CODE (rtl) == SYMBOL_REF)
14494 return const_ok_for_output_1 (rtl);
14495
14496 if (GET_CODE (rtl) == CONST)
14497 {
14498 subrtx_var_iterator::array_type array;
14499 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14500 if (!const_ok_for_output_1 (*iter))
14501 return false;
14502 return true;
14503 }
14504
14505 return true;
14506 }
14507
14508 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14509 if possible, NULL otherwise. */
14510
14511 static dw_die_ref
14512 base_type_for_mode (machine_mode mode, bool unsignedp)
14513 {
14514 dw_die_ref type_die;
14515 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14516
14517 if (type == NULL)
14518 return NULL;
14519 switch (TREE_CODE (type))
14520 {
14521 case INTEGER_TYPE:
14522 case REAL_TYPE:
14523 break;
14524 default:
14525 return NULL;
14526 }
14527 type_die = lookup_type_die (type);
14528 if (!type_die)
14529 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14530 comp_unit_die ());
14531 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14532 return NULL;
14533 return type_die;
14534 }
14535
14536 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14537 type matching MODE, or, if MODE is narrower than or as wide as
14538 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14539 possible. */
14540
14541 static dw_loc_descr_ref
14542 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14543 {
14544 machine_mode outer_mode = mode;
14545 dw_die_ref type_die;
14546 dw_loc_descr_ref cvt;
14547
14548 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14549 {
14550 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14551 return op;
14552 }
14553 type_die = base_type_for_mode (outer_mode, 1);
14554 if (type_die == NULL)
14555 return NULL;
14556 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14557 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14558 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14559 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14560 add_loc_descr (&op, cvt);
14561 return op;
14562 }
14563
14564 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14565
14566 static dw_loc_descr_ref
14567 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14568 dw_loc_descr_ref op1)
14569 {
14570 dw_loc_descr_ref ret = op0;
14571 add_loc_descr (&ret, op1);
14572 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14573 if (STORE_FLAG_VALUE != 1)
14574 {
14575 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14576 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14577 }
14578 return ret;
14579 }
14580
14581 /* Subroutine of scompare_loc_descriptor for the case in which we're
14582 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14583 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14584
14585 static dw_loc_descr_ref
14586 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14587 scalar_int_mode op_mode,
14588 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14589 {
14590 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14591 dw_loc_descr_ref cvt;
14592
14593 if (type_die == NULL)
14594 return NULL;
14595 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14596 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14597 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14598 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14599 add_loc_descr (&op0, cvt);
14600 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14601 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14602 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14603 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14604 add_loc_descr (&op1, cvt);
14605 return compare_loc_descriptor (op, op0, op1);
14606 }
14607
14608 /* Subroutine of scompare_loc_descriptor for the case in which we're
14609 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14610 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14611
14612 static dw_loc_descr_ref
14613 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14614 scalar_int_mode op_mode,
14615 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14616 {
14617 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14618 /* For eq/ne, if the operands are known to be zero-extended,
14619 there is no need to do the fancy shifting up. */
14620 if (op == DW_OP_eq || op == DW_OP_ne)
14621 {
14622 dw_loc_descr_ref last0, last1;
14623 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14624 ;
14625 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14626 ;
14627 /* deref_size zero extends, and for constants we can check
14628 whether they are zero extended or not. */
14629 if (((last0->dw_loc_opc == DW_OP_deref_size
14630 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14631 || (CONST_INT_P (XEXP (rtl, 0))
14632 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14633 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14634 && ((last1->dw_loc_opc == DW_OP_deref_size
14635 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14636 || (CONST_INT_P (XEXP (rtl, 1))
14637 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14638 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14639 return compare_loc_descriptor (op, op0, op1);
14640
14641 /* EQ/NE comparison against constant in narrower type than
14642 DWARF2_ADDR_SIZE can be performed either as
14643 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14644 DW_OP_{eq,ne}
14645 or
14646 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14647 DW_OP_{eq,ne}. Pick whatever is shorter. */
14648 if (CONST_INT_P (XEXP (rtl, 1))
14649 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14650 && (size_of_int_loc_descriptor (shift) + 1
14651 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14652 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14653 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14654 & GET_MODE_MASK (op_mode))))
14655 {
14656 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14657 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14658 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14659 & GET_MODE_MASK (op_mode));
14660 return compare_loc_descriptor (op, op0, op1);
14661 }
14662 }
14663 add_loc_descr (&op0, int_loc_descriptor (shift));
14664 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14665 if (CONST_INT_P (XEXP (rtl, 1)))
14666 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14667 else
14668 {
14669 add_loc_descr (&op1, int_loc_descriptor (shift));
14670 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14671 }
14672 return compare_loc_descriptor (op, op0, op1);
14673 }
14674
14675 /* Return location descriptor for unsigned comparison OP RTL. */
14676
14677 static dw_loc_descr_ref
14678 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14679 machine_mode mem_mode)
14680 {
14681 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14682 dw_loc_descr_ref op0, op1;
14683
14684 if (op_mode == VOIDmode)
14685 op_mode = GET_MODE (XEXP (rtl, 1));
14686 if (op_mode == VOIDmode)
14687 return NULL;
14688
14689 scalar_int_mode int_op_mode;
14690 if (dwarf_strict
14691 && dwarf_version < 5
14692 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14693 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14694 return NULL;
14695
14696 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14697 VAR_INIT_STATUS_INITIALIZED);
14698 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14699 VAR_INIT_STATUS_INITIALIZED);
14700
14701 if (op0 == NULL || op1 == NULL)
14702 return NULL;
14703
14704 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14705 {
14706 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14707 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14708
14709 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14710 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14711 }
14712 return compare_loc_descriptor (op, op0, op1);
14713 }
14714
14715 /* Return location descriptor for unsigned comparison OP RTL. */
14716
14717 static dw_loc_descr_ref
14718 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14719 machine_mode mem_mode)
14720 {
14721 dw_loc_descr_ref op0, op1;
14722
14723 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14724 if (test_op_mode == VOIDmode)
14725 test_op_mode = GET_MODE (XEXP (rtl, 1));
14726
14727 scalar_int_mode op_mode;
14728 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14729 return NULL;
14730
14731 if (dwarf_strict
14732 && dwarf_version < 5
14733 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14734 return NULL;
14735
14736 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14737 VAR_INIT_STATUS_INITIALIZED);
14738 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14739 VAR_INIT_STATUS_INITIALIZED);
14740
14741 if (op0 == NULL || op1 == NULL)
14742 return NULL;
14743
14744 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14745 {
14746 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14747 dw_loc_descr_ref last0, last1;
14748 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14749 ;
14750 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14751 ;
14752 if (CONST_INT_P (XEXP (rtl, 0)))
14753 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14754 /* deref_size zero extends, so no need to mask it again. */
14755 else if (last0->dw_loc_opc != DW_OP_deref_size
14756 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14757 {
14758 add_loc_descr (&op0, int_loc_descriptor (mask));
14759 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14760 }
14761 if (CONST_INT_P (XEXP (rtl, 1)))
14762 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14763 /* deref_size zero extends, so no need to mask it again. */
14764 else if (last1->dw_loc_opc != DW_OP_deref_size
14765 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14766 {
14767 add_loc_descr (&op1, int_loc_descriptor (mask));
14768 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14769 }
14770 }
14771 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14772 {
14773 HOST_WIDE_INT bias = 1;
14774 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14775 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14776 if (CONST_INT_P (XEXP (rtl, 1)))
14777 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14778 + INTVAL (XEXP (rtl, 1)));
14779 else
14780 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14781 bias, 0));
14782 }
14783 return compare_loc_descriptor (op, op0, op1);
14784 }
14785
14786 /* Return location descriptor for {U,S}{MIN,MAX}. */
14787
14788 static dw_loc_descr_ref
14789 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14790 machine_mode mem_mode)
14791 {
14792 enum dwarf_location_atom op;
14793 dw_loc_descr_ref op0, op1, ret;
14794 dw_loc_descr_ref bra_node, drop_node;
14795
14796 scalar_int_mode int_mode;
14797 if (dwarf_strict
14798 && dwarf_version < 5
14799 && (!is_a <scalar_int_mode> (mode, &int_mode)
14800 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14801 return NULL;
14802
14803 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14804 VAR_INIT_STATUS_INITIALIZED);
14805 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14806 VAR_INIT_STATUS_INITIALIZED);
14807
14808 if (op0 == NULL || op1 == NULL)
14809 return NULL;
14810
14811 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14812 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14813 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14814 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14815 {
14816 /* Checked by the caller. */
14817 int_mode = as_a <scalar_int_mode> (mode);
14818 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14819 {
14820 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14821 add_loc_descr (&op0, int_loc_descriptor (mask));
14822 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14823 add_loc_descr (&op1, int_loc_descriptor (mask));
14824 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14825 }
14826 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14827 {
14828 HOST_WIDE_INT bias = 1;
14829 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14830 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14831 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14832 }
14833 }
14834 else if (is_a <scalar_int_mode> (mode, &int_mode)
14835 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14836 {
14837 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14838 add_loc_descr (&op0, int_loc_descriptor (shift));
14839 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14840 add_loc_descr (&op1, int_loc_descriptor (shift));
14841 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14842 }
14843 else if (is_a <scalar_int_mode> (mode, &int_mode)
14844 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14845 {
14846 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14847 dw_loc_descr_ref cvt;
14848 if (type_die == NULL)
14849 return NULL;
14850 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14851 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14852 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14853 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14854 add_loc_descr (&op0, cvt);
14855 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14856 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14857 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14858 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14859 add_loc_descr (&op1, cvt);
14860 }
14861
14862 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14863 op = DW_OP_lt;
14864 else
14865 op = DW_OP_gt;
14866 ret = op0;
14867 add_loc_descr (&ret, op1);
14868 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14869 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14870 add_loc_descr (&ret, bra_node);
14871 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14872 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14873 add_loc_descr (&ret, drop_node);
14874 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14875 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14876 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14877 && is_a <scalar_int_mode> (mode, &int_mode)
14878 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14879 ret = convert_descriptor_to_mode (int_mode, ret);
14880 return ret;
14881 }
14882
14883 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14884 but after converting arguments to type_die, afterwards
14885 convert back to unsigned. */
14886
14887 static dw_loc_descr_ref
14888 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14889 scalar_int_mode mode, machine_mode mem_mode)
14890 {
14891 dw_loc_descr_ref cvt, op0, op1;
14892
14893 if (type_die == NULL)
14894 return NULL;
14895 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14896 VAR_INIT_STATUS_INITIALIZED);
14897 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14898 VAR_INIT_STATUS_INITIALIZED);
14899 if (op0 == NULL || op1 == NULL)
14900 return NULL;
14901 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14902 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14903 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14904 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14905 add_loc_descr (&op0, cvt);
14906 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14907 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14908 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14909 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14910 add_loc_descr (&op1, cvt);
14911 add_loc_descr (&op0, op1);
14912 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14913 return convert_descriptor_to_mode (mode, op0);
14914 }
14915
14916 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14917 const0 is DW_OP_lit0 or corresponding typed constant,
14918 const1 is DW_OP_lit1 or corresponding typed constant
14919 and constMSB is constant with just the MSB bit set
14920 for the mode):
14921 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14922 L1: const0 DW_OP_swap
14923 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14924 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14925 L3: DW_OP_drop
14926 L4: DW_OP_nop
14927
14928 CTZ is similar:
14929 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14930 L1: const0 DW_OP_swap
14931 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14932 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14933 L3: DW_OP_drop
14934 L4: DW_OP_nop
14935
14936 FFS is similar:
14937 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14938 L1: const1 DW_OP_swap
14939 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14940 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14941 L3: DW_OP_drop
14942 L4: DW_OP_nop */
14943
14944 static dw_loc_descr_ref
14945 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14946 machine_mode mem_mode)
14947 {
14948 dw_loc_descr_ref op0, ret, tmp;
14949 HOST_WIDE_INT valv;
14950 dw_loc_descr_ref l1jump, l1label;
14951 dw_loc_descr_ref l2jump, l2label;
14952 dw_loc_descr_ref l3jump, l3label;
14953 dw_loc_descr_ref l4jump, l4label;
14954 rtx msb;
14955
14956 if (GET_MODE (XEXP (rtl, 0)) != mode)
14957 return NULL;
14958
14959 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14960 VAR_INIT_STATUS_INITIALIZED);
14961 if (op0 == NULL)
14962 return NULL;
14963 ret = op0;
14964 if (GET_CODE (rtl) == CLZ)
14965 {
14966 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14967 valv = GET_MODE_BITSIZE (mode);
14968 }
14969 else if (GET_CODE (rtl) == FFS)
14970 valv = 0;
14971 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14972 valv = GET_MODE_BITSIZE (mode);
14973 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14974 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14975 add_loc_descr (&ret, l1jump);
14976 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14977 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14978 VAR_INIT_STATUS_INITIALIZED);
14979 if (tmp == NULL)
14980 return NULL;
14981 add_loc_descr (&ret, tmp);
14982 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14983 add_loc_descr (&ret, l4jump);
14984 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14985 ? const1_rtx : const0_rtx,
14986 mode, mem_mode,
14987 VAR_INIT_STATUS_INITIALIZED);
14988 if (l1label == NULL)
14989 return NULL;
14990 add_loc_descr (&ret, l1label);
14991 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14992 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14993 add_loc_descr (&ret, l2label);
14994 if (GET_CODE (rtl) != CLZ)
14995 msb = const1_rtx;
14996 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14997 msb = GEN_INT (HOST_WIDE_INT_1U
14998 << (GET_MODE_BITSIZE (mode) - 1));
14999 else
15000 msb = immed_wide_int_const
15001 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15002 GET_MODE_PRECISION (mode)), mode);
15003 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15004 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15005 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15006 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15007 else
15008 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15009 VAR_INIT_STATUS_INITIALIZED);
15010 if (tmp == NULL)
15011 return NULL;
15012 add_loc_descr (&ret, tmp);
15013 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15014 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15015 add_loc_descr (&ret, l3jump);
15016 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15017 VAR_INIT_STATUS_INITIALIZED);
15018 if (tmp == NULL)
15019 return NULL;
15020 add_loc_descr (&ret, tmp);
15021 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15022 ? DW_OP_shl : DW_OP_shr, 0, 0));
15023 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15024 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15025 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15026 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15027 add_loc_descr (&ret, l2jump);
15028 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15029 add_loc_descr (&ret, l3label);
15030 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15031 add_loc_descr (&ret, l4label);
15032 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15033 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15034 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15035 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15036 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15037 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15038 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15039 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15040 return ret;
15041 }
15042
15043 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15044 const1 is DW_OP_lit1 or corresponding typed constant):
15045 const0 DW_OP_swap
15046 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15047 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15048 L2: DW_OP_drop
15049
15050 PARITY is similar:
15051 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15052 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15053 L2: DW_OP_drop */
15054
15055 static dw_loc_descr_ref
15056 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15057 machine_mode mem_mode)
15058 {
15059 dw_loc_descr_ref op0, ret, tmp;
15060 dw_loc_descr_ref l1jump, l1label;
15061 dw_loc_descr_ref l2jump, l2label;
15062
15063 if (GET_MODE (XEXP (rtl, 0)) != mode)
15064 return NULL;
15065
15066 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15067 VAR_INIT_STATUS_INITIALIZED);
15068 if (op0 == NULL)
15069 return NULL;
15070 ret = op0;
15071 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15072 VAR_INIT_STATUS_INITIALIZED);
15073 if (tmp == NULL)
15074 return NULL;
15075 add_loc_descr (&ret, tmp);
15076 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15077 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15078 add_loc_descr (&ret, l1label);
15079 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15080 add_loc_descr (&ret, l2jump);
15081 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15082 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15083 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15084 VAR_INIT_STATUS_INITIALIZED);
15085 if (tmp == NULL)
15086 return NULL;
15087 add_loc_descr (&ret, tmp);
15088 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15089 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15090 ? DW_OP_plus : DW_OP_xor, 0, 0));
15091 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15092 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15093 VAR_INIT_STATUS_INITIALIZED);
15094 add_loc_descr (&ret, tmp);
15095 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15096 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15097 add_loc_descr (&ret, l1jump);
15098 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15099 add_loc_descr (&ret, l2label);
15100 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15101 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15102 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15103 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15104 return ret;
15105 }
15106
15107 /* BSWAP (constS is initial shift count, either 56 or 24):
15108 constS const0
15109 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15110 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15111 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15112 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15113 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15114
15115 static dw_loc_descr_ref
15116 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15117 machine_mode mem_mode)
15118 {
15119 dw_loc_descr_ref op0, ret, tmp;
15120 dw_loc_descr_ref l1jump, l1label;
15121 dw_loc_descr_ref l2jump, l2label;
15122
15123 if (BITS_PER_UNIT != 8
15124 || (GET_MODE_BITSIZE (mode) != 32
15125 && GET_MODE_BITSIZE (mode) != 64))
15126 return NULL;
15127
15128 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15129 VAR_INIT_STATUS_INITIALIZED);
15130 if (op0 == NULL)
15131 return NULL;
15132
15133 ret = op0;
15134 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15135 mode, mem_mode,
15136 VAR_INIT_STATUS_INITIALIZED);
15137 if (tmp == NULL)
15138 return NULL;
15139 add_loc_descr (&ret, tmp);
15140 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15141 VAR_INIT_STATUS_INITIALIZED);
15142 if (tmp == NULL)
15143 return NULL;
15144 add_loc_descr (&ret, tmp);
15145 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15146 add_loc_descr (&ret, l1label);
15147 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15148 mode, mem_mode,
15149 VAR_INIT_STATUS_INITIALIZED);
15150 add_loc_descr (&ret, tmp);
15151 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15152 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15153 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15154 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15155 VAR_INIT_STATUS_INITIALIZED);
15156 if (tmp == NULL)
15157 return NULL;
15158 add_loc_descr (&ret, tmp);
15159 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15160 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15161 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15162 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15163 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15164 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15165 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15166 VAR_INIT_STATUS_INITIALIZED);
15167 add_loc_descr (&ret, tmp);
15168 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15169 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15170 add_loc_descr (&ret, l2jump);
15171 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15172 VAR_INIT_STATUS_INITIALIZED);
15173 add_loc_descr (&ret, tmp);
15174 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15175 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15176 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15177 add_loc_descr (&ret, l1jump);
15178 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15179 add_loc_descr (&ret, l2label);
15180 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15181 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15182 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15183 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15184 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15185 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15186 return ret;
15187 }
15188
15189 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15190 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15191 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15192 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15193
15194 ROTATERT is similar:
15195 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15196 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15197 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15198
15199 static dw_loc_descr_ref
15200 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15201 machine_mode mem_mode)
15202 {
15203 rtx rtlop1 = XEXP (rtl, 1);
15204 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15205 int i;
15206
15207 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15208 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15209 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15210 VAR_INIT_STATUS_INITIALIZED);
15211 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15212 VAR_INIT_STATUS_INITIALIZED);
15213 if (op0 == NULL || op1 == NULL)
15214 return NULL;
15215 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15216 for (i = 0; i < 2; i++)
15217 {
15218 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15219 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15220 mode, mem_mode,
15221 VAR_INIT_STATUS_INITIALIZED);
15222 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15223 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15224 ? DW_OP_const4u
15225 : HOST_BITS_PER_WIDE_INT == 64
15226 ? DW_OP_const8u : DW_OP_constu,
15227 GET_MODE_MASK (mode), 0);
15228 else
15229 mask[i] = NULL;
15230 if (mask[i] == NULL)
15231 return NULL;
15232 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15233 }
15234 ret = op0;
15235 add_loc_descr (&ret, op1);
15236 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15237 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15238 if (GET_CODE (rtl) == ROTATERT)
15239 {
15240 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15241 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15242 GET_MODE_BITSIZE (mode), 0));
15243 }
15244 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15245 if (mask[0] != NULL)
15246 add_loc_descr (&ret, mask[0]);
15247 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15248 if (mask[1] != NULL)
15249 {
15250 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15251 add_loc_descr (&ret, mask[1]);
15252 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15253 }
15254 if (GET_CODE (rtl) == ROTATE)
15255 {
15256 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15257 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15258 GET_MODE_BITSIZE (mode), 0));
15259 }
15260 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15261 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15262 return ret;
15263 }
15264
15265 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15266 for DEBUG_PARAMETER_REF RTL. */
15267
15268 static dw_loc_descr_ref
15269 parameter_ref_descriptor (rtx rtl)
15270 {
15271 dw_loc_descr_ref ret;
15272 dw_die_ref ref;
15273
15274 if (dwarf_strict)
15275 return NULL;
15276 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15277 /* With LTO during LTRANS we get the late DIE that refers to the early
15278 DIE, thus we add another indirection here. This seems to confuse
15279 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15280 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15281 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15282 if (ref)
15283 {
15284 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15285 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15286 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15287 }
15288 else
15289 {
15290 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15291 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15292 }
15293 return ret;
15294 }
15295
15296 /* The following routine converts the RTL for a variable or parameter
15297 (resident in memory) into an equivalent Dwarf representation of a
15298 mechanism for getting the address of that same variable onto the top of a
15299 hypothetical "address evaluation" stack.
15300
15301 When creating memory location descriptors, we are effectively transforming
15302 the RTL for a memory-resident object into its Dwarf postfix expression
15303 equivalent. This routine recursively descends an RTL tree, turning
15304 it into Dwarf postfix code as it goes.
15305
15306 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15307
15308 MEM_MODE is the mode of the memory reference, needed to handle some
15309 autoincrement addressing modes.
15310
15311 Return 0 if we can't represent the location. */
15312
15313 dw_loc_descr_ref
15314 mem_loc_descriptor (rtx rtl, machine_mode mode,
15315 machine_mode mem_mode,
15316 enum var_init_status initialized)
15317 {
15318 dw_loc_descr_ref mem_loc_result = NULL;
15319 enum dwarf_location_atom op;
15320 dw_loc_descr_ref op0, op1;
15321 rtx inner = NULL_RTX;
15322 poly_int64 offset;
15323
15324 if (mode == VOIDmode)
15325 mode = GET_MODE (rtl);
15326
15327 /* Note that for a dynamically sized array, the location we will generate a
15328 description of here will be the lowest numbered location which is
15329 actually within the array. That's *not* necessarily the same as the
15330 zeroth element of the array. */
15331
15332 rtl = targetm.delegitimize_address (rtl);
15333
15334 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15335 return NULL;
15336
15337 scalar_int_mode int_mode, inner_mode, op1_mode;
15338 switch (GET_CODE (rtl))
15339 {
15340 case POST_INC:
15341 case POST_DEC:
15342 case POST_MODIFY:
15343 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15344
15345 case SUBREG:
15346 /* The case of a subreg may arise when we have a local (register)
15347 variable or a formal (register) parameter which doesn't quite fill
15348 up an entire register. For now, just assume that it is
15349 legitimate to make the Dwarf info refer to the whole register which
15350 contains the given subreg. */
15351 if (!subreg_lowpart_p (rtl))
15352 break;
15353 inner = SUBREG_REG (rtl);
15354 /* FALLTHRU */
15355 case TRUNCATE:
15356 if (inner == NULL_RTX)
15357 inner = XEXP (rtl, 0);
15358 if (is_a <scalar_int_mode> (mode, &int_mode)
15359 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15360 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15361 #ifdef POINTERS_EXTEND_UNSIGNED
15362 || (int_mode == Pmode && mem_mode != VOIDmode)
15363 #endif
15364 )
15365 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15366 {
15367 mem_loc_result = mem_loc_descriptor (inner,
15368 inner_mode,
15369 mem_mode, initialized);
15370 break;
15371 }
15372 if (dwarf_strict && dwarf_version < 5)
15373 break;
15374 if (is_a <scalar_int_mode> (mode, &int_mode)
15375 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15376 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15377 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15378 {
15379 dw_die_ref type_die;
15380 dw_loc_descr_ref cvt;
15381
15382 mem_loc_result = mem_loc_descriptor (inner,
15383 GET_MODE (inner),
15384 mem_mode, initialized);
15385 if (mem_loc_result == NULL)
15386 break;
15387 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15388 if (type_die == NULL)
15389 {
15390 mem_loc_result = NULL;
15391 break;
15392 }
15393 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15394 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15395 else
15396 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15397 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15398 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15399 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15400 add_loc_descr (&mem_loc_result, cvt);
15401 if (is_a <scalar_int_mode> (mode, &int_mode)
15402 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15403 {
15404 /* Convert it to untyped afterwards. */
15405 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15406 add_loc_descr (&mem_loc_result, cvt);
15407 }
15408 }
15409 break;
15410
15411 case REG:
15412 if (!is_a <scalar_int_mode> (mode, &int_mode)
15413 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15414 && rtl != arg_pointer_rtx
15415 && rtl != frame_pointer_rtx
15416 #ifdef POINTERS_EXTEND_UNSIGNED
15417 && (int_mode != Pmode || mem_mode == VOIDmode)
15418 #endif
15419 ))
15420 {
15421 dw_die_ref type_die;
15422 unsigned int dbx_regnum;
15423
15424 if (dwarf_strict && dwarf_version < 5)
15425 break;
15426 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15427 break;
15428 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15429 if (type_die == NULL)
15430 break;
15431
15432 dbx_regnum = dbx_reg_number (rtl);
15433 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15434 break;
15435 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15436 dbx_regnum, 0);
15437 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15438 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15439 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15440 break;
15441 }
15442 /* Whenever a register number forms a part of the description of the
15443 method for calculating the (dynamic) address of a memory resident
15444 object, DWARF rules require the register number be referred to as
15445 a "base register". This distinction is not based in any way upon
15446 what category of register the hardware believes the given register
15447 belongs to. This is strictly DWARF terminology we're dealing with
15448 here. Note that in cases where the location of a memory-resident
15449 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15450 OP_CONST (0)) the actual DWARF location descriptor that we generate
15451 may just be OP_BASEREG (basereg). This may look deceptively like
15452 the object in question was allocated to a register (rather than in
15453 memory) so DWARF consumers need to be aware of the subtle
15454 distinction between OP_REG and OP_BASEREG. */
15455 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15456 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15457 else if (stack_realign_drap
15458 && crtl->drap_reg
15459 && crtl->args.internal_arg_pointer == rtl
15460 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15461 {
15462 /* If RTL is internal_arg_pointer, which has been optimized
15463 out, use DRAP instead. */
15464 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15465 VAR_INIT_STATUS_INITIALIZED);
15466 }
15467 break;
15468
15469 case SIGN_EXTEND:
15470 case ZERO_EXTEND:
15471 if (!is_a <scalar_int_mode> (mode, &int_mode)
15472 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15473 break;
15474 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15475 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15476 if (op0 == 0)
15477 break;
15478 else if (GET_CODE (rtl) == ZERO_EXTEND
15479 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15480 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15481 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15482 to expand zero extend as two shifts instead of
15483 masking. */
15484 && GET_MODE_SIZE (inner_mode) <= 4)
15485 {
15486 mem_loc_result = op0;
15487 add_loc_descr (&mem_loc_result,
15488 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15489 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15490 }
15491 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15492 {
15493 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15494 shift *= BITS_PER_UNIT;
15495 if (GET_CODE (rtl) == SIGN_EXTEND)
15496 op = DW_OP_shra;
15497 else
15498 op = DW_OP_shr;
15499 mem_loc_result = op0;
15500 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15501 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15502 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15503 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15504 }
15505 else if (!dwarf_strict || dwarf_version >= 5)
15506 {
15507 dw_die_ref type_die1, type_die2;
15508 dw_loc_descr_ref cvt;
15509
15510 type_die1 = base_type_for_mode (inner_mode,
15511 GET_CODE (rtl) == ZERO_EXTEND);
15512 if (type_die1 == NULL)
15513 break;
15514 type_die2 = base_type_for_mode (int_mode, 1);
15515 if (type_die2 == NULL)
15516 break;
15517 mem_loc_result = op0;
15518 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15519 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15520 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15521 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15522 add_loc_descr (&mem_loc_result, cvt);
15523 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15524 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15525 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15526 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15527 add_loc_descr (&mem_loc_result, cvt);
15528 }
15529 break;
15530
15531 case MEM:
15532 {
15533 rtx new_rtl = avoid_constant_pool_reference (rtl);
15534 if (new_rtl != rtl)
15535 {
15536 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15537 initialized);
15538 if (mem_loc_result != NULL)
15539 return mem_loc_result;
15540 }
15541 }
15542 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15543 get_address_mode (rtl), mode,
15544 VAR_INIT_STATUS_INITIALIZED);
15545 if (mem_loc_result == NULL)
15546 mem_loc_result = tls_mem_loc_descriptor (rtl);
15547 if (mem_loc_result != NULL)
15548 {
15549 if (!is_a <scalar_int_mode> (mode, &int_mode)
15550 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15551 {
15552 dw_die_ref type_die;
15553 dw_loc_descr_ref deref;
15554 HOST_WIDE_INT size;
15555
15556 if (dwarf_strict && dwarf_version < 5)
15557 return NULL;
15558 if (!GET_MODE_SIZE (mode).is_constant (&size))
15559 return NULL;
15560 type_die
15561 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15562 if (type_die == NULL)
15563 return NULL;
15564 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15565 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15566 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15567 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15568 add_loc_descr (&mem_loc_result, deref);
15569 }
15570 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15571 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15572 else
15573 add_loc_descr (&mem_loc_result,
15574 new_loc_descr (DW_OP_deref_size,
15575 GET_MODE_SIZE (int_mode), 0));
15576 }
15577 break;
15578
15579 case LO_SUM:
15580 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15581
15582 case LABEL_REF:
15583 /* Some ports can transform a symbol ref into a label ref, because
15584 the symbol ref is too far away and has to be dumped into a constant
15585 pool. */
15586 case CONST:
15587 case SYMBOL_REF:
15588 if (!is_a <scalar_int_mode> (mode, &int_mode)
15589 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15590 #ifdef POINTERS_EXTEND_UNSIGNED
15591 && (int_mode != Pmode || mem_mode == VOIDmode)
15592 #endif
15593 ))
15594 break;
15595 if (GET_CODE (rtl) == SYMBOL_REF
15596 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15597 {
15598 dw_loc_descr_ref temp;
15599
15600 /* If this is not defined, we have no way to emit the data. */
15601 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15602 break;
15603
15604 temp = new_addr_loc_descr (rtl, dtprel_true);
15605
15606 /* We check for DWARF 5 here because gdb did not implement
15607 DW_OP_form_tls_address until after 7.12. */
15608 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15609 ? DW_OP_form_tls_address
15610 : DW_OP_GNU_push_tls_address),
15611 0, 0);
15612 add_loc_descr (&mem_loc_result, temp);
15613
15614 break;
15615 }
15616
15617 if (!const_ok_for_output (rtl))
15618 {
15619 if (GET_CODE (rtl) == CONST)
15620 switch (GET_CODE (XEXP (rtl, 0)))
15621 {
15622 case NOT:
15623 op = DW_OP_not;
15624 goto try_const_unop;
15625 case NEG:
15626 op = DW_OP_neg;
15627 goto try_const_unop;
15628 try_const_unop:
15629 rtx arg;
15630 arg = XEXP (XEXP (rtl, 0), 0);
15631 if (!CONSTANT_P (arg))
15632 arg = gen_rtx_CONST (int_mode, arg);
15633 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15634 initialized);
15635 if (op0)
15636 {
15637 mem_loc_result = op0;
15638 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15639 }
15640 break;
15641 default:
15642 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15643 mem_mode, initialized);
15644 break;
15645 }
15646 break;
15647 }
15648
15649 symref:
15650 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15651 vec_safe_push (used_rtx_array, rtl);
15652 break;
15653
15654 case CONCAT:
15655 case CONCATN:
15656 case VAR_LOCATION:
15657 case DEBUG_IMPLICIT_PTR:
15658 expansion_failed (NULL_TREE, rtl,
15659 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15660 return 0;
15661
15662 case ENTRY_VALUE:
15663 if (dwarf_strict && dwarf_version < 5)
15664 return NULL;
15665 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15666 {
15667 if (!is_a <scalar_int_mode> (mode, &int_mode)
15668 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15669 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15670 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15671 else
15672 {
15673 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15674 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15675 return NULL;
15676 op0 = one_reg_loc_descriptor (dbx_regnum,
15677 VAR_INIT_STATUS_INITIALIZED);
15678 }
15679 }
15680 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15681 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15682 {
15683 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15684 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15685 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15686 return NULL;
15687 }
15688 else
15689 gcc_unreachable ();
15690 if (op0 == NULL)
15691 return NULL;
15692 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15693 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15694 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15695 break;
15696
15697 case DEBUG_PARAMETER_REF:
15698 mem_loc_result = parameter_ref_descriptor (rtl);
15699 break;
15700
15701 case PRE_MODIFY:
15702 /* Extract the PLUS expression nested inside and fall into
15703 PLUS code below. */
15704 rtl = XEXP (rtl, 1);
15705 goto plus;
15706
15707 case PRE_INC:
15708 case PRE_DEC:
15709 /* Turn these into a PLUS expression and fall into the PLUS code
15710 below. */
15711 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15712 gen_int_mode (GET_CODE (rtl) == PRE_INC
15713 ? GET_MODE_UNIT_SIZE (mem_mode)
15714 : -GET_MODE_UNIT_SIZE (mem_mode),
15715 mode));
15716
15717 /* fall through */
15718
15719 case PLUS:
15720 plus:
15721 if (is_based_loc (rtl)
15722 && is_a <scalar_int_mode> (mode, &int_mode)
15723 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15724 || XEXP (rtl, 0) == arg_pointer_rtx
15725 || XEXP (rtl, 0) == frame_pointer_rtx))
15726 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15727 INTVAL (XEXP (rtl, 1)),
15728 VAR_INIT_STATUS_INITIALIZED);
15729 else
15730 {
15731 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15732 VAR_INIT_STATUS_INITIALIZED);
15733 if (mem_loc_result == 0)
15734 break;
15735
15736 if (CONST_INT_P (XEXP (rtl, 1))
15737 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15738 <= DWARF2_ADDR_SIZE))
15739 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15740 else
15741 {
15742 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15743 VAR_INIT_STATUS_INITIALIZED);
15744 if (op1 == 0)
15745 return NULL;
15746 add_loc_descr (&mem_loc_result, op1);
15747 add_loc_descr (&mem_loc_result,
15748 new_loc_descr (DW_OP_plus, 0, 0));
15749 }
15750 }
15751 break;
15752
15753 /* If a pseudo-reg is optimized away, it is possible for it to
15754 be replaced with a MEM containing a multiply or shift. */
15755 case MINUS:
15756 op = DW_OP_minus;
15757 goto do_binop;
15758
15759 case MULT:
15760 op = DW_OP_mul;
15761 goto do_binop;
15762
15763 case DIV:
15764 if ((!dwarf_strict || dwarf_version >= 5)
15765 && is_a <scalar_int_mode> (mode, &int_mode)
15766 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15767 {
15768 mem_loc_result = typed_binop (DW_OP_div, rtl,
15769 base_type_for_mode (mode, 0),
15770 int_mode, mem_mode);
15771 break;
15772 }
15773 op = DW_OP_div;
15774 goto do_binop;
15775
15776 case UMOD:
15777 op = DW_OP_mod;
15778 goto do_binop;
15779
15780 case ASHIFT:
15781 op = DW_OP_shl;
15782 goto do_shift;
15783
15784 case ASHIFTRT:
15785 op = DW_OP_shra;
15786 goto do_shift;
15787
15788 case LSHIFTRT:
15789 op = DW_OP_shr;
15790 goto do_shift;
15791
15792 do_shift:
15793 if (!is_a <scalar_int_mode> (mode, &int_mode))
15794 break;
15795 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15796 VAR_INIT_STATUS_INITIALIZED);
15797 {
15798 rtx rtlop1 = XEXP (rtl, 1);
15799 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15800 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15801 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15802 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15803 VAR_INIT_STATUS_INITIALIZED);
15804 }
15805
15806 if (op0 == 0 || op1 == 0)
15807 break;
15808
15809 mem_loc_result = op0;
15810 add_loc_descr (&mem_loc_result, op1);
15811 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15812 break;
15813
15814 case AND:
15815 op = DW_OP_and;
15816 goto do_binop;
15817
15818 case IOR:
15819 op = DW_OP_or;
15820 goto do_binop;
15821
15822 case XOR:
15823 op = DW_OP_xor;
15824 goto do_binop;
15825
15826 do_binop:
15827 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15828 VAR_INIT_STATUS_INITIALIZED);
15829 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15830 VAR_INIT_STATUS_INITIALIZED);
15831
15832 if (op0 == 0 || op1 == 0)
15833 break;
15834
15835 mem_loc_result = op0;
15836 add_loc_descr (&mem_loc_result, op1);
15837 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15838 break;
15839
15840 case MOD:
15841 if ((!dwarf_strict || dwarf_version >= 5)
15842 && is_a <scalar_int_mode> (mode, &int_mode)
15843 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15844 {
15845 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15846 base_type_for_mode (mode, 0),
15847 int_mode, mem_mode);
15848 break;
15849 }
15850
15851 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15852 VAR_INIT_STATUS_INITIALIZED);
15853 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15854 VAR_INIT_STATUS_INITIALIZED);
15855
15856 if (op0 == 0 || op1 == 0)
15857 break;
15858
15859 mem_loc_result = op0;
15860 add_loc_descr (&mem_loc_result, op1);
15861 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15862 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15863 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15864 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15865 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15866 break;
15867
15868 case UDIV:
15869 if ((!dwarf_strict || dwarf_version >= 5)
15870 && is_a <scalar_int_mode> (mode, &int_mode))
15871 {
15872 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15873 {
15874 op = DW_OP_div;
15875 goto do_binop;
15876 }
15877 mem_loc_result = typed_binop (DW_OP_div, rtl,
15878 base_type_for_mode (int_mode, 1),
15879 int_mode, mem_mode);
15880 }
15881 break;
15882
15883 case NOT:
15884 op = DW_OP_not;
15885 goto do_unop;
15886
15887 case ABS:
15888 op = DW_OP_abs;
15889 goto do_unop;
15890
15891 case NEG:
15892 op = DW_OP_neg;
15893 goto do_unop;
15894
15895 do_unop:
15896 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15897 VAR_INIT_STATUS_INITIALIZED);
15898
15899 if (op0 == 0)
15900 break;
15901
15902 mem_loc_result = op0;
15903 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15904 break;
15905
15906 case CONST_INT:
15907 if (!is_a <scalar_int_mode> (mode, &int_mode)
15908 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15909 #ifdef POINTERS_EXTEND_UNSIGNED
15910 || (int_mode == Pmode
15911 && mem_mode != VOIDmode
15912 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15913 #endif
15914 )
15915 {
15916 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15917 break;
15918 }
15919 if ((!dwarf_strict || dwarf_version >= 5)
15920 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15921 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15922 {
15923 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15924 scalar_int_mode amode;
15925 if (type_die == NULL)
15926 return NULL;
15927 if (INTVAL (rtl) >= 0
15928 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15929 .exists (&amode))
15930 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15931 /* const DW_OP_convert <XXX> vs.
15932 DW_OP_const_type <XXX, 1, const>. */
15933 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15934 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15935 {
15936 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15937 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15938 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15939 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15940 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15941 add_loc_descr (&mem_loc_result, op0);
15942 return mem_loc_result;
15943 }
15944 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15945 INTVAL (rtl));
15946 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15947 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15948 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15949 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15950 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15951 else
15952 {
15953 mem_loc_result->dw_loc_oprnd2.val_class
15954 = dw_val_class_const_double;
15955 mem_loc_result->dw_loc_oprnd2.v.val_double
15956 = double_int::from_shwi (INTVAL (rtl));
15957 }
15958 }
15959 break;
15960
15961 case CONST_DOUBLE:
15962 if (!dwarf_strict || dwarf_version >= 5)
15963 {
15964 dw_die_ref type_die;
15965
15966 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15967 CONST_DOUBLE rtx could represent either a large integer
15968 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15969 the value is always a floating point constant.
15970
15971 When it is an integer, a CONST_DOUBLE is used whenever
15972 the constant requires 2 HWIs to be adequately represented.
15973 We output CONST_DOUBLEs as blocks. */
15974 if (mode == VOIDmode
15975 || (GET_MODE (rtl) == VOIDmode
15976 && maybe_ne (GET_MODE_BITSIZE (mode),
15977 HOST_BITS_PER_DOUBLE_INT)))
15978 break;
15979 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15980 if (type_die == NULL)
15981 return NULL;
15982 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15983 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15984 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15985 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15986 #if TARGET_SUPPORTS_WIDE_INT == 0
15987 if (!SCALAR_FLOAT_MODE_P (mode))
15988 {
15989 mem_loc_result->dw_loc_oprnd2.val_class
15990 = dw_val_class_const_double;
15991 mem_loc_result->dw_loc_oprnd2.v.val_double
15992 = rtx_to_double_int (rtl);
15993 }
15994 else
15995 #endif
15996 {
15997 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15998 unsigned int length = GET_MODE_SIZE (float_mode);
15999 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16000
16001 insert_float (rtl, array);
16002 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16003 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16004 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16005 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16006 }
16007 }
16008 break;
16009
16010 case CONST_WIDE_INT:
16011 if (!dwarf_strict || dwarf_version >= 5)
16012 {
16013 dw_die_ref type_die;
16014
16015 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16016 if (type_die == NULL)
16017 return NULL;
16018 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16019 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16020 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16021 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16022 mem_loc_result->dw_loc_oprnd2.val_class
16023 = dw_val_class_wide_int;
16024 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16025 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16026 }
16027 break;
16028
16029 case CONST_POLY_INT:
16030 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16031 break;
16032
16033 case EQ:
16034 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16035 break;
16036
16037 case GE:
16038 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16039 break;
16040
16041 case GT:
16042 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16043 break;
16044
16045 case LE:
16046 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16047 break;
16048
16049 case LT:
16050 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16051 break;
16052
16053 case NE:
16054 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16055 break;
16056
16057 case GEU:
16058 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16059 break;
16060
16061 case GTU:
16062 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16063 break;
16064
16065 case LEU:
16066 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16067 break;
16068
16069 case LTU:
16070 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16071 break;
16072
16073 case UMIN:
16074 case UMAX:
16075 if (!SCALAR_INT_MODE_P (mode))
16076 break;
16077 /* FALLTHRU */
16078 case SMIN:
16079 case SMAX:
16080 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16081 break;
16082
16083 case ZERO_EXTRACT:
16084 case SIGN_EXTRACT:
16085 if (CONST_INT_P (XEXP (rtl, 1))
16086 && CONST_INT_P (XEXP (rtl, 2))
16087 && is_a <scalar_int_mode> (mode, &int_mode)
16088 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16089 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16090 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16091 && ((unsigned) INTVAL (XEXP (rtl, 1))
16092 + (unsigned) INTVAL (XEXP (rtl, 2))
16093 <= GET_MODE_BITSIZE (int_mode)))
16094 {
16095 int shift, size;
16096 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16097 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16098 if (op0 == 0)
16099 break;
16100 if (GET_CODE (rtl) == SIGN_EXTRACT)
16101 op = DW_OP_shra;
16102 else
16103 op = DW_OP_shr;
16104 mem_loc_result = op0;
16105 size = INTVAL (XEXP (rtl, 1));
16106 shift = INTVAL (XEXP (rtl, 2));
16107 if (BITS_BIG_ENDIAN)
16108 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16109 if (shift + size != (int) DWARF2_ADDR_SIZE)
16110 {
16111 add_loc_descr (&mem_loc_result,
16112 int_loc_descriptor (DWARF2_ADDR_SIZE
16113 - shift - size));
16114 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16115 }
16116 if (size != (int) DWARF2_ADDR_SIZE)
16117 {
16118 add_loc_descr (&mem_loc_result,
16119 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16120 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16121 }
16122 }
16123 break;
16124
16125 case IF_THEN_ELSE:
16126 {
16127 dw_loc_descr_ref op2, bra_node, drop_node;
16128 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16129 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16130 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16131 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16132 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16133 VAR_INIT_STATUS_INITIALIZED);
16134 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16135 VAR_INIT_STATUS_INITIALIZED);
16136 if (op0 == NULL || op1 == NULL || op2 == NULL)
16137 break;
16138
16139 mem_loc_result = op1;
16140 add_loc_descr (&mem_loc_result, op2);
16141 add_loc_descr (&mem_loc_result, op0);
16142 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16143 add_loc_descr (&mem_loc_result, bra_node);
16144 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16145 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16146 add_loc_descr (&mem_loc_result, drop_node);
16147 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16148 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16149 }
16150 break;
16151
16152 case FLOAT_EXTEND:
16153 case FLOAT_TRUNCATE:
16154 case FLOAT:
16155 case UNSIGNED_FLOAT:
16156 case FIX:
16157 case UNSIGNED_FIX:
16158 if (!dwarf_strict || dwarf_version >= 5)
16159 {
16160 dw_die_ref type_die;
16161 dw_loc_descr_ref cvt;
16162
16163 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16164 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16165 if (op0 == NULL)
16166 break;
16167 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16168 && (GET_CODE (rtl) == FLOAT
16169 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16170 {
16171 type_die = base_type_for_mode (int_mode,
16172 GET_CODE (rtl) == UNSIGNED_FLOAT);
16173 if (type_die == NULL)
16174 break;
16175 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16176 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16177 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16178 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16179 add_loc_descr (&op0, cvt);
16180 }
16181 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16182 if (type_die == NULL)
16183 break;
16184 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16185 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16186 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16187 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16188 add_loc_descr (&op0, cvt);
16189 if (is_a <scalar_int_mode> (mode, &int_mode)
16190 && (GET_CODE (rtl) == FIX
16191 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16192 {
16193 op0 = convert_descriptor_to_mode (int_mode, op0);
16194 if (op0 == NULL)
16195 break;
16196 }
16197 mem_loc_result = op0;
16198 }
16199 break;
16200
16201 case CLZ:
16202 case CTZ:
16203 case FFS:
16204 if (is_a <scalar_int_mode> (mode, &int_mode))
16205 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16206 break;
16207
16208 case POPCOUNT:
16209 case PARITY:
16210 if (is_a <scalar_int_mode> (mode, &int_mode))
16211 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16212 break;
16213
16214 case BSWAP:
16215 if (is_a <scalar_int_mode> (mode, &int_mode))
16216 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16217 break;
16218
16219 case ROTATE:
16220 case ROTATERT:
16221 if (is_a <scalar_int_mode> (mode, &int_mode))
16222 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16223 break;
16224
16225 case COMPARE:
16226 /* In theory, we could implement the above. */
16227 /* DWARF cannot represent the unsigned compare operations
16228 natively. */
16229 case SS_MULT:
16230 case US_MULT:
16231 case SS_DIV:
16232 case US_DIV:
16233 case SS_PLUS:
16234 case US_PLUS:
16235 case SS_MINUS:
16236 case US_MINUS:
16237 case SS_NEG:
16238 case US_NEG:
16239 case SS_ABS:
16240 case SS_ASHIFT:
16241 case US_ASHIFT:
16242 case SS_TRUNCATE:
16243 case US_TRUNCATE:
16244 case UNORDERED:
16245 case ORDERED:
16246 case UNEQ:
16247 case UNGE:
16248 case UNGT:
16249 case UNLE:
16250 case UNLT:
16251 case LTGT:
16252 case FRACT_CONVERT:
16253 case UNSIGNED_FRACT_CONVERT:
16254 case SAT_FRACT:
16255 case UNSIGNED_SAT_FRACT:
16256 case SQRT:
16257 case ASM_OPERANDS:
16258 case VEC_MERGE:
16259 case VEC_SELECT:
16260 case VEC_CONCAT:
16261 case VEC_DUPLICATE:
16262 case VEC_SERIES:
16263 case UNSPEC:
16264 case HIGH:
16265 case FMA:
16266 case STRICT_LOW_PART:
16267 case CONST_VECTOR:
16268 case CONST_FIXED:
16269 case CLRSB:
16270 case CLOBBER:
16271 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16272 can't express it in the debug info. This can happen e.g. with some
16273 TLS UNSPECs. */
16274 break;
16275
16276 case CONST_STRING:
16277 resolve_one_addr (&rtl);
16278 goto symref;
16279
16280 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16281 the expression. An UNSPEC rtx represents a raw DWARF operation,
16282 new_loc_descr is called for it to build the operation directly.
16283 Otherwise mem_loc_descriptor is called recursively. */
16284 case PARALLEL:
16285 {
16286 int index = 0;
16287 dw_loc_descr_ref exp_result = NULL;
16288
16289 for (; index < XVECLEN (rtl, 0); index++)
16290 {
16291 rtx elem = XVECEXP (rtl, 0, index);
16292 if (GET_CODE (elem) == UNSPEC)
16293 {
16294 /* Each DWARF operation UNSPEC contain two operands, if
16295 one operand is not used for the operation, const0_rtx is
16296 passed. */
16297 gcc_assert (XVECLEN (elem, 0) == 2);
16298
16299 HOST_WIDE_INT dw_op = XINT (elem, 1);
16300 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16301 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16302 exp_result
16303 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16304 oprnd2);
16305 }
16306 else
16307 exp_result
16308 = mem_loc_descriptor (elem, mode, mem_mode,
16309 VAR_INIT_STATUS_INITIALIZED);
16310
16311 if (!mem_loc_result)
16312 mem_loc_result = exp_result;
16313 else
16314 add_loc_descr (&mem_loc_result, exp_result);
16315 }
16316
16317 break;
16318 }
16319
16320 default:
16321 if (flag_checking)
16322 {
16323 print_rtl (stderr, rtl);
16324 gcc_unreachable ();
16325 }
16326 break;
16327 }
16328
16329 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16330 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16331
16332 return mem_loc_result;
16333 }
16334
16335 /* Return a descriptor that describes the concatenation of two locations.
16336 This is typically a complex variable. */
16337
16338 static dw_loc_descr_ref
16339 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16340 {
16341 /* At present we only track constant-sized pieces. */
16342 unsigned int size0, size1;
16343 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16344 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16345 return 0;
16346
16347 dw_loc_descr_ref cc_loc_result = NULL;
16348 dw_loc_descr_ref x0_ref
16349 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16350 dw_loc_descr_ref x1_ref
16351 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16352
16353 if (x0_ref == 0 || x1_ref == 0)
16354 return 0;
16355
16356 cc_loc_result = x0_ref;
16357 add_loc_descr_op_piece (&cc_loc_result, size0);
16358
16359 add_loc_descr (&cc_loc_result, x1_ref);
16360 add_loc_descr_op_piece (&cc_loc_result, size1);
16361
16362 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16363 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16364
16365 return cc_loc_result;
16366 }
16367
16368 /* Return a descriptor that describes the concatenation of N
16369 locations. */
16370
16371 static dw_loc_descr_ref
16372 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16373 {
16374 unsigned int i;
16375 dw_loc_descr_ref cc_loc_result = NULL;
16376 unsigned int n = XVECLEN (concatn, 0);
16377 unsigned int size;
16378
16379 for (i = 0; i < n; ++i)
16380 {
16381 dw_loc_descr_ref ref;
16382 rtx x = XVECEXP (concatn, 0, i);
16383
16384 /* At present we only track constant-sized pieces. */
16385 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16386 return NULL;
16387
16388 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16389 if (ref == NULL)
16390 return NULL;
16391
16392 add_loc_descr (&cc_loc_result, ref);
16393 add_loc_descr_op_piece (&cc_loc_result, size);
16394 }
16395
16396 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16397 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16398
16399 return cc_loc_result;
16400 }
16401
16402 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16403 for DEBUG_IMPLICIT_PTR RTL. */
16404
16405 static dw_loc_descr_ref
16406 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16407 {
16408 dw_loc_descr_ref ret;
16409 dw_die_ref ref;
16410
16411 if (dwarf_strict && dwarf_version < 5)
16412 return NULL;
16413 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16414 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16415 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16416 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16417 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16418 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16419 if (ref)
16420 {
16421 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16422 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16423 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16424 }
16425 else
16426 {
16427 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16428 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16429 }
16430 return ret;
16431 }
16432
16433 /* Output a proper Dwarf location descriptor for a variable or parameter
16434 which is either allocated in a register or in a memory location. For a
16435 register, we just generate an OP_REG and the register number. For a
16436 memory location we provide a Dwarf postfix expression describing how to
16437 generate the (dynamic) address of the object onto the address stack.
16438
16439 MODE is mode of the decl if this loc_descriptor is going to be used in
16440 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16441 allowed, VOIDmode otherwise.
16442
16443 If we don't know how to describe it, return 0. */
16444
16445 static dw_loc_descr_ref
16446 loc_descriptor (rtx rtl, machine_mode mode,
16447 enum var_init_status initialized)
16448 {
16449 dw_loc_descr_ref loc_result = NULL;
16450 scalar_int_mode int_mode;
16451
16452 switch (GET_CODE (rtl))
16453 {
16454 case SUBREG:
16455 /* The case of a subreg may arise when we have a local (register)
16456 variable or a formal (register) parameter which doesn't quite fill
16457 up an entire register. For now, just assume that it is
16458 legitimate to make the Dwarf info refer to the whole register which
16459 contains the given subreg. */
16460 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16461 loc_result = loc_descriptor (SUBREG_REG (rtl),
16462 GET_MODE (SUBREG_REG (rtl)), initialized);
16463 else
16464 goto do_default;
16465 break;
16466
16467 case REG:
16468 loc_result = reg_loc_descriptor (rtl, initialized);
16469 break;
16470
16471 case MEM:
16472 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16473 GET_MODE (rtl), initialized);
16474 if (loc_result == NULL)
16475 loc_result = tls_mem_loc_descriptor (rtl);
16476 if (loc_result == NULL)
16477 {
16478 rtx new_rtl = avoid_constant_pool_reference (rtl);
16479 if (new_rtl != rtl)
16480 loc_result = loc_descriptor (new_rtl, mode, initialized);
16481 }
16482 break;
16483
16484 case CONCAT:
16485 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16486 initialized);
16487 break;
16488
16489 case CONCATN:
16490 loc_result = concatn_loc_descriptor (rtl, initialized);
16491 break;
16492
16493 case VAR_LOCATION:
16494 /* Single part. */
16495 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16496 {
16497 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16498 if (GET_CODE (loc) == EXPR_LIST)
16499 loc = XEXP (loc, 0);
16500 loc_result = loc_descriptor (loc, mode, initialized);
16501 break;
16502 }
16503
16504 rtl = XEXP (rtl, 1);
16505 /* FALLTHRU */
16506
16507 case PARALLEL:
16508 {
16509 rtvec par_elems = XVEC (rtl, 0);
16510 int num_elem = GET_NUM_ELEM (par_elems);
16511 machine_mode mode;
16512 int i, size;
16513
16514 /* Create the first one, so we have something to add to. */
16515 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16516 VOIDmode, initialized);
16517 if (loc_result == NULL)
16518 return NULL;
16519 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16520 /* At present we only track constant-sized pieces. */
16521 if (!GET_MODE_SIZE (mode).is_constant (&size))
16522 return NULL;
16523 add_loc_descr_op_piece (&loc_result, size);
16524 for (i = 1; i < num_elem; i++)
16525 {
16526 dw_loc_descr_ref temp;
16527
16528 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16529 VOIDmode, initialized);
16530 if (temp == NULL)
16531 return NULL;
16532 add_loc_descr (&loc_result, temp);
16533 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16534 /* At present we only track constant-sized pieces. */
16535 if (!GET_MODE_SIZE (mode).is_constant (&size))
16536 return NULL;
16537 add_loc_descr_op_piece (&loc_result, size);
16538 }
16539 }
16540 break;
16541
16542 case CONST_INT:
16543 if (mode != VOIDmode && mode != BLKmode)
16544 {
16545 int_mode = as_a <scalar_int_mode> (mode);
16546 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16547 INTVAL (rtl));
16548 }
16549 break;
16550
16551 case CONST_DOUBLE:
16552 if (mode == VOIDmode)
16553 mode = GET_MODE (rtl);
16554
16555 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16556 {
16557 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16558
16559 /* Note that a CONST_DOUBLE rtx could represent either an integer
16560 or a floating-point constant. A CONST_DOUBLE is used whenever
16561 the constant requires more than one word in order to be
16562 adequately represented. We output CONST_DOUBLEs as blocks. */
16563 scalar_mode smode = as_a <scalar_mode> (mode);
16564 loc_result = new_loc_descr (DW_OP_implicit_value,
16565 GET_MODE_SIZE (smode), 0);
16566 #if TARGET_SUPPORTS_WIDE_INT == 0
16567 if (!SCALAR_FLOAT_MODE_P (smode))
16568 {
16569 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16570 loc_result->dw_loc_oprnd2.v.val_double
16571 = rtx_to_double_int (rtl);
16572 }
16573 else
16574 #endif
16575 {
16576 unsigned int length = GET_MODE_SIZE (smode);
16577 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16578
16579 insert_float (rtl, array);
16580 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16581 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16582 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16583 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16584 }
16585 }
16586 break;
16587
16588 case CONST_WIDE_INT:
16589 if (mode == VOIDmode)
16590 mode = GET_MODE (rtl);
16591
16592 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16593 {
16594 int_mode = as_a <scalar_int_mode> (mode);
16595 loc_result = new_loc_descr (DW_OP_implicit_value,
16596 GET_MODE_SIZE (int_mode), 0);
16597 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16598 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16599 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16600 }
16601 break;
16602
16603 case CONST_VECTOR:
16604 if (mode == VOIDmode)
16605 mode = GET_MODE (rtl);
16606
16607 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16608 {
16609 unsigned int length;
16610 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16611 return NULL;
16612
16613 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16614 unsigned char *array
16615 = ggc_vec_alloc<unsigned char> (length * elt_size);
16616 unsigned int i;
16617 unsigned char *p;
16618 machine_mode imode = GET_MODE_INNER (mode);
16619
16620 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16621 switch (GET_MODE_CLASS (mode))
16622 {
16623 case MODE_VECTOR_INT:
16624 for (i = 0, p = array; i < length; i++, p += elt_size)
16625 {
16626 rtx elt = CONST_VECTOR_ELT (rtl, i);
16627 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16628 }
16629 break;
16630
16631 case MODE_VECTOR_FLOAT:
16632 for (i = 0, p = array; i < length; i++, p += elt_size)
16633 {
16634 rtx elt = CONST_VECTOR_ELT (rtl, i);
16635 insert_float (elt, p);
16636 }
16637 break;
16638
16639 default:
16640 gcc_unreachable ();
16641 }
16642
16643 loc_result = new_loc_descr (DW_OP_implicit_value,
16644 length * elt_size, 0);
16645 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16646 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16647 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16648 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16649 }
16650 break;
16651
16652 case CONST:
16653 if (mode == VOIDmode
16654 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16655 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16656 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16657 {
16658 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16659 break;
16660 }
16661 /* FALLTHROUGH */
16662 case SYMBOL_REF:
16663 if (!const_ok_for_output (rtl))
16664 break;
16665 /* FALLTHROUGH */
16666 case LABEL_REF:
16667 if (is_a <scalar_int_mode> (mode, &int_mode)
16668 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16669 && (dwarf_version >= 4 || !dwarf_strict))
16670 {
16671 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16672 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16673 vec_safe_push (used_rtx_array, rtl);
16674 }
16675 break;
16676
16677 case DEBUG_IMPLICIT_PTR:
16678 loc_result = implicit_ptr_descriptor (rtl, 0);
16679 break;
16680
16681 case PLUS:
16682 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16683 && CONST_INT_P (XEXP (rtl, 1)))
16684 {
16685 loc_result
16686 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16687 break;
16688 }
16689 /* FALLTHRU */
16690 do_default:
16691 default:
16692 if ((is_a <scalar_int_mode> (mode, &int_mode)
16693 && GET_MODE (rtl) == int_mode
16694 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16695 && dwarf_version >= 4)
16696 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16697 {
16698 /* Value expression. */
16699 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16700 if (loc_result)
16701 add_loc_descr (&loc_result,
16702 new_loc_descr (DW_OP_stack_value, 0, 0));
16703 }
16704 break;
16705 }
16706
16707 return loc_result;
16708 }
16709
16710 /* We need to figure out what section we should use as the base for the
16711 address ranges where a given location is valid.
16712 1. If this particular DECL has a section associated with it, use that.
16713 2. If this function has a section associated with it, use that.
16714 3. Otherwise, use the text section.
16715 XXX: If you split a variable across multiple sections, we won't notice. */
16716
16717 static const char *
16718 secname_for_decl (const_tree decl)
16719 {
16720 const char *secname;
16721
16722 if (VAR_OR_FUNCTION_DECL_P (decl)
16723 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16724 && DECL_SECTION_NAME (decl))
16725 secname = DECL_SECTION_NAME (decl);
16726 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16727 secname = DECL_SECTION_NAME (current_function_decl);
16728 else if (cfun && in_cold_section_p)
16729 secname = crtl->subsections.cold_section_label;
16730 else
16731 secname = text_section_label;
16732
16733 return secname;
16734 }
16735
16736 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16737
16738 static bool
16739 decl_by_reference_p (tree decl)
16740 {
16741 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16742 || VAR_P (decl))
16743 && DECL_BY_REFERENCE (decl));
16744 }
16745
16746 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16747 for VARLOC. */
16748
16749 static dw_loc_descr_ref
16750 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16751 enum var_init_status initialized)
16752 {
16753 int have_address = 0;
16754 dw_loc_descr_ref descr;
16755 machine_mode mode;
16756
16757 if (want_address != 2)
16758 {
16759 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16760 /* Single part. */
16761 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16762 {
16763 varloc = PAT_VAR_LOCATION_LOC (varloc);
16764 if (GET_CODE (varloc) == EXPR_LIST)
16765 varloc = XEXP (varloc, 0);
16766 mode = GET_MODE (varloc);
16767 if (MEM_P (varloc))
16768 {
16769 rtx addr = XEXP (varloc, 0);
16770 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16771 mode, initialized);
16772 if (descr)
16773 have_address = 1;
16774 else
16775 {
16776 rtx x = avoid_constant_pool_reference (varloc);
16777 if (x != varloc)
16778 descr = mem_loc_descriptor (x, mode, VOIDmode,
16779 initialized);
16780 }
16781 }
16782 else
16783 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16784 }
16785 else
16786 return 0;
16787 }
16788 else
16789 {
16790 if (GET_CODE (varloc) == VAR_LOCATION)
16791 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16792 else
16793 mode = DECL_MODE (loc);
16794 descr = loc_descriptor (varloc, mode, initialized);
16795 have_address = 1;
16796 }
16797
16798 if (!descr)
16799 return 0;
16800
16801 if (want_address == 2 && !have_address
16802 && (dwarf_version >= 4 || !dwarf_strict))
16803 {
16804 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16805 {
16806 expansion_failed (loc, NULL_RTX,
16807 "DWARF address size mismatch");
16808 return 0;
16809 }
16810 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16811 have_address = 1;
16812 }
16813 /* Show if we can't fill the request for an address. */
16814 if (want_address && !have_address)
16815 {
16816 expansion_failed (loc, NULL_RTX,
16817 "Want address and only have value");
16818 return 0;
16819 }
16820
16821 /* If we've got an address and don't want one, dereference. */
16822 if (!want_address && have_address)
16823 {
16824 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16825 enum dwarf_location_atom op;
16826
16827 if (size > DWARF2_ADDR_SIZE || size == -1)
16828 {
16829 expansion_failed (loc, NULL_RTX,
16830 "DWARF address size mismatch");
16831 return 0;
16832 }
16833 else if (size == DWARF2_ADDR_SIZE)
16834 op = DW_OP_deref;
16835 else
16836 op = DW_OP_deref_size;
16837
16838 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16839 }
16840
16841 return descr;
16842 }
16843
16844 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16845 if it is not possible. */
16846
16847 static dw_loc_descr_ref
16848 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16849 {
16850 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16851 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16852 else if (dwarf_version >= 3 || !dwarf_strict)
16853 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16854 else
16855 return NULL;
16856 }
16857
16858 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16859 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16860
16861 static dw_loc_descr_ref
16862 dw_sra_loc_expr (tree decl, rtx loc)
16863 {
16864 rtx p;
16865 unsigned HOST_WIDE_INT padsize = 0;
16866 dw_loc_descr_ref descr, *descr_tail;
16867 unsigned HOST_WIDE_INT decl_size;
16868 rtx varloc;
16869 enum var_init_status initialized;
16870
16871 if (DECL_SIZE (decl) == NULL
16872 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16873 return NULL;
16874
16875 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16876 descr = NULL;
16877 descr_tail = &descr;
16878
16879 for (p = loc; p; p = XEXP (p, 1))
16880 {
16881 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16882 rtx loc_note = *decl_piece_varloc_ptr (p);
16883 dw_loc_descr_ref cur_descr;
16884 dw_loc_descr_ref *tail, last = NULL;
16885 unsigned HOST_WIDE_INT opsize = 0;
16886
16887 if (loc_note == NULL_RTX
16888 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16889 {
16890 padsize += bitsize;
16891 continue;
16892 }
16893 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16894 varloc = NOTE_VAR_LOCATION (loc_note);
16895 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16896 if (cur_descr == NULL)
16897 {
16898 padsize += bitsize;
16899 continue;
16900 }
16901
16902 /* Check that cur_descr either doesn't use
16903 DW_OP_*piece operations, or their sum is equal
16904 to bitsize. Otherwise we can't embed it. */
16905 for (tail = &cur_descr; *tail != NULL;
16906 tail = &(*tail)->dw_loc_next)
16907 if ((*tail)->dw_loc_opc == DW_OP_piece)
16908 {
16909 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16910 * BITS_PER_UNIT;
16911 last = *tail;
16912 }
16913 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16914 {
16915 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16916 last = *tail;
16917 }
16918
16919 if (last != NULL && opsize != bitsize)
16920 {
16921 padsize += bitsize;
16922 /* Discard the current piece of the descriptor and release any
16923 addr_table entries it uses. */
16924 remove_loc_list_addr_table_entries (cur_descr);
16925 continue;
16926 }
16927
16928 /* If there is a hole, add DW_OP_*piece after empty DWARF
16929 expression, which means that those bits are optimized out. */
16930 if (padsize)
16931 {
16932 if (padsize > decl_size)
16933 {
16934 remove_loc_list_addr_table_entries (cur_descr);
16935 goto discard_descr;
16936 }
16937 decl_size -= padsize;
16938 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16939 if (*descr_tail == NULL)
16940 {
16941 remove_loc_list_addr_table_entries (cur_descr);
16942 goto discard_descr;
16943 }
16944 descr_tail = &(*descr_tail)->dw_loc_next;
16945 padsize = 0;
16946 }
16947 *descr_tail = cur_descr;
16948 descr_tail = tail;
16949 if (bitsize > decl_size)
16950 goto discard_descr;
16951 decl_size -= bitsize;
16952 if (last == NULL)
16953 {
16954 HOST_WIDE_INT offset = 0;
16955 if (GET_CODE (varloc) == VAR_LOCATION
16956 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16957 {
16958 varloc = PAT_VAR_LOCATION_LOC (varloc);
16959 if (GET_CODE (varloc) == EXPR_LIST)
16960 varloc = XEXP (varloc, 0);
16961 }
16962 do
16963 {
16964 if (GET_CODE (varloc) == CONST
16965 || GET_CODE (varloc) == SIGN_EXTEND
16966 || GET_CODE (varloc) == ZERO_EXTEND)
16967 varloc = XEXP (varloc, 0);
16968 else if (GET_CODE (varloc) == SUBREG)
16969 varloc = SUBREG_REG (varloc);
16970 else
16971 break;
16972 }
16973 while (1);
16974 /* DW_OP_bit_size offset should be zero for register
16975 or implicit location descriptions and empty location
16976 descriptions, but for memory addresses needs big endian
16977 adjustment. */
16978 if (MEM_P (varloc))
16979 {
16980 unsigned HOST_WIDE_INT memsize;
16981 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16982 goto discard_descr;
16983 memsize *= BITS_PER_UNIT;
16984 if (memsize != bitsize)
16985 {
16986 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16987 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16988 goto discard_descr;
16989 if (memsize < bitsize)
16990 goto discard_descr;
16991 if (BITS_BIG_ENDIAN)
16992 offset = memsize - bitsize;
16993 }
16994 }
16995
16996 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16997 if (*descr_tail == NULL)
16998 goto discard_descr;
16999 descr_tail = &(*descr_tail)->dw_loc_next;
17000 }
17001 }
17002
17003 /* If there were any non-empty expressions, add padding till the end of
17004 the decl. */
17005 if (descr != NULL && decl_size != 0)
17006 {
17007 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17008 if (*descr_tail == NULL)
17009 goto discard_descr;
17010 }
17011 return descr;
17012
17013 discard_descr:
17014 /* Discard the descriptor and release any addr_table entries it uses. */
17015 remove_loc_list_addr_table_entries (descr);
17016 return NULL;
17017 }
17018
17019 /* Return the dwarf representation of the location list LOC_LIST of
17020 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17021 function. */
17022
17023 static dw_loc_list_ref
17024 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17025 {
17026 const char *endname, *secname;
17027 var_loc_view endview;
17028 rtx varloc;
17029 enum var_init_status initialized;
17030 struct var_loc_node *node;
17031 dw_loc_descr_ref descr;
17032 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17033 dw_loc_list_ref list = NULL;
17034 dw_loc_list_ref *listp = &list;
17035
17036 /* Now that we know what section we are using for a base,
17037 actually construct the list of locations.
17038 The first location information is what is passed to the
17039 function that creates the location list, and the remaining
17040 locations just get added on to that list.
17041 Note that we only know the start address for a location
17042 (IE location changes), so to build the range, we use
17043 the range [current location start, next location start].
17044 This means we have to special case the last node, and generate
17045 a range of [last location start, end of function label]. */
17046
17047 if (cfun && crtl->has_bb_partition)
17048 {
17049 bool save_in_cold_section_p = in_cold_section_p;
17050 in_cold_section_p = first_function_block_is_cold;
17051 if (loc_list->last_before_switch == NULL)
17052 in_cold_section_p = !in_cold_section_p;
17053 secname = secname_for_decl (decl);
17054 in_cold_section_p = save_in_cold_section_p;
17055 }
17056 else
17057 secname = secname_for_decl (decl);
17058
17059 for (node = loc_list->first; node; node = node->next)
17060 {
17061 bool range_across_switch = false;
17062 if (GET_CODE (node->loc) == EXPR_LIST
17063 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17064 {
17065 if (GET_CODE (node->loc) == EXPR_LIST)
17066 {
17067 descr = NULL;
17068 /* This requires DW_OP_{,bit_}piece, which is not usable
17069 inside DWARF expressions. */
17070 if (want_address == 2)
17071 descr = dw_sra_loc_expr (decl, node->loc);
17072 }
17073 else
17074 {
17075 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17076 varloc = NOTE_VAR_LOCATION (node->loc);
17077 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17078 }
17079 if (descr)
17080 {
17081 /* If section switch happens in between node->label
17082 and node->next->label (or end of function) and
17083 we can't emit it as a single entry list,
17084 emit two ranges, first one ending at the end
17085 of first partition and second one starting at the
17086 beginning of second partition. */
17087 if (node == loc_list->last_before_switch
17088 && (node != loc_list->first || loc_list->first->next)
17089 && current_function_decl)
17090 {
17091 endname = cfun->fde->dw_fde_end;
17092 endview = 0;
17093 range_across_switch = true;
17094 }
17095 /* The variable has a location between NODE->LABEL and
17096 NODE->NEXT->LABEL. */
17097 else if (node->next)
17098 endname = node->next->label, endview = node->next->view;
17099 /* If the variable has a location at the last label
17100 it keeps its location until the end of function. */
17101 else if (!current_function_decl)
17102 endname = text_end_label, endview = 0;
17103 else
17104 {
17105 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17106 current_function_funcdef_no);
17107 endname = ggc_strdup (label_id);
17108 endview = 0;
17109 }
17110
17111 *listp = new_loc_list (descr, node->label, node->view,
17112 endname, endview, secname);
17113 if (TREE_CODE (decl) == PARM_DECL
17114 && node == loc_list->first
17115 && NOTE_P (node->loc)
17116 && strcmp (node->label, endname) == 0)
17117 (*listp)->force = true;
17118 listp = &(*listp)->dw_loc_next;
17119 }
17120 }
17121
17122 if (cfun
17123 && crtl->has_bb_partition
17124 && node == loc_list->last_before_switch)
17125 {
17126 bool save_in_cold_section_p = in_cold_section_p;
17127 in_cold_section_p = !first_function_block_is_cold;
17128 secname = secname_for_decl (decl);
17129 in_cold_section_p = save_in_cold_section_p;
17130 }
17131
17132 if (range_across_switch)
17133 {
17134 if (GET_CODE (node->loc) == EXPR_LIST)
17135 descr = dw_sra_loc_expr (decl, node->loc);
17136 else
17137 {
17138 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17139 varloc = NOTE_VAR_LOCATION (node->loc);
17140 descr = dw_loc_list_1 (decl, varloc, want_address,
17141 initialized);
17142 }
17143 gcc_assert (descr);
17144 /* The variable has a location between NODE->LABEL and
17145 NODE->NEXT->LABEL. */
17146 if (node->next)
17147 endname = node->next->label, endview = node->next->view;
17148 else
17149 endname = cfun->fde->dw_fde_second_end, endview = 0;
17150 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17151 endname, endview, secname);
17152 listp = &(*listp)->dw_loc_next;
17153 }
17154 }
17155
17156 /* Try to avoid the overhead of a location list emitting a location
17157 expression instead, but only if we didn't have more than one
17158 location entry in the first place. If some entries were not
17159 representable, we don't want to pretend a single entry that was
17160 applies to the entire scope in which the variable is
17161 available. */
17162 if (list && loc_list->first->next)
17163 gen_llsym (list);
17164 else
17165 maybe_gen_llsym (list);
17166
17167 return list;
17168 }
17169
17170 /* Return if the loc_list has only single element and thus can be represented
17171 as location description. */
17172
17173 static bool
17174 single_element_loc_list_p (dw_loc_list_ref list)
17175 {
17176 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17177 return !list->ll_symbol;
17178 }
17179
17180 /* Duplicate a single element of location list. */
17181
17182 static inline dw_loc_descr_ref
17183 copy_loc_descr (dw_loc_descr_ref ref)
17184 {
17185 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17186 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17187 return copy;
17188 }
17189
17190 /* To each location in list LIST append loc descr REF. */
17191
17192 static void
17193 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17194 {
17195 dw_loc_descr_ref copy;
17196 add_loc_descr (&list->expr, ref);
17197 list = list->dw_loc_next;
17198 while (list)
17199 {
17200 copy = copy_loc_descr (ref);
17201 add_loc_descr (&list->expr, copy);
17202 while (copy->dw_loc_next)
17203 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17204 list = list->dw_loc_next;
17205 }
17206 }
17207
17208 /* To each location in list LIST prepend loc descr REF. */
17209
17210 static void
17211 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17212 {
17213 dw_loc_descr_ref copy;
17214 dw_loc_descr_ref ref_end = list->expr;
17215 add_loc_descr (&ref, list->expr);
17216 list->expr = ref;
17217 list = list->dw_loc_next;
17218 while (list)
17219 {
17220 dw_loc_descr_ref end = list->expr;
17221 list->expr = copy = copy_loc_descr (ref);
17222 while (copy->dw_loc_next != ref_end)
17223 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17224 copy->dw_loc_next = end;
17225 list = list->dw_loc_next;
17226 }
17227 }
17228
17229 /* Given two lists RET and LIST
17230 produce location list that is result of adding expression in LIST
17231 to expression in RET on each position in program.
17232 Might be destructive on both RET and LIST.
17233
17234 TODO: We handle only simple cases of RET or LIST having at most one
17235 element. General case would involve sorting the lists in program order
17236 and merging them that will need some additional work.
17237 Adding that will improve quality of debug info especially for SRA-ed
17238 structures. */
17239
17240 static void
17241 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17242 {
17243 if (!list)
17244 return;
17245 if (!*ret)
17246 {
17247 *ret = list;
17248 return;
17249 }
17250 if (!list->dw_loc_next)
17251 {
17252 add_loc_descr_to_each (*ret, list->expr);
17253 return;
17254 }
17255 if (!(*ret)->dw_loc_next)
17256 {
17257 prepend_loc_descr_to_each (list, (*ret)->expr);
17258 *ret = list;
17259 return;
17260 }
17261 expansion_failed (NULL_TREE, NULL_RTX,
17262 "Don't know how to merge two non-trivial"
17263 " location lists.\n");
17264 *ret = NULL;
17265 return;
17266 }
17267
17268 /* LOC is constant expression. Try a luck, look it up in constant
17269 pool and return its loc_descr of its address. */
17270
17271 static dw_loc_descr_ref
17272 cst_pool_loc_descr (tree loc)
17273 {
17274 /* Get an RTL for this, if something has been emitted. */
17275 rtx rtl = lookup_constant_def (loc);
17276
17277 if (!rtl || !MEM_P (rtl))
17278 {
17279 gcc_assert (!rtl);
17280 return 0;
17281 }
17282 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17283
17284 /* TODO: We might get more coverage if we was actually delaying expansion
17285 of all expressions till end of compilation when constant pools are fully
17286 populated. */
17287 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17288 {
17289 expansion_failed (loc, NULL_RTX,
17290 "CST value in contant pool but not marked.");
17291 return 0;
17292 }
17293 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17294 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17295 }
17296
17297 /* Return dw_loc_list representing address of addr_expr LOC
17298 by looking for inner INDIRECT_REF expression and turning
17299 it into simple arithmetics.
17300
17301 See loc_list_from_tree for the meaning of CONTEXT. */
17302
17303 static dw_loc_list_ref
17304 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17305 loc_descr_context *context)
17306 {
17307 tree obj, offset;
17308 poly_int64 bitsize, bitpos, bytepos;
17309 machine_mode mode;
17310 int unsignedp, reversep, volatilep = 0;
17311 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17312
17313 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17314 &bitsize, &bitpos, &offset, &mode,
17315 &unsignedp, &reversep, &volatilep);
17316 STRIP_NOPS (obj);
17317 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17318 {
17319 expansion_failed (loc, NULL_RTX, "bitfield access");
17320 return 0;
17321 }
17322 if (!INDIRECT_REF_P (obj))
17323 {
17324 expansion_failed (obj,
17325 NULL_RTX, "no indirect ref in inner refrence");
17326 return 0;
17327 }
17328 if (!offset && known_eq (bitpos, 0))
17329 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17330 context);
17331 else if (toplev
17332 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17333 && (dwarf_version >= 4 || !dwarf_strict))
17334 {
17335 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17336 if (!list_ret)
17337 return 0;
17338 if (offset)
17339 {
17340 /* Variable offset. */
17341 list_ret1 = loc_list_from_tree (offset, 0, context);
17342 if (list_ret1 == 0)
17343 return 0;
17344 add_loc_list (&list_ret, list_ret1);
17345 if (!list_ret)
17346 return 0;
17347 add_loc_descr_to_each (list_ret,
17348 new_loc_descr (DW_OP_plus, 0, 0));
17349 }
17350 HOST_WIDE_INT value;
17351 if (bytepos.is_constant (&value) && value > 0)
17352 add_loc_descr_to_each (list_ret,
17353 new_loc_descr (DW_OP_plus_uconst, value, 0));
17354 else if (maybe_ne (bytepos, 0))
17355 loc_list_plus_const (list_ret, bytepos);
17356 add_loc_descr_to_each (list_ret,
17357 new_loc_descr (DW_OP_stack_value, 0, 0));
17358 }
17359 return list_ret;
17360 }
17361
17362 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17363 all operations from LOC are nops, move to the last one. Insert in NOPS all
17364 operations that are skipped. */
17365
17366 static void
17367 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17368 hash_set<dw_loc_descr_ref> &nops)
17369 {
17370 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17371 {
17372 nops.add (loc);
17373 loc = loc->dw_loc_next;
17374 }
17375 }
17376
17377 /* Helper for loc_descr_without_nops: free the location description operation
17378 P. */
17379
17380 bool
17381 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17382 {
17383 ggc_free (loc);
17384 return true;
17385 }
17386
17387 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17388 finishes LOC. */
17389
17390 static void
17391 loc_descr_without_nops (dw_loc_descr_ref &loc)
17392 {
17393 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17394 return;
17395
17396 /* Set of all DW_OP_nop operations we remove. */
17397 hash_set<dw_loc_descr_ref> nops;
17398
17399 /* First, strip all prefix NOP operations in order to keep the head of the
17400 operations list. */
17401 loc_descr_to_next_no_nop (loc, nops);
17402
17403 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17404 {
17405 /* For control flow operations: strip "prefix" nops in destination
17406 labels. */
17407 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17408 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17409 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17410 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17411
17412 /* Do the same for the operations that follow, then move to the next
17413 iteration. */
17414 if (cur->dw_loc_next != NULL)
17415 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17416 cur = cur->dw_loc_next;
17417 }
17418
17419 nops.traverse<void *, free_loc_descr> (NULL);
17420 }
17421
17422
17423 struct dwarf_procedure_info;
17424
17425 /* Helper structure for location descriptions generation. */
17426 struct loc_descr_context
17427 {
17428 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17429 NULL_TREE if DW_OP_push_object_address in invalid for this location
17430 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17431 tree context_type;
17432 /* The ..._DECL node that should be translated as a
17433 DW_OP_push_object_address operation. */
17434 tree base_decl;
17435 /* Information about the DWARF procedure we are currently generating. NULL if
17436 we are not generating a DWARF procedure. */
17437 struct dwarf_procedure_info *dpi;
17438 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17439 by consumer. Used for DW_TAG_generic_subrange attributes. */
17440 bool placeholder_arg;
17441 /* True if PLACEHOLDER_EXPR has been seen. */
17442 bool placeholder_seen;
17443 };
17444
17445 /* DWARF procedures generation
17446
17447 DWARF expressions (aka. location descriptions) are used to encode variable
17448 things such as sizes or offsets. Such computations can have redundant parts
17449 that can be factorized in order to reduce the size of the output debug
17450 information. This is the whole point of DWARF procedures.
17451
17452 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17453 already factorized into functions ("size functions") in order to handle very
17454 big and complex types. Such functions are quite simple: they have integral
17455 arguments, they return an integral result and their body contains only a
17456 return statement with arithmetic expressions. This is the only kind of
17457 function we are interested in translating into DWARF procedures, here.
17458
17459 DWARF expressions and DWARF procedure are executed using a stack, so we have
17460 to define some calling convention for them to interact. Let's say that:
17461
17462 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17463 all arguments in reverse order (right-to-left) so that when the DWARF
17464 procedure execution starts, the first argument is the top of the stack.
17465
17466 - Then, when returning, the DWARF procedure must have consumed all arguments
17467 on the stack, must have pushed the result and touched nothing else.
17468
17469 - Each integral argument and the result are integral types can be hold in a
17470 single stack slot.
17471
17472 - We call "frame offset" the number of stack slots that are "under DWARF
17473 procedure control": it includes the arguments slots, the temporaries and
17474 the result slot. Thus, it is equal to the number of arguments when the
17475 procedure execution starts and must be equal to one (the result) when it
17476 returns. */
17477
17478 /* Helper structure used when generating operations for a DWARF procedure. */
17479 struct dwarf_procedure_info
17480 {
17481 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17482 currently translated. */
17483 tree fndecl;
17484 /* The number of arguments FNDECL takes. */
17485 unsigned args_count;
17486 };
17487
17488 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17489 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17490 equate it to this DIE. */
17491
17492 static dw_die_ref
17493 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17494 dw_die_ref parent_die)
17495 {
17496 dw_die_ref dwarf_proc_die;
17497
17498 if ((dwarf_version < 3 && dwarf_strict)
17499 || location == NULL)
17500 return NULL;
17501
17502 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17503 if (fndecl)
17504 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17505 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17506 return dwarf_proc_die;
17507 }
17508
17509 /* Return whether TYPE is a supported type as a DWARF procedure argument
17510 type or return type (we handle only scalar types and pointer types that
17511 aren't wider than the DWARF expression evaluation stack. */
17512
17513 static bool
17514 is_handled_procedure_type (tree type)
17515 {
17516 return ((INTEGRAL_TYPE_P (type)
17517 || TREE_CODE (type) == OFFSET_TYPE
17518 || TREE_CODE (type) == POINTER_TYPE)
17519 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17520 }
17521
17522 /* Helper for resolve_args_picking: do the same but stop when coming across
17523 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17524 offset *before* evaluating the corresponding operation. */
17525
17526 static bool
17527 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17528 struct dwarf_procedure_info *dpi,
17529 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17530 {
17531 /* The "frame_offset" identifier is already used to name a macro... */
17532 unsigned frame_offset_ = initial_frame_offset;
17533 dw_loc_descr_ref l;
17534
17535 for (l = loc; l != NULL;)
17536 {
17537 bool existed;
17538 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17539
17540 /* If we already met this node, there is nothing to compute anymore. */
17541 if (existed)
17542 {
17543 /* Make sure that the stack size is consistent wherever the execution
17544 flow comes from. */
17545 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17546 break;
17547 }
17548 l_frame_offset = frame_offset_;
17549
17550 /* If needed, relocate the picking offset with respect to the frame
17551 offset. */
17552 if (l->frame_offset_rel)
17553 {
17554 unsigned HOST_WIDE_INT off;
17555 switch (l->dw_loc_opc)
17556 {
17557 case DW_OP_pick:
17558 off = l->dw_loc_oprnd1.v.val_unsigned;
17559 break;
17560 case DW_OP_dup:
17561 off = 0;
17562 break;
17563 case DW_OP_over:
17564 off = 1;
17565 break;
17566 default:
17567 gcc_unreachable ();
17568 }
17569 /* frame_offset_ is the size of the current stack frame, including
17570 incoming arguments. Besides, the arguments are pushed
17571 right-to-left. Thus, in order to access the Nth argument from
17572 this operation node, the picking has to skip temporaries *plus*
17573 one stack slot per argument (0 for the first one, 1 for the second
17574 one, etc.).
17575
17576 The targetted argument number (N) is already set as the operand,
17577 and the number of temporaries can be computed with:
17578 frame_offsets_ - dpi->args_count */
17579 off += frame_offset_ - dpi->args_count;
17580
17581 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17582 if (off > 255)
17583 return false;
17584
17585 if (off == 0)
17586 {
17587 l->dw_loc_opc = DW_OP_dup;
17588 l->dw_loc_oprnd1.v.val_unsigned = 0;
17589 }
17590 else if (off == 1)
17591 {
17592 l->dw_loc_opc = DW_OP_over;
17593 l->dw_loc_oprnd1.v.val_unsigned = 0;
17594 }
17595 else
17596 {
17597 l->dw_loc_opc = DW_OP_pick;
17598 l->dw_loc_oprnd1.v.val_unsigned = off;
17599 }
17600 }
17601
17602 /* Update frame_offset according to the effect the current operation has
17603 on the stack. */
17604 switch (l->dw_loc_opc)
17605 {
17606 case DW_OP_deref:
17607 case DW_OP_swap:
17608 case DW_OP_rot:
17609 case DW_OP_abs:
17610 case DW_OP_neg:
17611 case DW_OP_not:
17612 case DW_OP_plus_uconst:
17613 case DW_OP_skip:
17614 case DW_OP_reg0:
17615 case DW_OP_reg1:
17616 case DW_OP_reg2:
17617 case DW_OP_reg3:
17618 case DW_OP_reg4:
17619 case DW_OP_reg5:
17620 case DW_OP_reg6:
17621 case DW_OP_reg7:
17622 case DW_OP_reg8:
17623 case DW_OP_reg9:
17624 case DW_OP_reg10:
17625 case DW_OP_reg11:
17626 case DW_OP_reg12:
17627 case DW_OP_reg13:
17628 case DW_OP_reg14:
17629 case DW_OP_reg15:
17630 case DW_OP_reg16:
17631 case DW_OP_reg17:
17632 case DW_OP_reg18:
17633 case DW_OP_reg19:
17634 case DW_OP_reg20:
17635 case DW_OP_reg21:
17636 case DW_OP_reg22:
17637 case DW_OP_reg23:
17638 case DW_OP_reg24:
17639 case DW_OP_reg25:
17640 case DW_OP_reg26:
17641 case DW_OP_reg27:
17642 case DW_OP_reg28:
17643 case DW_OP_reg29:
17644 case DW_OP_reg30:
17645 case DW_OP_reg31:
17646 case DW_OP_bregx:
17647 case DW_OP_piece:
17648 case DW_OP_deref_size:
17649 case DW_OP_nop:
17650 case DW_OP_bit_piece:
17651 case DW_OP_implicit_value:
17652 case DW_OP_stack_value:
17653 break;
17654
17655 case DW_OP_addr:
17656 case DW_OP_const1u:
17657 case DW_OP_const1s:
17658 case DW_OP_const2u:
17659 case DW_OP_const2s:
17660 case DW_OP_const4u:
17661 case DW_OP_const4s:
17662 case DW_OP_const8u:
17663 case DW_OP_const8s:
17664 case DW_OP_constu:
17665 case DW_OP_consts:
17666 case DW_OP_dup:
17667 case DW_OP_over:
17668 case DW_OP_pick:
17669 case DW_OP_lit0:
17670 case DW_OP_lit1:
17671 case DW_OP_lit2:
17672 case DW_OP_lit3:
17673 case DW_OP_lit4:
17674 case DW_OP_lit5:
17675 case DW_OP_lit6:
17676 case DW_OP_lit7:
17677 case DW_OP_lit8:
17678 case DW_OP_lit9:
17679 case DW_OP_lit10:
17680 case DW_OP_lit11:
17681 case DW_OP_lit12:
17682 case DW_OP_lit13:
17683 case DW_OP_lit14:
17684 case DW_OP_lit15:
17685 case DW_OP_lit16:
17686 case DW_OP_lit17:
17687 case DW_OP_lit18:
17688 case DW_OP_lit19:
17689 case DW_OP_lit20:
17690 case DW_OP_lit21:
17691 case DW_OP_lit22:
17692 case DW_OP_lit23:
17693 case DW_OP_lit24:
17694 case DW_OP_lit25:
17695 case DW_OP_lit26:
17696 case DW_OP_lit27:
17697 case DW_OP_lit28:
17698 case DW_OP_lit29:
17699 case DW_OP_lit30:
17700 case DW_OP_lit31:
17701 case DW_OP_breg0:
17702 case DW_OP_breg1:
17703 case DW_OP_breg2:
17704 case DW_OP_breg3:
17705 case DW_OP_breg4:
17706 case DW_OP_breg5:
17707 case DW_OP_breg6:
17708 case DW_OP_breg7:
17709 case DW_OP_breg8:
17710 case DW_OP_breg9:
17711 case DW_OP_breg10:
17712 case DW_OP_breg11:
17713 case DW_OP_breg12:
17714 case DW_OP_breg13:
17715 case DW_OP_breg14:
17716 case DW_OP_breg15:
17717 case DW_OP_breg16:
17718 case DW_OP_breg17:
17719 case DW_OP_breg18:
17720 case DW_OP_breg19:
17721 case DW_OP_breg20:
17722 case DW_OP_breg21:
17723 case DW_OP_breg22:
17724 case DW_OP_breg23:
17725 case DW_OP_breg24:
17726 case DW_OP_breg25:
17727 case DW_OP_breg26:
17728 case DW_OP_breg27:
17729 case DW_OP_breg28:
17730 case DW_OP_breg29:
17731 case DW_OP_breg30:
17732 case DW_OP_breg31:
17733 case DW_OP_fbreg:
17734 case DW_OP_push_object_address:
17735 case DW_OP_call_frame_cfa:
17736 case DW_OP_GNU_variable_value:
17737 ++frame_offset_;
17738 break;
17739
17740 case DW_OP_drop:
17741 case DW_OP_xderef:
17742 case DW_OP_and:
17743 case DW_OP_div:
17744 case DW_OP_minus:
17745 case DW_OP_mod:
17746 case DW_OP_mul:
17747 case DW_OP_or:
17748 case DW_OP_plus:
17749 case DW_OP_shl:
17750 case DW_OP_shr:
17751 case DW_OP_shra:
17752 case DW_OP_xor:
17753 case DW_OP_bra:
17754 case DW_OP_eq:
17755 case DW_OP_ge:
17756 case DW_OP_gt:
17757 case DW_OP_le:
17758 case DW_OP_lt:
17759 case DW_OP_ne:
17760 case DW_OP_regx:
17761 case DW_OP_xderef_size:
17762 --frame_offset_;
17763 break;
17764
17765 case DW_OP_call2:
17766 case DW_OP_call4:
17767 case DW_OP_call_ref:
17768 {
17769 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17770 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17771
17772 if (stack_usage == NULL)
17773 return false;
17774 frame_offset_ += *stack_usage;
17775 break;
17776 }
17777
17778 case DW_OP_implicit_pointer:
17779 case DW_OP_entry_value:
17780 case DW_OP_const_type:
17781 case DW_OP_regval_type:
17782 case DW_OP_deref_type:
17783 case DW_OP_convert:
17784 case DW_OP_reinterpret:
17785 case DW_OP_form_tls_address:
17786 case DW_OP_GNU_push_tls_address:
17787 case DW_OP_GNU_uninit:
17788 case DW_OP_GNU_encoded_addr:
17789 case DW_OP_GNU_implicit_pointer:
17790 case DW_OP_GNU_entry_value:
17791 case DW_OP_GNU_const_type:
17792 case DW_OP_GNU_regval_type:
17793 case DW_OP_GNU_deref_type:
17794 case DW_OP_GNU_convert:
17795 case DW_OP_GNU_reinterpret:
17796 case DW_OP_GNU_parameter_ref:
17797 /* loc_list_from_tree will probably not output these operations for
17798 size functions, so assume they will not appear here. */
17799 /* Fall through... */
17800
17801 default:
17802 gcc_unreachable ();
17803 }
17804
17805 /* Now, follow the control flow (except subroutine calls). */
17806 switch (l->dw_loc_opc)
17807 {
17808 case DW_OP_bra:
17809 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17810 frame_offsets))
17811 return false;
17812 /* Fall through. */
17813
17814 case DW_OP_skip:
17815 l = l->dw_loc_oprnd1.v.val_loc;
17816 break;
17817
17818 case DW_OP_stack_value:
17819 return true;
17820
17821 default:
17822 l = l->dw_loc_next;
17823 break;
17824 }
17825 }
17826
17827 return true;
17828 }
17829
17830 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17831 operations) in order to resolve the operand of DW_OP_pick operations that
17832 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17833 offset *before* LOC is executed. Return if all relocations were
17834 successful. */
17835
17836 static bool
17837 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17838 struct dwarf_procedure_info *dpi)
17839 {
17840 /* Associate to all visited operations the frame offset *before* evaluating
17841 this operation. */
17842 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17843
17844 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17845 frame_offsets);
17846 }
17847
17848 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17849 Return NULL if it is not possible. */
17850
17851 static dw_die_ref
17852 function_to_dwarf_procedure (tree fndecl)
17853 {
17854 struct loc_descr_context ctx;
17855 struct dwarf_procedure_info dpi;
17856 dw_die_ref dwarf_proc_die;
17857 tree tree_body = DECL_SAVED_TREE (fndecl);
17858 dw_loc_descr_ref loc_body, epilogue;
17859
17860 tree cursor;
17861 unsigned i;
17862
17863 /* Do not generate multiple DWARF procedures for the same function
17864 declaration. */
17865 dwarf_proc_die = lookup_decl_die (fndecl);
17866 if (dwarf_proc_die != NULL)
17867 return dwarf_proc_die;
17868
17869 /* DWARF procedures are available starting with the DWARFv3 standard. */
17870 if (dwarf_version < 3 && dwarf_strict)
17871 return NULL;
17872
17873 /* We handle only functions for which we still have a body, that return a
17874 supported type and that takes arguments with supported types. Note that
17875 there is no point translating functions that return nothing. */
17876 if (tree_body == NULL_TREE
17877 || DECL_RESULT (fndecl) == NULL_TREE
17878 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17879 return NULL;
17880
17881 for (cursor = DECL_ARGUMENTS (fndecl);
17882 cursor != NULL_TREE;
17883 cursor = TREE_CHAIN (cursor))
17884 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17885 return NULL;
17886
17887 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17888 if (TREE_CODE (tree_body) != RETURN_EXPR)
17889 return NULL;
17890 tree_body = TREE_OPERAND (tree_body, 0);
17891 if (TREE_CODE (tree_body) != MODIFY_EXPR
17892 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17893 return NULL;
17894 tree_body = TREE_OPERAND (tree_body, 1);
17895
17896 /* Try to translate the body expression itself. Note that this will probably
17897 cause an infinite recursion if its call graph has a cycle. This is very
17898 unlikely for size functions, however, so don't bother with such things at
17899 the moment. */
17900 ctx.context_type = NULL_TREE;
17901 ctx.base_decl = NULL_TREE;
17902 ctx.dpi = &dpi;
17903 ctx.placeholder_arg = false;
17904 ctx.placeholder_seen = false;
17905 dpi.fndecl = fndecl;
17906 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17907 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17908 if (!loc_body)
17909 return NULL;
17910
17911 /* After evaluating all operands in "loc_body", we should still have on the
17912 stack all arguments plus the desired function result (top of the stack).
17913 Generate code in order to keep only the result in our stack frame. */
17914 epilogue = NULL;
17915 for (i = 0; i < dpi.args_count; ++i)
17916 {
17917 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17918 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17919 op_couple->dw_loc_next->dw_loc_next = epilogue;
17920 epilogue = op_couple;
17921 }
17922 add_loc_descr (&loc_body, epilogue);
17923 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17924 return NULL;
17925
17926 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17927 because they are considered useful. Now there is an epilogue, they are
17928 not anymore, so give it another try. */
17929 loc_descr_without_nops (loc_body);
17930
17931 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17932 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17933 though, given that size functions do not come from source, so they should
17934 not have a dedicated DW_TAG_subprogram DIE. */
17935 dwarf_proc_die
17936 = new_dwarf_proc_die (loc_body, fndecl,
17937 get_context_die (DECL_CONTEXT (fndecl)));
17938
17939 /* The called DWARF procedure consumes one stack slot per argument and
17940 returns one stack slot. */
17941 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17942
17943 return dwarf_proc_die;
17944 }
17945
17946
17947 /* Generate Dwarf location list representing LOC.
17948 If WANT_ADDRESS is false, expression computing LOC will be computed
17949 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17950 if WANT_ADDRESS is 2, expression computing address useable in location
17951 will be returned (i.e. DW_OP_reg can be used
17952 to refer to register values).
17953
17954 CONTEXT provides information to customize the location descriptions
17955 generation. Its context_type field specifies what type is implicitly
17956 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17957 will not be generated.
17958
17959 Its DPI field determines whether we are generating a DWARF expression for a
17960 DWARF procedure, so PARM_DECL references are processed specifically.
17961
17962 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17963 and dpi fields were null. */
17964
17965 static dw_loc_list_ref
17966 loc_list_from_tree_1 (tree loc, int want_address,
17967 struct loc_descr_context *context)
17968 {
17969 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17970 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17971 int have_address = 0;
17972 enum dwarf_location_atom op;
17973
17974 /* ??? Most of the time we do not take proper care for sign/zero
17975 extending the values properly. Hopefully this won't be a real
17976 problem... */
17977
17978 if (context != NULL
17979 && context->base_decl == loc
17980 && want_address == 0)
17981 {
17982 if (dwarf_version >= 3 || !dwarf_strict)
17983 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17984 NULL, 0, NULL, 0, NULL);
17985 else
17986 return NULL;
17987 }
17988
17989 switch (TREE_CODE (loc))
17990 {
17991 case ERROR_MARK:
17992 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17993 return 0;
17994
17995 case PLACEHOLDER_EXPR:
17996 /* This case involves extracting fields from an object to determine the
17997 position of other fields. It is supposed to appear only as the first
17998 operand of COMPONENT_REF nodes and to reference precisely the type
17999 that the context allows. */
18000 if (context != NULL
18001 && TREE_TYPE (loc) == context->context_type
18002 && want_address >= 1)
18003 {
18004 if (dwarf_version >= 3 || !dwarf_strict)
18005 {
18006 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18007 have_address = 1;
18008 break;
18009 }
18010 else
18011 return NULL;
18012 }
18013 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18014 the single argument passed by consumer. */
18015 else if (context != NULL
18016 && context->placeholder_arg
18017 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18018 && want_address == 0)
18019 {
18020 ret = new_loc_descr (DW_OP_pick, 0, 0);
18021 ret->frame_offset_rel = 1;
18022 context->placeholder_seen = true;
18023 break;
18024 }
18025 else
18026 expansion_failed (loc, NULL_RTX,
18027 "PLACEHOLDER_EXPR for an unexpected type");
18028 break;
18029
18030 case CALL_EXPR:
18031 {
18032 const int nargs = call_expr_nargs (loc);
18033 tree callee = get_callee_fndecl (loc);
18034 int i;
18035 dw_die_ref dwarf_proc;
18036
18037 if (callee == NULL_TREE)
18038 goto call_expansion_failed;
18039
18040 /* We handle only functions that return an integer. */
18041 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18042 goto call_expansion_failed;
18043
18044 dwarf_proc = function_to_dwarf_procedure (callee);
18045 if (dwarf_proc == NULL)
18046 goto call_expansion_failed;
18047
18048 /* Evaluate arguments right-to-left so that the first argument will
18049 be the top-most one on the stack. */
18050 for (i = nargs - 1; i >= 0; --i)
18051 {
18052 dw_loc_descr_ref loc_descr
18053 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18054 context);
18055
18056 if (loc_descr == NULL)
18057 goto call_expansion_failed;
18058
18059 add_loc_descr (&ret, loc_descr);
18060 }
18061
18062 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18063 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18064 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18065 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18066 add_loc_descr (&ret, ret1);
18067 break;
18068
18069 call_expansion_failed:
18070 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18071 /* There are no opcodes for these operations. */
18072 return 0;
18073 }
18074
18075 case PREINCREMENT_EXPR:
18076 case PREDECREMENT_EXPR:
18077 case POSTINCREMENT_EXPR:
18078 case POSTDECREMENT_EXPR:
18079 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18080 /* There are no opcodes for these operations. */
18081 return 0;
18082
18083 case ADDR_EXPR:
18084 /* If we already want an address, see if there is INDIRECT_REF inside
18085 e.g. for &this->field. */
18086 if (want_address)
18087 {
18088 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18089 (loc, want_address == 2, context);
18090 if (list_ret)
18091 have_address = 1;
18092 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18093 && (ret = cst_pool_loc_descr (loc)))
18094 have_address = 1;
18095 }
18096 /* Otherwise, process the argument and look for the address. */
18097 if (!list_ret && !ret)
18098 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18099 else
18100 {
18101 if (want_address)
18102 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18103 return NULL;
18104 }
18105 break;
18106
18107 case VAR_DECL:
18108 if (DECL_THREAD_LOCAL_P (loc))
18109 {
18110 rtx rtl;
18111 enum dwarf_location_atom tls_op;
18112 enum dtprel_bool dtprel = dtprel_false;
18113
18114 if (targetm.have_tls)
18115 {
18116 /* If this is not defined, we have no way to emit the
18117 data. */
18118 if (!targetm.asm_out.output_dwarf_dtprel)
18119 return 0;
18120
18121 /* The way DW_OP_GNU_push_tls_address is specified, we
18122 can only look up addresses of objects in the current
18123 module. We used DW_OP_addr as first op, but that's
18124 wrong, because DW_OP_addr is relocated by the debug
18125 info consumer, while DW_OP_GNU_push_tls_address
18126 operand shouldn't be. */
18127 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18128 return 0;
18129 dtprel = dtprel_true;
18130 /* We check for DWARF 5 here because gdb did not implement
18131 DW_OP_form_tls_address until after 7.12. */
18132 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18133 : DW_OP_GNU_push_tls_address);
18134 }
18135 else
18136 {
18137 if (!targetm.emutls.debug_form_tls_address
18138 || !(dwarf_version >= 3 || !dwarf_strict))
18139 return 0;
18140 /* We stuffed the control variable into the DECL_VALUE_EXPR
18141 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18142 no longer appear in gimple code. We used the control
18143 variable in specific so that we could pick it up here. */
18144 loc = DECL_VALUE_EXPR (loc);
18145 tls_op = DW_OP_form_tls_address;
18146 }
18147
18148 rtl = rtl_for_decl_location (loc);
18149 if (rtl == NULL_RTX)
18150 return 0;
18151
18152 if (!MEM_P (rtl))
18153 return 0;
18154 rtl = XEXP (rtl, 0);
18155 if (! CONSTANT_P (rtl))
18156 return 0;
18157
18158 ret = new_addr_loc_descr (rtl, dtprel);
18159 ret1 = new_loc_descr (tls_op, 0, 0);
18160 add_loc_descr (&ret, ret1);
18161
18162 have_address = 1;
18163 break;
18164 }
18165 /* FALLTHRU */
18166
18167 case PARM_DECL:
18168 if (context != NULL && context->dpi != NULL
18169 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18170 {
18171 /* We are generating code for a DWARF procedure and we want to access
18172 one of its arguments: find the appropriate argument offset and let
18173 the resolve_args_picking pass compute the offset that complies
18174 with the stack frame size. */
18175 unsigned i = 0;
18176 tree cursor;
18177
18178 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18179 cursor != NULL_TREE && cursor != loc;
18180 cursor = TREE_CHAIN (cursor), ++i)
18181 ;
18182 /* If we are translating a DWARF procedure, all referenced parameters
18183 must belong to the current function. */
18184 gcc_assert (cursor != NULL_TREE);
18185
18186 ret = new_loc_descr (DW_OP_pick, i, 0);
18187 ret->frame_offset_rel = 1;
18188 break;
18189 }
18190 /* FALLTHRU */
18191
18192 case RESULT_DECL:
18193 if (DECL_HAS_VALUE_EXPR_P (loc))
18194 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18195 want_address, context);
18196 /* FALLTHRU */
18197
18198 case FUNCTION_DECL:
18199 {
18200 rtx rtl;
18201 var_loc_list *loc_list = lookup_decl_loc (loc);
18202
18203 if (loc_list && loc_list->first)
18204 {
18205 list_ret = dw_loc_list (loc_list, loc, want_address);
18206 have_address = want_address != 0;
18207 break;
18208 }
18209 rtl = rtl_for_decl_location (loc);
18210 if (rtl == NULL_RTX)
18211 {
18212 if (TREE_CODE (loc) != FUNCTION_DECL
18213 && early_dwarf
18214 && current_function_decl
18215 && want_address != 1
18216 && ! DECL_IGNORED_P (loc)
18217 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18218 || POINTER_TYPE_P (TREE_TYPE (loc)))
18219 && DECL_CONTEXT (loc) == current_function_decl
18220 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18221 <= DWARF2_ADDR_SIZE))
18222 {
18223 dw_die_ref ref = lookup_decl_die (loc);
18224 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18225 if (ref)
18226 {
18227 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18228 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18229 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18230 }
18231 else
18232 {
18233 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18234 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18235 }
18236 break;
18237 }
18238 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18239 return 0;
18240 }
18241 else if (CONST_INT_P (rtl))
18242 {
18243 HOST_WIDE_INT val = INTVAL (rtl);
18244 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18245 val &= GET_MODE_MASK (DECL_MODE (loc));
18246 ret = int_loc_descriptor (val);
18247 }
18248 else if (GET_CODE (rtl) == CONST_STRING)
18249 {
18250 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18251 return 0;
18252 }
18253 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18254 ret = new_addr_loc_descr (rtl, dtprel_false);
18255 else
18256 {
18257 machine_mode mode, mem_mode;
18258
18259 /* Certain constructs can only be represented at top-level. */
18260 if (want_address == 2)
18261 {
18262 ret = loc_descriptor (rtl, VOIDmode,
18263 VAR_INIT_STATUS_INITIALIZED);
18264 have_address = 1;
18265 }
18266 else
18267 {
18268 mode = GET_MODE (rtl);
18269 mem_mode = VOIDmode;
18270 if (MEM_P (rtl))
18271 {
18272 mem_mode = mode;
18273 mode = get_address_mode (rtl);
18274 rtl = XEXP (rtl, 0);
18275 have_address = 1;
18276 }
18277 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18278 VAR_INIT_STATUS_INITIALIZED);
18279 }
18280 if (!ret)
18281 expansion_failed (loc, rtl,
18282 "failed to produce loc descriptor for rtl");
18283 }
18284 }
18285 break;
18286
18287 case MEM_REF:
18288 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18289 {
18290 have_address = 1;
18291 goto do_plus;
18292 }
18293 /* Fallthru. */
18294 case INDIRECT_REF:
18295 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18296 have_address = 1;
18297 break;
18298
18299 case TARGET_MEM_REF:
18300 case SSA_NAME:
18301 case DEBUG_EXPR_DECL:
18302 return NULL;
18303
18304 case COMPOUND_EXPR:
18305 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18306 context);
18307
18308 CASE_CONVERT:
18309 case VIEW_CONVERT_EXPR:
18310 case SAVE_EXPR:
18311 case MODIFY_EXPR:
18312 case NON_LVALUE_EXPR:
18313 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18314 context);
18315
18316 case COMPONENT_REF:
18317 case BIT_FIELD_REF:
18318 case ARRAY_REF:
18319 case ARRAY_RANGE_REF:
18320 case REALPART_EXPR:
18321 case IMAGPART_EXPR:
18322 {
18323 tree obj, offset;
18324 poly_int64 bitsize, bitpos, bytepos;
18325 machine_mode mode;
18326 int unsignedp, reversep, volatilep = 0;
18327
18328 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18329 &unsignedp, &reversep, &volatilep);
18330
18331 gcc_assert (obj != loc);
18332
18333 list_ret = loc_list_from_tree_1 (obj,
18334 want_address == 2
18335 && known_eq (bitpos, 0)
18336 && !offset ? 2 : 1,
18337 context);
18338 /* TODO: We can extract value of the small expression via shifting even
18339 for nonzero bitpos. */
18340 if (list_ret == 0)
18341 return 0;
18342 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18343 || !multiple_p (bitsize, BITS_PER_UNIT))
18344 {
18345 expansion_failed (loc, NULL_RTX,
18346 "bitfield access");
18347 return 0;
18348 }
18349
18350 if (offset != NULL_TREE)
18351 {
18352 /* Variable offset. */
18353 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18354 if (list_ret1 == 0)
18355 return 0;
18356 add_loc_list (&list_ret, list_ret1);
18357 if (!list_ret)
18358 return 0;
18359 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18360 }
18361
18362 HOST_WIDE_INT value;
18363 if (bytepos.is_constant (&value) && value > 0)
18364 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18365 value, 0));
18366 else if (maybe_ne (bytepos, 0))
18367 loc_list_plus_const (list_ret, bytepos);
18368
18369 have_address = 1;
18370 break;
18371 }
18372
18373 case INTEGER_CST:
18374 if ((want_address || !tree_fits_shwi_p (loc))
18375 && (ret = cst_pool_loc_descr (loc)))
18376 have_address = 1;
18377 else if (want_address == 2
18378 && tree_fits_shwi_p (loc)
18379 && (ret = address_of_int_loc_descriptor
18380 (int_size_in_bytes (TREE_TYPE (loc)),
18381 tree_to_shwi (loc))))
18382 have_address = 1;
18383 else if (tree_fits_shwi_p (loc))
18384 ret = int_loc_descriptor (tree_to_shwi (loc));
18385 else if (tree_fits_uhwi_p (loc))
18386 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18387 else
18388 {
18389 expansion_failed (loc, NULL_RTX,
18390 "Integer operand is not host integer");
18391 return 0;
18392 }
18393 break;
18394
18395 case CONSTRUCTOR:
18396 case REAL_CST:
18397 case STRING_CST:
18398 case COMPLEX_CST:
18399 if ((ret = cst_pool_loc_descr (loc)))
18400 have_address = 1;
18401 else if (TREE_CODE (loc) == CONSTRUCTOR)
18402 {
18403 tree type = TREE_TYPE (loc);
18404 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18405 unsigned HOST_WIDE_INT offset = 0;
18406 unsigned HOST_WIDE_INT cnt;
18407 constructor_elt *ce;
18408
18409 if (TREE_CODE (type) == RECORD_TYPE)
18410 {
18411 /* This is very limited, but it's enough to output
18412 pointers to member functions, as long as the
18413 referenced function is defined in the current
18414 translation unit. */
18415 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18416 {
18417 tree val = ce->value;
18418
18419 tree field = ce->index;
18420
18421 if (val)
18422 STRIP_NOPS (val);
18423
18424 if (!field || DECL_BIT_FIELD (field))
18425 {
18426 expansion_failed (loc, NULL_RTX,
18427 "bitfield in record type constructor");
18428 size = offset = (unsigned HOST_WIDE_INT)-1;
18429 ret = NULL;
18430 break;
18431 }
18432
18433 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18434 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18435 gcc_assert (pos + fieldsize <= size);
18436 if (pos < offset)
18437 {
18438 expansion_failed (loc, NULL_RTX,
18439 "out-of-order fields in record constructor");
18440 size = offset = (unsigned HOST_WIDE_INT)-1;
18441 ret = NULL;
18442 break;
18443 }
18444 if (pos > offset)
18445 {
18446 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18447 add_loc_descr (&ret, ret1);
18448 offset = pos;
18449 }
18450 if (val && fieldsize != 0)
18451 {
18452 ret1 = loc_descriptor_from_tree (val, want_address, context);
18453 if (!ret1)
18454 {
18455 expansion_failed (loc, NULL_RTX,
18456 "unsupported expression in field");
18457 size = offset = (unsigned HOST_WIDE_INT)-1;
18458 ret = NULL;
18459 break;
18460 }
18461 add_loc_descr (&ret, ret1);
18462 }
18463 if (fieldsize)
18464 {
18465 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18466 add_loc_descr (&ret, ret1);
18467 offset = pos + fieldsize;
18468 }
18469 }
18470
18471 if (offset != size)
18472 {
18473 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18474 add_loc_descr (&ret, ret1);
18475 offset = size;
18476 }
18477
18478 have_address = !!want_address;
18479 }
18480 else
18481 expansion_failed (loc, NULL_RTX,
18482 "constructor of non-record type");
18483 }
18484 else
18485 /* We can construct small constants here using int_loc_descriptor. */
18486 expansion_failed (loc, NULL_RTX,
18487 "constructor or constant not in constant pool");
18488 break;
18489
18490 case TRUTH_AND_EXPR:
18491 case TRUTH_ANDIF_EXPR:
18492 case BIT_AND_EXPR:
18493 op = DW_OP_and;
18494 goto do_binop;
18495
18496 case TRUTH_XOR_EXPR:
18497 case BIT_XOR_EXPR:
18498 op = DW_OP_xor;
18499 goto do_binop;
18500
18501 case TRUTH_OR_EXPR:
18502 case TRUTH_ORIF_EXPR:
18503 case BIT_IOR_EXPR:
18504 op = DW_OP_or;
18505 goto do_binop;
18506
18507 case FLOOR_DIV_EXPR:
18508 case CEIL_DIV_EXPR:
18509 case ROUND_DIV_EXPR:
18510 case TRUNC_DIV_EXPR:
18511 case EXACT_DIV_EXPR:
18512 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18513 return 0;
18514 op = DW_OP_div;
18515 goto do_binop;
18516
18517 case MINUS_EXPR:
18518 op = DW_OP_minus;
18519 goto do_binop;
18520
18521 case FLOOR_MOD_EXPR:
18522 case CEIL_MOD_EXPR:
18523 case ROUND_MOD_EXPR:
18524 case TRUNC_MOD_EXPR:
18525 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18526 {
18527 op = DW_OP_mod;
18528 goto do_binop;
18529 }
18530 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18531 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18532 if (list_ret == 0 || list_ret1 == 0)
18533 return 0;
18534
18535 add_loc_list (&list_ret, list_ret1);
18536 if (list_ret == 0)
18537 return 0;
18538 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18539 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18540 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18541 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18542 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18543 break;
18544
18545 case MULT_EXPR:
18546 op = DW_OP_mul;
18547 goto do_binop;
18548
18549 case LSHIFT_EXPR:
18550 op = DW_OP_shl;
18551 goto do_binop;
18552
18553 case RSHIFT_EXPR:
18554 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18555 goto do_binop;
18556
18557 case POINTER_PLUS_EXPR:
18558 case PLUS_EXPR:
18559 do_plus:
18560 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18561 {
18562 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18563 smarter to encode their opposite. The DW_OP_plus_uconst operation
18564 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18565 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18566 bytes, Y being the size of the operation that pushes the opposite
18567 of the addend. So let's choose the smallest representation. */
18568 const tree tree_addend = TREE_OPERAND (loc, 1);
18569 offset_int wi_addend;
18570 HOST_WIDE_INT shwi_addend;
18571 dw_loc_descr_ref loc_naddend;
18572
18573 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18574 if (list_ret == 0)
18575 return 0;
18576
18577 /* Try to get the literal to push. It is the opposite of the addend,
18578 so as we rely on wrapping during DWARF evaluation, first decode
18579 the literal as a "DWARF-sized" signed number. */
18580 wi_addend = wi::to_offset (tree_addend);
18581 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18582 shwi_addend = wi_addend.to_shwi ();
18583 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18584 ? int_loc_descriptor (-shwi_addend)
18585 : NULL;
18586
18587 if (loc_naddend != NULL
18588 && ((unsigned) size_of_uleb128 (shwi_addend)
18589 > size_of_loc_descr (loc_naddend)))
18590 {
18591 add_loc_descr_to_each (list_ret, loc_naddend);
18592 add_loc_descr_to_each (list_ret,
18593 new_loc_descr (DW_OP_minus, 0, 0));
18594 }
18595 else
18596 {
18597 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18598 {
18599 loc_naddend = loc_cur;
18600 loc_cur = loc_cur->dw_loc_next;
18601 ggc_free (loc_naddend);
18602 }
18603 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18604 }
18605 break;
18606 }
18607
18608 op = DW_OP_plus;
18609 goto do_binop;
18610
18611 case LE_EXPR:
18612 op = DW_OP_le;
18613 goto do_comp_binop;
18614
18615 case GE_EXPR:
18616 op = DW_OP_ge;
18617 goto do_comp_binop;
18618
18619 case LT_EXPR:
18620 op = DW_OP_lt;
18621 goto do_comp_binop;
18622
18623 case GT_EXPR:
18624 op = DW_OP_gt;
18625 goto do_comp_binop;
18626
18627 do_comp_binop:
18628 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18629 {
18630 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18631 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18632 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18633 TREE_CODE (loc));
18634 break;
18635 }
18636 else
18637 goto do_binop;
18638
18639 case EQ_EXPR:
18640 op = DW_OP_eq;
18641 goto do_binop;
18642
18643 case NE_EXPR:
18644 op = DW_OP_ne;
18645 goto do_binop;
18646
18647 do_binop:
18648 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18649 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18650 if (list_ret == 0 || list_ret1 == 0)
18651 return 0;
18652
18653 add_loc_list (&list_ret, list_ret1);
18654 if (list_ret == 0)
18655 return 0;
18656 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18657 break;
18658
18659 case TRUTH_NOT_EXPR:
18660 case BIT_NOT_EXPR:
18661 op = DW_OP_not;
18662 goto do_unop;
18663
18664 case ABS_EXPR:
18665 op = DW_OP_abs;
18666 goto do_unop;
18667
18668 case NEGATE_EXPR:
18669 op = DW_OP_neg;
18670 goto do_unop;
18671
18672 do_unop:
18673 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18674 if (list_ret == 0)
18675 return 0;
18676
18677 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18678 break;
18679
18680 case MIN_EXPR:
18681 case MAX_EXPR:
18682 {
18683 const enum tree_code code =
18684 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18685
18686 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18687 build2 (code, integer_type_node,
18688 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18689 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18690 }
18691
18692 /* fall through */
18693
18694 case COND_EXPR:
18695 {
18696 dw_loc_descr_ref lhs
18697 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18698 dw_loc_list_ref rhs
18699 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18700 dw_loc_descr_ref bra_node, jump_node, tmp;
18701
18702 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18703 if (list_ret == 0 || lhs == 0 || rhs == 0)
18704 return 0;
18705
18706 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18707 add_loc_descr_to_each (list_ret, bra_node);
18708
18709 add_loc_list (&list_ret, rhs);
18710 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18711 add_loc_descr_to_each (list_ret, jump_node);
18712
18713 add_loc_descr_to_each (list_ret, lhs);
18714 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18715 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18716
18717 /* ??? Need a node to point the skip at. Use a nop. */
18718 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18719 add_loc_descr_to_each (list_ret, tmp);
18720 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18721 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18722 }
18723 break;
18724
18725 case FIX_TRUNC_EXPR:
18726 return 0;
18727
18728 default:
18729 /* Leave front-end specific codes as simply unknown. This comes
18730 up, for instance, with the C STMT_EXPR. */
18731 if ((unsigned int) TREE_CODE (loc)
18732 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18733 {
18734 expansion_failed (loc, NULL_RTX,
18735 "language specific tree node");
18736 return 0;
18737 }
18738
18739 /* Otherwise this is a generic code; we should just lists all of
18740 these explicitly. We forgot one. */
18741 if (flag_checking)
18742 gcc_unreachable ();
18743
18744 /* In a release build, we want to degrade gracefully: better to
18745 generate incomplete debugging information than to crash. */
18746 return NULL;
18747 }
18748
18749 if (!ret && !list_ret)
18750 return 0;
18751
18752 if (want_address == 2 && !have_address
18753 && (dwarf_version >= 4 || !dwarf_strict))
18754 {
18755 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18756 {
18757 expansion_failed (loc, NULL_RTX,
18758 "DWARF address size mismatch");
18759 return 0;
18760 }
18761 if (ret)
18762 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18763 else
18764 add_loc_descr_to_each (list_ret,
18765 new_loc_descr (DW_OP_stack_value, 0, 0));
18766 have_address = 1;
18767 }
18768 /* Show if we can't fill the request for an address. */
18769 if (want_address && !have_address)
18770 {
18771 expansion_failed (loc, NULL_RTX,
18772 "Want address and only have value");
18773 return 0;
18774 }
18775
18776 gcc_assert (!ret || !list_ret);
18777
18778 /* If we've got an address and don't want one, dereference. */
18779 if (!want_address && have_address)
18780 {
18781 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18782
18783 if (size > DWARF2_ADDR_SIZE || size == -1)
18784 {
18785 expansion_failed (loc, NULL_RTX,
18786 "DWARF address size mismatch");
18787 return 0;
18788 }
18789 else if (size == DWARF2_ADDR_SIZE)
18790 op = DW_OP_deref;
18791 else
18792 op = DW_OP_deref_size;
18793
18794 if (ret)
18795 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18796 else
18797 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18798 }
18799 if (ret)
18800 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18801
18802 return list_ret;
18803 }
18804
18805 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18806 expressions. */
18807
18808 static dw_loc_list_ref
18809 loc_list_from_tree (tree loc, int want_address,
18810 struct loc_descr_context *context)
18811 {
18812 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18813
18814 for (dw_loc_list_ref loc_cur = result;
18815 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18816 loc_descr_without_nops (loc_cur->expr);
18817 return result;
18818 }
18819
18820 /* Same as above but return only single location expression. */
18821 static dw_loc_descr_ref
18822 loc_descriptor_from_tree (tree loc, int want_address,
18823 struct loc_descr_context *context)
18824 {
18825 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18826 if (!ret)
18827 return NULL;
18828 if (ret->dw_loc_next)
18829 {
18830 expansion_failed (loc, NULL_RTX,
18831 "Location list where only loc descriptor needed");
18832 return NULL;
18833 }
18834 return ret->expr;
18835 }
18836
18837 /* Given a value, round it up to the lowest multiple of `boundary'
18838 which is not less than the value itself. */
18839
18840 static inline HOST_WIDE_INT
18841 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18842 {
18843 return (((value + boundary - 1) / boundary) * boundary);
18844 }
18845
18846 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18847 pointer to the declared type for the relevant field variable, or return
18848 `integer_type_node' if the given node turns out to be an
18849 ERROR_MARK node. */
18850
18851 static inline tree
18852 field_type (const_tree decl)
18853 {
18854 tree type;
18855
18856 if (TREE_CODE (decl) == ERROR_MARK)
18857 return integer_type_node;
18858
18859 type = DECL_BIT_FIELD_TYPE (decl);
18860 if (type == NULL_TREE)
18861 type = TREE_TYPE (decl);
18862
18863 return type;
18864 }
18865
18866 /* Given a pointer to a tree node, return the alignment in bits for
18867 it, or else return BITS_PER_WORD if the node actually turns out to
18868 be an ERROR_MARK node. */
18869
18870 static inline unsigned
18871 simple_type_align_in_bits (const_tree type)
18872 {
18873 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18874 }
18875
18876 static inline unsigned
18877 simple_decl_align_in_bits (const_tree decl)
18878 {
18879 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18880 }
18881
18882 /* Return the result of rounding T up to ALIGN. */
18883
18884 static inline offset_int
18885 round_up_to_align (const offset_int &t, unsigned int align)
18886 {
18887 return wi::udiv_trunc (t + align - 1, align) * align;
18888 }
18889
18890 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18891 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18892 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18893 if we fail to return the size in one of these two forms. */
18894
18895 static dw_loc_descr_ref
18896 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18897 {
18898 tree tree_size;
18899 struct loc_descr_context ctx;
18900
18901 /* Return a constant integer in priority, if possible. */
18902 *cst_size = int_size_in_bytes (type);
18903 if (*cst_size != -1)
18904 return NULL;
18905
18906 ctx.context_type = const_cast<tree> (type);
18907 ctx.base_decl = NULL_TREE;
18908 ctx.dpi = NULL;
18909 ctx.placeholder_arg = false;
18910 ctx.placeholder_seen = false;
18911
18912 type = TYPE_MAIN_VARIANT (type);
18913 tree_size = TYPE_SIZE_UNIT (type);
18914 return ((tree_size != NULL_TREE)
18915 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18916 : NULL);
18917 }
18918
18919 /* Helper structure for RECORD_TYPE processing. */
18920 struct vlr_context
18921 {
18922 /* Root RECORD_TYPE. It is needed to generate data member location
18923 descriptions in variable-length records (VLR), but also to cope with
18924 variants, which are composed of nested structures multiplexed with
18925 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18926 function processing a FIELD_DECL, it is required to be non null. */
18927 tree struct_type;
18928 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18929 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18930 this variant part as part of the root record (in storage units). For
18931 regular records, it must be NULL_TREE. */
18932 tree variant_part_offset;
18933 };
18934
18935 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18936 addressed byte of the "containing object" for the given FIELD_DECL. If
18937 possible, return a native constant through CST_OFFSET (in which case NULL is
18938 returned); otherwise return a DWARF expression that computes the offset.
18939
18940 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18941 that offset is, either because the argument turns out to be a pointer to an
18942 ERROR_MARK node, or because the offset expression is too complex for us.
18943
18944 CTX is required: see the comment for VLR_CONTEXT. */
18945
18946 static dw_loc_descr_ref
18947 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18948 HOST_WIDE_INT *cst_offset)
18949 {
18950 tree tree_result;
18951 dw_loc_list_ref loc_result;
18952
18953 *cst_offset = 0;
18954
18955 if (TREE_CODE (decl) == ERROR_MARK)
18956 return NULL;
18957 else
18958 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18959
18960 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18961 case. */
18962 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18963 return NULL;
18964
18965 #ifdef PCC_BITFIELD_TYPE_MATTERS
18966 /* We used to handle only constant offsets in all cases. Now, we handle
18967 properly dynamic byte offsets only when PCC bitfield type doesn't
18968 matter. */
18969 if (PCC_BITFIELD_TYPE_MATTERS
18970 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18971 {
18972 offset_int object_offset_in_bits;
18973 offset_int object_offset_in_bytes;
18974 offset_int bitpos_int;
18975 tree type;
18976 tree field_size_tree;
18977 offset_int deepest_bitpos;
18978 offset_int field_size_in_bits;
18979 unsigned int type_align_in_bits;
18980 unsigned int decl_align_in_bits;
18981 offset_int type_size_in_bits;
18982
18983 bitpos_int = wi::to_offset (bit_position (decl));
18984 type = field_type (decl);
18985 type_size_in_bits = offset_int_type_size_in_bits (type);
18986 type_align_in_bits = simple_type_align_in_bits (type);
18987
18988 field_size_tree = DECL_SIZE (decl);
18989
18990 /* The size could be unspecified if there was an error, or for
18991 a flexible array member. */
18992 if (!field_size_tree)
18993 field_size_tree = bitsize_zero_node;
18994
18995 /* If the size of the field is not constant, use the type size. */
18996 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18997 field_size_in_bits = wi::to_offset (field_size_tree);
18998 else
18999 field_size_in_bits = type_size_in_bits;
19000
19001 decl_align_in_bits = simple_decl_align_in_bits (decl);
19002
19003 /* The GCC front-end doesn't make any attempt to keep track of the
19004 starting bit offset (relative to the start of the containing
19005 structure type) of the hypothetical "containing object" for a
19006 bit-field. Thus, when computing the byte offset value for the
19007 start of the "containing object" of a bit-field, we must deduce
19008 this information on our own. This can be rather tricky to do in
19009 some cases. For example, handling the following structure type
19010 definition when compiling for an i386/i486 target (which only
19011 aligns long long's to 32-bit boundaries) can be very tricky:
19012
19013 struct S { int field1; long long field2:31; };
19014
19015 Fortunately, there is a simple rule-of-thumb which can be used
19016 in such cases. When compiling for an i386/i486, GCC will
19017 allocate 8 bytes for the structure shown above. It decides to
19018 do this based upon one simple rule for bit-field allocation.
19019 GCC allocates each "containing object" for each bit-field at
19020 the first (i.e. lowest addressed) legitimate alignment boundary
19021 (based upon the required minimum alignment for the declared
19022 type of the field) which it can possibly use, subject to the
19023 condition that there is still enough available space remaining
19024 in the containing object (when allocated at the selected point)
19025 to fully accommodate all of the bits of the bit-field itself.
19026
19027 This simple rule makes it obvious why GCC allocates 8 bytes for
19028 each object of the structure type shown above. When looking
19029 for a place to allocate the "containing object" for `field2',
19030 the compiler simply tries to allocate a 64-bit "containing
19031 object" at each successive 32-bit boundary (starting at zero)
19032 until it finds a place to allocate that 64- bit field such that
19033 at least 31 contiguous (and previously unallocated) bits remain
19034 within that selected 64 bit field. (As it turns out, for the
19035 example above, the compiler finds it is OK to allocate the
19036 "containing object" 64-bit field at bit-offset zero within the
19037 structure type.)
19038
19039 Here we attempt to work backwards from the limited set of facts
19040 we're given, and we try to deduce from those facts, where GCC
19041 must have believed that the containing object started (within
19042 the structure type). The value we deduce is then used (by the
19043 callers of this routine) to generate DW_AT_location and
19044 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19045 the case of DW_AT_location, regular fields as well). */
19046
19047 /* Figure out the bit-distance from the start of the structure to
19048 the "deepest" bit of the bit-field. */
19049 deepest_bitpos = bitpos_int + field_size_in_bits;
19050
19051 /* This is the tricky part. Use some fancy footwork to deduce
19052 where the lowest addressed bit of the containing object must
19053 be. */
19054 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19055
19056 /* Round up to type_align by default. This works best for
19057 bitfields. */
19058 object_offset_in_bits
19059 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19060
19061 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19062 {
19063 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19064
19065 /* Round up to decl_align instead. */
19066 object_offset_in_bits
19067 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19068 }
19069
19070 object_offset_in_bytes
19071 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19072 if (ctx->variant_part_offset == NULL_TREE)
19073 {
19074 *cst_offset = object_offset_in_bytes.to_shwi ();
19075 return NULL;
19076 }
19077 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19078 }
19079 else
19080 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19081 tree_result = byte_position (decl);
19082
19083 if (ctx->variant_part_offset != NULL_TREE)
19084 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19085 ctx->variant_part_offset, tree_result);
19086
19087 /* If the byte offset is a constant, it's simplier to handle a native
19088 constant rather than a DWARF expression. */
19089 if (TREE_CODE (tree_result) == INTEGER_CST)
19090 {
19091 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19092 return NULL;
19093 }
19094 struct loc_descr_context loc_ctx = {
19095 ctx->struct_type, /* context_type */
19096 NULL_TREE, /* base_decl */
19097 NULL, /* dpi */
19098 false, /* placeholder_arg */
19099 false /* placeholder_seen */
19100 };
19101 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19102
19103 /* We want a DWARF expression: abort if we only have a location list with
19104 multiple elements. */
19105 if (!loc_result || !single_element_loc_list_p (loc_result))
19106 return NULL;
19107 else
19108 return loc_result->expr;
19109 }
19110 \f
19111 /* The following routines define various Dwarf attributes and any data
19112 associated with them. */
19113
19114 /* Add a location description attribute value to a DIE.
19115
19116 This emits location attributes suitable for whole variables and
19117 whole parameters. Note that the location attributes for struct fields are
19118 generated by the routine `data_member_location_attribute' below. */
19119
19120 static inline void
19121 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19122 dw_loc_list_ref descr)
19123 {
19124 bool check_no_locviews = true;
19125 if (descr == 0)
19126 return;
19127 if (single_element_loc_list_p (descr))
19128 add_AT_loc (die, attr_kind, descr->expr);
19129 else
19130 {
19131 add_AT_loc_list (die, attr_kind, descr);
19132 gcc_assert (descr->ll_symbol);
19133 if (attr_kind == DW_AT_location && descr->vl_symbol
19134 && dwarf2out_locviews_in_attribute ())
19135 {
19136 add_AT_view_list (die, DW_AT_GNU_locviews);
19137 check_no_locviews = false;
19138 }
19139 }
19140
19141 if (check_no_locviews)
19142 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19143 }
19144
19145 /* Add DW_AT_accessibility attribute to DIE if needed. */
19146
19147 static void
19148 add_accessibility_attribute (dw_die_ref die, tree decl)
19149 {
19150 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19151 children, otherwise the default is DW_ACCESS_public. In DWARF2
19152 the default has always been DW_ACCESS_public. */
19153 if (TREE_PROTECTED (decl))
19154 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19155 else if (TREE_PRIVATE (decl))
19156 {
19157 if (dwarf_version == 2
19158 || die->die_parent == NULL
19159 || die->die_parent->die_tag != DW_TAG_class_type)
19160 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19161 }
19162 else if (dwarf_version > 2
19163 && die->die_parent
19164 && die->die_parent->die_tag == DW_TAG_class_type)
19165 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19166 }
19167
19168 /* Attach the specialized form of location attribute used for data members of
19169 struct and union types. In the special case of a FIELD_DECL node which
19170 represents a bit-field, the "offset" part of this special location
19171 descriptor must indicate the distance in bytes from the lowest-addressed
19172 byte of the containing struct or union type to the lowest-addressed byte of
19173 the "containing object" for the bit-field. (See the `field_byte_offset'
19174 function above).
19175
19176 For any given bit-field, the "containing object" is a hypothetical object
19177 (of some integral or enum type) within which the given bit-field lives. The
19178 type of this hypothetical "containing object" is always the same as the
19179 declared type of the individual bit-field itself (for GCC anyway... the
19180 DWARF spec doesn't actually mandate this). Note that it is the size (in
19181 bytes) of the hypothetical "containing object" which will be given in the
19182 DW_AT_byte_size attribute for this bit-field. (See the
19183 `byte_size_attribute' function below.) It is also used when calculating the
19184 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19185 function below.)
19186
19187 CTX is required: see the comment for VLR_CONTEXT. */
19188
19189 static void
19190 add_data_member_location_attribute (dw_die_ref die,
19191 tree decl,
19192 struct vlr_context *ctx)
19193 {
19194 HOST_WIDE_INT offset;
19195 dw_loc_descr_ref loc_descr = 0;
19196
19197 if (TREE_CODE (decl) == TREE_BINFO)
19198 {
19199 /* We're working on the TAG_inheritance for a base class. */
19200 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19201 {
19202 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19203 aren't at a fixed offset from all (sub)objects of the same
19204 type. We need to extract the appropriate offset from our
19205 vtable. The following dwarf expression means
19206
19207 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19208
19209 This is specific to the V3 ABI, of course. */
19210
19211 dw_loc_descr_ref tmp;
19212
19213 /* Make a copy of the object address. */
19214 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19215 add_loc_descr (&loc_descr, tmp);
19216
19217 /* Extract the vtable address. */
19218 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19219 add_loc_descr (&loc_descr, tmp);
19220
19221 /* Calculate the address of the offset. */
19222 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19223 gcc_assert (offset < 0);
19224
19225 tmp = int_loc_descriptor (-offset);
19226 add_loc_descr (&loc_descr, tmp);
19227 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19228 add_loc_descr (&loc_descr, tmp);
19229
19230 /* Extract the offset. */
19231 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19232 add_loc_descr (&loc_descr, tmp);
19233
19234 /* Add it to the object address. */
19235 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19236 add_loc_descr (&loc_descr, tmp);
19237 }
19238 else
19239 offset = tree_to_shwi (BINFO_OFFSET (decl));
19240 }
19241 else
19242 {
19243 loc_descr = field_byte_offset (decl, ctx, &offset);
19244
19245 /* If loc_descr is available then we know the field offset is dynamic.
19246 However, GDB does not handle dynamic field offsets very well at the
19247 moment. */
19248 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19249 {
19250 loc_descr = NULL;
19251 offset = 0;
19252 }
19253
19254 /* Data member location evalutation starts with the base address on the
19255 stack. Compute the field offset and add it to this base address. */
19256 else if (loc_descr != NULL)
19257 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19258 }
19259
19260 if (! loc_descr)
19261 {
19262 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19263 e.g. GDB only added support to it in November 2016. For DWARF5
19264 we need newer debug info consumers anyway. We might change this
19265 to dwarf_version >= 4 once most consumers catched up. */
19266 if (dwarf_version >= 5
19267 && TREE_CODE (decl) == FIELD_DECL
19268 && DECL_BIT_FIELD_TYPE (decl))
19269 {
19270 tree off = bit_position (decl);
19271 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19272 {
19273 remove_AT (die, DW_AT_byte_size);
19274 remove_AT (die, DW_AT_bit_offset);
19275 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19276 return;
19277 }
19278 }
19279 if (dwarf_version > 2)
19280 {
19281 /* Don't need to output a location expression, just the constant. */
19282 if (offset < 0)
19283 add_AT_int (die, DW_AT_data_member_location, offset);
19284 else
19285 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19286 return;
19287 }
19288 else
19289 {
19290 enum dwarf_location_atom op;
19291
19292 /* The DWARF2 standard says that we should assume that the structure
19293 address is already on the stack, so we can specify a structure
19294 field address by using DW_OP_plus_uconst. */
19295 op = DW_OP_plus_uconst;
19296 loc_descr = new_loc_descr (op, offset, 0);
19297 }
19298 }
19299
19300 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19301 }
19302
19303 /* Writes integer values to dw_vec_const array. */
19304
19305 static void
19306 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19307 {
19308 while (size != 0)
19309 {
19310 *dest++ = val & 0xff;
19311 val >>= 8;
19312 --size;
19313 }
19314 }
19315
19316 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19317
19318 static HOST_WIDE_INT
19319 extract_int (const unsigned char *src, unsigned int size)
19320 {
19321 HOST_WIDE_INT val = 0;
19322
19323 src += size;
19324 while (size != 0)
19325 {
19326 val <<= 8;
19327 val |= *--src & 0xff;
19328 --size;
19329 }
19330 return val;
19331 }
19332
19333 /* Writes wide_int values to dw_vec_const array. */
19334
19335 static void
19336 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19337 {
19338 int i;
19339
19340 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19341 {
19342 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19343 return;
19344 }
19345
19346 /* We'd have to extend this code to support odd sizes. */
19347 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19348
19349 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19350
19351 if (WORDS_BIG_ENDIAN)
19352 for (i = n - 1; i >= 0; i--)
19353 {
19354 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19355 dest += sizeof (HOST_WIDE_INT);
19356 }
19357 else
19358 for (i = 0; i < n; i++)
19359 {
19360 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19361 dest += sizeof (HOST_WIDE_INT);
19362 }
19363 }
19364
19365 /* Writes floating point values to dw_vec_const array. */
19366
19367 static void
19368 insert_float (const_rtx rtl, unsigned char *array)
19369 {
19370 long val[4];
19371 int i;
19372 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19373
19374 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19375
19376 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19377 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19378 {
19379 insert_int (val[i], 4, array);
19380 array += 4;
19381 }
19382 }
19383
19384 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19385 does not have a "location" either in memory or in a register. These
19386 things can arise in GNU C when a constant is passed as an actual parameter
19387 to an inlined function. They can also arise in C++ where declared
19388 constants do not necessarily get memory "homes". */
19389
19390 static bool
19391 add_const_value_attribute (dw_die_ref die, rtx rtl)
19392 {
19393 switch (GET_CODE (rtl))
19394 {
19395 case CONST_INT:
19396 {
19397 HOST_WIDE_INT val = INTVAL (rtl);
19398
19399 if (val < 0)
19400 add_AT_int (die, DW_AT_const_value, val);
19401 else
19402 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19403 }
19404 return true;
19405
19406 case CONST_WIDE_INT:
19407 {
19408 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19409 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19410 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19411 wide_int w = wi::zext (w1, prec);
19412 add_AT_wide (die, DW_AT_const_value, w);
19413 }
19414 return true;
19415
19416 case CONST_DOUBLE:
19417 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19418 floating-point constant. A CONST_DOUBLE is used whenever the
19419 constant requires more than one word in order to be adequately
19420 represented. */
19421 if (TARGET_SUPPORTS_WIDE_INT == 0
19422 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19423 add_AT_double (die, DW_AT_const_value,
19424 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19425 else
19426 {
19427 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19428 unsigned int length = GET_MODE_SIZE (mode);
19429 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19430
19431 insert_float (rtl, array);
19432 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19433 }
19434 return true;
19435
19436 case CONST_VECTOR:
19437 {
19438 unsigned int length;
19439 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19440 return false;
19441
19442 machine_mode mode = GET_MODE (rtl);
19443 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19444 unsigned char *array
19445 = ggc_vec_alloc<unsigned char> (length * elt_size);
19446 unsigned int i;
19447 unsigned char *p;
19448 machine_mode imode = GET_MODE_INNER (mode);
19449
19450 switch (GET_MODE_CLASS (mode))
19451 {
19452 case MODE_VECTOR_INT:
19453 for (i = 0, p = array; i < length; i++, p += elt_size)
19454 {
19455 rtx elt = CONST_VECTOR_ELT (rtl, i);
19456 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19457 }
19458 break;
19459
19460 case MODE_VECTOR_FLOAT:
19461 for (i = 0, p = array; i < length; i++, p += elt_size)
19462 {
19463 rtx elt = CONST_VECTOR_ELT (rtl, i);
19464 insert_float (elt, p);
19465 }
19466 break;
19467
19468 default:
19469 gcc_unreachable ();
19470 }
19471
19472 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19473 }
19474 return true;
19475
19476 case CONST_STRING:
19477 if (dwarf_version >= 4 || !dwarf_strict)
19478 {
19479 dw_loc_descr_ref loc_result;
19480 resolve_one_addr (&rtl);
19481 rtl_addr:
19482 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19483 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19484 add_AT_loc (die, DW_AT_location, loc_result);
19485 vec_safe_push (used_rtx_array, rtl);
19486 return true;
19487 }
19488 return false;
19489
19490 case CONST:
19491 if (CONSTANT_P (XEXP (rtl, 0)))
19492 return add_const_value_attribute (die, XEXP (rtl, 0));
19493 /* FALLTHROUGH */
19494 case SYMBOL_REF:
19495 if (!const_ok_for_output (rtl))
19496 return false;
19497 /* FALLTHROUGH */
19498 case LABEL_REF:
19499 if (dwarf_version >= 4 || !dwarf_strict)
19500 goto rtl_addr;
19501 return false;
19502
19503 case PLUS:
19504 /* In cases where an inlined instance of an inline function is passed
19505 the address of an `auto' variable (which is local to the caller) we
19506 can get a situation where the DECL_RTL of the artificial local
19507 variable (for the inlining) which acts as a stand-in for the
19508 corresponding formal parameter (of the inline function) will look
19509 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19510 exactly a compile-time constant expression, but it isn't the address
19511 of the (artificial) local variable either. Rather, it represents the
19512 *value* which the artificial local variable always has during its
19513 lifetime. We currently have no way to represent such quasi-constant
19514 values in Dwarf, so for now we just punt and generate nothing. */
19515 return false;
19516
19517 case HIGH:
19518 case CONST_FIXED:
19519 return false;
19520
19521 case MEM:
19522 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19523 && MEM_READONLY_P (rtl)
19524 && GET_MODE (rtl) == BLKmode)
19525 {
19526 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19527 return true;
19528 }
19529 return false;
19530
19531 default:
19532 /* No other kinds of rtx should be possible here. */
19533 gcc_unreachable ();
19534 }
19535 return false;
19536 }
19537
19538 /* Determine whether the evaluation of EXPR references any variables
19539 or functions which aren't otherwise used (and therefore may not be
19540 output). */
19541 static tree
19542 reference_to_unused (tree * tp, int * walk_subtrees,
19543 void * data ATTRIBUTE_UNUSED)
19544 {
19545 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19546 *walk_subtrees = 0;
19547
19548 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19549 && ! TREE_ASM_WRITTEN (*tp))
19550 return *tp;
19551 /* ??? The C++ FE emits debug information for using decls, so
19552 putting gcc_unreachable here falls over. See PR31899. For now
19553 be conservative. */
19554 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19555 return *tp;
19556 else if (VAR_P (*tp))
19557 {
19558 varpool_node *node = varpool_node::get (*tp);
19559 if (!node || !node->definition)
19560 return *tp;
19561 }
19562 else if (TREE_CODE (*tp) == FUNCTION_DECL
19563 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19564 {
19565 /* The call graph machinery must have finished analyzing,
19566 optimizing and gimplifying the CU by now.
19567 So if *TP has no call graph node associated
19568 to it, it means *TP will not be emitted. */
19569 if (!cgraph_node::get (*tp))
19570 return *tp;
19571 }
19572 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19573 return *tp;
19574
19575 return NULL_TREE;
19576 }
19577
19578 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19579 for use in a later add_const_value_attribute call. */
19580
19581 static rtx
19582 rtl_for_decl_init (tree init, tree type)
19583 {
19584 rtx rtl = NULL_RTX;
19585
19586 STRIP_NOPS (init);
19587
19588 /* If a variable is initialized with a string constant without embedded
19589 zeros, build CONST_STRING. */
19590 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19591 {
19592 tree enttype = TREE_TYPE (type);
19593 tree domain = TYPE_DOMAIN (type);
19594 scalar_int_mode mode;
19595
19596 if (is_int_mode (TYPE_MODE (enttype), &mode)
19597 && GET_MODE_SIZE (mode) == 1
19598 && domain
19599 && TYPE_MAX_VALUE (domain)
19600 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19601 && integer_zerop (TYPE_MIN_VALUE (domain))
19602 && compare_tree_int (TYPE_MAX_VALUE (domain),
19603 TREE_STRING_LENGTH (init) - 1) == 0
19604 && ((size_t) TREE_STRING_LENGTH (init)
19605 == strlen (TREE_STRING_POINTER (init)) + 1))
19606 {
19607 rtl = gen_rtx_CONST_STRING (VOIDmode,
19608 ggc_strdup (TREE_STRING_POINTER (init)));
19609 rtl = gen_rtx_MEM (BLKmode, rtl);
19610 MEM_READONLY_P (rtl) = 1;
19611 }
19612 }
19613 /* Other aggregates, and complex values, could be represented using
19614 CONCAT: FIXME! */
19615 else if (AGGREGATE_TYPE_P (type)
19616 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19617 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19618 || TREE_CODE (type) == COMPLEX_TYPE)
19619 ;
19620 /* Vectors only work if their mode is supported by the target.
19621 FIXME: generic vectors ought to work too. */
19622 else if (TREE_CODE (type) == VECTOR_TYPE
19623 && !VECTOR_MODE_P (TYPE_MODE (type)))
19624 ;
19625 /* If the initializer is something that we know will expand into an
19626 immediate RTL constant, expand it now. We must be careful not to
19627 reference variables which won't be output. */
19628 else if (initializer_constant_valid_p (init, type)
19629 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19630 {
19631 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19632 possible. */
19633 if (TREE_CODE (type) == VECTOR_TYPE)
19634 switch (TREE_CODE (init))
19635 {
19636 case VECTOR_CST:
19637 break;
19638 case CONSTRUCTOR:
19639 if (TREE_CONSTANT (init))
19640 {
19641 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19642 bool constant_p = true;
19643 tree value;
19644 unsigned HOST_WIDE_INT ix;
19645
19646 /* Even when ctor is constant, it might contain non-*_CST
19647 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19648 belong into VECTOR_CST nodes. */
19649 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19650 if (!CONSTANT_CLASS_P (value))
19651 {
19652 constant_p = false;
19653 break;
19654 }
19655
19656 if (constant_p)
19657 {
19658 init = build_vector_from_ctor (type, elts);
19659 break;
19660 }
19661 }
19662 /* FALLTHRU */
19663
19664 default:
19665 return NULL;
19666 }
19667
19668 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19669
19670 /* If expand_expr returns a MEM, it wasn't immediate. */
19671 gcc_assert (!rtl || !MEM_P (rtl));
19672 }
19673
19674 return rtl;
19675 }
19676
19677 /* Generate RTL for the variable DECL to represent its location. */
19678
19679 static rtx
19680 rtl_for_decl_location (tree decl)
19681 {
19682 rtx rtl;
19683
19684 /* Here we have to decide where we are going to say the parameter "lives"
19685 (as far as the debugger is concerned). We only have a couple of
19686 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19687
19688 DECL_RTL normally indicates where the parameter lives during most of the
19689 activation of the function. If optimization is enabled however, this
19690 could be either NULL or else a pseudo-reg. Both of those cases indicate
19691 that the parameter doesn't really live anywhere (as far as the code
19692 generation parts of GCC are concerned) during most of the function's
19693 activation. That will happen (for example) if the parameter is never
19694 referenced within the function.
19695
19696 We could just generate a location descriptor here for all non-NULL
19697 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19698 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19699 where DECL_RTL is NULL or is a pseudo-reg.
19700
19701 Note however that we can only get away with using DECL_INCOMING_RTL as
19702 a backup substitute for DECL_RTL in certain limited cases. In cases
19703 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19704 we can be sure that the parameter was passed using the same type as it is
19705 declared to have within the function, and that its DECL_INCOMING_RTL
19706 points us to a place where a value of that type is passed.
19707
19708 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19709 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19710 because in these cases DECL_INCOMING_RTL points us to a value of some
19711 type which is *different* from the type of the parameter itself. Thus,
19712 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19713 such cases, the debugger would end up (for example) trying to fetch a
19714 `float' from a place which actually contains the first part of a
19715 `double'. That would lead to really incorrect and confusing
19716 output at debug-time.
19717
19718 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19719 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19720 are a couple of exceptions however. On little-endian machines we can
19721 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19722 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19723 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19724 when (on a little-endian machine) a non-prototyped function has a
19725 parameter declared to be of type `short' or `char'. In such cases,
19726 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19727 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19728 passed `int' value. If the debugger then uses that address to fetch
19729 a `short' or a `char' (on a little-endian machine) the result will be
19730 the correct data, so we allow for such exceptional cases below.
19731
19732 Note that our goal here is to describe the place where the given formal
19733 parameter lives during most of the function's activation (i.e. between the
19734 end of the prologue and the start of the epilogue). We'll do that as best
19735 as we can. Note however that if the given formal parameter is modified
19736 sometime during the execution of the function, then a stack backtrace (at
19737 debug-time) will show the function as having been called with the *new*
19738 value rather than the value which was originally passed in. This happens
19739 rarely enough that it is not a major problem, but it *is* a problem, and
19740 I'd like to fix it.
19741
19742 A future version of dwarf2out.c may generate two additional attributes for
19743 any given DW_TAG_formal_parameter DIE which will describe the "passed
19744 type" and the "passed location" for the given formal parameter in addition
19745 to the attributes we now generate to indicate the "declared type" and the
19746 "active location" for each parameter. This additional set of attributes
19747 could be used by debuggers for stack backtraces. Separately, note that
19748 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19749 This happens (for example) for inlined-instances of inline function formal
19750 parameters which are never referenced. This really shouldn't be
19751 happening. All PARM_DECL nodes should get valid non-NULL
19752 DECL_INCOMING_RTL values. FIXME. */
19753
19754 /* Use DECL_RTL as the "location" unless we find something better. */
19755 rtl = DECL_RTL_IF_SET (decl);
19756
19757 /* When generating abstract instances, ignore everything except
19758 constants, symbols living in memory, and symbols living in
19759 fixed registers. */
19760 if (! reload_completed)
19761 {
19762 if (rtl
19763 && (CONSTANT_P (rtl)
19764 || (MEM_P (rtl)
19765 && CONSTANT_P (XEXP (rtl, 0)))
19766 || (REG_P (rtl)
19767 && VAR_P (decl)
19768 && TREE_STATIC (decl))))
19769 {
19770 rtl = targetm.delegitimize_address (rtl);
19771 return rtl;
19772 }
19773 rtl = NULL_RTX;
19774 }
19775 else if (TREE_CODE (decl) == PARM_DECL)
19776 {
19777 if (rtl == NULL_RTX
19778 || is_pseudo_reg (rtl)
19779 || (MEM_P (rtl)
19780 && is_pseudo_reg (XEXP (rtl, 0))
19781 && DECL_INCOMING_RTL (decl)
19782 && MEM_P (DECL_INCOMING_RTL (decl))
19783 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19784 {
19785 tree declared_type = TREE_TYPE (decl);
19786 tree passed_type = DECL_ARG_TYPE (decl);
19787 machine_mode dmode = TYPE_MODE (declared_type);
19788 machine_mode pmode = TYPE_MODE (passed_type);
19789
19790 /* This decl represents a formal parameter which was optimized out.
19791 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19792 all cases where (rtl == NULL_RTX) just below. */
19793 if (dmode == pmode)
19794 rtl = DECL_INCOMING_RTL (decl);
19795 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19796 && SCALAR_INT_MODE_P (dmode)
19797 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19798 && DECL_INCOMING_RTL (decl))
19799 {
19800 rtx inc = DECL_INCOMING_RTL (decl);
19801 if (REG_P (inc))
19802 rtl = inc;
19803 else if (MEM_P (inc))
19804 {
19805 if (BYTES_BIG_ENDIAN)
19806 rtl = adjust_address_nv (inc, dmode,
19807 GET_MODE_SIZE (pmode)
19808 - GET_MODE_SIZE (dmode));
19809 else
19810 rtl = inc;
19811 }
19812 }
19813 }
19814
19815 /* If the parm was passed in registers, but lives on the stack, then
19816 make a big endian correction if the mode of the type of the
19817 parameter is not the same as the mode of the rtl. */
19818 /* ??? This is the same series of checks that are made in dbxout.c before
19819 we reach the big endian correction code there. It isn't clear if all
19820 of these checks are necessary here, but keeping them all is the safe
19821 thing to do. */
19822 else if (MEM_P (rtl)
19823 && XEXP (rtl, 0) != const0_rtx
19824 && ! CONSTANT_P (XEXP (rtl, 0))
19825 /* Not passed in memory. */
19826 && !MEM_P (DECL_INCOMING_RTL (decl))
19827 /* Not passed by invisible reference. */
19828 && (!REG_P (XEXP (rtl, 0))
19829 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19830 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19831 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19832 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19833 #endif
19834 )
19835 /* Big endian correction check. */
19836 && BYTES_BIG_ENDIAN
19837 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19838 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19839 UNITS_PER_WORD))
19840 {
19841 machine_mode addr_mode = get_address_mode (rtl);
19842 poly_int64 offset = (UNITS_PER_WORD
19843 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19844
19845 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19846 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19847 }
19848 }
19849 else if (VAR_P (decl)
19850 && rtl
19851 && MEM_P (rtl)
19852 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19853 {
19854 machine_mode addr_mode = get_address_mode (rtl);
19855 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19856 GET_MODE (rtl));
19857
19858 /* If a variable is declared "register" yet is smaller than
19859 a register, then if we store the variable to memory, it
19860 looks like we're storing a register-sized value, when in
19861 fact we are not. We need to adjust the offset of the
19862 storage location to reflect the actual value's bytes,
19863 else gdb will not be able to display it. */
19864 if (maybe_ne (offset, 0))
19865 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19866 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19867 }
19868
19869 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19870 and will have been substituted directly into all expressions that use it.
19871 C does not have such a concept, but C++ and other languages do. */
19872 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19873 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19874
19875 if (rtl)
19876 rtl = targetm.delegitimize_address (rtl);
19877
19878 /* If we don't look past the constant pool, we risk emitting a
19879 reference to a constant pool entry that isn't referenced from
19880 code, and thus is not emitted. */
19881 if (rtl)
19882 rtl = avoid_constant_pool_reference (rtl);
19883
19884 /* Try harder to get a rtl. If this symbol ends up not being emitted
19885 in the current CU, resolve_addr will remove the expression referencing
19886 it. */
19887 if (rtl == NULL_RTX
19888 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19889 && VAR_P (decl)
19890 && !DECL_EXTERNAL (decl)
19891 && TREE_STATIC (decl)
19892 && DECL_NAME (decl)
19893 && !DECL_HARD_REGISTER (decl)
19894 && DECL_MODE (decl) != VOIDmode)
19895 {
19896 rtl = make_decl_rtl_for_debug (decl);
19897 if (!MEM_P (rtl)
19898 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19899 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19900 rtl = NULL_RTX;
19901 }
19902
19903 return rtl;
19904 }
19905
19906 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19907 returned. If so, the decl for the COMMON block is returned, and the
19908 value is the offset into the common block for the symbol. */
19909
19910 static tree
19911 fortran_common (tree decl, HOST_WIDE_INT *value)
19912 {
19913 tree val_expr, cvar;
19914 machine_mode mode;
19915 poly_int64 bitsize, bitpos;
19916 tree offset;
19917 HOST_WIDE_INT cbitpos;
19918 int unsignedp, reversep, volatilep = 0;
19919
19920 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19921 it does not have a value (the offset into the common area), or if it
19922 is thread local (as opposed to global) then it isn't common, and shouldn't
19923 be handled as such. */
19924 if (!VAR_P (decl)
19925 || !TREE_STATIC (decl)
19926 || !DECL_HAS_VALUE_EXPR_P (decl)
19927 || !is_fortran ())
19928 return NULL_TREE;
19929
19930 val_expr = DECL_VALUE_EXPR (decl);
19931 if (TREE_CODE (val_expr) != COMPONENT_REF)
19932 return NULL_TREE;
19933
19934 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19935 &unsignedp, &reversep, &volatilep);
19936
19937 if (cvar == NULL_TREE
19938 || !VAR_P (cvar)
19939 || DECL_ARTIFICIAL (cvar)
19940 || !TREE_PUBLIC (cvar)
19941 /* We don't expect to have to cope with variable offsets,
19942 since at present all static data must have a constant size. */
19943 || !bitpos.is_constant (&cbitpos))
19944 return NULL_TREE;
19945
19946 *value = 0;
19947 if (offset != NULL)
19948 {
19949 if (!tree_fits_shwi_p (offset))
19950 return NULL_TREE;
19951 *value = tree_to_shwi (offset);
19952 }
19953 if (cbitpos != 0)
19954 *value += cbitpos / BITS_PER_UNIT;
19955
19956 return cvar;
19957 }
19958
19959 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19960 data attribute for a variable or a parameter. We generate the
19961 DW_AT_const_value attribute only in those cases where the given variable
19962 or parameter does not have a true "location" either in memory or in a
19963 register. This can happen (for example) when a constant is passed as an
19964 actual argument in a call to an inline function. (It's possible that
19965 these things can crop up in other ways also.) Note that one type of
19966 constant value which can be passed into an inlined function is a constant
19967 pointer. This can happen for example if an actual argument in an inlined
19968 function call evaluates to a compile-time constant address.
19969
19970 CACHE_P is true if it is worth caching the location list for DECL,
19971 so that future calls can reuse it rather than regenerate it from scratch.
19972 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19973 since we will need to refer to them each time the function is inlined. */
19974
19975 static bool
19976 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19977 {
19978 rtx rtl;
19979 dw_loc_list_ref list;
19980 var_loc_list *loc_list;
19981 cached_dw_loc_list *cache;
19982
19983 if (early_dwarf)
19984 return false;
19985
19986 if (TREE_CODE (decl) == ERROR_MARK)
19987 return false;
19988
19989 if (get_AT (die, DW_AT_location)
19990 || get_AT (die, DW_AT_const_value))
19991 return true;
19992
19993 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19994 || TREE_CODE (decl) == RESULT_DECL);
19995
19996 /* Try to get some constant RTL for this decl, and use that as the value of
19997 the location. */
19998
19999 rtl = rtl_for_decl_location (decl);
20000 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20001 && add_const_value_attribute (die, rtl))
20002 return true;
20003
20004 /* See if we have single element location list that is equivalent to
20005 a constant value. That way we are better to use add_const_value_attribute
20006 rather than expanding constant value equivalent. */
20007 loc_list = lookup_decl_loc (decl);
20008 if (loc_list
20009 && loc_list->first
20010 && loc_list->first->next == NULL
20011 && NOTE_P (loc_list->first->loc)
20012 && NOTE_VAR_LOCATION (loc_list->first->loc)
20013 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20014 {
20015 struct var_loc_node *node;
20016
20017 node = loc_list->first;
20018 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20019 if (GET_CODE (rtl) == EXPR_LIST)
20020 rtl = XEXP (rtl, 0);
20021 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20022 && add_const_value_attribute (die, rtl))
20023 return true;
20024 }
20025 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20026 list several times. See if we've already cached the contents. */
20027 list = NULL;
20028 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20029 cache_p = false;
20030 if (cache_p)
20031 {
20032 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20033 if (cache)
20034 list = cache->loc_list;
20035 }
20036 if (list == NULL)
20037 {
20038 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20039 NULL);
20040 /* It is usually worth caching this result if the decl is from
20041 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20042 if (cache_p && list && list->dw_loc_next)
20043 {
20044 cached_dw_loc_list **slot
20045 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20046 DECL_UID (decl),
20047 INSERT);
20048 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20049 cache->decl_id = DECL_UID (decl);
20050 cache->loc_list = list;
20051 *slot = cache;
20052 }
20053 }
20054 if (list)
20055 {
20056 add_AT_location_description (die, DW_AT_location, list);
20057 return true;
20058 }
20059 /* None of that worked, so it must not really have a location;
20060 try adding a constant value attribute from the DECL_INITIAL. */
20061 return tree_add_const_value_attribute_for_decl (die, decl);
20062 }
20063
20064 /* Helper function for tree_add_const_value_attribute. Natively encode
20065 initializer INIT into an array. Return true if successful. */
20066
20067 static bool
20068 native_encode_initializer (tree init, unsigned char *array, int size)
20069 {
20070 tree type;
20071
20072 if (init == NULL_TREE)
20073 return false;
20074
20075 STRIP_NOPS (init);
20076 switch (TREE_CODE (init))
20077 {
20078 case STRING_CST:
20079 type = TREE_TYPE (init);
20080 if (TREE_CODE (type) == ARRAY_TYPE)
20081 {
20082 tree enttype = TREE_TYPE (type);
20083 scalar_int_mode mode;
20084
20085 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20086 || GET_MODE_SIZE (mode) != 1)
20087 return false;
20088 if (int_size_in_bytes (type) != size)
20089 return false;
20090 if (size > TREE_STRING_LENGTH (init))
20091 {
20092 memcpy (array, TREE_STRING_POINTER (init),
20093 TREE_STRING_LENGTH (init));
20094 memset (array + TREE_STRING_LENGTH (init),
20095 '\0', size - TREE_STRING_LENGTH (init));
20096 }
20097 else
20098 memcpy (array, TREE_STRING_POINTER (init), size);
20099 return true;
20100 }
20101 return false;
20102 case CONSTRUCTOR:
20103 type = TREE_TYPE (init);
20104 if (int_size_in_bytes (type) != size)
20105 return false;
20106 if (TREE_CODE (type) == ARRAY_TYPE)
20107 {
20108 HOST_WIDE_INT min_index;
20109 unsigned HOST_WIDE_INT cnt;
20110 int curpos = 0, fieldsize;
20111 constructor_elt *ce;
20112
20113 if (TYPE_DOMAIN (type) == NULL_TREE
20114 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20115 return false;
20116
20117 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20118 if (fieldsize <= 0)
20119 return false;
20120
20121 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20122 memset (array, '\0', size);
20123 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20124 {
20125 tree val = ce->value;
20126 tree index = ce->index;
20127 int pos = curpos;
20128 if (index && TREE_CODE (index) == RANGE_EXPR)
20129 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20130 * fieldsize;
20131 else if (index)
20132 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20133
20134 if (val)
20135 {
20136 STRIP_NOPS (val);
20137 if (!native_encode_initializer (val, array + pos, fieldsize))
20138 return false;
20139 }
20140 curpos = pos + fieldsize;
20141 if (index && TREE_CODE (index) == RANGE_EXPR)
20142 {
20143 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20144 - tree_to_shwi (TREE_OPERAND (index, 0));
20145 while (count-- > 0)
20146 {
20147 if (val)
20148 memcpy (array + curpos, array + pos, fieldsize);
20149 curpos += fieldsize;
20150 }
20151 }
20152 gcc_assert (curpos <= size);
20153 }
20154 return true;
20155 }
20156 else if (TREE_CODE (type) == RECORD_TYPE
20157 || TREE_CODE (type) == UNION_TYPE)
20158 {
20159 tree field = NULL_TREE;
20160 unsigned HOST_WIDE_INT cnt;
20161 constructor_elt *ce;
20162
20163 if (int_size_in_bytes (type) != size)
20164 return false;
20165
20166 if (TREE_CODE (type) == RECORD_TYPE)
20167 field = TYPE_FIELDS (type);
20168
20169 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20170 {
20171 tree val = ce->value;
20172 int pos, fieldsize;
20173
20174 if (ce->index != 0)
20175 field = ce->index;
20176
20177 if (val)
20178 STRIP_NOPS (val);
20179
20180 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20181 return false;
20182
20183 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20184 && TYPE_DOMAIN (TREE_TYPE (field))
20185 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20186 return false;
20187 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20188 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20189 return false;
20190 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20191 pos = int_byte_position (field);
20192 gcc_assert (pos + fieldsize <= size);
20193 if (val && fieldsize != 0
20194 && !native_encode_initializer (val, array + pos, fieldsize))
20195 return false;
20196 }
20197 return true;
20198 }
20199 return false;
20200 case VIEW_CONVERT_EXPR:
20201 case NON_LVALUE_EXPR:
20202 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20203 default:
20204 return native_encode_expr (init, array, size) == size;
20205 }
20206 }
20207
20208 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20209 attribute is the const value T. */
20210
20211 static bool
20212 tree_add_const_value_attribute (dw_die_ref die, tree t)
20213 {
20214 tree init;
20215 tree type = TREE_TYPE (t);
20216 rtx rtl;
20217
20218 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20219 return false;
20220
20221 init = t;
20222 gcc_assert (!DECL_P (init));
20223
20224 if (TREE_CODE (init) == INTEGER_CST)
20225 {
20226 if (tree_fits_uhwi_p (init))
20227 {
20228 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20229 return true;
20230 }
20231 if (tree_fits_shwi_p (init))
20232 {
20233 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20234 return true;
20235 }
20236 }
20237 if (! early_dwarf)
20238 {
20239 rtl = rtl_for_decl_init (init, type);
20240 if (rtl)
20241 return add_const_value_attribute (die, rtl);
20242 }
20243 /* If the host and target are sane, try harder. */
20244 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20245 && initializer_constant_valid_p (init, type))
20246 {
20247 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20248 if (size > 0 && (int) size == size)
20249 {
20250 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20251
20252 if (native_encode_initializer (init, array, size))
20253 {
20254 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20255 return true;
20256 }
20257 ggc_free (array);
20258 }
20259 }
20260 return false;
20261 }
20262
20263 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20264 attribute is the const value of T, where T is an integral constant
20265 variable with static storage duration
20266 (so it can't be a PARM_DECL or a RESULT_DECL). */
20267
20268 static bool
20269 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20270 {
20271
20272 if (!decl
20273 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20274 || (VAR_P (decl) && !TREE_STATIC (decl)))
20275 return false;
20276
20277 if (TREE_READONLY (decl)
20278 && ! TREE_THIS_VOLATILE (decl)
20279 && DECL_INITIAL (decl))
20280 /* OK */;
20281 else
20282 return false;
20283
20284 /* Don't add DW_AT_const_value if abstract origin already has one. */
20285 if (get_AT (var_die, DW_AT_const_value))
20286 return false;
20287
20288 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20289 }
20290
20291 /* Convert the CFI instructions for the current function into a
20292 location list. This is used for DW_AT_frame_base when we targeting
20293 a dwarf2 consumer that does not support the dwarf3
20294 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20295 expressions. */
20296
20297 static dw_loc_list_ref
20298 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20299 {
20300 int ix;
20301 dw_fde_ref fde;
20302 dw_loc_list_ref list, *list_tail;
20303 dw_cfi_ref cfi;
20304 dw_cfa_location last_cfa, next_cfa;
20305 const char *start_label, *last_label, *section;
20306 dw_cfa_location remember;
20307
20308 fde = cfun->fde;
20309 gcc_assert (fde != NULL);
20310
20311 section = secname_for_decl (current_function_decl);
20312 list_tail = &list;
20313 list = NULL;
20314
20315 memset (&next_cfa, 0, sizeof (next_cfa));
20316 next_cfa.reg = INVALID_REGNUM;
20317 remember = next_cfa;
20318
20319 start_label = fde->dw_fde_begin;
20320
20321 /* ??? Bald assumption that the CIE opcode list does not contain
20322 advance opcodes. */
20323 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20324 lookup_cfa_1 (cfi, &next_cfa, &remember);
20325
20326 last_cfa = next_cfa;
20327 last_label = start_label;
20328
20329 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20330 {
20331 /* If the first partition contained no CFI adjustments, the
20332 CIE opcodes apply to the whole first partition. */
20333 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20334 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20335 list_tail =&(*list_tail)->dw_loc_next;
20336 start_label = last_label = fde->dw_fde_second_begin;
20337 }
20338
20339 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20340 {
20341 switch (cfi->dw_cfi_opc)
20342 {
20343 case DW_CFA_set_loc:
20344 case DW_CFA_advance_loc1:
20345 case DW_CFA_advance_loc2:
20346 case DW_CFA_advance_loc4:
20347 if (!cfa_equal_p (&last_cfa, &next_cfa))
20348 {
20349 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20350 start_label, 0, last_label, 0, section);
20351
20352 list_tail = &(*list_tail)->dw_loc_next;
20353 last_cfa = next_cfa;
20354 start_label = last_label;
20355 }
20356 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20357 break;
20358
20359 case DW_CFA_advance_loc:
20360 /* The encoding is complex enough that we should never emit this. */
20361 gcc_unreachable ();
20362
20363 default:
20364 lookup_cfa_1 (cfi, &next_cfa, &remember);
20365 break;
20366 }
20367 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20368 {
20369 if (!cfa_equal_p (&last_cfa, &next_cfa))
20370 {
20371 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20372 start_label, 0, last_label, 0, section);
20373
20374 list_tail = &(*list_tail)->dw_loc_next;
20375 last_cfa = next_cfa;
20376 start_label = last_label;
20377 }
20378 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20379 start_label, 0, fde->dw_fde_end, 0, section);
20380 list_tail = &(*list_tail)->dw_loc_next;
20381 start_label = last_label = fde->dw_fde_second_begin;
20382 }
20383 }
20384
20385 if (!cfa_equal_p (&last_cfa, &next_cfa))
20386 {
20387 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20388 start_label, 0, last_label, 0, section);
20389 list_tail = &(*list_tail)->dw_loc_next;
20390 start_label = last_label;
20391 }
20392
20393 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20394 start_label, 0,
20395 fde->dw_fde_second_begin
20396 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20397 section);
20398
20399 maybe_gen_llsym (list);
20400
20401 return list;
20402 }
20403
20404 /* Compute a displacement from the "steady-state frame pointer" to the
20405 frame base (often the same as the CFA), and store it in
20406 frame_pointer_fb_offset. OFFSET is added to the displacement
20407 before the latter is negated. */
20408
20409 static void
20410 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20411 {
20412 rtx reg, elim;
20413
20414 #ifdef FRAME_POINTER_CFA_OFFSET
20415 reg = frame_pointer_rtx;
20416 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20417 #else
20418 reg = arg_pointer_rtx;
20419 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20420 #endif
20421
20422 elim = (ira_use_lra_p
20423 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20424 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20425 elim = strip_offset_and_add (elim, &offset);
20426
20427 frame_pointer_fb_offset = -offset;
20428
20429 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20430 in which to eliminate. This is because it's stack pointer isn't
20431 directly accessible as a register within the ISA. To work around
20432 this, assume that while we cannot provide a proper value for
20433 frame_pointer_fb_offset, we won't need one either. */
20434 frame_pointer_fb_offset_valid
20435 = ((SUPPORTS_STACK_ALIGNMENT
20436 && (elim == hard_frame_pointer_rtx
20437 || elim == stack_pointer_rtx))
20438 || elim == (frame_pointer_needed
20439 ? hard_frame_pointer_rtx
20440 : stack_pointer_rtx));
20441 }
20442
20443 /* Generate a DW_AT_name attribute given some string value to be included as
20444 the value of the attribute. */
20445
20446 static void
20447 add_name_attribute (dw_die_ref die, const char *name_string)
20448 {
20449 if (name_string != NULL && *name_string != 0)
20450 {
20451 if (demangle_name_func)
20452 name_string = (*demangle_name_func) (name_string);
20453
20454 add_AT_string (die, DW_AT_name, name_string);
20455 }
20456 }
20457
20458 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20459 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20460 of TYPE accordingly.
20461
20462 ??? This is a temporary measure until after we're able to generate
20463 regular DWARF for the complex Ada type system. */
20464
20465 static void
20466 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20467 dw_die_ref context_die)
20468 {
20469 tree dtype;
20470 dw_die_ref dtype_die;
20471
20472 if (!lang_hooks.types.descriptive_type)
20473 return;
20474
20475 dtype = lang_hooks.types.descriptive_type (type);
20476 if (!dtype)
20477 return;
20478
20479 dtype_die = lookup_type_die (dtype);
20480 if (!dtype_die)
20481 {
20482 gen_type_die (dtype, context_die);
20483 dtype_die = lookup_type_die (dtype);
20484 gcc_assert (dtype_die);
20485 }
20486
20487 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20488 }
20489
20490 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20491
20492 static const char *
20493 comp_dir_string (void)
20494 {
20495 const char *wd;
20496 char *wd1;
20497 static const char *cached_wd = NULL;
20498
20499 if (cached_wd != NULL)
20500 return cached_wd;
20501
20502 wd = get_src_pwd ();
20503 if (wd == NULL)
20504 return NULL;
20505
20506 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20507 {
20508 int wdlen;
20509
20510 wdlen = strlen (wd);
20511 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20512 strcpy (wd1, wd);
20513 wd1 [wdlen] = DIR_SEPARATOR;
20514 wd1 [wdlen + 1] = 0;
20515 wd = wd1;
20516 }
20517
20518 cached_wd = remap_debug_filename (wd);
20519 return cached_wd;
20520 }
20521
20522 /* Generate a DW_AT_comp_dir attribute for DIE. */
20523
20524 static void
20525 add_comp_dir_attribute (dw_die_ref die)
20526 {
20527 const char * wd = comp_dir_string ();
20528 if (wd != NULL)
20529 add_AT_string (die, DW_AT_comp_dir, wd);
20530 }
20531
20532 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20533 pointer computation, ...), output a representation for that bound according
20534 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20535 loc_list_from_tree for the meaning of CONTEXT. */
20536
20537 static void
20538 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20539 int forms, struct loc_descr_context *context)
20540 {
20541 dw_die_ref context_die, decl_die;
20542 dw_loc_list_ref list;
20543 bool strip_conversions = true;
20544 bool placeholder_seen = false;
20545
20546 while (strip_conversions)
20547 switch (TREE_CODE (value))
20548 {
20549 case ERROR_MARK:
20550 case SAVE_EXPR:
20551 return;
20552
20553 CASE_CONVERT:
20554 case VIEW_CONVERT_EXPR:
20555 value = TREE_OPERAND (value, 0);
20556 break;
20557
20558 default:
20559 strip_conversions = false;
20560 break;
20561 }
20562
20563 /* If possible and permitted, output the attribute as a constant. */
20564 if ((forms & dw_scalar_form_constant) != 0
20565 && TREE_CODE (value) == INTEGER_CST)
20566 {
20567 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20568
20569 /* If HOST_WIDE_INT is big enough then represent the bound as
20570 a constant value. We need to choose a form based on
20571 whether the type is signed or unsigned. We cannot just
20572 call add_AT_unsigned if the value itself is positive
20573 (add_AT_unsigned might add the unsigned value encoded as
20574 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20575 bounds type and then sign extend any unsigned values found
20576 for signed types. This is needed only for
20577 DW_AT_{lower,upper}_bound, since for most other attributes,
20578 consumers will treat DW_FORM_data[1248] as unsigned values,
20579 regardless of the underlying type. */
20580 if (prec <= HOST_BITS_PER_WIDE_INT
20581 || tree_fits_uhwi_p (value))
20582 {
20583 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20584 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20585 else
20586 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20587 }
20588 else
20589 /* Otherwise represent the bound as an unsigned value with
20590 the precision of its type. The precision and signedness
20591 of the type will be necessary to re-interpret it
20592 unambiguously. */
20593 add_AT_wide (die, attr, wi::to_wide (value));
20594 return;
20595 }
20596
20597 /* Otherwise, if it's possible and permitted too, output a reference to
20598 another DIE. */
20599 if ((forms & dw_scalar_form_reference) != 0)
20600 {
20601 tree decl = NULL_TREE;
20602
20603 /* Some type attributes reference an outer type. For instance, the upper
20604 bound of an array may reference an embedding record (this happens in
20605 Ada). */
20606 if (TREE_CODE (value) == COMPONENT_REF
20607 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20608 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20609 decl = TREE_OPERAND (value, 1);
20610
20611 else if (VAR_P (value)
20612 || TREE_CODE (value) == PARM_DECL
20613 || TREE_CODE (value) == RESULT_DECL)
20614 decl = value;
20615
20616 if (decl != NULL_TREE)
20617 {
20618 dw_die_ref decl_die = lookup_decl_die (decl);
20619
20620 /* ??? Can this happen, or should the variable have been bound
20621 first? Probably it can, since I imagine that we try to create
20622 the types of parameters in the order in which they exist in
20623 the list, and won't have created a forward reference to a
20624 later parameter. */
20625 if (decl_die != NULL)
20626 {
20627 add_AT_die_ref (die, attr, decl_die);
20628 return;
20629 }
20630 }
20631 }
20632
20633 /* Last chance: try to create a stack operation procedure to evaluate the
20634 value. Do nothing if even that is not possible or permitted. */
20635 if ((forms & dw_scalar_form_exprloc) == 0)
20636 return;
20637
20638 list = loc_list_from_tree (value, 2, context);
20639 if (context && context->placeholder_arg)
20640 {
20641 placeholder_seen = context->placeholder_seen;
20642 context->placeholder_seen = false;
20643 }
20644 if (list == NULL || single_element_loc_list_p (list))
20645 {
20646 /* If this attribute is not a reference nor constant, it is
20647 a DWARF expression rather than location description. For that
20648 loc_list_from_tree (value, 0, &context) is needed. */
20649 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20650 if (list2 && single_element_loc_list_p (list2))
20651 {
20652 if (placeholder_seen)
20653 {
20654 struct dwarf_procedure_info dpi;
20655 dpi.fndecl = NULL_TREE;
20656 dpi.args_count = 1;
20657 if (!resolve_args_picking (list2->expr, 1, &dpi))
20658 return;
20659 }
20660 add_AT_loc (die, attr, list2->expr);
20661 return;
20662 }
20663 }
20664
20665 /* If that failed to give a single element location list, fall back to
20666 outputting this as a reference... still if permitted. */
20667 if (list == NULL
20668 || (forms & dw_scalar_form_reference) == 0
20669 || placeholder_seen)
20670 return;
20671
20672 if (current_function_decl == 0)
20673 context_die = comp_unit_die ();
20674 else
20675 context_die = lookup_decl_die (current_function_decl);
20676
20677 decl_die = new_die (DW_TAG_variable, context_die, value);
20678 add_AT_flag (decl_die, DW_AT_artificial, 1);
20679 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20680 context_die);
20681 add_AT_location_description (decl_die, DW_AT_location, list);
20682 add_AT_die_ref (die, attr, decl_die);
20683 }
20684
20685 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20686 default. */
20687
20688 static int
20689 lower_bound_default (void)
20690 {
20691 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20692 {
20693 case DW_LANG_C:
20694 case DW_LANG_C89:
20695 case DW_LANG_C99:
20696 case DW_LANG_C11:
20697 case DW_LANG_C_plus_plus:
20698 case DW_LANG_C_plus_plus_11:
20699 case DW_LANG_C_plus_plus_14:
20700 case DW_LANG_ObjC:
20701 case DW_LANG_ObjC_plus_plus:
20702 return 0;
20703 case DW_LANG_Fortran77:
20704 case DW_LANG_Fortran90:
20705 case DW_LANG_Fortran95:
20706 case DW_LANG_Fortran03:
20707 case DW_LANG_Fortran08:
20708 return 1;
20709 case DW_LANG_UPC:
20710 case DW_LANG_D:
20711 case DW_LANG_Python:
20712 return dwarf_version >= 4 ? 0 : -1;
20713 case DW_LANG_Ada95:
20714 case DW_LANG_Ada83:
20715 case DW_LANG_Cobol74:
20716 case DW_LANG_Cobol85:
20717 case DW_LANG_Modula2:
20718 case DW_LANG_PLI:
20719 return dwarf_version >= 4 ? 1 : -1;
20720 default:
20721 return -1;
20722 }
20723 }
20724
20725 /* Given a tree node describing an array bound (either lower or upper) output
20726 a representation for that bound. */
20727
20728 static void
20729 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20730 tree bound, struct loc_descr_context *context)
20731 {
20732 int dflt;
20733
20734 while (1)
20735 switch (TREE_CODE (bound))
20736 {
20737 /* Strip all conversions. */
20738 CASE_CONVERT:
20739 case VIEW_CONVERT_EXPR:
20740 bound = TREE_OPERAND (bound, 0);
20741 break;
20742
20743 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20744 are even omitted when they are the default. */
20745 case INTEGER_CST:
20746 /* If the value for this bound is the default one, we can even omit the
20747 attribute. */
20748 if (bound_attr == DW_AT_lower_bound
20749 && tree_fits_shwi_p (bound)
20750 && (dflt = lower_bound_default ()) != -1
20751 && tree_to_shwi (bound) == dflt)
20752 return;
20753
20754 /* FALLTHRU */
20755
20756 default:
20757 /* Because of the complex interaction there can be with other GNAT
20758 encodings, GDB isn't ready yet to handle proper DWARF description
20759 for self-referencial subrange bounds: let GNAT encodings do the
20760 magic in such a case. */
20761 if (is_ada ()
20762 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20763 && contains_placeholder_p (bound))
20764 return;
20765
20766 add_scalar_info (subrange_die, bound_attr, bound,
20767 dw_scalar_form_constant
20768 | dw_scalar_form_exprloc
20769 | dw_scalar_form_reference,
20770 context);
20771 return;
20772 }
20773 }
20774
20775 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20776 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20777 Note that the block of subscript information for an array type also
20778 includes information about the element type of the given array type.
20779
20780 This function reuses previously set type and bound information if
20781 available. */
20782
20783 static void
20784 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20785 {
20786 unsigned dimension_number;
20787 tree lower, upper;
20788 dw_die_ref child = type_die->die_child;
20789
20790 for (dimension_number = 0;
20791 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20792 type = TREE_TYPE (type), dimension_number++)
20793 {
20794 tree domain = TYPE_DOMAIN (type);
20795
20796 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20797 break;
20798
20799 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20800 and (in GNU C only) variable bounds. Handle all three forms
20801 here. */
20802
20803 /* Find and reuse a previously generated DW_TAG_subrange_type if
20804 available.
20805
20806 For multi-dimensional arrays, as we iterate through the
20807 various dimensions in the enclosing for loop above, we also
20808 iterate through the DIE children and pick at each
20809 DW_TAG_subrange_type previously generated (if available).
20810 Each child DW_TAG_subrange_type DIE describes the range of
20811 the current dimension. At this point we should have as many
20812 DW_TAG_subrange_type's as we have dimensions in the
20813 array. */
20814 dw_die_ref subrange_die = NULL;
20815 if (child)
20816 while (1)
20817 {
20818 child = child->die_sib;
20819 if (child->die_tag == DW_TAG_subrange_type)
20820 subrange_die = child;
20821 if (child == type_die->die_child)
20822 {
20823 /* If we wrapped around, stop looking next time. */
20824 child = NULL;
20825 break;
20826 }
20827 if (child->die_tag == DW_TAG_subrange_type)
20828 break;
20829 }
20830 if (!subrange_die)
20831 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20832
20833 if (domain)
20834 {
20835 /* We have an array type with specified bounds. */
20836 lower = TYPE_MIN_VALUE (domain);
20837 upper = TYPE_MAX_VALUE (domain);
20838
20839 /* Define the index type. */
20840 if (TREE_TYPE (domain)
20841 && !get_AT (subrange_die, DW_AT_type))
20842 {
20843 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20844 TREE_TYPE field. We can't emit debug info for this
20845 because it is an unnamed integral type. */
20846 if (TREE_CODE (domain) == INTEGER_TYPE
20847 && TYPE_NAME (domain) == NULL_TREE
20848 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20849 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20850 ;
20851 else
20852 add_type_attribute (subrange_die, TREE_TYPE (domain),
20853 TYPE_UNQUALIFIED, false, type_die);
20854 }
20855
20856 /* ??? If upper is NULL, the array has unspecified length,
20857 but it does have a lower bound. This happens with Fortran
20858 dimension arr(N:*)
20859 Since the debugger is definitely going to need to know N
20860 to produce useful results, go ahead and output the lower
20861 bound solo, and hope the debugger can cope. */
20862
20863 if (!get_AT (subrange_die, DW_AT_lower_bound))
20864 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20865 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20866 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20867 }
20868
20869 /* Otherwise we have an array type with an unspecified length. The
20870 DWARF-2 spec does not say how to handle this; let's just leave out the
20871 bounds. */
20872 }
20873 }
20874
20875 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20876
20877 static void
20878 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20879 {
20880 dw_die_ref decl_die;
20881 HOST_WIDE_INT size;
20882 dw_loc_descr_ref size_expr = NULL;
20883
20884 switch (TREE_CODE (tree_node))
20885 {
20886 case ERROR_MARK:
20887 size = 0;
20888 break;
20889 case ENUMERAL_TYPE:
20890 case RECORD_TYPE:
20891 case UNION_TYPE:
20892 case QUAL_UNION_TYPE:
20893 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20894 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20895 {
20896 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20897 return;
20898 }
20899 size_expr = type_byte_size (tree_node, &size);
20900 break;
20901 case FIELD_DECL:
20902 /* For a data member of a struct or union, the DW_AT_byte_size is
20903 generally given as the number of bytes normally allocated for an
20904 object of the *declared* type of the member itself. This is true
20905 even for bit-fields. */
20906 size = int_size_in_bytes (field_type (tree_node));
20907 break;
20908 default:
20909 gcc_unreachable ();
20910 }
20911
20912 /* Support for dynamically-sized objects was introduced by DWARFv3.
20913 At the moment, GDB does not handle variable byte sizes very well,
20914 though. */
20915 if ((dwarf_version >= 3 || !dwarf_strict)
20916 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20917 && size_expr != NULL)
20918 add_AT_loc (die, DW_AT_byte_size, size_expr);
20919
20920 /* Note that `size' might be -1 when we get to this point. If it is, that
20921 indicates that the byte size of the entity in question is variable and
20922 that we could not generate a DWARF expression that computes it. */
20923 if (size >= 0)
20924 add_AT_unsigned (die, DW_AT_byte_size, size);
20925 }
20926
20927 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20928 alignment. */
20929
20930 static void
20931 add_alignment_attribute (dw_die_ref die, tree tree_node)
20932 {
20933 if (dwarf_version < 5 && dwarf_strict)
20934 return;
20935
20936 unsigned align;
20937
20938 if (DECL_P (tree_node))
20939 {
20940 if (!DECL_USER_ALIGN (tree_node))
20941 return;
20942
20943 align = DECL_ALIGN_UNIT (tree_node);
20944 }
20945 else if (TYPE_P (tree_node))
20946 {
20947 if (!TYPE_USER_ALIGN (tree_node))
20948 return;
20949
20950 align = TYPE_ALIGN_UNIT (tree_node);
20951 }
20952 else
20953 gcc_unreachable ();
20954
20955 add_AT_unsigned (die, DW_AT_alignment, align);
20956 }
20957
20958 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20959 which specifies the distance in bits from the highest order bit of the
20960 "containing object" for the bit-field to the highest order bit of the
20961 bit-field itself.
20962
20963 For any given bit-field, the "containing object" is a hypothetical object
20964 (of some integral or enum type) within which the given bit-field lives. The
20965 type of this hypothetical "containing object" is always the same as the
20966 declared type of the individual bit-field itself. The determination of the
20967 exact location of the "containing object" for a bit-field is rather
20968 complicated. It's handled by the `field_byte_offset' function (above).
20969
20970 CTX is required: see the comment for VLR_CONTEXT.
20971
20972 Note that it is the size (in bytes) of the hypothetical "containing object"
20973 which will be given in the DW_AT_byte_size attribute for this bit-field.
20974 (See `byte_size_attribute' above). */
20975
20976 static inline void
20977 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20978 {
20979 HOST_WIDE_INT object_offset_in_bytes;
20980 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20981 HOST_WIDE_INT bitpos_int;
20982 HOST_WIDE_INT highest_order_object_bit_offset;
20983 HOST_WIDE_INT highest_order_field_bit_offset;
20984 HOST_WIDE_INT bit_offset;
20985
20986 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20987
20988 /* Must be a field and a bit field. */
20989 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20990
20991 /* We can't yet handle bit-fields whose offsets are variable, so if we
20992 encounter such things, just return without generating any attribute
20993 whatsoever. Likewise for variable or too large size. */
20994 if (! tree_fits_shwi_p (bit_position (decl))
20995 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20996 return;
20997
20998 bitpos_int = int_bit_position (decl);
20999
21000 /* Note that the bit offset is always the distance (in bits) from the
21001 highest-order bit of the "containing object" to the highest-order bit of
21002 the bit-field itself. Since the "high-order end" of any object or field
21003 is different on big-endian and little-endian machines, the computation
21004 below must take account of these differences. */
21005 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21006 highest_order_field_bit_offset = bitpos_int;
21007
21008 if (! BYTES_BIG_ENDIAN)
21009 {
21010 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21011 highest_order_object_bit_offset +=
21012 simple_type_size_in_bits (original_type);
21013 }
21014
21015 bit_offset
21016 = (! BYTES_BIG_ENDIAN
21017 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21018 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21019
21020 if (bit_offset < 0)
21021 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21022 else
21023 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21024 }
21025
21026 /* For a FIELD_DECL node which represents a bit field, output an attribute
21027 which specifies the length in bits of the given field. */
21028
21029 static inline void
21030 add_bit_size_attribute (dw_die_ref die, tree decl)
21031 {
21032 /* Must be a field and a bit field. */
21033 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21034 && DECL_BIT_FIELD_TYPE (decl));
21035
21036 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21037 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21038 }
21039
21040 /* If the compiled language is ANSI C, then add a 'prototyped'
21041 attribute, if arg types are given for the parameters of a function. */
21042
21043 static inline void
21044 add_prototyped_attribute (dw_die_ref die, tree func_type)
21045 {
21046 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21047 {
21048 case DW_LANG_C:
21049 case DW_LANG_C89:
21050 case DW_LANG_C99:
21051 case DW_LANG_C11:
21052 case DW_LANG_ObjC:
21053 if (prototype_p (func_type))
21054 add_AT_flag (die, DW_AT_prototyped, 1);
21055 break;
21056 default:
21057 break;
21058 }
21059 }
21060
21061 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21062 by looking in the type declaration, the object declaration equate table or
21063 the block mapping. */
21064
21065 static inline dw_die_ref
21066 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21067 {
21068 dw_die_ref origin_die = NULL;
21069
21070 if (DECL_P (origin))
21071 {
21072 dw_die_ref c;
21073 origin_die = lookup_decl_die (origin);
21074 /* "Unwrap" the decls DIE which we put in the imported unit context.
21075 We are looking for the abstract copy here. */
21076 if (in_lto_p
21077 && origin_die
21078 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21079 /* ??? Identify this better. */
21080 && c->with_offset)
21081 origin_die = c;
21082 }
21083 else if (TYPE_P (origin))
21084 origin_die = lookup_type_die (origin);
21085 else if (TREE_CODE (origin) == BLOCK)
21086 origin_die = BLOCK_DIE (origin);
21087
21088 /* XXX: Functions that are never lowered don't always have correct block
21089 trees (in the case of java, they simply have no block tree, in some other
21090 languages). For these functions, there is nothing we can really do to
21091 output correct debug info for inlined functions in all cases. Rather
21092 than die, we'll just produce deficient debug info now, in that we will
21093 have variables without a proper abstract origin. In the future, when all
21094 functions are lowered, we should re-add a gcc_assert (origin_die)
21095 here. */
21096
21097 if (origin_die)
21098 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21099 return origin_die;
21100 }
21101
21102 /* We do not currently support the pure_virtual attribute. */
21103
21104 static inline void
21105 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21106 {
21107 if (DECL_VINDEX (func_decl))
21108 {
21109 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21110
21111 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21112 add_AT_loc (die, DW_AT_vtable_elem_location,
21113 new_loc_descr (DW_OP_constu,
21114 tree_to_shwi (DECL_VINDEX (func_decl)),
21115 0));
21116
21117 /* GNU extension: Record what type this method came from originally. */
21118 if (debug_info_level > DINFO_LEVEL_TERSE
21119 && DECL_CONTEXT (func_decl))
21120 add_AT_die_ref (die, DW_AT_containing_type,
21121 lookup_type_die (DECL_CONTEXT (func_decl)));
21122 }
21123 }
21124 \f
21125 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21126 given decl. This used to be a vendor extension until after DWARF 4
21127 standardized it. */
21128
21129 static void
21130 add_linkage_attr (dw_die_ref die, tree decl)
21131 {
21132 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21133
21134 /* Mimic what assemble_name_raw does with a leading '*'. */
21135 if (name[0] == '*')
21136 name = &name[1];
21137
21138 if (dwarf_version >= 4)
21139 add_AT_string (die, DW_AT_linkage_name, name);
21140 else
21141 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21142 }
21143
21144 /* Add source coordinate attributes for the given decl. */
21145
21146 static void
21147 add_src_coords_attributes (dw_die_ref die, tree decl)
21148 {
21149 expanded_location s;
21150
21151 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21152 return;
21153 s = expand_location (DECL_SOURCE_LOCATION (decl));
21154 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21155 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21156 if (debug_column_info && s.column)
21157 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21158 }
21159
21160 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21161
21162 static void
21163 add_linkage_name_raw (dw_die_ref die, tree decl)
21164 {
21165 /* Defer until we have an assembler name set. */
21166 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21167 {
21168 limbo_die_node *asm_name;
21169
21170 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21171 asm_name->die = die;
21172 asm_name->created_for = decl;
21173 asm_name->next = deferred_asm_name;
21174 deferred_asm_name = asm_name;
21175 }
21176 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21177 add_linkage_attr (die, decl);
21178 }
21179
21180 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21181
21182 static void
21183 add_linkage_name (dw_die_ref die, tree decl)
21184 {
21185 if (debug_info_level > DINFO_LEVEL_NONE
21186 && VAR_OR_FUNCTION_DECL_P (decl)
21187 && TREE_PUBLIC (decl)
21188 && !(VAR_P (decl) && DECL_REGISTER (decl))
21189 && die->die_tag != DW_TAG_member)
21190 add_linkage_name_raw (die, decl);
21191 }
21192
21193 /* Add a DW_AT_name attribute and source coordinate attribute for the
21194 given decl, but only if it actually has a name. */
21195
21196 static void
21197 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21198 bool no_linkage_name)
21199 {
21200 tree decl_name;
21201
21202 decl_name = DECL_NAME (decl);
21203 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21204 {
21205 const char *name = dwarf2_name (decl, 0);
21206 if (name)
21207 add_name_attribute (die, name);
21208 if (! DECL_ARTIFICIAL (decl))
21209 add_src_coords_attributes (die, decl);
21210
21211 if (!no_linkage_name)
21212 add_linkage_name (die, decl);
21213 }
21214
21215 #ifdef VMS_DEBUGGING_INFO
21216 /* Get the function's name, as described by its RTL. This may be different
21217 from the DECL_NAME name used in the source file. */
21218 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21219 {
21220 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21221 XEXP (DECL_RTL (decl), 0), false);
21222 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21223 }
21224 #endif /* VMS_DEBUGGING_INFO */
21225 }
21226
21227 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21228
21229 static void
21230 add_discr_value (dw_die_ref die, dw_discr_value *value)
21231 {
21232 dw_attr_node attr;
21233
21234 attr.dw_attr = DW_AT_discr_value;
21235 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21236 attr.dw_attr_val.val_entry = NULL;
21237 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21238 if (value->pos)
21239 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21240 else
21241 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21242 add_dwarf_attr (die, &attr);
21243 }
21244
21245 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21246
21247 static void
21248 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21249 {
21250 dw_attr_node attr;
21251
21252 attr.dw_attr = DW_AT_discr_list;
21253 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21254 attr.dw_attr_val.val_entry = NULL;
21255 attr.dw_attr_val.v.val_discr_list = discr_list;
21256 add_dwarf_attr (die, &attr);
21257 }
21258
21259 static inline dw_discr_list_ref
21260 AT_discr_list (dw_attr_node *attr)
21261 {
21262 return attr->dw_attr_val.v.val_discr_list;
21263 }
21264
21265 #ifdef VMS_DEBUGGING_INFO
21266 /* Output the debug main pointer die for VMS */
21267
21268 void
21269 dwarf2out_vms_debug_main_pointer (void)
21270 {
21271 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21272 dw_die_ref die;
21273
21274 /* Allocate the VMS debug main subprogram die. */
21275 die = new_die_raw (DW_TAG_subprogram);
21276 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21277 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21278 current_function_funcdef_no);
21279 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21280
21281 /* Make it the first child of comp_unit_die (). */
21282 die->die_parent = comp_unit_die ();
21283 if (comp_unit_die ()->die_child)
21284 {
21285 die->die_sib = comp_unit_die ()->die_child->die_sib;
21286 comp_unit_die ()->die_child->die_sib = die;
21287 }
21288 else
21289 {
21290 die->die_sib = die;
21291 comp_unit_die ()->die_child = die;
21292 }
21293 }
21294 #endif /* VMS_DEBUGGING_INFO */
21295
21296 /* Push a new declaration scope. */
21297
21298 static void
21299 push_decl_scope (tree scope)
21300 {
21301 vec_safe_push (decl_scope_table, scope);
21302 }
21303
21304 /* Pop a declaration scope. */
21305
21306 static inline void
21307 pop_decl_scope (void)
21308 {
21309 decl_scope_table->pop ();
21310 }
21311
21312 /* walk_tree helper function for uses_local_type, below. */
21313
21314 static tree
21315 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21316 {
21317 if (!TYPE_P (*tp))
21318 *walk_subtrees = 0;
21319 else
21320 {
21321 tree name = TYPE_NAME (*tp);
21322 if (name && DECL_P (name) && decl_function_context (name))
21323 return *tp;
21324 }
21325 return NULL_TREE;
21326 }
21327
21328 /* If TYPE involves a function-local type (including a local typedef to a
21329 non-local type), returns that type; otherwise returns NULL_TREE. */
21330
21331 static tree
21332 uses_local_type (tree type)
21333 {
21334 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21335 return used;
21336 }
21337
21338 /* Return the DIE for the scope that immediately contains this type.
21339 Non-named types that do not involve a function-local type get global
21340 scope. Named types nested in namespaces or other types get their
21341 containing scope. All other types (i.e. function-local named types) get
21342 the current active scope. */
21343
21344 static dw_die_ref
21345 scope_die_for (tree t, dw_die_ref context_die)
21346 {
21347 dw_die_ref scope_die = NULL;
21348 tree containing_scope;
21349
21350 /* Non-types always go in the current scope. */
21351 gcc_assert (TYPE_P (t));
21352
21353 /* Use the scope of the typedef, rather than the scope of the type
21354 it refers to. */
21355 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21356 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21357 else
21358 containing_scope = TYPE_CONTEXT (t);
21359
21360 /* Use the containing namespace if there is one. */
21361 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21362 {
21363 if (context_die == lookup_decl_die (containing_scope))
21364 /* OK */;
21365 else if (debug_info_level > DINFO_LEVEL_TERSE)
21366 context_die = get_context_die (containing_scope);
21367 else
21368 containing_scope = NULL_TREE;
21369 }
21370
21371 /* Ignore function type "scopes" from the C frontend. They mean that
21372 a tagged type is local to a parmlist of a function declarator, but
21373 that isn't useful to DWARF. */
21374 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21375 containing_scope = NULL_TREE;
21376
21377 if (SCOPE_FILE_SCOPE_P (containing_scope))
21378 {
21379 /* If T uses a local type keep it local as well, to avoid references
21380 to function-local DIEs from outside the function. */
21381 if (current_function_decl && uses_local_type (t))
21382 scope_die = context_die;
21383 else
21384 scope_die = comp_unit_die ();
21385 }
21386 else if (TYPE_P (containing_scope))
21387 {
21388 /* For types, we can just look up the appropriate DIE. */
21389 if (debug_info_level > DINFO_LEVEL_TERSE)
21390 scope_die = get_context_die (containing_scope);
21391 else
21392 {
21393 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21394 if (scope_die == NULL)
21395 scope_die = comp_unit_die ();
21396 }
21397 }
21398 else
21399 scope_die = context_die;
21400
21401 return scope_die;
21402 }
21403
21404 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21405
21406 static inline int
21407 local_scope_p (dw_die_ref context_die)
21408 {
21409 for (; context_die; context_die = context_die->die_parent)
21410 if (context_die->die_tag == DW_TAG_inlined_subroutine
21411 || context_die->die_tag == DW_TAG_subprogram)
21412 return 1;
21413
21414 return 0;
21415 }
21416
21417 /* Returns nonzero if CONTEXT_DIE is a class. */
21418
21419 static inline int
21420 class_scope_p (dw_die_ref context_die)
21421 {
21422 return (context_die
21423 && (context_die->die_tag == DW_TAG_structure_type
21424 || context_die->die_tag == DW_TAG_class_type
21425 || context_die->die_tag == DW_TAG_interface_type
21426 || context_die->die_tag == DW_TAG_union_type));
21427 }
21428
21429 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21430 whether or not to treat a DIE in this context as a declaration. */
21431
21432 static inline int
21433 class_or_namespace_scope_p (dw_die_ref context_die)
21434 {
21435 return (class_scope_p (context_die)
21436 || (context_die && context_die->die_tag == DW_TAG_namespace));
21437 }
21438
21439 /* Many forms of DIEs require a "type description" attribute. This
21440 routine locates the proper "type descriptor" die for the type given
21441 by 'type' plus any additional qualifiers given by 'cv_quals', and
21442 adds a DW_AT_type attribute below the given die. */
21443
21444 static void
21445 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21446 bool reverse, dw_die_ref context_die)
21447 {
21448 enum tree_code code = TREE_CODE (type);
21449 dw_die_ref type_die = NULL;
21450
21451 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21452 or fixed-point type, use the inner type. This is because we have no
21453 support for unnamed types in base_type_die. This can happen if this is
21454 an Ada subrange type. Correct solution is emit a subrange type die. */
21455 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21456 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21457 type = TREE_TYPE (type), code = TREE_CODE (type);
21458
21459 if (code == ERROR_MARK
21460 /* Handle a special case. For functions whose return type is void, we
21461 generate *no* type attribute. (Note that no object may have type
21462 `void', so this only applies to function return types). */
21463 || code == VOID_TYPE)
21464 return;
21465
21466 type_die = modified_type_die (type,
21467 cv_quals | TYPE_QUALS (type),
21468 reverse,
21469 context_die);
21470
21471 if (type_die != NULL)
21472 add_AT_die_ref (object_die, DW_AT_type, type_die);
21473 }
21474
21475 /* Given an object die, add the calling convention attribute for the
21476 function call type. */
21477 static void
21478 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21479 {
21480 enum dwarf_calling_convention value = DW_CC_normal;
21481
21482 value = ((enum dwarf_calling_convention)
21483 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21484
21485 if (is_fortran ()
21486 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21487 {
21488 /* DWARF 2 doesn't provide a way to identify a program's source-level
21489 entry point. DW_AT_calling_convention attributes are only meant
21490 to describe functions' calling conventions. However, lacking a
21491 better way to signal the Fortran main program, we used this for
21492 a long time, following existing custom. Now, DWARF 4 has
21493 DW_AT_main_subprogram, which we add below, but some tools still
21494 rely on the old way, which we thus keep. */
21495 value = DW_CC_program;
21496
21497 if (dwarf_version >= 4 || !dwarf_strict)
21498 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21499 }
21500
21501 /* Only add the attribute if the backend requests it, and
21502 is not DW_CC_normal. */
21503 if (value && (value != DW_CC_normal))
21504 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21505 }
21506
21507 /* Given a tree pointer to a struct, class, union, or enum type node, return
21508 a pointer to the (string) tag name for the given type, or zero if the type
21509 was declared without a tag. */
21510
21511 static const char *
21512 type_tag (const_tree type)
21513 {
21514 const char *name = 0;
21515
21516 if (TYPE_NAME (type) != 0)
21517 {
21518 tree t = 0;
21519
21520 /* Find the IDENTIFIER_NODE for the type name. */
21521 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21522 && !TYPE_NAMELESS (type))
21523 t = TYPE_NAME (type);
21524
21525 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21526 a TYPE_DECL node, regardless of whether or not a `typedef' was
21527 involved. */
21528 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21529 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21530 {
21531 /* We want to be extra verbose. Don't call dwarf_name if
21532 DECL_NAME isn't set. The default hook for decl_printable_name
21533 doesn't like that, and in this context it's correct to return
21534 0, instead of "<anonymous>" or the like. */
21535 if (DECL_NAME (TYPE_NAME (type))
21536 && !DECL_NAMELESS (TYPE_NAME (type)))
21537 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21538 }
21539
21540 /* Now get the name as a string, or invent one. */
21541 if (!name && t != 0)
21542 name = IDENTIFIER_POINTER (t);
21543 }
21544
21545 return (name == 0 || *name == '\0') ? 0 : name;
21546 }
21547
21548 /* Return the type associated with a data member, make a special check
21549 for bit field types. */
21550
21551 static inline tree
21552 member_declared_type (const_tree member)
21553 {
21554 return (DECL_BIT_FIELD_TYPE (member)
21555 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21556 }
21557
21558 /* Get the decl's label, as described by its RTL. This may be different
21559 from the DECL_NAME name used in the source file. */
21560
21561 #if 0
21562 static const char *
21563 decl_start_label (tree decl)
21564 {
21565 rtx x;
21566 const char *fnname;
21567
21568 x = DECL_RTL (decl);
21569 gcc_assert (MEM_P (x));
21570
21571 x = XEXP (x, 0);
21572 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21573
21574 fnname = XSTR (x, 0);
21575 return fnname;
21576 }
21577 #endif
21578 \f
21579 /* For variable-length arrays that have been previously generated, but
21580 may be incomplete due to missing subscript info, fill the subscript
21581 info. Return TRUE if this is one of those cases. */
21582 static bool
21583 fill_variable_array_bounds (tree type)
21584 {
21585 if (TREE_ASM_WRITTEN (type)
21586 && TREE_CODE (type) == ARRAY_TYPE
21587 && variably_modified_type_p (type, NULL))
21588 {
21589 dw_die_ref array_die = lookup_type_die (type);
21590 if (!array_die)
21591 return false;
21592 add_subscript_info (array_die, type, !is_ada ());
21593 return true;
21594 }
21595 return false;
21596 }
21597
21598 /* These routines generate the internal representation of the DIE's for
21599 the compilation unit. Debugging information is collected by walking
21600 the declaration trees passed in from dwarf2out_decl(). */
21601
21602 static void
21603 gen_array_type_die (tree type, dw_die_ref context_die)
21604 {
21605 dw_die_ref array_die;
21606
21607 /* GNU compilers represent multidimensional array types as sequences of one
21608 dimensional array types whose element types are themselves array types.
21609 We sometimes squish that down to a single array_type DIE with multiple
21610 subscripts in the Dwarf debugging info. The draft Dwarf specification
21611 say that we are allowed to do this kind of compression in C, because
21612 there is no difference between an array of arrays and a multidimensional
21613 array. We don't do this for Ada to remain as close as possible to the
21614 actual representation, which is especially important against the language
21615 flexibilty wrt arrays of variable size. */
21616
21617 bool collapse_nested_arrays = !is_ada ();
21618
21619 if (fill_variable_array_bounds (type))
21620 return;
21621
21622 dw_die_ref scope_die = scope_die_for (type, context_die);
21623 tree element_type;
21624
21625 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21626 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21627 if (TYPE_STRING_FLAG (type)
21628 && TREE_CODE (type) == ARRAY_TYPE
21629 && is_fortran ()
21630 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21631 {
21632 HOST_WIDE_INT size;
21633
21634 array_die = new_die (DW_TAG_string_type, scope_die, type);
21635 add_name_attribute (array_die, type_tag (type));
21636 equate_type_number_to_die (type, array_die);
21637 size = int_size_in_bytes (type);
21638 if (size >= 0)
21639 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21640 /* ??? We can't annotate types late, but for LTO we may not
21641 generate a location early either (gfortran.dg/save_6.f90). */
21642 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21643 && TYPE_DOMAIN (type) != NULL_TREE
21644 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21645 {
21646 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21647 tree rszdecl = szdecl;
21648
21649 size = int_size_in_bytes (TREE_TYPE (szdecl));
21650 if (!DECL_P (szdecl))
21651 {
21652 if (TREE_CODE (szdecl) == INDIRECT_REF
21653 && DECL_P (TREE_OPERAND (szdecl, 0)))
21654 {
21655 rszdecl = TREE_OPERAND (szdecl, 0);
21656 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21657 != DWARF2_ADDR_SIZE)
21658 size = 0;
21659 }
21660 else
21661 size = 0;
21662 }
21663 if (size > 0)
21664 {
21665 dw_loc_list_ref loc
21666 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21667 NULL);
21668 if (loc)
21669 {
21670 add_AT_location_description (array_die, DW_AT_string_length,
21671 loc);
21672 if (size != DWARF2_ADDR_SIZE)
21673 add_AT_unsigned (array_die, dwarf_version >= 5
21674 ? DW_AT_string_length_byte_size
21675 : DW_AT_byte_size, size);
21676 }
21677 }
21678 }
21679 return;
21680 }
21681
21682 array_die = new_die (DW_TAG_array_type, scope_die, type);
21683 add_name_attribute (array_die, type_tag (type));
21684 equate_type_number_to_die (type, array_die);
21685
21686 if (TREE_CODE (type) == VECTOR_TYPE)
21687 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21688
21689 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21690 if (is_fortran ()
21691 && TREE_CODE (type) == ARRAY_TYPE
21692 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21693 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21694 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21695
21696 #if 0
21697 /* We default the array ordering. Debuggers will probably do the right
21698 things even if DW_AT_ordering is not present. It's not even an issue
21699 until we start to get into multidimensional arrays anyway. If a debugger
21700 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21701 then we'll have to put the DW_AT_ordering attribute back in. (But if
21702 and when we find out that we need to put these in, we will only do so
21703 for multidimensional arrays. */
21704 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21705 #endif
21706
21707 if (TREE_CODE (type) == VECTOR_TYPE)
21708 {
21709 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21710 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21711 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21712 add_bound_info (subrange_die, DW_AT_upper_bound,
21713 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21714 }
21715 else
21716 add_subscript_info (array_die, type, collapse_nested_arrays);
21717
21718 /* Add representation of the type of the elements of this array type and
21719 emit the corresponding DIE if we haven't done it already. */
21720 element_type = TREE_TYPE (type);
21721 if (collapse_nested_arrays)
21722 while (TREE_CODE (element_type) == ARRAY_TYPE)
21723 {
21724 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21725 break;
21726 element_type = TREE_TYPE (element_type);
21727 }
21728
21729 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21730 TREE_CODE (type) == ARRAY_TYPE
21731 && TYPE_REVERSE_STORAGE_ORDER (type),
21732 context_die);
21733
21734 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21735 if (TYPE_ARTIFICIAL (type))
21736 add_AT_flag (array_die, DW_AT_artificial, 1);
21737
21738 if (get_AT (array_die, DW_AT_name))
21739 add_pubtype (type, array_die);
21740
21741 add_alignment_attribute (array_die, type);
21742 }
21743
21744 /* This routine generates DIE for array with hidden descriptor, details
21745 are filled into *info by a langhook. */
21746
21747 static void
21748 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21749 dw_die_ref context_die)
21750 {
21751 const dw_die_ref scope_die = scope_die_for (type, context_die);
21752 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21753 struct loc_descr_context context = { type, info->base_decl, NULL,
21754 false, false };
21755 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21756 int dim;
21757
21758 add_name_attribute (array_die, type_tag (type));
21759 equate_type_number_to_die (type, array_die);
21760
21761 if (info->ndimensions > 1)
21762 switch (info->ordering)
21763 {
21764 case array_descr_ordering_row_major:
21765 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21766 break;
21767 case array_descr_ordering_column_major:
21768 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21769 break;
21770 default:
21771 break;
21772 }
21773
21774 if (dwarf_version >= 3 || !dwarf_strict)
21775 {
21776 if (info->data_location)
21777 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21778 dw_scalar_form_exprloc, &context);
21779 if (info->associated)
21780 add_scalar_info (array_die, DW_AT_associated, info->associated,
21781 dw_scalar_form_constant
21782 | dw_scalar_form_exprloc
21783 | dw_scalar_form_reference, &context);
21784 if (info->allocated)
21785 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21786 dw_scalar_form_constant
21787 | dw_scalar_form_exprloc
21788 | dw_scalar_form_reference, &context);
21789 if (info->stride)
21790 {
21791 const enum dwarf_attribute attr
21792 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21793 const int forms
21794 = (info->stride_in_bits)
21795 ? dw_scalar_form_constant
21796 : (dw_scalar_form_constant
21797 | dw_scalar_form_exprloc
21798 | dw_scalar_form_reference);
21799
21800 add_scalar_info (array_die, attr, info->stride, forms, &context);
21801 }
21802 }
21803 if (dwarf_version >= 5)
21804 {
21805 if (info->rank)
21806 {
21807 add_scalar_info (array_die, DW_AT_rank, info->rank,
21808 dw_scalar_form_constant
21809 | dw_scalar_form_exprloc, &context);
21810 subrange_tag = DW_TAG_generic_subrange;
21811 context.placeholder_arg = true;
21812 }
21813 }
21814
21815 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21816
21817 for (dim = 0; dim < info->ndimensions; dim++)
21818 {
21819 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21820
21821 if (info->dimen[dim].bounds_type)
21822 add_type_attribute (subrange_die,
21823 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21824 false, context_die);
21825 if (info->dimen[dim].lower_bound)
21826 add_bound_info (subrange_die, DW_AT_lower_bound,
21827 info->dimen[dim].lower_bound, &context);
21828 if (info->dimen[dim].upper_bound)
21829 add_bound_info (subrange_die, DW_AT_upper_bound,
21830 info->dimen[dim].upper_bound, &context);
21831 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21832 add_scalar_info (subrange_die, DW_AT_byte_stride,
21833 info->dimen[dim].stride,
21834 dw_scalar_form_constant
21835 | dw_scalar_form_exprloc
21836 | dw_scalar_form_reference,
21837 &context);
21838 }
21839
21840 gen_type_die (info->element_type, context_die);
21841 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21842 TREE_CODE (type) == ARRAY_TYPE
21843 && TYPE_REVERSE_STORAGE_ORDER (type),
21844 context_die);
21845
21846 if (get_AT (array_die, DW_AT_name))
21847 add_pubtype (type, array_die);
21848
21849 add_alignment_attribute (array_die, type);
21850 }
21851
21852 #if 0
21853 static void
21854 gen_entry_point_die (tree decl, dw_die_ref context_die)
21855 {
21856 tree origin = decl_ultimate_origin (decl);
21857 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21858
21859 if (origin != NULL)
21860 add_abstract_origin_attribute (decl_die, origin);
21861 else
21862 {
21863 add_name_and_src_coords_attributes (decl_die, decl);
21864 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21865 TYPE_UNQUALIFIED, false, context_die);
21866 }
21867
21868 if (DECL_ABSTRACT_P (decl))
21869 equate_decl_number_to_die (decl, decl_die);
21870 else
21871 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21872 }
21873 #endif
21874
21875 /* Walk through the list of incomplete types again, trying once more to
21876 emit full debugging info for them. */
21877
21878 static void
21879 retry_incomplete_types (void)
21880 {
21881 set_early_dwarf s;
21882 int i;
21883
21884 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21885 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21886 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21887 vec_safe_truncate (incomplete_types, 0);
21888 }
21889
21890 /* Determine what tag to use for a record type. */
21891
21892 static enum dwarf_tag
21893 record_type_tag (tree type)
21894 {
21895 if (! lang_hooks.types.classify_record)
21896 return DW_TAG_structure_type;
21897
21898 switch (lang_hooks.types.classify_record (type))
21899 {
21900 case RECORD_IS_STRUCT:
21901 return DW_TAG_structure_type;
21902
21903 case RECORD_IS_CLASS:
21904 return DW_TAG_class_type;
21905
21906 case RECORD_IS_INTERFACE:
21907 if (dwarf_version >= 3 || !dwarf_strict)
21908 return DW_TAG_interface_type;
21909 return DW_TAG_structure_type;
21910
21911 default:
21912 gcc_unreachable ();
21913 }
21914 }
21915
21916 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21917 include all of the information about the enumeration values also. Each
21918 enumerated type name/value is listed as a child of the enumerated type
21919 DIE. */
21920
21921 static dw_die_ref
21922 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21923 {
21924 dw_die_ref type_die = lookup_type_die (type);
21925 dw_die_ref orig_type_die = type_die;
21926
21927 if (type_die == NULL)
21928 {
21929 type_die = new_die (DW_TAG_enumeration_type,
21930 scope_die_for (type, context_die), type);
21931 equate_type_number_to_die (type, type_die);
21932 add_name_attribute (type_die, type_tag (type));
21933 if ((dwarf_version >= 4 || !dwarf_strict)
21934 && ENUM_IS_SCOPED (type))
21935 add_AT_flag (type_die, DW_AT_enum_class, 1);
21936 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21937 add_AT_flag (type_die, DW_AT_declaration, 1);
21938 if (!dwarf_strict)
21939 add_AT_unsigned (type_die, DW_AT_encoding,
21940 TYPE_UNSIGNED (type)
21941 ? DW_ATE_unsigned
21942 : DW_ATE_signed);
21943 }
21944 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21945 return type_die;
21946 else
21947 remove_AT (type_die, DW_AT_declaration);
21948
21949 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21950 given enum type is incomplete, do not generate the DW_AT_byte_size
21951 attribute or the DW_AT_element_list attribute. */
21952 if (TYPE_SIZE (type))
21953 {
21954 tree link;
21955
21956 if (!ENUM_IS_OPAQUE (type))
21957 TREE_ASM_WRITTEN (type) = 1;
21958 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
21959 add_byte_size_attribute (type_die, type);
21960 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
21961 add_alignment_attribute (type_die, type);
21962 if ((dwarf_version >= 3 || !dwarf_strict)
21963 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
21964 {
21965 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21966 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21967 context_die);
21968 }
21969 if (TYPE_STUB_DECL (type) != NULL_TREE)
21970 {
21971 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
21972 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21973 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
21974 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21975 }
21976
21977 /* If the first reference to this type was as the return type of an
21978 inline function, then it may not have a parent. Fix this now. */
21979 if (type_die->die_parent == NULL)
21980 add_child_die (scope_die_for (type, context_die), type_die);
21981
21982 for (link = TYPE_VALUES (type);
21983 link != NULL; link = TREE_CHAIN (link))
21984 {
21985 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21986 tree value = TREE_VALUE (link);
21987
21988 gcc_assert (!ENUM_IS_OPAQUE (type));
21989 add_name_attribute (enum_die,
21990 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21991
21992 if (TREE_CODE (value) == CONST_DECL)
21993 value = DECL_INITIAL (value);
21994
21995 if (simple_type_size_in_bits (TREE_TYPE (value))
21996 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21997 {
21998 /* For constant forms created by add_AT_unsigned DWARF
21999 consumers (GDB, elfutils, etc.) always zero extend
22000 the value. Only when the actual value is negative
22001 do we need to use add_AT_int to generate a constant
22002 form that can represent negative values. */
22003 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22004 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22005 add_AT_unsigned (enum_die, DW_AT_const_value,
22006 (unsigned HOST_WIDE_INT) val);
22007 else
22008 add_AT_int (enum_die, DW_AT_const_value, val);
22009 }
22010 else
22011 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22012 that here. TODO: This should be re-worked to use correct
22013 signed/unsigned double tags for all cases. */
22014 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22015 }
22016
22017 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22018 if (TYPE_ARTIFICIAL (type)
22019 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22020 add_AT_flag (type_die, DW_AT_artificial, 1);
22021 }
22022 else
22023 add_AT_flag (type_die, DW_AT_declaration, 1);
22024
22025 add_pubtype (type, type_die);
22026
22027 return type_die;
22028 }
22029
22030 /* Generate a DIE to represent either a real live formal parameter decl or to
22031 represent just the type of some formal parameter position in some function
22032 type.
22033
22034 Note that this routine is a bit unusual because its argument may be a
22035 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22036 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22037 node. If it's the former then this function is being called to output a
22038 DIE to represent a formal parameter object (or some inlining thereof). If
22039 it's the latter, then this function is only being called to output a
22040 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22041 argument type of some subprogram type.
22042 If EMIT_NAME_P is true, name and source coordinate attributes
22043 are emitted. */
22044
22045 static dw_die_ref
22046 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22047 dw_die_ref context_die)
22048 {
22049 tree node_or_origin = node ? node : origin;
22050 tree ultimate_origin;
22051 dw_die_ref parm_die = NULL;
22052
22053 if (DECL_P (node_or_origin))
22054 {
22055 parm_die = lookup_decl_die (node);
22056
22057 /* If the contexts differ, we may not be talking about the same
22058 thing.
22059 ??? When in LTO the DIE parent is the "abstract" copy and the
22060 context_die is the specification "copy". But this whole block
22061 should eventually be no longer needed. */
22062 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22063 {
22064 if (!DECL_ABSTRACT_P (node))
22065 {
22066 /* This can happen when creating an inlined instance, in
22067 which case we need to create a new DIE that will get
22068 annotated with DW_AT_abstract_origin. */
22069 parm_die = NULL;
22070 }
22071 else
22072 gcc_unreachable ();
22073 }
22074
22075 if (parm_die && parm_die->die_parent == NULL)
22076 {
22077 /* Check that parm_die already has the right attributes that
22078 we would have added below. If any attributes are
22079 missing, fall through to add them. */
22080 if (! DECL_ABSTRACT_P (node_or_origin)
22081 && !get_AT (parm_die, DW_AT_location)
22082 && !get_AT (parm_die, DW_AT_const_value))
22083 /* We are missing location info, and are about to add it. */
22084 ;
22085 else
22086 {
22087 add_child_die (context_die, parm_die);
22088 return parm_die;
22089 }
22090 }
22091 }
22092
22093 /* If we have a previously generated DIE, use it, unless this is an
22094 concrete instance (origin != NULL), in which case we need a new
22095 DIE with a corresponding DW_AT_abstract_origin. */
22096 bool reusing_die;
22097 if (parm_die && origin == NULL)
22098 reusing_die = true;
22099 else
22100 {
22101 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22102 reusing_die = false;
22103 }
22104
22105 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22106 {
22107 case tcc_declaration:
22108 ultimate_origin = decl_ultimate_origin (node_or_origin);
22109 if (node || ultimate_origin)
22110 origin = ultimate_origin;
22111
22112 if (reusing_die)
22113 goto add_location;
22114
22115 if (origin != NULL)
22116 add_abstract_origin_attribute (parm_die, origin);
22117 else if (emit_name_p)
22118 add_name_and_src_coords_attributes (parm_die, node);
22119 if (origin == NULL
22120 || (! DECL_ABSTRACT_P (node_or_origin)
22121 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22122 decl_function_context
22123 (node_or_origin))))
22124 {
22125 tree type = TREE_TYPE (node_or_origin);
22126 if (decl_by_reference_p (node_or_origin))
22127 add_type_attribute (parm_die, TREE_TYPE (type),
22128 TYPE_UNQUALIFIED,
22129 false, context_die);
22130 else
22131 add_type_attribute (parm_die, type,
22132 decl_quals (node_or_origin),
22133 false, context_die);
22134 }
22135 if (origin == NULL && DECL_ARTIFICIAL (node))
22136 add_AT_flag (parm_die, DW_AT_artificial, 1);
22137 add_location:
22138 if (node && node != origin)
22139 equate_decl_number_to_die (node, parm_die);
22140 if (! DECL_ABSTRACT_P (node_or_origin))
22141 add_location_or_const_value_attribute (parm_die, node_or_origin,
22142 node == NULL);
22143
22144 break;
22145
22146 case tcc_type:
22147 /* We were called with some kind of a ..._TYPE node. */
22148 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22149 context_die);
22150 break;
22151
22152 default:
22153 gcc_unreachable ();
22154 }
22155
22156 return parm_die;
22157 }
22158
22159 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22160 children DW_TAG_formal_parameter DIEs representing the arguments of the
22161 parameter pack.
22162
22163 PARM_PACK must be a function parameter pack.
22164 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22165 must point to the subsequent arguments of the function PACK_ARG belongs to.
22166 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22167 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22168 following the last one for which a DIE was generated. */
22169
22170 static dw_die_ref
22171 gen_formal_parameter_pack_die (tree parm_pack,
22172 tree pack_arg,
22173 dw_die_ref subr_die,
22174 tree *next_arg)
22175 {
22176 tree arg;
22177 dw_die_ref parm_pack_die;
22178
22179 gcc_assert (parm_pack
22180 && lang_hooks.function_parameter_pack_p (parm_pack)
22181 && subr_die);
22182
22183 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22184 add_src_coords_attributes (parm_pack_die, parm_pack);
22185
22186 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22187 {
22188 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22189 parm_pack))
22190 break;
22191 gen_formal_parameter_die (arg, NULL,
22192 false /* Don't emit name attribute. */,
22193 parm_pack_die);
22194 }
22195 if (next_arg)
22196 *next_arg = arg;
22197 return parm_pack_die;
22198 }
22199
22200 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22201 at the end of an (ANSI prototyped) formal parameters list. */
22202
22203 static void
22204 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22205 {
22206 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22207 }
22208
22209 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22210 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22211 parameters as specified in some function type specification (except for
22212 those which appear as part of a function *definition*). */
22213
22214 static void
22215 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22216 {
22217 tree link;
22218 tree formal_type = NULL;
22219 tree first_parm_type;
22220 tree arg;
22221
22222 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22223 {
22224 arg = DECL_ARGUMENTS (function_or_method_type);
22225 function_or_method_type = TREE_TYPE (function_or_method_type);
22226 }
22227 else
22228 arg = NULL_TREE;
22229
22230 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22231
22232 /* Make our first pass over the list of formal parameter types and output a
22233 DW_TAG_formal_parameter DIE for each one. */
22234 for (link = first_parm_type; link; )
22235 {
22236 dw_die_ref parm_die;
22237
22238 formal_type = TREE_VALUE (link);
22239 if (formal_type == void_type_node)
22240 break;
22241
22242 /* Output a (nameless) DIE to represent the formal parameter itself. */
22243 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22244 {
22245 parm_die = gen_formal_parameter_die (formal_type, NULL,
22246 true /* Emit name attribute. */,
22247 context_die);
22248 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22249 && link == first_parm_type)
22250 {
22251 add_AT_flag (parm_die, DW_AT_artificial, 1);
22252 if (dwarf_version >= 3 || !dwarf_strict)
22253 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22254 }
22255 else if (arg && DECL_ARTIFICIAL (arg))
22256 add_AT_flag (parm_die, DW_AT_artificial, 1);
22257 }
22258
22259 link = TREE_CHAIN (link);
22260 if (arg)
22261 arg = DECL_CHAIN (arg);
22262 }
22263
22264 /* If this function type has an ellipsis, add a
22265 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22266 if (formal_type != void_type_node)
22267 gen_unspecified_parameters_die (function_or_method_type, context_die);
22268
22269 /* Make our second (and final) pass over the list of formal parameter types
22270 and output DIEs to represent those types (as necessary). */
22271 for (link = TYPE_ARG_TYPES (function_or_method_type);
22272 link && TREE_VALUE (link);
22273 link = TREE_CHAIN (link))
22274 gen_type_die (TREE_VALUE (link), context_die);
22275 }
22276
22277 /* We want to generate the DIE for TYPE so that we can generate the
22278 die for MEMBER, which has been defined; we will need to refer back
22279 to the member declaration nested within TYPE. If we're trying to
22280 generate minimal debug info for TYPE, processing TYPE won't do the
22281 trick; we need to attach the member declaration by hand. */
22282
22283 static void
22284 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22285 {
22286 gen_type_die (type, context_die);
22287
22288 /* If we're trying to avoid duplicate debug info, we may not have
22289 emitted the member decl for this function. Emit it now. */
22290 if (TYPE_STUB_DECL (type)
22291 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22292 && ! lookup_decl_die (member))
22293 {
22294 dw_die_ref type_die;
22295 gcc_assert (!decl_ultimate_origin (member));
22296
22297 push_decl_scope (type);
22298 type_die = lookup_type_die_strip_naming_typedef (type);
22299 if (TREE_CODE (member) == FUNCTION_DECL)
22300 gen_subprogram_die (member, type_die);
22301 else if (TREE_CODE (member) == FIELD_DECL)
22302 {
22303 /* Ignore the nameless fields that are used to skip bits but handle
22304 C++ anonymous unions and structs. */
22305 if (DECL_NAME (member) != NULL_TREE
22306 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22307 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22308 {
22309 struct vlr_context vlr_ctx = {
22310 DECL_CONTEXT (member), /* struct_type */
22311 NULL_TREE /* variant_part_offset */
22312 };
22313 gen_type_die (member_declared_type (member), type_die);
22314 gen_field_die (member, &vlr_ctx, type_die);
22315 }
22316 }
22317 else
22318 gen_variable_die (member, NULL_TREE, type_die);
22319
22320 pop_decl_scope ();
22321 }
22322 }
22323 \f
22324 /* Forward declare these functions, because they are mutually recursive
22325 with their set_block_* pairing functions. */
22326 static void set_decl_origin_self (tree);
22327
22328 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22329 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22330 that it points to the node itself, thus indicating that the node is its
22331 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22332 the given node is NULL, recursively descend the decl/block tree which
22333 it is the root of, and for each other ..._DECL or BLOCK node contained
22334 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22335 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22336 values to point to themselves. */
22337
22338 static void
22339 set_block_origin_self (tree stmt)
22340 {
22341 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22342 {
22343 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22344
22345 {
22346 tree local_decl;
22347
22348 for (local_decl = BLOCK_VARS (stmt);
22349 local_decl != NULL_TREE;
22350 local_decl = DECL_CHAIN (local_decl))
22351 /* Do not recurse on nested functions since the inlining status
22352 of parent and child can be different as per the DWARF spec. */
22353 if (TREE_CODE (local_decl) != FUNCTION_DECL
22354 && !DECL_EXTERNAL (local_decl))
22355 set_decl_origin_self (local_decl);
22356 }
22357
22358 {
22359 tree subblock;
22360
22361 for (subblock = BLOCK_SUBBLOCKS (stmt);
22362 subblock != NULL_TREE;
22363 subblock = BLOCK_CHAIN (subblock))
22364 set_block_origin_self (subblock); /* Recurse. */
22365 }
22366 }
22367 }
22368
22369 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22370 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22371 node to so that it points to the node itself, thus indicating that the
22372 node represents its own (abstract) origin. Additionally, if the
22373 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22374 the decl/block tree of which the given node is the root of, and for
22375 each other ..._DECL or BLOCK node contained therein whose
22376 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22377 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22378 point to themselves. */
22379
22380 static void
22381 set_decl_origin_self (tree decl)
22382 {
22383 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22384 {
22385 DECL_ABSTRACT_ORIGIN (decl) = decl;
22386 if (TREE_CODE (decl) == FUNCTION_DECL)
22387 {
22388 tree arg;
22389
22390 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22391 DECL_ABSTRACT_ORIGIN (arg) = arg;
22392 if (DECL_INITIAL (decl) != NULL_TREE
22393 && DECL_INITIAL (decl) != error_mark_node)
22394 set_block_origin_self (DECL_INITIAL (decl));
22395 }
22396 }
22397 }
22398 \f
22399 /* Mark the early DIE for DECL as the abstract instance. */
22400
22401 static void
22402 dwarf2out_abstract_function (tree decl)
22403 {
22404 dw_die_ref old_die;
22405
22406 /* Make sure we have the actual abstract inline, not a clone. */
22407 decl = DECL_ORIGIN (decl);
22408
22409 if (DECL_IGNORED_P (decl))
22410 return;
22411
22412 old_die = lookup_decl_die (decl);
22413 /* With early debug we always have an old DIE unless we are in LTO
22414 and the user did not compile but only link with debug. */
22415 if (in_lto_p && ! old_die)
22416 return;
22417 gcc_assert (old_die != NULL);
22418 if (get_AT (old_die, DW_AT_inline)
22419 || get_AT (old_die, DW_AT_abstract_origin))
22420 /* We've already generated the abstract instance. */
22421 return;
22422
22423 /* Go ahead and put DW_AT_inline on the DIE. */
22424 if (DECL_DECLARED_INLINE_P (decl))
22425 {
22426 if (cgraph_function_possibly_inlined_p (decl))
22427 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22428 else
22429 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22430 }
22431 else
22432 {
22433 if (cgraph_function_possibly_inlined_p (decl))
22434 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22435 else
22436 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22437 }
22438
22439 if (DECL_DECLARED_INLINE_P (decl)
22440 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22441 add_AT_flag (old_die, DW_AT_artificial, 1);
22442
22443 set_decl_origin_self (decl);
22444 }
22445
22446 /* Helper function of premark_used_types() which gets called through
22447 htab_traverse.
22448
22449 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22450 marked as unused by prune_unused_types. */
22451
22452 bool
22453 premark_used_types_helper (tree const &type, void *)
22454 {
22455 dw_die_ref die;
22456
22457 die = lookup_type_die (type);
22458 if (die != NULL)
22459 die->die_perennial_p = 1;
22460 return true;
22461 }
22462
22463 /* Helper function of premark_types_used_by_global_vars which gets called
22464 through htab_traverse.
22465
22466 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22467 marked as unused by prune_unused_types. The DIE of the type is marked
22468 only if the global variable using the type will actually be emitted. */
22469
22470 int
22471 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22472 void *)
22473 {
22474 struct types_used_by_vars_entry *entry;
22475 dw_die_ref die;
22476
22477 entry = (struct types_used_by_vars_entry *) *slot;
22478 gcc_assert (entry->type != NULL
22479 && entry->var_decl != NULL);
22480 die = lookup_type_die (entry->type);
22481 if (die)
22482 {
22483 /* Ask cgraph if the global variable really is to be emitted.
22484 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22485 varpool_node *node = varpool_node::get (entry->var_decl);
22486 if (node && node->definition)
22487 {
22488 die->die_perennial_p = 1;
22489 /* Keep the parent DIEs as well. */
22490 while ((die = die->die_parent) && die->die_perennial_p == 0)
22491 die->die_perennial_p = 1;
22492 }
22493 }
22494 return 1;
22495 }
22496
22497 /* Mark all members of used_types_hash as perennial. */
22498
22499 static void
22500 premark_used_types (struct function *fun)
22501 {
22502 if (fun && fun->used_types_hash)
22503 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22504 }
22505
22506 /* Mark all members of types_used_by_vars_entry as perennial. */
22507
22508 static void
22509 premark_types_used_by_global_vars (void)
22510 {
22511 if (types_used_by_vars_hash)
22512 types_used_by_vars_hash
22513 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22514 }
22515
22516 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22517 for CA_LOC call arg loc node. */
22518
22519 static dw_die_ref
22520 gen_call_site_die (tree decl, dw_die_ref subr_die,
22521 struct call_arg_loc_node *ca_loc)
22522 {
22523 dw_die_ref stmt_die = NULL, die;
22524 tree block = ca_loc->block;
22525
22526 while (block
22527 && block != DECL_INITIAL (decl)
22528 && TREE_CODE (block) == BLOCK)
22529 {
22530 stmt_die = BLOCK_DIE (block);
22531 if (stmt_die)
22532 break;
22533 block = BLOCK_SUPERCONTEXT (block);
22534 }
22535 if (stmt_die == NULL)
22536 stmt_die = subr_die;
22537 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22538 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22539 if (ca_loc->tail_call_p)
22540 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22541 if (ca_loc->symbol_ref)
22542 {
22543 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22544 if (tdie)
22545 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22546 else
22547 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22548 false);
22549 }
22550 return die;
22551 }
22552
22553 /* Generate a DIE to represent a declared function (either file-scope or
22554 block-local). */
22555
22556 static void
22557 gen_subprogram_die (tree decl, dw_die_ref context_die)
22558 {
22559 tree origin = decl_ultimate_origin (decl);
22560 dw_die_ref subr_die;
22561 dw_die_ref old_die = lookup_decl_die (decl);
22562
22563 /* This function gets called multiple times for different stages of
22564 the debug process. For example, for func() in this code:
22565
22566 namespace S
22567 {
22568 void func() { ... }
22569 }
22570
22571 ...we get called 4 times. Twice in early debug and twice in
22572 late debug:
22573
22574 Early debug
22575 -----------
22576
22577 1. Once while generating func() within the namespace. This is
22578 the declaration. The declaration bit below is set, as the
22579 context is the namespace.
22580
22581 A new DIE will be generated with DW_AT_declaration set.
22582
22583 2. Once for func() itself. This is the specification. The
22584 declaration bit below is clear as the context is the CU.
22585
22586 We will use the cached DIE from (1) to create a new DIE with
22587 DW_AT_specification pointing to the declaration in (1).
22588
22589 Late debug via rest_of_handle_final()
22590 -------------------------------------
22591
22592 3. Once generating func() within the namespace. This is also the
22593 declaration, as in (1), but this time we will early exit below
22594 as we have a cached DIE and a declaration needs no additional
22595 annotations (no locations), as the source declaration line
22596 info is enough.
22597
22598 4. Once for func() itself. As in (2), this is the specification,
22599 but this time we will re-use the cached DIE, and just annotate
22600 it with the location information that should now be available.
22601
22602 For something without namespaces, but with abstract instances, we
22603 are also called a multiple times:
22604
22605 class Base
22606 {
22607 public:
22608 Base (); // constructor declaration (1)
22609 };
22610
22611 Base::Base () { } // constructor specification (2)
22612
22613 Early debug
22614 -----------
22615
22616 1. Once for the Base() constructor by virtue of it being a
22617 member of the Base class. This is done via
22618 rest_of_type_compilation.
22619
22620 This is a declaration, so a new DIE will be created with
22621 DW_AT_declaration.
22622
22623 2. Once for the Base() constructor definition, but this time
22624 while generating the abstract instance of the base
22625 constructor (__base_ctor) which is being generated via early
22626 debug of reachable functions.
22627
22628 Even though we have a cached version of the declaration (1),
22629 we will create a DW_AT_specification of the declaration DIE
22630 in (1).
22631
22632 3. Once for the __base_ctor itself, but this time, we generate
22633 an DW_AT_abstract_origin version of the DW_AT_specification in
22634 (2).
22635
22636 Late debug via rest_of_handle_final
22637 -----------------------------------
22638
22639 4. One final time for the __base_ctor (which will have a cached
22640 DIE with DW_AT_abstract_origin created in (3). This time,
22641 we will just annotate the location information now
22642 available.
22643 */
22644 int declaration = (current_function_decl != decl
22645 || class_or_namespace_scope_p (context_die));
22646
22647 /* A declaration that has been previously dumped needs no
22648 additional information. */
22649 if (old_die && declaration)
22650 return;
22651
22652 /* Now that the C++ front end lazily declares artificial member fns, we
22653 might need to retrofit the declaration into its class. */
22654 if (!declaration && !origin && !old_die
22655 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22656 && !class_or_namespace_scope_p (context_die)
22657 && debug_info_level > DINFO_LEVEL_TERSE)
22658 old_die = force_decl_die (decl);
22659
22660 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22661 if (origin != NULL)
22662 {
22663 gcc_assert (!declaration || local_scope_p (context_die));
22664
22665 /* Fixup die_parent for the abstract instance of a nested
22666 inline function. */
22667 if (old_die && old_die->die_parent == NULL)
22668 add_child_die (context_die, old_die);
22669
22670 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22671 {
22672 /* If we have a DW_AT_abstract_origin we have a working
22673 cached version. */
22674 subr_die = old_die;
22675 }
22676 else
22677 {
22678 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22679 add_abstract_origin_attribute (subr_die, origin);
22680 /* This is where the actual code for a cloned function is.
22681 Let's emit linkage name attribute for it. This helps
22682 debuggers to e.g, set breakpoints into
22683 constructors/destructors when the user asks "break
22684 K::K". */
22685 add_linkage_name (subr_die, decl);
22686 }
22687 }
22688 /* A cached copy, possibly from early dwarf generation. Reuse as
22689 much as possible. */
22690 else if (old_die)
22691 {
22692 if (!get_AT_flag (old_die, DW_AT_declaration)
22693 /* We can have a normal definition following an inline one in the
22694 case of redefinition of GNU C extern inlines.
22695 It seems reasonable to use AT_specification in this case. */
22696 && !get_AT (old_die, DW_AT_inline))
22697 {
22698 /* Detect and ignore this case, where we are trying to output
22699 something we have already output. */
22700 if (get_AT (old_die, DW_AT_low_pc)
22701 || get_AT (old_die, DW_AT_ranges))
22702 return;
22703
22704 /* If we have no location information, this must be a
22705 partially generated DIE from early dwarf generation.
22706 Fall through and generate it. */
22707 }
22708
22709 /* If the definition comes from the same place as the declaration,
22710 maybe use the old DIE. We always want the DIE for this function
22711 that has the *_pc attributes to be under comp_unit_die so the
22712 debugger can find it. We also need to do this for abstract
22713 instances of inlines, since the spec requires the out-of-line copy
22714 to have the same parent. For local class methods, this doesn't
22715 apply; we just use the old DIE. */
22716 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22717 struct dwarf_file_data * file_index = lookup_filename (s.file);
22718 if ((is_cu_die (old_die->die_parent)
22719 /* This condition fixes the inconsistency/ICE with the
22720 following Fortran test (or some derivative thereof) while
22721 building libgfortran:
22722
22723 module some_m
22724 contains
22725 logical function funky (FLAG)
22726 funky = .true.
22727 end function
22728 end module
22729 */
22730 || (old_die->die_parent
22731 && old_die->die_parent->die_tag == DW_TAG_module)
22732 || context_die == NULL)
22733 && (DECL_ARTIFICIAL (decl)
22734 /* The location attributes may be in the abstract origin
22735 which in the case of LTO might be not available to
22736 look at. */
22737 || get_AT (old_die, DW_AT_abstract_origin)
22738 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22739 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22740 == (unsigned) s.line)
22741 && (!debug_column_info
22742 || s.column == 0
22743 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22744 == (unsigned) s.column)))))
22745 {
22746 subr_die = old_die;
22747
22748 /* Clear out the declaration attribute, but leave the
22749 parameters so they can be augmented with location
22750 information later. Unless this was a declaration, in
22751 which case, wipe out the nameless parameters and recreate
22752 them further down. */
22753 if (remove_AT (subr_die, DW_AT_declaration))
22754 {
22755
22756 remove_AT (subr_die, DW_AT_object_pointer);
22757 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22758 }
22759 }
22760 /* Make a specification pointing to the previously built
22761 declaration. */
22762 else
22763 {
22764 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22765 add_AT_specification (subr_die, old_die);
22766 add_pubname (decl, subr_die);
22767 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22768 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22769 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22770 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22771 if (debug_column_info
22772 && s.column
22773 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22774 != (unsigned) s.column))
22775 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22776
22777 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22778 emit the real type on the definition die. */
22779 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22780 {
22781 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22782 if (die == auto_die || die == decltype_auto_die)
22783 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22784 TYPE_UNQUALIFIED, false, context_die);
22785 }
22786
22787 /* When we process the method declaration, we haven't seen
22788 the out-of-class defaulted definition yet, so we have to
22789 recheck now. */
22790 if ((dwarf_version >= 5 || ! dwarf_strict)
22791 && !get_AT (subr_die, DW_AT_defaulted))
22792 {
22793 int defaulted
22794 = lang_hooks.decls.decl_dwarf_attribute (decl,
22795 DW_AT_defaulted);
22796 if (defaulted != -1)
22797 {
22798 /* Other values must have been handled before. */
22799 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22800 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22801 }
22802 }
22803 }
22804 }
22805 /* Create a fresh DIE for anything else. */
22806 else
22807 {
22808 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22809
22810 if (TREE_PUBLIC (decl))
22811 add_AT_flag (subr_die, DW_AT_external, 1);
22812
22813 add_name_and_src_coords_attributes (subr_die, decl);
22814 add_pubname (decl, subr_die);
22815 if (debug_info_level > DINFO_LEVEL_TERSE)
22816 {
22817 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22818 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22819 TYPE_UNQUALIFIED, false, context_die);
22820 }
22821
22822 add_pure_or_virtual_attribute (subr_die, decl);
22823 if (DECL_ARTIFICIAL (decl))
22824 add_AT_flag (subr_die, DW_AT_artificial, 1);
22825
22826 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22827 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22828
22829 add_alignment_attribute (subr_die, decl);
22830
22831 add_accessibility_attribute (subr_die, decl);
22832 }
22833
22834 /* Unless we have an existing non-declaration DIE, equate the new
22835 DIE. */
22836 if (!old_die || is_declaration_die (old_die))
22837 equate_decl_number_to_die (decl, subr_die);
22838
22839 if (declaration)
22840 {
22841 if (!old_die || !get_AT (old_die, DW_AT_inline))
22842 {
22843 add_AT_flag (subr_die, DW_AT_declaration, 1);
22844
22845 /* If this is an explicit function declaration then generate
22846 a DW_AT_explicit attribute. */
22847 if ((dwarf_version >= 3 || !dwarf_strict)
22848 && lang_hooks.decls.decl_dwarf_attribute (decl,
22849 DW_AT_explicit) == 1)
22850 add_AT_flag (subr_die, DW_AT_explicit, 1);
22851
22852 /* If this is a C++11 deleted special function member then generate
22853 a DW_AT_deleted attribute. */
22854 if ((dwarf_version >= 5 || !dwarf_strict)
22855 && lang_hooks.decls.decl_dwarf_attribute (decl,
22856 DW_AT_deleted) == 1)
22857 add_AT_flag (subr_die, DW_AT_deleted, 1);
22858
22859 /* If this is a C++11 defaulted special function member then
22860 generate a DW_AT_defaulted attribute. */
22861 if (dwarf_version >= 5 || !dwarf_strict)
22862 {
22863 int defaulted
22864 = lang_hooks.decls.decl_dwarf_attribute (decl,
22865 DW_AT_defaulted);
22866 if (defaulted != -1)
22867 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22868 }
22869
22870 /* If this is a C++11 non-static member function with & ref-qualifier
22871 then generate a DW_AT_reference attribute. */
22872 if ((dwarf_version >= 5 || !dwarf_strict)
22873 && lang_hooks.decls.decl_dwarf_attribute (decl,
22874 DW_AT_reference) == 1)
22875 add_AT_flag (subr_die, DW_AT_reference, 1);
22876
22877 /* If this is a C++11 non-static member function with &&
22878 ref-qualifier then generate a DW_AT_reference attribute. */
22879 if ((dwarf_version >= 5 || !dwarf_strict)
22880 && lang_hooks.decls.decl_dwarf_attribute (decl,
22881 DW_AT_rvalue_reference)
22882 == 1)
22883 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22884 }
22885 }
22886 /* For non DECL_EXTERNALs, if range information is available, fill
22887 the DIE with it. */
22888 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22889 {
22890 HOST_WIDE_INT cfa_fb_offset;
22891
22892 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22893
22894 if (!crtl->has_bb_partition)
22895 {
22896 dw_fde_ref fde = fun->fde;
22897 if (fde->dw_fde_begin)
22898 {
22899 /* We have already generated the labels. */
22900 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22901 fde->dw_fde_end, false);
22902 }
22903 else
22904 {
22905 /* Create start/end labels and add the range. */
22906 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22907 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22908 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22909 current_function_funcdef_no);
22910 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22911 current_function_funcdef_no);
22912 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22913 false);
22914 }
22915
22916 #if VMS_DEBUGGING_INFO
22917 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22918 Section 2.3 Prologue and Epilogue Attributes:
22919 When a breakpoint is set on entry to a function, it is generally
22920 desirable for execution to be suspended, not on the very first
22921 instruction of the function, but rather at a point after the
22922 function's frame has been set up, after any language defined local
22923 declaration processing has been completed, and before execution of
22924 the first statement of the function begins. Debuggers generally
22925 cannot properly determine where this point is. Similarly for a
22926 breakpoint set on exit from a function. The prologue and epilogue
22927 attributes allow a compiler to communicate the location(s) to use. */
22928
22929 {
22930 if (fde->dw_fde_vms_end_prologue)
22931 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22932 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22933
22934 if (fde->dw_fde_vms_begin_epilogue)
22935 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22936 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22937 }
22938 #endif
22939
22940 }
22941 else
22942 {
22943 /* Generate pubnames entries for the split function code ranges. */
22944 dw_fde_ref fde = fun->fde;
22945
22946 if (fde->dw_fde_second_begin)
22947 {
22948 if (dwarf_version >= 3 || !dwarf_strict)
22949 {
22950 /* We should use ranges for non-contiguous code section
22951 addresses. Use the actual code range for the initial
22952 section, since the HOT/COLD labels might precede an
22953 alignment offset. */
22954 bool range_list_added = false;
22955 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22956 fde->dw_fde_end, &range_list_added,
22957 false);
22958 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22959 fde->dw_fde_second_end,
22960 &range_list_added, false);
22961 if (range_list_added)
22962 add_ranges (NULL);
22963 }
22964 else
22965 {
22966 /* There is no real support in DW2 for this .. so we make
22967 a work-around. First, emit the pub name for the segment
22968 containing the function label. Then make and emit a
22969 simplified subprogram DIE for the second segment with the
22970 name pre-fixed by __hot/cold_sect_of_. We use the same
22971 linkage name for the second die so that gdb will find both
22972 sections when given "b foo". */
22973 const char *name = NULL;
22974 tree decl_name = DECL_NAME (decl);
22975 dw_die_ref seg_die;
22976
22977 /* Do the 'primary' section. */
22978 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22979 fde->dw_fde_end, false);
22980
22981 /* Build a minimal DIE for the secondary section. */
22982 seg_die = new_die (DW_TAG_subprogram,
22983 subr_die->die_parent, decl);
22984
22985 if (TREE_PUBLIC (decl))
22986 add_AT_flag (seg_die, DW_AT_external, 1);
22987
22988 if (decl_name != NULL
22989 && IDENTIFIER_POINTER (decl_name) != NULL)
22990 {
22991 name = dwarf2_name (decl, 1);
22992 if (! DECL_ARTIFICIAL (decl))
22993 add_src_coords_attributes (seg_die, decl);
22994
22995 add_linkage_name (seg_die, decl);
22996 }
22997 gcc_assert (name != NULL);
22998 add_pure_or_virtual_attribute (seg_die, decl);
22999 if (DECL_ARTIFICIAL (decl))
23000 add_AT_flag (seg_die, DW_AT_artificial, 1);
23001
23002 name = concat ("__second_sect_of_", name, NULL);
23003 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23004 fde->dw_fde_second_end, false);
23005 add_name_attribute (seg_die, name);
23006 if (want_pubnames ())
23007 add_pubname_string (name, seg_die);
23008 }
23009 }
23010 else
23011 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23012 false);
23013 }
23014
23015 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23016
23017 /* We define the "frame base" as the function's CFA. This is more
23018 convenient for several reasons: (1) It's stable across the prologue
23019 and epilogue, which makes it better than just a frame pointer,
23020 (2) With dwarf3, there exists a one-byte encoding that allows us
23021 to reference the .debug_frame data by proxy, but failing that,
23022 (3) We can at least reuse the code inspection and interpretation
23023 code that determines the CFA position at various points in the
23024 function. */
23025 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23026 {
23027 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23028 add_AT_loc (subr_die, DW_AT_frame_base, op);
23029 }
23030 else
23031 {
23032 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23033 if (list->dw_loc_next)
23034 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23035 else
23036 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23037 }
23038
23039 /* Compute a displacement from the "steady-state frame pointer" to
23040 the CFA. The former is what all stack slots and argument slots
23041 will reference in the rtl; the latter is what we've told the
23042 debugger about. We'll need to adjust all frame_base references
23043 by this displacement. */
23044 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23045
23046 if (fun->static_chain_decl)
23047 {
23048 /* DWARF requires here a location expression that computes the
23049 address of the enclosing subprogram's frame base. The machinery
23050 in tree-nested.c is supposed to store this specific address in the
23051 last field of the FRAME record. */
23052 const tree frame_type
23053 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23054 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23055
23056 tree fb_expr
23057 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23058 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23059 fb_expr, fb_decl, NULL_TREE);
23060
23061 add_AT_location_description (subr_die, DW_AT_static_link,
23062 loc_list_from_tree (fb_expr, 0, NULL));
23063 }
23064
23065 resolve_variable_values ();
23066 }
23067
23068 /* Generate child dies for template paramaters. */
23069 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23070 gen_generic_params_dies (decl);
23071
23072 /* Now output descriptions of the arguments for this function. This gets
23073 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23074 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23075 `...' at the end of the formal parameter list. In order to find out if
23076 there was a trailing ellipsis or not, we must instead look at the type
23077 associated with the FUNCTION_DECL. This will be a node of type
23078 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23079 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23080 an ellipsis at the end. */
23081
23082 /* In the case where we are describing a mere function declaration, all we
23083 need to do here (and all we *can* do here) is to describe the *types* of
23084 its formal parameters. */
23085 if (debug_info_level <= DINFO_LEVEL_TERSE)
23086 ;
23087 else if (declaration)
23088 gen_formal_types_die (decl, subr_die);
23089 else
23090 {
23091 /* Generate DIEs to represent all known formal parameters. */
23092 tree parm = DECL_ARGUMENTS (decl);
23093 tree generic_decl = early_dwarf
23094 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23095 tree generic_decl_parm = generic_decl
23096 ? DECL_ARGUMENTS (generic_decl)
23097 : NULL;
23098
23099 /* Now we want to walk the list of parameters of the function and
23100 emit their relevant DIEs.
23101
23102 We consider the case of DECL being an instance of a generic function
23103 as well as it being a normal function.
23104
23105 If DECL is an instance of a generic function we walk the
23106 parameters of the generic function declaration _and_ the parameters of
23107 DECL itself. This is useful because we want to emit specific DIEs for
23108 function parameter packs and those are declared as part of the
23109 generic function declaration. In that particular case,
23110 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23111 That DIE has children DIEs representing the set of arguments
23112 of the pack. Note that the set of pack arguments can be empty.
23113 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23114 children DIE.
23115
23116 Otherwise, we just consider the parameters of DECL. */
23117 while (generic_decl_parm || parm)
23118 {
23119 if (generic_decl_parm
23120 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23121 gen_formal_parameter_pack_die (generic_decl_parm,
23122 parm, subr_die,
23123 &parm);
23124 else if (parm && !POINTER_BOUNDS_P (parm))
23125 {
23126 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23127
23128 if (early_dwarf
23129 && parm == DECL_ARGUMENTS (decl)
23130 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23131 && parm_die
23132 && (dwarf_version >= 3 || !dwarf_strict))
23133 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23134
23135 parm = DECL_CHAIN (parm);
23136 }
23137 else if (parm)
23138 parm = DECL_CHAIN (parm);
23139
23140 if (generic_decl_parm)
23141 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23142 }
23143
23144 /* Decide whether we need an unspecified_parameters DIE at the end.
23145 There are 2 more cases to do this for: 1) the ansi ... declaration -
23146 this is detectable when the end of the arg list is not a
23147 void_type_node 2) an unprototyped function declaration (not a
23148 definition). This just means that we have no info about the
23149 parameters at all. */
23150 if (early_dwarf)
23151 {
23152 if (prototype_p (TREE_TYPE (decl)))
23153 {
23154 /* This is the prototyped case, check for.... */
23155 if (stdarg_p (TREE_TYPE (decl)))
23156 gen_unspecified_parameters_die (decl, subr_die);
23157 }
23158 else if (DECL_INITIAL (decl) == NULL_TREE)
23159 gen_unspecified_parameters_die (decl, subr_die);
23160 }
23161 }
23162
23163 if (subr_die != old_die)
23164 /* Add the calling convention attribute if requested. */
23165 add_calling_convention_attribute (subr_die, decl);
23166
23167 /* Output Dwarf info for all of the stuff within the body of the function
23168 (if it has one - it may be just a declaration).
23169
23170 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23171 a function. This BLOCK actually represents the outermost binding contour
23172 for the function, i.e. the contour in which the function's formal
23173 parameters and labels get declared. Curiously, it appears that the front
23174 end doesn't actually put the PARM_DECL nodes for the current function onto
23175 the BLOCK_VARS list for this outer scope, but are strung off of the
23176 DECL_ARGUMENTS list for the function instead.
23177
23178 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23179 the LABEL_DECL nodes for the function however, and we output DWARF info
23180 for those in decls_for_scope. Just within the `outer_scope' there will be
23181 a BLOCK node representing the function's outermost pair of curly braces,
23182 and any blocks used for the base and member initializers of a C++
23183 constructor function. */
23184 tree outer_scope = DECL_INITIAL (decl);
23185 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23186 {
23187 int call_site_note_count = 0;
23188 int tail_call_site_note_count = 0;
23189
23190 /* Emit a DW_TAG_variable DIE for a named return value. */
23191 if (DECL_NAME (DECL_RESULT (decl)))
23192 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23193
23194 /* The first time through decls_for_scope we will generate the
23195 DIEs for the locals. The second time, we fill in the
23196 location info. */
23197 decls_for_scope (outer_scope, subr_die);
23198
23199 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23200 {
23201 struct call_arg_loc_node *ca_loc;
23202 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23203 {
23204 dw_die_ref die = NULL;
23205 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23206 rtx arg, next_arg;
23207
23208 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23209 ? XEXP (ca_loc->call_arg_loc_note, 0)
23210 : NULL_RTX);
23211 arg; arg = next_arg)
23212 {
23213 dw_loc_descr_ref reg, val;
23214 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23215 dw_die_ref cdie, tdie = NULL;
23216
23217 next_arg = XEXP (arg, 1);
23218 if (REG_P (XEXP (XEXP (arg, 0), 0))
23219 && next_arg
23220 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23221 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23222 && REGNO (XEXP (XEXP (arg, 0), 0))
23223 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23224 next_arg = XEXP (next_arg, 1);
23225 if (mode == VOIDmode)
23226 {
23227 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23228 if (mode == VOIDmode)
23229 mode = GET_MODE (XEXP (arg, 0));
23230 }
23231 if (mode == VOIDmode || mode == BLKmode)
23232 continue;
23233 /* Get dynamic information about call target only if we
23234 have no static information: we cannot generate both
23235 DW_AT_call_origin and DW_AT_call_target
23236 attributes. */
23237 if (ca_loc->symbol_ref == NULL_RTX)
23238 {
23239 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23240 {
23241 tloc = XEXP (XEXP (arg, 0), 1);
23242 continue;
23243 }
23244 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23245 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23246 {
23247 tlocc = XEXP (XEXP (arg, 0), 1);
23248 continue;
23249 }
23250 }
23251 reg = NULL;
23252 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23253 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23254 VAR_INIT_STATUS_INITIALIZED);
23255 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23256 {
23257 rtx mem = XEXP (XEXP (arg, 0), 0);
23258 reg = mem_loc_descriptor (XEXP (mem, 0),
23259 get_address_mode (mem),
23260 GET_MODE (mem),
23261 VAR_INIT_STATUS_INITIALIZED);
23262 }
23263 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23264 == DEBUG_PARAMETER_REF)
23265 {
23266 tree tdecl
23267 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23268 tdie = lookup_decl_die (tdecl);
23269 if (tdie == NULL)
23270 continue;
23271 }
23272 else
23273 continue;
23274 if (reg == NULL
23275 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23276 != DEBUG_PARAMETER_REF)
23277 continue;
23278 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23279 VOIDmode,
23280 VAR_INIT_STATUS_INITIALIZED);
23281 if (val == NULL)
23282 continue;
23283 if (die == NULL)
23284 die = gen_call_site_die (decl, subr_die, ca_loc);
23285 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23286 NULL_TREE);
23287 if (reg != NULL)
23288 add_AT_loc (cdie, DW_AT_location, reg);
23289 else if (tdie != NULL)
23290 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23291 tdie);
23292 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23293 if (next_arg != XEXP (arg, 1))
23294 {
23295 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23296 if (mode == VOIDmode)
23297 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23298 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23299 0), 1),
23300 mode, VOIDmode,
23301 VAR_INIT_STATUS_INITIALIZED);
23302 if (val != NULL)
23303 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23304 val);
23305 }
23306 }
23307 if (die == NULL
23308 && (ca_loc->symbol_ref || tloc))
23309 die = gen_call_site_die (decl, subr_die, ca_loc);
23310 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23311 {
23312 dw_loc_descr_ref tval = NULL;
23313
23314 if (tloc != NULL_RTX)
23315 tval = mem_loc_descriptor (tloc,
23316 GET_MODE (tloc) == VOIDmode
23317 ? Pmode : GET_MODE (tloc),
23318 VOIDmode,
23319 VAR_INIT_STATUS_INITIALIZED);
23320 if (tval)
23321 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23322 else if (tlocc != NULL_RTX)
23323 {
23324 tval = mem_loc_descriptor (tlocc,
23325 GET_MODE (tlocc) == VOIDmode
23326 ? Pmode : GET_MODE (tlocc),
23327 VOIDmode,
23328 VAR_INIT_STATUS_INITIALIZED);
23329 if (tval)
23330 add_AT_loc (die,
23331 dwarf_AT (DW_AT_call_target_clobbered),
23332 tval);
23333 }
23334 }
23335 if (die != NULL)
23336 {
23337 call_site_note_count++;
23338 if (ca_loc->tail_call_p)
23339 tail_call_site_note_count++;
23340 }
23341 }
23342 }
23343 call_arg_locations = NULL;
23344 call_arg_loc_last = NULL;
23345 if (tail_call_site_count >= 0
23346 && tail_call_site_count == tail_call_site_note_count
23347 && (!dwarf_strict || dwarf_version >= 5))
23348 {
23349 if (call_site_count >= 0
23350 && call_site_count == call_site_note_count)
23351 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23352 else
23353 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23354 }
23355 call_site_count = -1;
23356 tail_call_site_count = -1;
23357 }
23358
23359 /* Mark used types after we have created DIEs for the functions scopes. */
23360 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23361 }
23362
23363 /* Returns a hash value for X (which really is a die_struct). */
23364
23365 hashval_t
23366 block_die_hasher::hash (die_struct *d)
23367 {
23368 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23369 }
23370
23371 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23372 as decl_id and die_parent of die_struct Y. */
23373
23374 bool
23375 block_die_hasher::equal (die_struct *x, die_struct *y)
23376 {
23377 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23378 }
23379
23380 /* Hold information about markers for inlined entry points. */
23381 struct GTY ((for_user)) inline_entry_data
23382 {
23383 /* The block that's the inlined_function_outer_scope for an inlined
23384 function. */
23385 tree block;
23386
23387 /* The label at the inlined entry point. */
23388 const char *label_pfx;
23389 unsigned int label_num;
23390
23391 /* The view number to be used as the inlined entry point. */
23392 var_loc_view view;
23393 };
23394
23395 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23396 {
23397 typedef tree compare_type;
23398 static inline hashval_t hash (const inline_entry_data *);
23399 static inline bool equal (const inline_entry_data *, const_tree);
23400 };
23401
23402 /* Hash table routines for inline_entry_data. */
23403
23404 inline hashval_t
23405 inline_entry_data_hasher::hash (const inline_entry_data *data)
23406 {
23407 return htab_hash_pointer (data->block);
23408 }
23409
23410 inline bool
23411 inline_entry_data_hasher::equal (const inline_entry_data *data,
23412 const_tree block)
23413 {
23414 return data->block == block;
23415 }
23416
23417 /* Inlined entry points pending DIE creation in this compilation unit. */
23418
23419 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23420
23421
23422 /* Return TRUE if DECL, which may have been previously generated as
23423 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23424 true if decl (or its origin) is either an extern declaration or a
23425 class/namespace scoped declaration.
23426
23427 The declare_in_namespace support causes us to get two DIEs for one
23428 variable, both of which are declarations. We want to avoid
23429 considering one to be a specification, so we must test for
23430 DECLARATION and DW_AT_declaration. */
23431 static inline bool
23432 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23433 {
23434 return (old_die && TREE_STATIC (decl) && !declaration
23435 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23436 }
23437
23438 /* Return true if DECL is a local static. */
23439
23440 static inline bool
23441 local_function_static (tree decl)
23442 {
23443 gcc_assert (VAR_P (decl));
23444 return TREE_STATIC (decl)
23445 && DECL_CONTEXT (decl)
23446 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23447 }
23448
23449 /* Generate a DIE to represent a declared data object.
23450 Either DECL or ORIGIN must be non-null. */
23451
23452 static void
23453 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23454 {
23455 HOST_WIDE_INT off = 0;
23456 tree com_decl;
23457 tree decl_or_origin = decl ? decl : origin;
23458 tree ultimate_origin;
23459 dw_die_ref var_die;
23460 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23461 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23462 || class_or_namespace_scope_p (context_die));
23463 bool specialization_p = false;
23464 bool no_linkage_name = false;
23465
23466 /* While C++ inline static data members have definitions inside of the
23467 class, force the first DIE to be a declaration, then let gen_member_die
23468 reparent it to the class context and call gen_variable_die again
23469 to create the outside of the class DIE for the definition. */
23470 if (!declaration
23471 && old_die == NULL
23472 && decl
23473 && DECL_CONTEXT (decl)
23474 && TYPE_P (DECL_CONTEXT (decl))
23475 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23476 {
23477 declaration = true;
23478 if (dwarf_version < 5)
23479 no_linkage_name = true;
23480 }
23481
23482 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23483 if (decl || ultimate_origin)
23484 origin = ultimate_origin;
23485 com_decl = fortran_common (decl_or_origin, &off);
23486
23487 /* Symbol in common gets emitted as a child of the common block, in the form
23488 of a data member. */
23489 if (com_decl)
23490 {
23491 dw_die_ref com_die;
23492 dw_loc_list_ref loc = NULL;
23493 die_node com_die_arg;
23494
23495 var_die = lookup_decl_die (decl_or_origin);
23496 if (var_die)
23497 {
23498 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23499 {
23500 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23501 if (loc)
23502 {
23503 if (off)
23504 {
23505 /* Optimize the common case. */
23506 if (single_element_loc_list_p (loc)
23507 && loc->expr->dw_loc_opc == DW_OP_addr
23508 && loc->expr->dw_loc_next == NULL
23509 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23510 == SYMBOL_REF)
23511 {
23512 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23513 loc->expr->dw_loc_oprnd1.v.val_addr
23514 = plus_constant (GET_MODE (x), x , off);
23515 }
23516 else
23517 loc_list_plus_const (loc, off);
23518 }
23519 add_AT_location_description (var_die, DW_AT_location, loc);
23520 remove_AT (var_die, DW_AT_declaration);
23521 }
23522 }
23523 return;
23524 }
23525
23526 if (common_block_die_table == NULL)
23527 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23528
23529 com_die_arg.decl_id = DECL_UID (com_decl);
23530 com_die_arg.die_parent = context_die;
23531 com_die = common_block_die_table->find (&com_die_arg);
23532 if (! early_dwarf)
23533 loc = loc_list_from_tree (com_decl, 2, NULL);
23534 if (com_die == NULL)
23535 {
23536 const char *cnam
23537 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23538 die_node **slot;
23539
23540 com_die = new_die (DW_TAG_common_block, context_die, decl);
23541 add_name_and_src_coords_attributes (com_die, com_decl);
23542 if (loc)
23543 {
23544 add_AT_location_description (com_die, DW_AT_location, loc);
23545 /* Avoid sharing the same loc descriptor between
23546 DW_TAG_common_block and DW_TAG_variable. */
23547 loc = loc_list_from_tree (com_decl, 2, NULL);
23548 }
23549 else if (DECL_EXTERNAL (decl_or_origin))
23550 add_AT_flag (com_die, DW_AT_declaration, 1);
23551 if (want_pubnames ())
23552 add_pubname_string (cnam, com_die); /* ??? needed? */
23553 com_die->decl_id = DECL_UID (com_decl);
23554 slot = common_block_die_table->find_slot (com_die, INSERT);
23555 *slot = com_die;
23556 }
23557 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23558 {
23559 add_AT_location_description (com_die, DW_AT_location, loc);
23560 loc = loc_list_from_tree (com_decl, 2, NULL);
23561 remove_AT (com_die, DW_AT_declaration);
23562 }
23563 var_die = new_die (DW_TAG_variable, com_die, decl);
23564 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23565 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23566 decl_quals (decl_or_origin), false,
23567 context_die);
23568 add_alignment_attribute (var_die, decl);
23569 add_AT_flag (var_die, DW_AT_external, 1);
23570 if (loc)
23571 {
23572 if (off)
23573 {
23574 /* Optimize the common case. */
23575 if (single_element_loc_list_p (loc)
23576 && loc->expr->dw_loc_opc == DW_OP_addr
23577 && loc->expr->dw_loc_next == NULL
23578 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23579 {
23580 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23581 loc->expr->dw_loc_oprnd1.v.val_addr
23582 = plus_constant (GET_MODE (x), x, off);
23583 }
23584 else
23585 loc_list_plus_const (loc, off);
23586 }
23587 add_AT_location_description (var_die, DW_AT_location, loc);
23588 }
23589 else if (DECL_EXTERNAL (decl_or_origin))
23590 add_AT_flag (var_die, DW_AT_declaration, 1);
23591 if (decl)
23592 equate_decl_number_to_die (decl, var_die);
23593 return;
23594 }
23595
23596 if (old_die)
23597 {
23598 if (declaration)
23599 {
23600 /* A declaration that has been previously dumped, needs no
23601 further annotations, since it doesn't need location on
23602 the second pass. */
23603 return;
23604 }
23605 else if (decl_will_get_specification_p (old_die, decl, declaration)
23606 && !get_AT (old_die, DW_AT_specification))
23607 {
23608 /* Fall-thru so we can make a new variable die along with a
23609 DW_AT_specification. */
23610 }
23611 else if (origin && old_die->die_parent != context_die)
23612 {
23613 /* If we will be creating an inlined instance, we need a
23614 new DIE that will get annotated with
23615 DW_AT_abstract_origin. */
23616 gcc_assert (!DECL_ABSTRACT_P (decl));
23617 }
23618 else
23619 {
23620 /* If a DIE was dumped early, it still needs location info.
23621 Skip to where we fill the location bits. */
23622 var_die = old_die;
23623
23624 /* ??? In LTRANS we cannot annotate early created variably
23625 modified type DIEs without copying them and adjusting all
23626 references to them. Thus we dumped them again. Also add a
23627 reference to them but beware of -g0 compile and -g link
23628 in which case the reference will be already present. */
23629 tree type = TREE_TYPE (decl_or_origin);
23630 if (in_lto_p
23631 && ! get_AT (var_die, DW_AT_type)
23632 && variably_modified_type_p
23633 (type, decl_function_context (decl_or_origin)))
23634 {
23635 if (decl_by_reference_p (decl_or_origin))
23636 add_type_attribute (var_die, TREE_TYPE (type),
23637 TYPE_UNQUALIFIED, false, context_die);
23638 else
23639 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23640 false, context_die);
23641 }
23642
23643 goto gen_variable_die_location;
23644 }
23645 }
23646
23647 /* For static data members, the declaration in the class is supposed
23648 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23649 also in DWARF2; the specification should still be DW_TAG_variable
23650 referencing the DW_TAG_member DIE. */
23651 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23652 var_die = new_die (DW_TAG_member, context_die, decl);
23653 else
23654 var_die = new_die (DW_TAG_variable, context_die, decl);
23655
23656 if (origin != NULL)
23657 add_abstract_origin_attribute (var_die, origin);
23658
23659 /* Loop unrolling can create multiple blocks that refer to the same
23660 static variable, so we must test for the DW_AT_declaration flag.
23661
23662 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23663 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23664 sharing them.
23665
23666 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23667 else if (decl_will_get_specification_p (old_die, decl, declaration))
23668 {
23669 /* This is a definition of a C++ class level static. */
23670 add_AT_specification (var_die, old_die);
23671 specialization_p = true;
23672 if (DECL_NAME (decl))
23673 {
23674 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23675 struct dwarf_file_data * file_index = lookup_filename (s.file);
23676
23677 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23678 add_AT_file (var_die, DW_AT_decl_file, file_index);
23679
23680 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23681 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23682
23683 if (debug_column_info
23684 && s.column
23685 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23686 != (unsigned) s.column))
23687 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23688
23689 if (old_die->die_tag == DW_TAG_member)
23690 add_linkage_name (var_die, decl);
23691 }
23692 }
23693 else
23694 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23695
23696 if ((origin == NULL && !specialization_p)
23697 || (origin != NULL
23698 && !DECL_ABSTRACT_P (decl_or_origin)
23699 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23700 decl_function_context
23701 (decl_or_origin))))
23702 {
23703 tree type = TREE_TYPE (decl_or_origin);
23704
23705 if (decl_by_reference_p (decl_or_origin))
23706 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23707 context_die);
23708 else
23709 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23710 context_die);
23711 }
23712
23713 if (origin == NULL && !specialization_p)
23714 {
23715 if (TREE_PUBLIC (decl))
23716 add_AT_flag (var_die, DW_AT_external, 1);
23717
23718 if (DECL_ARTIFICIAL (decl))
23719 add_AT_flag (var_die, DW_AT_artificial, 1);
23720
23721 add_alignment_attribute (var_die, decl);
23722
23723 add_accessibility_attribute (var_die, decl);
23724 }
23725
23726 if (declaration)
23727 add_AT_flag (var_die, DW_AT_declaration, 1);
23728
23729 if (decl && (DECL_ABSTRACT_P (decl)
23730 || !old_die || is_declaration_die (old_die)))
23731 equate_decl_number_to_die (decl, var_die);
23732
23733 gen_variable_die_location:
23734 if (! declaration
23735 && (! DECL_ABSTRACT_P (decl_or_origin)
23736 /* Local static vars are shared between all clones/inlines,
23737 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23738 already set. */
23739 || (VAR_P (decl_or_origin)
23740 && TREE_STATIC (decl_or_origin)
23741 && DECL_RTL_SET_P (decl_or_origin))))
23742 {
23743 if (early_dwarf)
23744 add_pubname (decl_or_origin, var_die);
23745 else
23746 add_location_or_const_value_attribute (var_die, decl_or_origin,
23747 decl == NULL);
23748 }
23749 else
23750 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23751
23752 if ((dwarf_version >= 4 || !dwarf_strict)
23753 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23754 DW_AT_const_expr) == 1
23755 && !get_AT (var_die, DW_AT_const_expr)
23756 && !specialization_p)
23757 add_AT_flag (var_die, DW_AT_const_expr, 1);
23758
23759 if (!dwarf_strict)
23760 {
23761 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23762 DW_AT_inline);
23763 if (inl != -1
23764 && !get_AT (var_die, DW_AT_inline)
23765 && !specialization_p)
23766 add_AT_unsigned (var_die, DW_AT_inline, inl);
23767 }
23768 }
23769
23770 /* Generate a DIE to represent a named constant. */
23771
23772 static void
23773 gen_const_die (tree decl, dw_die_ref context_die)
23774 {
23775 dw_die_ref const_die;
23776 tree type = TREE_TYPE (decl);
23777
23778 const_die = lookup_decl_die (decl);
23779 if (const_die)
23780 return;
23781
23782 const_die = new_die (DW_TAG_constant, context_die, decl);
23783 equate_decl_number_to_die (decl, const_die);
23784 add_name_and_src_coords_attributes (const_die, decl);
23785 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23786 if (TREE_PUBLIC (decl))
23787 add_AT_flag (const_die, DW_AT_external, 1);
23788 if (DECL_ARTIFICIAL (decl))
23789 add_AT_flag (const_die, DW_AT_artificial, 1);
23790 tree_add_const_value_attribute_for_decl (const_die, decl);
23791 }
23792
23793 /* Generate a DIE to represent a label identifier. */
23794
23795 static void
23796 gen_label_die (tree decl, dw_die_ref context_die)
23797 {
23798 tree origin = decl_ultimate_origin (decl);
23799 dw_die_ref lbl_die = lookup_decl_die (decl);
23800 rtx insn;
23801 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23802
23803 if (!lbl_die)
23804 {
23805 lbl_die = new_die (DW_TAG_label, context_die, decl);
23806 equate_decl_number_to_die (decl, lbl_die);
23807
23808 if (origin != NULL)
23809 add_abstract_origin_attribute (lbl_die, origin);
23810 else
23811 add_name_and_src_coords_attributes (lbl_die, decl);
23812 }
23813
23814 if (DECL_ABSTRACT_P (decl))
23815 equate_decl_number_to_die (decl, lbl_die);
23816 else if (! early_dwarf)
23817 {
23818 insn = DECL_RTL_IF_SET (decl);
23819
23820 /* Deleted labels are programmer specified labels which have been
23821 eliminated because of various optimizations. We still emit them
23822 here so that it is possible to put breakpoints on them. */
23823 if (insn
23824 && (LABEL_P (insn)
23825 || ((NOTE_P (insn)
23826 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23827 {
23828 /* When optimization is enabled (via -O) some parts of the compiler
23829 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23830 represent source-level labels which were explicitly declared by
23831 the user. This really shouldn't be happening though, so catch
23832 it if it ever does happen. */
23833 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23834
23835 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23836 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23837 }
23838 else if (insn
23839 && NOTE_P (insn)
23840 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23841 && CODE_LABEL_NUMBER (insn) != -1)
23842 {
23843 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23844 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23845 }
23846 }
23847 }
23848
23849 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23850 attributes to the DIE for a block STMT, to describe where the inlined
23851 function was called from. This is similar to add_src_coords_attributes. */
23852
23853 static inline void
23854 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23855 {
23856 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23857
23858 if (dwarf_version >= 3 || !dwarf_strict)
23859 {
23860 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23861 add_AT_unsigned (die, DW_AT_call_line, s.line);
23862 if (debug_column_info && s.column)
23863 add_AT_unsigned (die, DW_AT_call_column, s.column);
23864 }
23865 }
23866
23867
23868 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23869 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23870
23871 static inline void
23872 add_high_low_attributes (tree stmt, dw_die_ref die)
23873 {
23874 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23875
23876 if (inline_entry_data **iedp
23877 = !inline_entry_data_table ? NULL
23878 : inline_entry_data_table->find_slot_with_hash (stmt,
23879 htab_hash_pointer (stmt),
23880 NO_INSERT))
23881 {
23882 inline_entry_data *ied = *iedp;
23883 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23884 gcc_assert (debug_inline_points);
23885 gcc_assert (inlined_function_outer_scope_p (stmt));
23886
23887 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23888 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23889
23890 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23891 && !dwarf_strict)
23892 {
23893 if (!output_asm_line_debug_info ())
23894 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23895 else
23896 {
23897 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23898 /* FIXME: this will resolve to a small number. Could we
23899 possibly emit smaller data? Ideally we'd emit a
23900 uleb128, but that would make the size of DIEs
23901 impossible for the compiler to compute, since it's
23902 the assembler that computes the value of the view
23903 label in this case. Ideally, we'd have a single form
23904 encompassing both the address and the view, and
23905 indirecting them through a table might make things
23906 easier, but even that would be more wasteful,
23907 space-wise, than what we have now. */
23908 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23909 }
23910 }
23911
23912 inline_entry_data_table->clear_slot (iedp);
23913 }
23914
23915 if (BLOCK_FRAGMENT_CHAIN (stmt)
23916 && (dwarf_version >= 3 || !dwarf_strict))
23917 {
23918 tree chain, superblock = NULL_TREE;
23919 dw_die_ref pdie;
23920 dw_attr_node *attr = NULL;
23921
23922 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23923 {
23924 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23925 BLOCK_NUMBER (stmt));
23926 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23927 }
23928
23929 /* Optimize duplicate .debug_ranges lists or even tails of
23930 lists. If this BLOCK has same ranges as its supercontext,
23931 lookup DW_AT_ranges attribute in the supercontext (and
23932 recursively so), verify that the ranges_table contains the
23933 right values and use it instead of adding a new .debug_range. */
23934 for (chain = stmt, pdie = die;
23935 BLOCK_SAME_RANGE (chain);
23936 chain = BLOCK_SUPERCONTEXT (chain))
23937 {
23938 dw_attr_node *new_attr;
23939
23940 pdie = pdie->die_parent;
23941 if (pdie == NULL)
23942 break;
23943 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23944 break;
23945 new_attr = get_AT (pdie, DW_AT_ranges);
23946 if (new_attr == NULL
23947 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23948 break;
23949 attr = new_attr;
23950 superblock = BLOCK_SUPERCONTEXT (chain);
23951 }
23952 if (attr != NULL
23953 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23954 == BLOCK_NUMBER (superblock))
23955 && BLOCK_FRAGMENT_CHAIN (superblock))
23956 {
23957 unsigned long off = attr->dw_attr_val.v.val_offset;
23958 unsigned long supercnt = 0, thiscnt = 0;
23959 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23960 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23961 {
23962 ++supercnt;
23963 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23964 == BLOCK_NUMBER (chain));
23965 }
23966 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23967 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23968 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23969 ++thiscnt;
23970 gcc_assert (supercnt >= thiscnt);
23971 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23972 false);
23973 note_rnglist_head (off + supercnt - thiscnt);
23974 return;
23975 }
23976
23977 unsigned int offset = add_ranges (stmt, true);
23978 add_AT_range_list (die, DW_AT_ranges, offset, false);
23979 note_rnglist_head (offset);
23980
23981 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23982 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23983 do
23984 {
23985 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23986 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23987 chain = BLOCK_FRAGMENT_CHAIN (chain);
23988 }
23989 while (chain);
23990 add_ranges (NULL);
23991 }
23992 else
23993 {
23994 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23995 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23996 BLOCK_NUMBER (stmt));
23997 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23998 BLOCK_NUMBER (stmt));
23999 add_AT_low_high_pc (die, label, label_high, false);
24000 }
24001 }
24002
24003 /* Generate a DIE for a lexical block. */
24004
24005 static void
24006 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24007 {
24008 dw_die_ref old_die = BLOCK_DIE (stmt);
24009 dw_die_ref stmt_die = NULL;
24010 if (!old_die)
24011 {
24012 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24013 BLOCK_DIE (stmt) = stmt_die;
24014 }
24015
24016 if (BLOCK_ABSTRACT (stmt))
24017 {
24018 if (old_die)
24019 {
24020 /* This must have been generated early and it won't even
24021 need location information since it's a DW_AT_inline
24022 function. */
24023 if (flag_checking)
24024 for (dw_die_ref c = context_die; c; c = c->die_parent)
24025 if (c->die_tag == DW_TAG_inlined_subroutine
24026 || c->die_tag == DW_TAG_subprogram)
24027 {
24028 gcc_assert (get_AT (c, DW_AT_inline));
24029 break;
24030 }
24031 return;
24032 }
24033 }
24034 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24035 {
24036 /* If this is an inlined instance, create a new lexical die for
24037 anything below to attach DW_AT_abstract_origin to. */
24038 if (old_die)
24039 {
24040 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24041 BLOCK_DIE (stmt) = stmt_die;
24042 old_die = NULL;
24043 }
24044
24045 tree origin = block_ultimate_origin (stmt);
24046 if (origin != NULL_TREE && origin != stmt)
24047 add_abstract_origin_attribute (stmt_die, origin);
24048 }
24049
24050 if (old_die)
24051 stmt_die = old_die;
24052
24053 /* A non abstract block whose blocks have already been reordered
24054 should have the instruction range for this block. If so, set the
24055 high/low attributes. */
24056 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24057 {
24058 gcc_assert (stmt_die);
24059 add_high_low_attributes (stmt, stmt_die);
24060 }
24061
24062 decls_for_scope (stmt, stmt_die);
24063 }
24064
24065 /* Generate a DIE for an inlined subprogram. */
24066
24067 static void
24068 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24069 {
24070 tree decl;
24071
24072 /* The instance of function that is effectively being inlined shall not
24073 be abstract. */
24074 gcc_assert (! BLOCK_ABSTRACT (stmt));
24075
24076 decl = block_ultimate_origin (stmt);
24077
24078 /* Make sure any inlined functions are known to be inlineable. */
24079 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24080 || cgraph_function_possibly_inlined_p (decl));
24081
24082 if (! BLOCK_ABSTRACT (stmt))
24083 {
24084 dw_die_ref subr_die
24085 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24086
24087 if (call_arg_locations || debug_inline_points)
24088 BLOCK_DIE (stmt) = subr_die;
24089 add_abstract_origin_attribute (subr_die, decl);
24090 if (TREE_ASM_WRITTEN (stmt))
24091 add_high_low_attributes (stmt, subr_die);
24092 add_call_src_coords_attributes (stmt, subr_die);
24093
24094 decls_for_scope (stmt, subr_die);
24095 }
24096 }
24097
24098 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24099 the comment for VLR_CONTEXT. */
24100
24101 static void
24102 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24103 {
24104 dw_die_ref decl_die;
24105
24106 if (TREE_TYPE (decl) == error_mark_node)
24107 return;
24108
24109 decl_die = new_die (DW_TAG_member, context_die, decl);
24110 add_name_and_src_coords_attributes (decl_die, decl);
24111 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24112 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24113 context_die);
24114
24115 if (DECL_BIT_FIELD_TYPE (decl))
24116 {
24117 add_byte_size_attribute (decl_die, decl);
24118 add_bit_size_attribute (decl_die, decl);
24119 add_bit_offset_attribute (decl_die, decl, ctx);
24120 }
24121
24122 add_alignment_attribute (decl_die, decl);
24123
24124 /* If we have a variant part offset, then we are supposed to process a member
24125 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24126 trees. */
24127 gcc_assert (ctx->variant_part_offset == NULL_TREE
24128 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24129 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24130 add_data_member_location_attribute (decl_die, decl, ctx);
24131
24132 if (DECL_ARTIFICIAL (decl))
24133 add_AT_flag (decl_die, DW_AT_artificial, 1);
24134
24135 add_accessibility_attribute (decl_die, decl);
24136
24137 /* Equate decl number to die, so that we can look up this decl later on. */
24138 equate_decl_number_to_die (decl, decl_die);
24139 }
24140
24141 /* Generate a DIE for a pointer to a member type. TYPE can be an
24142 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24143 pointer to member function. */
24144
24145 static void
24146 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24147 {
24148 if (lookup_type_die (type))
24149 return;
24150
24151 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24152 scope_die_for (type, context_die), type);
24153
24154 equate_type_number_to_die (type, ptr_die);
24155 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24156 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24157 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24158 context_die);
24159 add_alignment_attribute (ptr_die, type);
24160
24161 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24162 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24163 {
24164 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24165 add_AT_loc (ptr_die, DW_AT_use_location, op);
24166 }
24167 }
24168
24169 static char *producer_string;
24170
24171 /* Return a heap allocated producer string including command line options
24172 if -grecord-gcc-switches. */
24173
24174 static char *
24175 gen_producer_string (void)
24176 {
24177 size_t j;
24178 auto_vec<const char *> switches;
24179 const char *language_string = lang_hooks.name;
24180 char *producer, *tail;
24181 const char *p;
24182 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24183 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24184
24185 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24186 switch (save_decoded_options[j].opt_index)
24187 {
24188 case OPT_o:
24189 case OPT_d:
24190 case OPT_dumpbase:
24191 case OPT_dumpdir:
24192 case OPT_auxbase:
24193 case OPT_auxbase_strip:
24194 case OPT_quiet:
24195 case OPT_version:
24196 case OPT_v:
24197 case OPT_w:
24198 case OPT_L:
24199 case OPT_D:
24200 case OPT_I:
24201 case OPT_U:
24202 case OPT_SPECIAL_unknown:
24203 case OPT_SPECIAL_ignore:
24204 case OPT_SPECIAL_program_name:
24205 case OPT_SPECIAL_input_file:
24206 case OPT_grecord_gcc_switches:
24207 case OPT__output_pch_:
24208 case OPT_fdiagnostics_show_location_:
24209 case OPT_fdiagnostics_show_option:
24210 case OPT_fdiagnostics_show_caret:
24211 case OPT_fdiagnostics_color_:
24212 case OPT_fverbose_asm:
24213 case OPT____:
24214 case OPT__sysroot_:
24215 case OPT_nostdinc:
24216 case OPT_nostdinc__:
24217 case OPT_fpreprocessed:
24218 case OPT_fltrans_output_list_:
24219 case OPT_fresolution_:
24220 case OPT_fdebug_prefix_map_:
24221 case OPT_fmacro_prefix_map_:
24222 case OPT_ffile_prefix_map_:
24223 case OPT_fcompare_debug:
24224 /* Ignore these. */
24225 continue;
24226 default:
24227 if (cl_options[save_decoded_options[j].opt_index].flags
24228 & CL_NO_DWARF_RECORD)
24229 continue;
24230 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24231 == '-');
24232 switch (save_decoded_options[j].canonical_option[0][1])
24233 {
24234 case 'M':
24235 case 'i':
24236 case 'W':
24237 continue;
24238 case 'f':
24239 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24240 "dump", 4) == 0)
24241 continue;
24242 break;
24243 default:
24244 break;
24245 }
24246 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24247 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24248 break;
24249 }
24250
24251 producer = XNEWVEC (char, plen + 1 + len + 1);
24252 tail = producer;
24253 sprintf (tail, "%s %s", language_string, version_string);
24254 tail += plen;
24255
24256 FOR_EACH_VEC_ELT (switches, j, p)
24257 {
24258 len = strlen (p);
24259 *tail = ' ';
24260 memcpy (tail + 1, p, len);
24261 tail += len + 1;
24262 }
24263
24264 *tail = '\0';
24265 return producer;
24266 }
24267
24268 /* Given a C and/or C++ language/version string return the "highest".
24269 C++ is assumed to be "higher" than C in this case. Used for merging
24270 LTO translation unit languages. */
24271 static const char *
24272 highest_c_language (const char *lang1, const char *lang2)
24273 {
24274 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24275 return "GNU C++17";
24276 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24277 return "GNU C++14";
24278 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24279 return "GNU C++11";
24280 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24281 return "GNU C++98";
24282
24283 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24284 return "GNU C17";
24285 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24286 return "GNU C11";
24287 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24288 return "GNU C99";
24289 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24290 return "GNU C89";
24291
24292 gcc_unreachable ();
24293 }
24294
24295
24296 /* Generate the DIE for the compilation unit. */
24297
24298 static dw_die_ref
24299 gen_compile_unit_die (const char *filename)
24300 {
24301 dw_die_ref die;
24302 const char *language_string = lang_hooks.name;
24303 int language;
24304
24305 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24306
24307 if (filename)
24308 {
24309 add_name_attribute (die, filename);
24310 /* Don't add cwd for <built-in>. */
24311 if (filename[0] != '<')
24312 add_comp_dir_attribute (die);
24313 }
24314
24315 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24316
24317 /* If our producer is LTO try to figure out a common language to use
24318 from the global list of translation units. */
24319 if (strcmp (language_string, "GNU GIMPLE") == 0)
24320 {
24321 unsigned i;
24322 tree t;
24323 const char *common_lang = NULL;
24324
24325 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24326 {
24327 if (!TRANSLATION_UNIT_LANGUAGE (t))
24328 continue;
24329 if (!common_lang)
24330 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24331 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24332 ;
24333 else if (strncmp (common_lang, "GNU C", 5) == 0
24334 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24335 /* Mixing C and C++ is ok, use C++ in that case. */
24336 common_lang = highest_c_language (common_lang,
24337 TRANSLATION_UNIT_LANGUAGE (t));
24338 else
24339 {
24340 /* Fall back to C. */
24341 common_lang = NULL;
24342 break;
24343 }
24344 }
24345
24346 if (common_lang)
24347 language_string = common_lang;
24348 }
24349
24350 language = DW_LANG_C;
24351 if (strncmp (language_string, "GNU C", 5) == 0
24352 && ISDIGIT (language_string[5]))
24353 {
24354 language = DW_LANG_C89;
24355 if (dwarf_version >= 3 || !dwarf_strict)
24356 {
24357 if (strcmp (language_string, "GNU C89") != 0)
24358 language = DW_LANG_C99;
24359
24360 if (dwarf_version >= 5 /* || !dwarf_strict */)
24361 if (strcmp (language_string, "GNU C11") == 0
24362 || strcmp (language_string, "GNU C17") == 0)
24363 language = DW_LANG_C11;
24364 }
24365 }
24366 else if (strncmp (language_string, "GNU C++", 7) == 0)
24367 {
24368 language = DW_LANG_C_plus_plus;
24369 if (dwarf_version >= 5 /* || !dwarf_strict */)
24370 {
24371 if (strcmp (language_string, "GNU C++11") == 0)
24372 language = DW_LANG_C_plus_plus_11;
24373 else if (strcmp (language_string, "GNU C++14") == 0)
24374 language = DW_LANG_C_plus_plus_14;
24375 else if (strcmp (language_string, "GNU C++17") == 0)
24376 /* For now. */
24377 language = DW_LANG_C_plus_plus_14;
24378 }
24379 }
24380 else if (strcmp (language_string, "GNU F77") == 0)
24381 language = DW_LANG_Fortran77;
24382 else if (dwarf_version >= 3 || !dwarf_strict)
24383 {
24384 if (strcmp (language_string, "GNU Ada") == 0)
24385 language = DW_LANG_Ada95;
24386 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24387 {
24388 language = DW_LANG_Fortran95;
24389 if (dwarf_version >= 5 /* || !dwarf_strict */)
24390 {
24391 if (strcmp (language_string, "GNU Fortran2003") == 0)
24392 language = DW_LANG_Fortran03;
24393 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24394 language = DW_LANG_Fortran08;
24395 }
24396 }
24397 else if (strcmp (language_string, "GNU Objective-C") == 0)
24398 language = DW_LANG_ObjC;
24399 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24400 language = DW_LANG_ObjC_plus_plus;
24401 else if (dwarf_version >= 5 || !dwarf_strict)
24402 {
24403 if (strcmp (language_string, "GNU Go") == 0)
24404 language = DW_LANG_Go;
24405 }
24406 }
24407 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24408 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24409 language = DW_LANG_Fortran90;
24410
24411 add_AT_unsigned (die, DW_AT_language, language);
24412
24413 switch (language)
24414 {
24415 case DW_LANG_Fortran77:
24416 case DW_LANG_Fortran90:
24417 case DW_LANG_Fortran95:
24418 case DW_LANG_Fortran03:
24419 case DW_LANG_Fortran08:
24420 /* Fortran has case insensitive identifiers and the front-end
24421 lowercases everything. */
24422 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24423 break;
24424 default:
24425 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24426 break;
24427 }
24428 return die;
24429 }
24430
24431 /* Generate the DIE for a base class. */
24432
24433 static void
24434 gen_inheritance_die (tree binfo, tree access, tree type,
24435 dw_die_ref context_die)
24436 {
24437 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24438 struct vlr_context ctx = { type, NULL };
24439
24440 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24441 context_die);
24442 add_data_member_location_attribute (die, binfo, &ctx);
24443
24444 if (BINFO_VIRTUAL_P (binfo))
24445 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24446
24447 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24448 children, otherwise the default is DW_ACCESS_public. In DWARF2
24449 the default has always been DW_ACCESS_private. */
24450 if (access == access_public_node)
24451 {
24452 if (dwarf_version == 2
24453 || context_die->die_tag == DW_TAG_class_type)
24454 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24455 }
24456 else if (access == access_protected_node)
24457 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24458 else if (dwarf_version > 2
24459 && context_die->die_tag != DW_TAG_class_type)
24460 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24461 }
24462
24463 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24464 structure. */
24465 static bool
24466 is_variant_part (tree decl)
24467 {
24468 return (TREE_CODE (decl) == FIELD_DECL
24469 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24470 }
24471
24472 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24473 return the FIELD_DECL. Return NULL_TREE otherwise. */
24474
24475 static tree
24476 analyze_discr_in_predicate (tree operand, tree struct_type)
24477 {
24478 bool continue_stripping = true;
24479 while (continue_stripping)
24480 switch (TREE_CODE (operand))
24481 {
24482 CASE_CONVERT:
24483 operand = TREE_OPERAND (operand, 0);
24484 break;
24485 default:
24486 continue_stripping = false;
24487 break;
24488 }
24489
24490 /* Match field access to members of struct_type only. */
24491 if (TREE_CODE (operand) == COMPONENT_REF
24492 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24493 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24494 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24495 return TREE_OPERAND (operand, 1);
24496 else
24497 return NULL_TREE;
24498 }
24499
24500 /* Check that SRC is a constant integer that can be represented as a native
24501 integer constant (either signed or unsigned). If so, store it into DEST and
24502 return true. Return false otherwise. */
24503
24504 static bool
24505 get_discr_value (tree src, dw_discr_value *dest)
24506 {
24507 tree discr_type = TREE_TYPE (src);
24508
24509 if (lang_hooks.types.get_debug_type)
24510 {
24511 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24512 if (debug_type != NULL)
24513 discr_type = debug_type;
24514 }
24515
24516 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24517 return false;
24518
24519 /* Signedness can vary between the original type and the debug type. This
24520 can happen for character types in Ada for instance: the character type
24521 used for code generation can be signed, to be compatible with the C one,
24522 but from a debugger point of view, it must be unsigned. */
24523 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24524 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24525
24526 if (is_orig_unsigned != is_debug_unsigned)
24527 src = fold_convert (discr_type, src);
24528
24529 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24530 return false;
24531
24532 dest->pos = is_debug_unsigned;
24533 if (is_debug_unsigned)
24534 dest->v.uval = tree_to_uhwi (src);
24535 else
24536 dest->v.sval = tree_to_shwi (src);
24537
24538 return true;
24539 }
24540
24541 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24542 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24543 store NULL_TREE in DISCR_DECL. Otherwise:
24544
24545 - store the discriminant field in STRUCT_TYPE that controls the variant
24546 part to *DISCR_DECL
24547
24548 - put in *DISCR_LISTS_P an array where for each variant, the item
24549 represents the corresponding matching list of discriminant values.
24550
24551 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24552 the above array.
24553
24554 Note that when the array is allocated (i.e. when the analysis is
24555 successful), it is up to the caller to free the array. */
24556
24557 static void
24558 analyze_variants_discr (tree variant_part_decl,
24559 tree struct_type,
24560 tree *discr_decl,
24561 dw_discr_list_ref **discr_lists_p,
24562 unsigned *discr_lists_length)
24563 {
24564 tree variant_part_type = TREE_TYPE (variant_part_decl);
24565 tree variant;
24566 dw_discr_list_ref *discr_lists;
24567 unsigned i;
24568
24569 /* Compute how many variants there are in this variant part. */
24570 *discr_lists_length = 0;
24571 for (variant = TYPE_FIELDS (variant_part_type);
24572 variant != NULL_TREE;
24573 variant = DECL_CHAIN (variant))
24574 ++*discr_lists_length;
24575
24576 *discr_decl = NULL_TREE;
24577 *discr_lists_p
24578 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24579 sizeof (**discr_lists_p));
24580 discr_lists = *discr_lists_p;
24581
24582 /* And then analyze all variants to extract discriminant information for all
24583 of them. This analysis is conservative: as soon as we detect something we
24584 do not support, abort everything and pretend we found nothing. */
24585 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24586 variant != NULL_TREE;
24587 variant = DECL_CHAIN (variant), ++i)
24588 {
24589 tree match_expr = DECL_QUALIFIER (variant);
24590
24591 /* Now, try to analyze the predicate and deduce a discriminant for
24592 it. */
24593 if (match_expr == boolean_true_node)
24594 /* Typically happens for the default variant: it matches all cases that
24595 previous variants rejected. Don't output any matching value for
24596 this one. */
24597 continue;
24598
24599 /* The following loop tries to iterate over each discriminant
24600 possibility: single values or ranges. */
24601 while (match_expr != NULL_TREE)
24602 {
24603 tree next_round_match_expr;
24604 tree candidate_discr = NULL_TREE;
24605 dw_discr_list_ref new_node = NULL;
24606
24607 /* Possibilities are matched one after the other by nested
24608 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24609 continue with the rest at next iteration. */
24610 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24611 {
24612 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24613 match_expr = TREE_OPERAND (match_expr, 1);
24614 }
24615 else
24616 next_round_match_expr = NULL_TREE;
24617
24618 if (match_expr == boolean_false_node)
24619 /* This sub-expression matches nothing: just wait for the next
24620 one. */
24621 ;
24622
24623 else if (TREE_CODE (match_expr) == EQ_EXPR)
24624 {
24625 /* We are matching: <discr_field> == <integer_cst>
24626 This sub-expression matches a single value. */
24627 tree integer_cst = TREE_OPERAND (match_expr, 1);
24628
24629 candidate_discr
24630 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24631 struct_type);
24632
24633 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24634 if (!get_discr_value (integer_cst,
24635 &new_node->dw_discr_lower_bound))
24636 goto abort;
24637 new_node->dw_discr_range = false;
24638 }
24639
24640 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24641 {
24642 /* We are matching:
24643 <discr_field> > <integer_cst>
24644 && <discr_field> < <integer_cst>.
24645 This sub-expression matches the range of values between the
24646 two matched integer constants. Note that comparisons can be
24647 inclusive or exclusive. */
24648 tree candidate_discr_1, candidate_discr_2;
24649 tree lower_cst, upper_cst;
24650 bool lower_cst_included, upper_cst_included;
24651 tree lower_op = TREE_OPERAND (match_expr, 0);
24652 tree upper_op = TREE_OPERAND (match_expr, 1);
24653
24654 /* When the comparison is exclusive, the integer constant is not
24655 the discriminant range bound we are looking for: we will have
24656 to increment or decrement it. */
24657 if (TREE_CODE (lower_op) == GE_EXPR)
24658 lower_cst_included = true;
24659 else if (TREE_CODE (lower_op) == GT_EXPR)
24660 lower_cst_included = false;
24661 else
24662 goto abort;
24663
24664 if (TREE_CODE (upper_op) == LE_EXPR)
24665 upper_cst_included = true;
24666 else if (TREE_CODE (upper_op) == LT_EXPR)
24667 upper_cst_included = false;
24668 else
24669 goto abort;
24670
24671 /* Extract the discriminant from the first operand and check it
24672 is consistant with the same analysis in the second
24673 operand. */
24674 candidate_discr_1
24675 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24676 struct_type);
24677 candidate_discr_2
24678 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24679 struct_type);
24680 if (candidate_discr_1 == candidate_discr_2)
24681 candidate_discr = candidate_discr_1;
24682 else
24683 goto abort;
24684
24685 /* Extract bounds from both. */
24686 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24687 lower_cst = TREE_OPERAND (lower_op, 1);
24688 upper_cst = TREE_OPERAND (upper_op, 1);
24689
24690 if (!lower_cst_included)
24691 lower_cst
24692 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24693 build_int_cst (TREE_TYPE (lower_cst), 1));
24694 if (!upper_cst_included)
24695 upper_cst
24696 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24697 build_int_cst (TREE_TYPE (upper_cst), 1));
24698
24699 if (!get_discr_value (lower_cst,
24700 &new_node->dw_discr_lower_bound)
24701 || !get_discr_value (upper_cst,
24702 &new_node->dw_discr_upper_bound))
24703 goto abort;
24704
24705 new_node->dw_discr_range = true;
24706 }
24707
24708 else
24709 /* Unsupported sub-expression: we cannot determine the set of
24710 matching discriminant values. Abort everything. */
24711 goto abort;
24712
24713 /* If the discriminant info is not consistant with what we saw so
24714 far, consider the analysis failed and abort everything. */
24715 if (candidate_discr == NULL_TREE
24716 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24717 goto abort;
24718 else
24719 *discr_decl = candidate_discr;
24720
24721 if (new_node != NULL)
24722 {
24723 new_node->dw_discr_next = discr_lists[i];
24724 discr_lists[i] = new_node;
24725 }
24726 match_expr = next_round_match_expr;
24727 }
24728 }
24729
24730 /* If we reach this point, we could match everything we were interested
24731 in. */
24732 return;
24733
24734 abort:
24735 /* Clean all data structure and return no result. */
24736 free (*discr_lists_p);
24737 *discr_lists_p = NULL;
24738 *discr_decl = NULL_TREE;
24739 }
24740
24741 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24742 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24743 under CONTEXT_DIE.
24744
24745 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24746 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24747 this type, which are record types, represent the available variants and each
24748 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24749 values are inferred from these attributes.
24750
24751 In trees, the offsets for the fields inside these sub-records are relative
24752 to the variant part itself, whereas the corresponding DIEs should have
24753 offset attributes that are relative to the embedding record base address.
24754 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24755 must be an expression that computes the offset of the variant part to
24756 describe in DWARF. */
24757
24758 static void
24759 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24760 dw_die_ref context_die)
24761 {
24762 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24763 tree variant_part_offset = vlr_ctx->variant_part_offset;
24764 struct loc_descr_context ctx = {
24765 vlr_ctx->struct_type, /* context_type */
24766 NULL_TREE, /* base_decl */
24767 NULL, /* dpi */
24768 false, /* placeholder_arg */
24769 false /* placeholder_seen */
24770 };
24771
24772 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24773 NULL_TREE if there is no such field. */
24774 tree discr_decl = NULL_TREE;
24775 dw_discr_list_ref *discr_lists;
24776 unsigned discr_lists_length = 0;
24777 unsigned i;
24778
24779 dw_die_ref dwarf_proc_die = NULL;
24780 dw_die_ref variant_part_die
24781 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24782
24783 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24784
24785 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24786 &discr_decl, &discr_lists, &discr_lists_length);
24787
24788 if (discr_decl != NULL_TREE)
24789 {
24790 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24791
24792 if (discr_die)
24793 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24794 else
24795 /* We have no DIE for the discriminant, so just discard all
24796 discrimimant information in the output. */
24797 discr_decl = NULL_TREE;
24798 }
24799
24800 /* If the offset for this variant part is more complex than a constant,
24801 create a DWARF procedure for it so that we will not have to generate DWARF
24802 expressions for it for each member. */
24803 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24804 && (dwarf_version >= 3 || !dwarf_strict))
24805 {
24806 const tree dwarf_proc_fndecl
24807 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24808 build_function_type (TREE_TYPE (variant_part_offset),
24809 NULL_TREE));
24810 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24811 const dw_loc_descr_ref dwarf_proc_body
24812 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24813
24814 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24815 dwarf_proc_fndecl, context_die);
24816 if (dwarf_proc_die != NULL)
24817 variant_part_offset = dwarf_proc_call;
24818 }
24819
24820 /* Output DIEs for all variants. */
24821 i = 0;
24822 for (tree variant = TYPE_FIELDS (variant_part_type);
24823 variant != NULL_TREE;
24824 variant = DECL_CHAIN (variant), ++i)
24825 {
24826 tree variant_type = TREE_TYPE (variant);
24827 dw_die_ref variant_die;
24828
24829 /* All variants (i.e. members of a variant part) are supposed to be
24830 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24831 under these records. */
24832 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24833
24834 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24835 equate_decl_number_to_die (variant, variant_die);
24836
24837 /* Output discriminant values this variant matches, if any. */
24838 if (discr_decl == NULL || discr_lists[i] == NULL)
24839 /* In the case we have discriminant information at all, this is
24840 probably the default variant: as the standard says, don't
24841 output any discriminant value/list attribute. */
24842 ;
24843 else if (discr_lists[i]->dw_discr_next == NULL
24844 && !discr_lists[i]->dw_discr_range)
24845 /* If there is only one accepted value, don't bother outputting a
24846 list. */
24847 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24848 else
24849 add_discr_list (variant_die, discr_lists[i]);
24850
24851 for (tree member = TYPE_FIELDS (variant_type);
24852 member != NULL_TREE;
24853 member = DECL_CHAIN (member))
24854 {
24855 struct vlr_context vlr_sub_ctx = {
24856 vlr_ctx->struct_type, /* struct_type */
24857 NULL /* variant_part_offset */
24858 };
24859 if (is_variant_part (member))
24860 {
24861 /* All offsets for fields inside variant parts are relative to
24862 the top-level embedding RECORD_TYPE's base address. On the
24863 other hand, offsets in GCC's types are relative to the
24864 nested-most variant part. So we have to sum offsets each time
24865 we recurse. */
24866
24867 vlr_sub_ctx.variant_part_offset
24868 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24869 variant_part_offset, byte_position (member));
24870 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24871 }
24872 else
24873 {
24874 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24875 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24876 }
24877 }
24878 }
24879
24880 free (discr_lists);
24881 }
24882
24883 /* Generate a DIE for a class member. */
24884
24885 static void
24886 gen_member_die (tree type, dw_die_ref context_die)
24887 {
24888 tree member;
24889 tree binfo = TYPE_BINFO (type);
24890
24891 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24892
24893 /* If this is not an incomplete type, output descriptions of each of its
24894 members. Note that as we output the DIEs necessary to represent the
24895 members of this record or union type, we will also be trying to output
24896 DIEs to represent the *types* of those members. However the `type'
24897 function (above) will specifically avoid generating type DIEs for member
24898 types *within* the list of member DIEs for this (containing) type except
24899 for those types (of members) which are explicitly marked as also being
24900 members of this (containing) type themselves. The g++ front- end can
24901 force any given type to be treated as a member of some other (containing)
24902 type by setting the TYPE_CONTEXT of the given (member) type to point to
24903 the TREE node representing the appropriate (containing) type. */
24904
24905 /* First output info about the base classes. */
24906 if (binfo)
24907 {
24908 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24909 int i;
24910 tree base;
24911
24912 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24913 gen_inheritance_die (base,
24914 (accesses ? (*accesses)[i] : access_public_node),
24915 type,
24916 context_die);
24917 }
24918
24919 /* Now output info about the data members and type members. */
24920 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24921 {
24922 struct vlr_context vlr_ctx = { type, NULL_TREE };
24923 bool static_inline_p
24924 = (TREE_STATIC (member)
24925 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24926 != -1));
24927
24928 /* Ignore clones. */
24929 if (DECL_ABSTRACT_ORIGIN (member))
24930 continue;
24931
24932 /* If we thought we were generating minimal debug info for TYPE
24933 and then changed our minds, some of the member declarations
24934 may have already been defined. Don't define them again, but
24935 do put them in the right order. */
24936
24937 if (dw_die_ref child = lookup_decl_die (member))
24938 {
24939 /* Handle inline static data members, which only have in-class
24940 declarations. */
24941 dw_die_ref ref = NULL;
24942 if (child->die_tag == DW_TAG_variable
24943 && child->die_parent == comp_unit_die ())
24944 {
24945 ref = get_AT_ref (child, DW_AT_specification);
24946 /* For C++17 inline static data members followed by redundant
24947 out of class redeclaration, we might get here with
24948 child being the DIE created for the out of class
24949 redeclaration and with its DW_AT_specification being
24950 the DIE created for in-class definition. We want to
24951 reparent the latter, and don't want to create another
24952 DIE with DW_AT_specification in that case, because
24953 we already have one. */
24954 if (ref
24955 && static_inline_p
24956 && ref->die_tag == DW_TAG_variable
24957 && ref->die_parent == comp_unit_die ()
24958 && get_AT (ref, DW_AT_specification) == NULL)
24959 {
24960 child = ref;
24961 ref = NULL;
24962 static_inline_p = false;
24963 }
24964 }
24965
24966 if (child->die_tag == DW_TAG_variable
24967 && child->die_parent == comp_unit_die ()
24968 && ref == NULL)
24969 {
24970 reparent_child (child, context_die);
24971 if (dwarf_version < 5)
24972 child->die_tag = DW_TAG_member;
24973 }
24974 else
24975 splice_child_die (context_die, child);
24976 }
24977
24978 /* Do not generate standard DWARF for variant parts if we are generating
24979 the corresponding GNAT encodings: DIEs generated for both would
24980 conflict in our mappings. */
24981 else if (is_variant_part (member)
24982 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24983 {
24984 vlr_ctx.variant_part_offset = byte_position (member);
24985 gen_variant_part (member, &vlr_ctx, context_die);
24986 }
24987 else
24988 {
24989 vlr_ctx.variant_part_offset = NULL_TREE;
24990 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24991 }
24992
24993 /* For C++ inline static data members emit immediately a DW_TAG_variable
24994 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
24995 DW_AT_specification. */
24996 if (static_inline_p)
24997 {
24998 int old_extern = DECL_EXTERNAL (member);
24999 DECL_EXTERNAL (member) = 0;
25000 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25001 DECL_EXTERNAL (member) = old_extern;
25002 }
25003 }
25004 }
25005
25006 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25007 is set, we pretend that the type was never defined, so we only get the
25008 member DIEs needed by later specification DIEs. */
25009
25010 static void
25011 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25012 enum debug_info_usage usage)
25013 {
25014 if (TREE_ASM_WRITTEN (type))
25015 {
25016 /* Fill in the bound of variable-length fields in late dwarf if
25017 still incomplete. */
25018 if (!early_dwarf && variably_modified_type_p (type, NULL))
25019 for (tree member = TYPE_FIELDS (type);
25020 member;
25021 member = DECL_CHAIN (member))
25022 fill_variable_array_bounds (TREE_TYPE (member));
25023 return;
25024 }
25025
25026 dw_die_ref type_die = lookup_type_die (type);
25027 dw_die_ref scope_die = 0;
25028 int nested = 0;
25029 int complete = (TYPE_SIZE (type)
25030 && (! TYPE_STUB_DECL (type)
25031 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25032 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25033 complete = complete && should_emit_struct_debug (type, usage);
25034
25035 if (type_die && ! complete)
25036 return;
25037
25038 if (TYPE_CONTEXT (type) != NULL_TREE
25039 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25040 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25041 nested = 1;
25042
25043 scope_die = scope_die_for (type, context_die);
25044
25045 /* Generate child dies for template paramaters. */
25046 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25047 schedule_generic_params_dies_gen (type);
25048
25049 if (! type_die || (nested && is_cu_die (scope_die)))
25050 /* First occurrence of type or toplevel definition of nested class. */
25051 {
25052 dw_die_ref old_die = type_die;
25053
25054 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25055 ? record_type_tag (type) : DW_TAG_union_type,
25056 scope_die, type);
25057 equate_type_number_to_die (type, type_die);
25058 if (old_die)
25059 add_AT_specification (type_die, old_die);
25060 else
25061 add_name_attribute (type_die, type_tag (type));
25062 }
25063 else
25064 remove_AT (type_die, DW_AT_declaration);
25065
25066 /* If this type has been completed, then give it a byte_size attribute and
25067 then give a list of members. */
25068 if (complete && !ns_decl)
25069 {
25070 /* Prevent infinite recursion in cases where the type of some member of
25071 this type is expressed in terms of this type itself. */
25072 TREE_ASM_WRITTEN (type) = 1;
25073 add_byte_size_attribute (type_die, type);
25074 add_alignment_attribute (type_die, type);
25075 if (TYPE_STUB_DECL (type) != NULL_TREE)
25076 {
25077 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25078 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25079 }
25080
25081 /* If the first reference to this type was as the return type of an
25082 inline function, then it may not have a parent. Fix this now. */
25083 if (type_die->die_parent == NULL)
25084 add_child_die (scope_die, type_die);
25085
25086 push_decl_scope (type);
25087 gen_member_die (type, type_die);
25088 pop_decl_scope ();
25089
25090 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25091 if (TYPE_ARTIFICIAL (type))
25092 add_AT_flag (type_die, DW_AT_artificial, 1);
25093
25094 /* GNU extension: Record what type our vtable lives in. */
25095 if (TYPE_VFIELD (type))
25096 {
25097 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25098
25099 gen_type_die (vtype, context_die);
25100 add_AT_die_ref (type_die, DW_AT_containing_type,
25101 lookup_type_die (vtype));
25102 }
25103 }
25104 else
25105 {
25106 add_AT_flag (type_die, DW_AT_declaration, 1);
25107
25108 /* We don't need to do this for function-local types. */
25109 if (TYPE_STUB_DECL (type)
25110 && ! decl_function_context (TYPE_STUB_DECL (type)))
25111 vec_safe_push (incomplete_types, type);
25112 }
25113
25114 if (get_AT (type_die, DW_AT_name))
25115 add_pubtype (type, type_die);
25116 }
25117
25118 /* Generate a DIE for a subroutine _type_. */
25119
25120 static void
25121 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25122 {
25123 tree return_type = TREE_TYPE (type);
25124 dw_die_ref subr_die
25125 = new_die (DW_TAG_subroutine_type,
25126 scope_die_for (type, context_die), type);
25127
25128 equate_type_number_to_die (type, subr_die);
25129 add_prototyped_attribute (subr_die, type);
25130 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25131 context_die);
25132 add_alignment_attribute (subr_die, type);
25133 gen_formal_types_die (type, subr_die);
25134
25135 if (get_AT (subr_die, DW_AT_name))
25136 add_pubtype (type, subr_die);
25137 if ((dwarf_version >= 5 || !dwarf_strict)
25138 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25139 add_AT_flag (subr_die, DW_AT_reference, 1);
25140 if ((dwarf_version >= 5 || !dwarf_strict)
25141 && lang_hooks.types.type_dwarf_attribute (type,
25142 DW_AT_rvalue_reference) != -1)
25143 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25144 }
25145
25146 /* Generate a DIE for a type definition. */
25147
25148 static void
25149 gen_typedef_die (tree decl, dw_die_ref context_die)
25150 {
25151 dw_die_ref type_die;
25152 tree type;
25153
25154 if (TREE_ASM_WRITTEN (decl))
25155 {
25156 if (DECL_ORIGINAL_TYPE (decl))
25157 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25158 return;
25159 }
25160
25161 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25162 checks in process_scope_var and modified_type_die), this should be called
25163 only for original types. */
25164 gcc_assert (decl_ultimate_origin (decl) == NULL
25165 || decl_ultimate_origin (decl) == decl);
25166
25167 TREE_ASM_WRITTEN (decl) = 1;
25168 type_die = new_die (DW_TAG_typedef, context_die, decl);
25169
25170 add_name_and_src_coords_attributes (type_die, decl);
25171 if (DECL_ORIGINAL_TYPE (decl))
25172 {
25173 type = DECL_ORIGINAL_TYPE (decl);
25174 if (type == error_mark_node)
25175 return;
25176
25177 gcc_assert (type != TREE_TYPE (decl));
25178 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25179 }
25180 else
25181 {
25182 type = TREE_TYPE (decl);
25183 if (type == error_mark_node)
25184 return;
25185
25186 if (is_naming_typedef_decl (TYPE_NAME (type)))
25187 {
25188 /* Here, we are in the case of decl being a typedef naming
25189 an anonymous type, e.g:
25190 typedef struct {...} foo;
25191 In that case TREE_TYPE (decl) is not a typedef variant
25192 type and TYPE_NAME of the anonymous type is set to the
25193 TYPE_DECL of the typedef. This construct is emitted by
25194 the C++ FE.
25195
25196 TYPE is the anonymous struct named by the typedef
25197 DECL. As we need the DW_AT_type attribute of the
25198 DW_TAG_typedef to point to the DIE of TYPE, let's
25199 generate that DIE right away. add_type_attribute
25200 called below will then pick (via lookup_type_die) that
25201 anonymous struct DIE. */
25202 if (!TREE_ASM_WRITTEN (type))
25203 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25204
25205 /* This is a GNU Extension. We are adding a
25206 DW_AT_linkage_name attribute to the DIE of the
25207 anonymous struct TYPE. The value of that attribute
25208 is the name of the typedef decl naming the anonymous
25209 struct. This greatly eases the work of consumers of
25210 this debug info. */
25211 add_linkage_name_raw (lookup_type_die (type), decl);
25212 }
25213 }
25214
25215 add_type_attribute (type_die, type, decl_quals (decl), false,
25216 context_die);
25217
25218 if (is_naming_typedef_decl (decl))
25219 /* We want that all subsequent calls to lookup_type_die with
25220 TYPE in argument yield the DW_TAG_typedef we have just
25221 created. */
25222 equate_type_number_to_die (type, type_die);
25223
25224 add_alignment_attribute (type_die, TREE_TYPE (decl));
25225
25226 add_accessibility_attribute (type_die, decl);
25227
25228 if (DECL_ABSTRACT_P (decl))
25229 equate_decl_number_to_die (decl, type_die);
25230
25231 if (get_AT (type_die, DW_AT_name))
25232 add_pubtype (decl, type_die);
25233 }
25234
25235 /* Generate a DIE for a struct, class, enum or union type. */
25236
25237 static void
25238 gen_tagged_type_die (tree type,
25239 dw_die_ref context_die,
25240 enum debug_info_usage usage)
25241 {
25242 int need_pop;
25243
25244 if (type == NULL_TREE
25245 || !is_tagged_type (type))
25246 return;
25247
25248 if (TREE_ASM_WRITTEN (type))
25249 need_pop = 0;
25250 /* If this is a nested type whose containing class hasn't been written
25251 out yet, writing it out will cover this one, too. This does not apply
25252 to instantiations of member class templates; they need to be added to
25253 the containing class as they are generated. FIXME: This hurts the
25254 idea of combining type decls from multiple TUs, since we can't predict
25255 what set of template instantiations we'll get. */
25256 else if (TYPE_CONTEXT (type)
25257 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25258 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25259 {
25260 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25261
25262 if (TREE_ASM_WRITTEN (type))
25263 return;
25264
25265 /* If that failed, attach ourselves to the stub. */
25266 push_decl_scope (TYPE_CONTEXT (type));
25267 context_die = lookup_type_die (TYPE_CONTEXT (type));
25268 need_pop = 1;
25269 }
25270 else if (TYPE_CONTEXT (type) != NULL_TREE
25271 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25272 {
25273 /* If this type is local to a function that hasn't been written
25274 out yet, use a NULL context for now; it will be fixed up in
25275 decls_for_scope. */
25276 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25277 /* A declaration DIE doesn't count; nested types need to go in the
25278 specification. */
25279 if (context_die && is_declaration_die (context_die))
25280 context_die = NULL;
25281 need_pop = 0;
25282 }
25283 else
25284 {
25285 context_die = declare_in_namespace (type, context_die);
25286 need_pop = 0;
25287 }
25288
25289 if (TREE_CODE (type) == ENUMERAL_TYPE)
25290 {
25291 /* This might have been written out by the call to
25292 declare_in_namespace. */
25293 if (!TREE_ASM_WRITTEN (type))
25294 gen_enumeration_type_die (type, context_die);
25295 }
25296 else
25297 gen_struct_or_union_type_die (type, context_die, usage);
25298
25299 if (need_pop)
25300 pop_decl_scope ();
25301
25302 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25303 it up if it is ever completed. gen_*_type_die will set it for us
25304 when appropriate. */
25305 }
25306
25307 /* Generate a type description DIE. */
25308
25309 static void
25310 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25311 enum debug_info_usage usage)
25312 {
25313 struct array_descr_info info;
25314
25315 if (type == NULL_TREE || type == error_mark_node)
25316 return;
25317
25318 if (flag_checking && type)
25319 verify_type (type);
25320
25321 if (TYPE_NAME (type) != NULL_TREE
25322 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25323 && is_redundant_typedef (TYPE_NAME (type))
25324 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25325 /* The DECL of this type is a typedef we don't want to emit debug
25326 info for but we want debug info for its underlying typedef.
25327 This can happen for e.g, the injected-class-name of a C++
25328 type. */
25329 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25330
25331 /* If TYPE is a typedef type variant, let's generate debug info
25332 for the parent typedef which TYPE is a type of. */
25333 if (typedef_variant_p (type))
25334 {
25335 if (TREE_ASM_WRITTEN (type))
25336 return;
25337
25338 tree name = TYPE_NAME (type);
25339 tree origin = decl_ultimate_origin (name);
25340 if (origin != NULL && origin != name)
25341 {
25342 gen_decl_die (origin, NULL, NULL, context_die);
25343 return;
25344 }
25345
25346 /* Prevent broken recursion; we can't hand off to the same type. */
25347 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25348
25349 /* Give typedefs the right scope. */
25350 context_die = scope_die_for (type, context_die);
25351
25352 TREE_ASM_WRITTEN (type) = 1;
25353
25354 gen_decl_die (name, NULL, NULL, context_die);
25355 return;
25356 }
25357
25358 /* If type is an anonymous tagged type named by a typedef, let's
25359 generate debug info for the typedef. */
25360 if (is_naming_typedef_decl (TYPE_NAME (type)))
25361 {
25362 /* Use the DIE of the containing namespace as the parent DIE of
25363 the type description DIE we want to generate. */
25364 if (DECL_CONTEXT (TYPE_NAME (type))
25365 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25366 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25367
25368 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25369 return;
25370 }
25371
25372 if (lang_hooks.types.get_debug_type)
25373 {
25374 tree debug_type = lang_hooks.types.get_debug_type (type);
25375
25376 if (debug_type != NULL_TREE && debug_type != type)
25377 {
25378 gen_type_die_with_usage (debug_type, context_die, usage);
25379 return;
25380 }
25381 }
25382
25383 /* We are going to output a DIE to represent the unqualified version
25384 of this type (i.e. without any const or volatile qualifiers) so
25385 get the main variant (i.e. the unqualified version) of this type
25386 now. (Vectors and arrays are special because the debugging info is in the
25387 cloned type itself. Similarly function/method types can contain extra
25388 ref-qualification). */
25389 if (TREE_CODE (type) == FUNCTION_TYPE
25390 || TREE_CODE (type) == METHOD_TYPE)
25391 {
25392 /* For function/method types, can't use type_main_variant here,
25393 because that can have different ref-qualifiers for C++,
25394 but try to canonicalize. */
25395 tree main = TYPE_MAIN_VARIANT (type);
25396 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25397 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25398 && check_base_type (t, main)
25399 && check_lang_type (t, type))
25400 {
25401 type = t;
25402 break;
25403 }
25404 }
25405 else if (TREE_CODE (type) != VECTOR_TYPE
25406 && TREE_CODE (type) != ARRAY_TYPE)
25407 type = type_main_variant (type);
25408
25409 /* If this is an array type with hidden descriptor, handle it first. */
25410 if (!TREE_ASM_WRITTEN (type)
25411 && lang_hooks.types.get_array_descr_info)
25412 {
25413 memset (&info, 0, sizeof (info));
25414 if (lang_hooks.types.get_array_descr_info (type, &info))
25415 {
25416 /* Fortran sometimes emits array types with no dimension. */
25417 gcc_assert (info.ndimensions >= 0
25418 && (info.ndimensions
25419 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25420 gen_descr_array_type_die (type, &info, context_die);
25421 TREE_ASM_WRITTEN (type) = 1;
25422 return;
25423 }
25424 }
25425
25426 if (TREE_ASM_WRITTEN (type))
25427 {
25428 /* Variable-length types may be incomplete even if
25429 TREE_ASM_WRITTEN. For such types, fall through to
25430 gen_array_type_die() and possibly fill in
25431 DW_AT_{upper,lower}_bound attributes. */
25432 if ((TREE_CODE (type) != ARRAY_TYPE
25433 && TREE_CODE (type) != RECORD_TYPE
25434 && TREE_CODE (type) != UNION_TYPE
25435 && TREE_CODE (type) != QUAL_UNION_TYPE)
25436 || !variably_modified_type_p (type, NULL))
25437 return;
25438 }
25439
25440 switch (TREE_CODE (type))
25441 {
25442 case ERROR_MARK:
25443 break;
25444
25445 case POINTER_TYPE:
25446 case REFERENCE_TYPE:
25447 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25448 ensures that the gen_type_die recursion will terminate even if the
25449 type is recursive. Recursive types are possible in Ada. */
25450 /* ??? We could perhaps do this for all types before the switch
25451 statement. */
25452 TREE_ASM_WRITTEN (type) = 1;
25453
25454 /* For these types, all that is required is that we output a DIE (or a
25455 set of DIEs) to represent the "basis" type. */
25456 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25457 DINFO_USAGE_IND_USE);
25458 break;
25459
25460 case OFFSET_TYPE:
25461 /* This code is used for C++ pointer-to-data-member types.
25462 Output a description of the relevant class type. */
25463 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25464 DINFO_USAGE_IND_USE);
25465
25466 /* Output a description of the type of the object pointed to. */
25467 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25468 DINFO_USAGE_IND_USE);
25469
25470 /* Now output a DIE to represent this pointer-to-data-member type
25471 itself. */
25472 gen_ptr_to_mbr_type_die (type, context_die);
25473 break;
25474
25475 case FUNCTION_TYPE:
25476 /* Force out return type (in case it wasn't forced out already). */
25477 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25478 DINFO_USAGE_DIR_USE);
25479 gen_subroutine_type_die (type, context_die);
25480 break;
25481
25482 case METHOD_TYPE:
25483 /* Force out return type (in case it wasn't forced out already). */
25484 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25485 DINFO_USAGE_DIR_USE);
25486 gen_subroutine_type_die (type, context_die);
25487 break;
25488
25489 case ARRAY_TYPE:
25490 case VECTOR_TYPE:
25491 gen_array_type_die (type, context_die);
25492 break;
25493
25494 case ENUMERAL_TYPE:
25495 case RECORD_TYPE:
25496 case UNION_TYPE:
25497 case QUAL_UNION_TYPE:
25498 gen_tagged_type_die (type, context_die, usage);
25499 return;
25500
25501 case VOID_TYPE:
25502 case INTEGER_TYPE:
25503 case REAL_TYPE:
25504 case FIXED_POINT_TYPE:
25505 case COMPLEX_TYPE:
25506 case BOOLEAN_TYPE:
25507 case POINTER_BOUNDS_TYPE:
25508 /* No DIEs needed for fundamental types. */
25509 break;
25510
25511 case NULLPTR_TYPE:
25512 case LANG_TYPE:
25513 /* Just use DW_TAG_unspecified_type. */
25514 {
25515 dw_die_ref type_die = lookup_type_die (type);
25516 if (type_die == NULL)
25517 {
25518 tree name = TYPE_IDENTIFIER (type);
25519 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25520 type);
25521 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25522 equate_type_number_to_die (type, type_die);
25523 }
25524 }
25525 break;
25526
25527 default:
25528 if (is_cxx_auto (type))
25529 {
25530 tree name = TYPE_IDENTIFIER (type);
25531 dw_die_ref *die = (name == get_identifier ("auto")
25532 ? &auto_die : &decltype_auto_die);
25533 if (!*die)
25534 {
25535 *die = new_die (DW_TAG_unspecified_type,
25536 comp_unit_die (), NULL_TREE);
25537 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25538 }
25539 equate_type_number_to_die (type, *die);
25540 break;
25541 }
25542 gcc_unreachable ();
25543 }
25544
25545 TREE_ASM_WRITTEN (type) = 1;
25546 }
25547
25548 static void
25549 gen_type_die (tree type, dw_die_ref context_die)
25550 {
25551 if (type != error_mark_node)
25552 {
25553 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25554 if (flag_checking)
25555 {
25556 dw_die_ref die = lookup_type_die (type);
25557 if (die)
25558 check_die (die);
25559 }
25560 }
25561 }
25562
25563 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25564 things which are local to the given block. */
25565
25566 static void
25567 gen_block_die (tree stmt, dw_die_ref context_die)
25568 {
25569 int must_output_die = 0;
25570 bool inlined_func;
25571
25572 /* Ignore blocks that are NULL. */
25573 if (stmt == NULL_TREE)
25574 return;
25575
25576 inlined_func = inlined_function_outer_scope_p (stmt);
25577
25578 /* If the block is one fragment of a non-contiguous block, do not
25579 process the variables, since they will have been done by the
25580 origin block. Do process subblocks. */
25581 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25582 {
25583 tree sub;
25584
25585 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25586 gen_block_die (sub, context_die);
25587
25588 return;
25589 }
25590
25591 /* Determine if we need to output any Dwarf DIEs at all to represent this
25592 block. */
25593 if (inlined_func)
25594 /* The outer scopes for inlinings *must* always be represented. We
25595 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25596 must_output_die = 1;
25597 else
25598 {
25599 /* Determine if this block directly contains any "significant"
25600 local declarations which we will need to output DIEs for. */
25601 if (debug_info_level > DINFO_LEVEL_TERSE)
25602 /* We are not in terse mode so *any* local declaration counts
25603 as being a "significant" one. */
25604 must_output_die = ((BLOCK_VARS (stmt) != NULL
25605 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25606 && (TREE_USED (stmt)
25607 || TREE_ASM_WRITTEN (stmt)
25608 || BLOCK_ABSTRACT (stmt)));
25609 else if ((TREE_USED (stmt)
25610 || TREE_ASM_WRITTEN (stmt)
25611 || BLOCK_ABSTRACT (stmt))
25612 && !dwarf2out_ignore_block (stmt))
25613 must_output_die = 1;
25614 }
25615
25616 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25617 DIE for any block which contains no significant local declarations at
25618 all. Rather, in such cases we just call `decls_for_scope' so that any
25619 needed Dwarf info for any sub-blocks will get properly generated. Note
25620 that in terse mode, our definition of what constitutes a "significant"
25621 local declaration gets restricted to include only inlined function
25622 instances and local (nested) function definitions. */
25623 if (must_output_die)
25624 {
25625 if (inlined_func)
25626 {
25627 /* If STMT block is abstract, that means we have been called
25628 indirectly from dwarf2out_abstract_function.
25629 That function rightfully marks the descendent blocks (of
25630 the abstract function it is dealing with) as being abstract,
25631 precisely to prevent us from emitting any
25632 DW_TAG_inlined_subroutine DIE as a descendent
25633 of an abstract function instance. So in that case, we should
25634 not call gen_inlined_subroutine_die.
25635
25636 Later though, when cgraph asks dwarf2out to emit info
25637 for the concrete instance of the function decl into which
25638 the concrete instance of STMT got inlined, the later will lead
25639 to the generation of a DW_TAG_inlined_subroutine DIE. */
25640 if (! BLOCK_ABSTRACT (stmt))
25641 gen_inlined_subroutine_die (stmt, context_die);
25642 }
25643 else
25644 gen_lexical_block_die (stmt, context_die);
25645 }
25646 else
25647 decls_for_scope (stmt, context_die);
25648 }
25649
25650 /* Process variable DECL (or variable with origin ORIGIN) within
25651 block STMT and add it to CONTEXT_DIE. */
25652 static void
25653 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25654 {
25655 dw_die_ref die;
25656 tree decl_or_origin = decl ? decl : origin;
25657
25658 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25659 die = lookup_decl_die (decl_or_origin);
25660 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25661 {
25662 if (TYPE_DECL_IS_STUB (decl_or_origin))
25663 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25664 else
25665 die = lookup_decl_die (decl_or_origin);
25666 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25667 if (! die && ! early_dwarf)
25668 return;
25669 }
25670 else
25671 die = NULL;
25672
25673 /* Avoid creating DIEs for local typedefs and concrete static variables that
25674 will only be pruned later. */
25675 if ((origin || decl_ultimate_origin (decl))
25676 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25677 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25678 {
25679 origin = decl_ultimate_origin (decl_or_origin);
25680 if (decl && VAR_P (decl) && die != NULL)
25681 {
25682 die = lookup_decl_die (origin);
25683 if (die != NULL)
25684 equate_decl_number_to_die (decl, die);
25685 }
25686 return;
25687 }
25688
25689 if (die != NULL && die->die_parent == NULL)
25690 add_child_die (context_die, die);
25691 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25692 {
25693 if (early_dwarf)
25694 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25695 stmt, context_die);
25696 }
25697 else
25698 {
25699 if (decl && DECL_P (decl))
25700 {
25701 die = lookup_decl_die (decl);
25702
25703 /* Early created DIEs do not have a parent as the decls refer
25704 to the function as DECL_CONTEXT rather than the BLOCK. */
25705 if (die && die->die_parent == NULL)
25706 {
25707 gcc_assert (in_lto_p);
25708 add_child_die (context_die, die);
25709 }
25710 }
25711
25712 gen_decl_die (decl, origin, NULL, context_die);
25713 }
25714 }
25715
25716 /* Generate all of the decls declared within a given scope and (recursively)
25717 all of its sub-blocks. */
25718
25719 static void
25720 decls_for_scope (tree stmt, dw_die_ref context_die)
25721 {
25722 tree decl;
25723 unsigned int i;
25724 tree subblocks;
25725
25726 /* Ignore NULL blocks. */
25727 if (stmt == NULL_TREE)
25728 return;
25729
25730 /* Output the DIEs to represent all of the data objects and typedefs
25731 declared directly within this block but not within any nested
25732 sub-blocks. Also, nested function and tag DIEs have been
25733 generated with a parent of NULL; fix that up now. We don't
25734 have to do this if we're at -g1. */
25735 if (debug_info_level > DINFO_LEVEL_TERSE)
25736 {
25737 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25738 process_scope_var (stmt, decl, NULL_TREE, context_die);
25739 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25740 origin - avoid doing this twice as we have no good way to see
25741 if we've done it once already. */
25742 if (! early_dwarf)
25743 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25744 {
25745 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25746 if (decl == current_function_decl)
25747 /* Ignore declarations of the current function, while they
25748 are declarations, gen_subprogram_die would treat them
25749 as definitions again, because they are equal to
25750 current_function_decl and endlessly recurse. */;
25751 else if (TREE_CODE (decl) == FUNCTION_DECL)
25752 process_scope_var (stmt, decl, NULL_TREE, context_die);
25753 else
25754 process_scope_var (stmt, NULL_TREE, decl, context_die);
25755 }
25756 }
25757
25758 /* Even if we're at -g1, we need to process the subblocks in order to get
25759 inlined call information. */
25760
25761 /* Output the DIEs to represent all sub-blocks (and the items declared
25762 therein) of this block. */
25763 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25764 subblocks != NULL;
25765 subblocks = BLOCK_CHAIN (subblocks))
25766 gen_block_die (subblocks, context_die);
25767 }
25768
25769 /* Is this a typedef we can avoid emitting? */
25770
25771 bool
25772 is_redundant_typedef (const_tree decl)
25773 {
25774 if (TYPE_DECL_IS_STUB (decl))
25775 return true;
25776
25777 if (DECL_ARTIFICIAL (decl)
25778 && DECL_CONTEXT (decl)
25779 && is_tagged_type (DECL_CONTEXT (decl))
25780 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25781 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25782 /* Also ignore the artificial member typedef for the class name. */
25783 return true;
25784
25785 return false;
25786 }
25787
25788 /* Return TRUE if TYPE is a typedef that names a type for linkage
25789 purposes. This kind of typedefs is produced by the C++ FE for
25790 constructs like:
25791
25792 typedef struct {...} foo;
25793
25794 In that case, there is no typedef variant type produced for foo.
25795 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25796 struct type. */
25797
25798 static bool
25799 is_naming_typedef_decl (const_tree decl)
25800 {
25801 if (decl == NULL_TREE
25802 || TREE_CODE (decl) != TYPE_DECL
25803 || DECL_NAMELESS (decl)
25804 || !is_tagged_type (TREE_TYPE (decl))
25805 || DECL_IS_BUILTIN (decl)
25806 || is_redundant_typedef (decl)
25807 /* It looks like Ada produces TYPE_DECLs that are very similar
25808 to C++ naming typedefs but that have different
25809 semantics. Let's be specific to c++ for now. */
25810 || !is_cxx (decl))
25811 return FALSE;
25812
25813 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25814 && TYPE_NAME (TREE_TYPE (decl)) == decl
25815 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25816 != TYPE_NAME (TREE_TYPE (decl))));
25817 }
25818
25819 /* Looks up the DIE for a context. */
25820
25821 static inline dw_die_ref
25822 lookup_context_die (tree context)
25823 {
25824 if (context)
25825 {
25826 /* Find die that represents this context. */
25827 if (TYPE_P (context))
25828 {
25829 context = TYPE_MAIN_VARIANT (context);
25830 dw_die_ref ctx = lookup_type_die (context);
25831 if (!ctx)
25832 return NULL;
25833 return strip_naming_typedef (context, ctx);
25834 }
25835 else
25836 return lookup_decl_die (context);
25837 }
25838 return comp_unit_die ();
25839 }
25840
25841 /* Returns the DIE for a context. */
25842
25843 static inline dw_die_ref
25844 get_context_die (tree context)
25845 {
25846 if (context)
25847 {
25848 /* Find die that represents this context. */
25849 if (TYPE_P (context))
25850 {
25851 context = TYPE_MAIN_VARIANT (context);
25852 return strip_naming_typedef (context, force_type_die (context));
25853 }
25854 else
25855 return force_decl_die (context);
25856 }
25857 return comp_unit_die ();
25858 }
25859
25860 /* Returns the DIE for decl. A DIE will always be returned. */
25861
25862 static dw_die_ref
25863 force_decl_die (tree decl)
25864 {
25865 dw_die_ref decl_die;
25866 unsigned saved_external_flag;
25867 tree save_fn = NULL_TREE;
25868 decl_die = lookup_decl_die (decl);
25869 if (!decl_die)
25870 {
25871 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25872
25873 decl_die = lookup_decl_die (decl);
25874 if (decl_die)
25875 return decl_die;
25876
25877 switch (TREE_CODE (decl))
25878 {
25879 case FUNCTION_DECL:
25880 /* Clear current_function_decl, so that gen_subprogram_die thinks
25881 that this is a declaration. At this point, we just want to force
25882 declaration die. */
25883 save_fn = current_function_decl;
25884 current_function_decl = NULL_TREE;
25885 gen_subprogram_die (decl, context_die);
25886 current_function_decl = save_fn;
25887 break;
25888
25889 case VAR_DECL:
25890 /* Set external flag to force declaration die. Restore it after
25891 gen_decl_die() call. */
25892 saved_external_flag = DECL_EXTERNAL (decl);
25893 DECL_EXTERNAL (decl) = 1;
25894 gen_decl_die (decl, NULL, NULL, context_die);
25895 DECL_EXTERNAL (decl) = saved_external_flag;
25896 break;
25897
25898 case NAMESPACE_DECL:
25899 if (dwarf_version >= 3 || !dwarf_strict)
25900 dwarf2out_decl (decl);
25901 else
25902 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25903 decl_die = comp_unit_die ();
25904 break;
25905
25906 case TRANSLATION_UNIT_DECL:
25907 decl_die = comp_unit_die ();
25908 break;
25909
25910 default:
25911 gcc_unreachable ();
25912 }
25913
25914 /* We should be able to find the DIE now. */
25915 if (!decl_die)
25916 decl_die = lookup_decl_die (decl);
25917 gcc_assert (decl_die);
25918 }
25919
25920 return decl_die;
25921 }
25922
25923 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25924 always returned. */
25925
25926 static dw_die_ref
25927 force_type_die (tree type)
25928 {
25929 dw_die_ref type_die;
25930
25931 type_die = lookup_type_die (type);
25932 if (!type_die)
25933 {
25934 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25935
25936 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25937 false, context_die);
25938 gcc_assert (type_die);
25939 }
25940 return type_die;
25941 }
25942
25943 /* Force out any required namespaces to be able to output DECL,
25944 and return the new context_die for it, if it's changed. */
25945
25946 static dw_die_ref
25947 setup_namespace_context (tree thing, dw_die_ref context_die)
25948 {
25949 tree context = (DECL_P (thing)
25950 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25951 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25952 /* Force out the namespace. */
25953 context_die = force_decl_die (context);
25954
25955 return context_die;
25956 }
25957
25958 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25959 type) within its namespace, if appropriate.
25960
25961 For compatibility with older debuggers, namespace DIEs only contain
25962 declarations; all definitions are emitted at CU scope, with
25963 DW_AT_specification pointing to the declaration (like with class
25964 members). */
25965
25966 static dw_die_ref
25967 declare_in_namespace (tree thing, dw_die_ref context_die)
25968 {
25969 dw_die_ref ns_context;
25970
25971 if (debug_info_level <= DINFO_LEVEL_TERSE)
25972 return context_die;
25973
25974 /* External declarations in the local scope only need to be emitted
25975 once, not once in the namespace and once in the scope.
25976
25977 This avoids declaring the `extern' below in the
25978 namespace DIE as well as in the innermost scope:
25979
25980 namespace S
25981 {
25982 int i=5;
25983 int foo()
25984 {
25985 int i=8;
25986 extern int i;
25987 return i;
25988 }
25989 }
25990 */
25991 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25992 return context_die;
25993
25994 /* If this decl is from an inlined function, then don't try to emit it in its
25995 namespace, as we will get confused. It would have already been emitted
25996 when the abstract instance of the inline function was emitted anyways. */
25997 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25998 return context_die;
25999
26000 ns_context = setup_namespace_context (thing, context_die);
26001
26002 if (ns_context != context_die)
26003 {
26004 if (is_fortran ())
26005 return ns_context;
26006 if (DECL_P (thing))
26007 gen_decl_die (thing, NULL, NULL, ns_context);
26008 else
26009 gen_type_die (thing, ns_context);
26010 }
26011 return context_die;
26012 }
26013
26014 /* Generate a DIE for a namespace or namespace alias. */
26015
26016 static void
26017 gen_namespace_die (tree decl, dw_die_ref context_die)
26018 {
26019 dw_die_ref namespace_die;
26020
26021 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26022 they are an alias of. */
26023 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26024 {
26025 /* Output a real namespace or module. */
26026 context_die = setup_namespace_context (decl, comp_unit_die ());
26027 namespace_die = new_die (is_fortran ()
26028 ? DW_TAG_module : DW_TAG_namespace,
26029 context_die, decl);
26030 /* For Fortran modules defined in different CU don't add src coords. */
26031 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26032 {
26033 const char *name = dwarf2_name (decl, 0);
26034 if (name)
26035 add_name_attribute (namespace_die, name);
26036 }
26037 else
26038 add_name_and_src_coords_attributes (namespace_die, decl);
26039 if (DECL_EXTERNAL (decl))
26040 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26041 equate_decl_number_to_die (decl, namespace_die);
26042 }
26043 else
26044 {
26045 /* Output a namespace alias. */
26046
26047 /* Force out the namespace we are an alias of, if necessary. */
26048 dw_die_ref origin_die
26049 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26050
26051 if (DECL_FILE_SCOPE_P (decl)
26052 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26053 context_die = setup_namespace_context (decl, comp_unit_die ());
26054 /* Now create the namespace alias DIE. */
26055 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26056 add_name_and_src_coords_attributes (namespace_die, decl);
26057 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26058 equate_decl_number_to_die (decl, namespace_die);
26059 }
26060 if ((dwarf_version >= 5 || !dwarf_strict)
26061 && lang_hooks.decls.decl_dwarf_attribute (decl,
26062 DW_AT_export_symbols) == 1)
26063 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26064
26065 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26066 if (want_pubnames ())
26067 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26068 }
26069
26070 /* Generate Dwarf debug information for a decl described by DECL.
26071 The return value is currently only meaningful for PARM_DECLs,
26072 for all other decls it returns NULL.
26073
26074 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26075 It can be NULL otherwise. */
26076
26077 static dw_die_ref
26078 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26079 dw_die_ref context_die)
26080 {
26081 tree decl_or_origin = decl ? decl : origin;
26082 tree class_origin = NULL, ultimate_origin;
26083
26084 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26085 return NULL;
26086
26087 /* Ignore pointer bounds decls. */
26088 if (DECL_P (decl_or_origin)
26089 && TREE_TYPE (decl_or_origin)
26090 && POINTER_BOUNDS_P (decl_or_origin))
26091 return NULL;
26092
26093 switch (TREE_CODE (decl_or_origin))
26094 {
26095 case ERROR_MARK:
26096 break;
26097
26098 case CONST_DECL:
26099 if (!is_fortran () && !is_ada ())
26100 {
26101 /* The individual enumerators of an enum type get output when we output
26102 the Dwarf representation of the relevant enum type itself. */
26103 break;
26104 }
26105
26106 /* Emit its type. */
26107 gen_type_die (TREE_TYPE (decl), context_die);
26108
26109 /* And its containing namespace. */
26110 context_die = declare_in_namespace (decl, context_die);
26111
26112 gen_const_die (decl, context_die);
26113 break;
26114
26115 case FUNCTION_DECL:
26116 #if 0
26117 /* FIXME */
26118 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26119 on local redeclarations of global functions. That seems broken. */
26120 if (current_function_decl != decl)
26121 /* This is only a declaration. */;
26122 #endif
26123
26124 /* We should have abstract copies already and should not generate
26125 stray type DIEs in late LTO dumping. */
26126 if (! early_dwarf)
26127 ;
26128
26129 /* If we're emitting a clone, emit info for the abstract instance. */
26130 else if (origin || DECL_ORIGIN (decl) != decl)
26131 dwarf2out_abstract_function (origin
26132 ? DECL_ORIGIN (origin)
26133 : DECL_ABSTRACT_ORIGIN (decl));
26134
26135 /* If we're emitting a possibly inlined function emit it as
26136 abstract instance. */
26137 else if (cgraph_function_possibly_inlined_p (decl)
26138 && ! DECL_ABSTRACT_P (decl)
26139 && ! class_or_namespace_scope_p (context_die)
26140 /* dwarf2out_abstract_function won't emit a die if this is just
26141 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26142 that case, because that works only if we have a die. */
26143 && DECL_INITIAL (decl) != NULL_TREE)
26144 dwarf2out_abstract_function (decl);
26145
26146 /* Otherwise we're emitting the primary DIE for this decl. */
26147 else if (debug_info_level > DINFO_LEVEL_TERSE)
26148 {
26149 /* Before we describe the FUNCTION_DECL itself, make sure that we
26150 have its containing type. */
26151 if (!origin)
26152 origin = decl_class_context (decl);
26153 if (origin != NULL_TREE)
26154 gen_type_die (origin, context_die);
26155
26156 /* And its return type. */
26157 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26158
26159 /* And its virtual context. */
26160 if (DECL_VINDEX (decl) != NULL_TREE)
26161 gen_type_die (DECL_CONTEXT (decl), context_die);
26162
26163 /* Make sure we have a member DIE for decl. */
26164 if (origin != NULL_TREE)
26165 gen_type_die_for_member (origin, decl, context_die);
26166
26167 /* And its containing namespace. */
26168 context_die = declare_in_namespace (decl, context_die);
26169 }
26170
26171 /* Now output a DIE to represent the function itself. */
26172 if (decl)
26173 gen_subprogram_die (decl, context_die);
26174 break;
26175
26176 case TYPE_DECL:
26177 /* If we are in terse mode, don't generate any DIEs to represent any
26178 actual typedefs. */
26179 if (debug_info_level <= DINFO_LEVEL_TERSE)
26180 break;
26181
26182 /* In the special case of a TYPE_DECL node representing the declaration
26183 of some type tag, if the given TYPE_DECL is marked as having been
26184 instantiated from some other (original) TYPE_DECL node (e.g. one which
26185 was generated within the original definition of an inline function) we
26186 used to generate a special (abbreviated) DW_TAG_structure_type,
26187 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26188 should be actually referencing those DIEs, as variable DIEs with that
26189 type would be emitted already in the abstract origin, so it was always
26190 removed during unused type prunning. Don't add anything in this
26191 case. */
26192 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26193 break;
26194
26195 if (is_redundant_typedef (decl))
26196 gen_type_die (TREE_TYPE (decl), context_die);
26197 else
26198 /* Output a DIE to represent the typedef itself. */
26199 gen_typedef_die (decl, context_die);
26200 break;
26201
26202 case LABEL_DECL:
26203 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26204 gen_label_die (decl, context_die);
26205 break;
26206
26207 case VAR_DECL:
26208 case RESULT_DECL:
26209 /* If we are in terse mode, don't generate any DIEs to represent any
26210 variable declarations or definitions. */
26211 if (debug_info_level <= DINFO_LEVEL_TERSE)
26212 break;
26213
26214 /* Avoid generating stray type DIEs during late dwarf dumping.
26215 All types have been dumped early. */
26216 if (early_dwarf
26217 /* ??? But in LTRANS we cannot annotate early created variably
26218 modified type DIEs without copying them and adjusting all
26219 references to them. Dump them again as happens for inlining
26220 which copies both the decl and the types. */
26221 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26222 in VLA bound information for example. */
26223 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26224 current_function_decl)))
26225 {
26226 /* Output any DIEs that are needed to specify the type of this data
26227 object. */
26228 if (decl_by_reference_p (decl_or_origin))
26229 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26230 else
26231 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26232 }
26233
26234 if (early_dwarf)
26235 {
26236 /* And its containing type. */
26237 class_origin = decl_class_context (decl_or_origin);
26238 if (class_origin != NULL_TREE)
26239 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26240
26241 /* And its containing namespace. */
26242 context_die = declare_in_namespace (decl_or_origin, context_die);
26243 }
26244
26245 /* Now output the DIE to represent the data object itself. This gets
26246 complicated because of the possibility that the VAR_DECL really
26247 represents an inlined instance of a formal parameter for an inline
26248 function. */
26249 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26250 if (ultimate_origin != NULL_TREE
26251 && TREE_CODE (ultimate_origin) == PARM_DECL)
26252 gen_formal_parameter_die (decl, origin,
26253 true /* Emit name attribute. */,
26254 context_die);
26255 else
26256 gen_variable_die (decl, origin, context_die);
26257 break;
26258
26259 case FIELD_DECL:
26260 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26261 /* Ignore the nameless fields that are used to skip bits but handle C++
26262 anonymous unions and structs. */
26263 if (DECL_NAME (decl) != NULL_TREE
26264 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26265 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26266 {
26267 gen_type_die (member_declared_type (decl), context_die);
26268 gen_field_die (decl, ctx, context_die);
26269 }
26270 break;
26271
26272 case PARM_DECL:
26273 /* Avoid generating stray type DIEs during late dwarf dumping.
26274 All types have been dumped early. */
26275 if (early_dwarf
26276 /* ??? But in LTRANS we cannot annotate early created variably
26277 modified type DIEs without copying them and adjusting all
26278 references to them. Dump them again as happens for inlining
26279 which copies both the decl and the types. */
26280 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26281 in VLA bound information for example. */
26282 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26283 current_function_decl)))
26284 {
26285 if (DECL_BY_REFERENCE (decl_or_origin))
26286 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26287 else
26288 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26289 }
26290 return gen_formal_parameter_die (decl, origin,
26291 true /* Emit name attribute. */,
26292 context_die);
26293
26294 case NAMESPACE_DECL:
26295 if (dwarf_version >= 3 || !dwarf_strict)
26296 gen_namespace_die (decl, context_die);
26297 break;
26298
26299 case IMPORTED_DECL:
26300 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26301 DECL_CONTEXT (decl), context_die);
26302 break;
26303
26304 case NAMELIST_DECL:
26305 gen_namelist_decl (DECL_NAME (decl), context_die,
26306 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26307 break;
26308
26309 default:
26310 /* Probably some frontend-internal decl. Assume we don't care. */
26311 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26312 break;
26313 }
26314
26315 return NULL;
26316 }
26317 \f
26318 /* Output initial debug information for global DECL. Called at the
26319 end of the parsing process.
26320
26321 This is the initial debug generation process. As such, the DIEs
26322 generated may be incomplete. A later debug generation pass
26323 (dwarf2out_late_global_decl) will augment the information generated
26324 in this pass (e.g., with complete location info). */
26325
26326 static void
26327 dwarf2out_early_global_decl (tree decl)
26328 {
26329 set_early_dwarf s;
26330
26331 /* gen_decl_die() will set DECL_ABSTRACT because
26332 cgraph_function_possibly_inlined_p() returns true. This is in
26333 turn will cause DW_AT_inline attributes to be set.
26334
26335 This happens because at early dwarf generation, there is no
26336 cgraph information, causing cgraph_function_possibly_inlined_p()
26337 to return true. Trick cgraph_function_possibly_inlined_p()
26338 while we generate dwarf early. */
26339 bool save = symtab->global_info_ready;
26340 symtab->global_info_ready = true;
26341
26342 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26343 other DECLs and they can point to template types or other things
26344 that dwarf2out can't handle when done via dwarf2out_decl. */
26345 if (TREE_CODE (decl) != TYPE_DECL
26346 && TREE_CODE (decl) != PARM_DECL)
26347 {
26348 if (TREE_CODE (decl) == FUNCTION_DECL)
26349 {
26350 tree save_fndecl = current_function_decl;
26351
26352 /* For nested functions, make sure we have DIEs for the parents first
26353 so that all nested DIEs are generated at the proper scope in the
26354 first shot. */
26355 tree context = decl_function_context (decl);
26356 if (context != NULL)
26357 {
26358 dw_die_ref context_die = lookup_decl_die (context);
26359 current_function_decl = context;
26360
26361 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26362 enough so that it lands in its own context. This avoids type
26363 pruning issues later on. */
26364 if (context_die == NULL || is_declaration_die (context_die))
26365 dwarf2out_decl (context);
26366 }
26367
26368 /* Emit an abstract origin of a function first. This happens
26369 with C++ constructor clones for example and makes
26370 dwarf2out_abstract_function happy which requires the early
26371 DIE of the abstract instance to be present. */
26372 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26373 dw_die_ref origin_die;
26374 if (origin != NULL
26375 /* Do not emit the DIE multiple times but make sure to
26376 process it fully here in case we just saw a declaration. */
26377 && ((origin_die = lookup_decl_die (origin)) == NULL
26378 || is_declaration_die (origin_die)))
26379 {
26380 current_function_decl = origin;
26381 dwarf2out_decl (origin);
26382 }
26383
26384 /* Emit the DIE for decl but avoid doing that multiple times. */
26385 dw_die_ref old_die;
26386 if ((old_die = lookup_decl_die (decl)) == NULL
26387 || is_declaration_die (old_die))
26388 {
26389 current_function_decl = decl;
26390 dwarf2out_decl (decl);
26391 }
26392
26393 current_function_decl = save_fndecl;
26394 }
26395 else
26396 dwarf2out_decl (decl);
26397 }
26398 symtab->global_info_ready = save;
26399 }
26400
26401 /* Return whether EXPR is an expression with the following pattern:
26402 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26403
26404 static bool
26405 is_trivial_indirect_ref (tree expr)
26406 {
26407 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26408 return false;
26409
26410 tree nop = TREE_OPERAND (expr, 0);
26411 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26412 return false;
26413
26414 tree int_cst = TREE_OPERAND (nop, 0);
26415 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26416 }
26417
26418 /* Output debug information for global decl DECL. Called from
26419 toplev.c after compilation proper has finished. */
26420
26421 static void
26422 dwarf2out_late_global_decl (tree decl)
26423 {
26424 /* Fill-in any location information we were unable to determine
26425 on the first pass. */
26426 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26427 {
26428 dw_die_ref die = lookup_decl_die (decl);
26429
26430 /* We may have to generate early debug late for LTO in case debug
26431 was not enabled at compile-time or the target doesn't support
26432 the LTO early debug scheme. */
26433 if (! die && in_lto_p)
26434 {
26435 dwarf2out_decl (decl);
26436 die = lookup_decl_die (decl);
26437 }
26438
26439 if (die)
26440 {
26441 /* We get called via the symtab code invoking late_global_decl
26442 for symbols that are optimized out.
26443
26444 Do not add locations for those, except if they have a
26445 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26446 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26447 INDIRECT_REF expression, as this could generate relocations to
26448 text symbols in LTO object files, which is invalid. */
26449 varpool_node *node = varpool_node::get (decl);
26450 if ((! node || ! node->definition)
26451 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26452 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26453 tree_add_const_value_attribute_for_decl (die, decl);
26454 else
26455 add_location_or_const_value_attribute (die, decl, false);
26456 }
26457 }
26458 }
26459
26460 /* Output debug information for type decl DECL. Called from toplev.c
26461 and from language front ends (to record built-in types). */
26462 static void
26463 dwarf2out_type_decl (tree decl, int local)
26464 {
26465 if (!local)
26466 {
26467 set_early_dwarf s;
26468 dwarf2out_decl (decl);
26469 }
26470 }
26471
26472 /* Output debug information for imported module or decl DECL.
26473 NAME is non-NULL name in the lexical block if the decl has been renamed.
26474 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26475 that DECL belongs to.
26476 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26477 static void
26478 dwarf2out_imported_module_or_decl_1 (tree decl,
26479 tree name,
26480 tree lexical_block,
26481 dw_die_ref lexical_block_die)
26482 {
26483 expanded_location xloc;
26484 dw_die_ref imported_die = NULL;
26485 dw_die_ref at_import_die;
26486
26487 if (TREE_CODE (decl) == IMPORTED_DECL)
26488 {
26489 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26490 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26491 gcc_assert (decl);
26492 }
26493 else
26494 xloc = expand_location (input_location);
26495
26496 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26497 {
26498 at_import_die = force_type_die (TREE_TYPE (decl));
26499 /* For namespace N { typedef void T; } using N::T; base_type_die
26500 returns NULL, but DW_TAG_imported_declaration requires
26501 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26502 if (!at_import_die)
26503 {
26504 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26505 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26506 at_import_die = lookup_type_die (TREE_TYPE (decl));
26507 gcc_assert (at_import_die);
26508 }
26509 }
26510 else
26511 {
26512 at_import_die = lookup_decl_die (decl);
26513 if (!at_import_die)
26514 {
26515 /* If we're trying to avoid duplicate debug info, we may not have
26516 emitted the member decl for this field. Emit it now. */
26517 if (TREE_CODE (decl) == FIELD_DECL)
26518 {
26519 tree type = DECL_CONTEXT (decl);
26520
26521 if (TYPE_CONTEXT (type)
26522 && TYPE_P (TYPE_CONTEXT (type))
26523 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26524 DINFO_USAGE_DIR_USE))
26525 return;
26526 gen_type_die_for_member (type, decl,
26527 get_context_die (TYPE_CONTEXT (type)));
26528 }
26529 if (TREE_CODE (decl) == NAMELIST_DECL)
26530 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26531 get_context_die (DECL_CONTEXT (decl)),
26532 NULL_TREE);
26533 else
26534 at_import_die = force_decl_die (decl);
26535 }
26536 }
26537
26538 if (TREE_CODE (decl) == NAMESPACE_DECL)
26539 {
26540 if (dwarf_version >= 3 || !dwarf_strict)
26541 imported_die = new_die (DW_TAG_imported_module,
26542 lexical_block_die,
26543 lexical_block);
26544 else
26545 return;
26546 }
26547 else
26548 imported_die = new_die (DW_TAG_imported_declaration,
26549 lexical_block_die,
26550 lexical_block);
26551
26552 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26553 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26554 if (debug_column_info && xloc.column)
26555 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26556 if (name)
26557 add_AT_string (imported_die, DW_AT_name,
26558 IDENTIFIER_POINTER (name));
26559 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26560 }
26561
26562 /* Output debug information for imported module or decl DECL.
26563 NAME is non-NULL name in context if the decl has been renamed.
26564 CHILD is true if decl is one of the renamed decls as part of
26565 importing whole module.
26566 IMPLICIT is set if this hook is called for an implicit import
26567 such as inline namespace. */
26568
26569 static void
26570 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26571 bool child, bool implicit)
26572 {
26573 /* dw_die_ref at_import_die; */
26574 dw_die_ref scope_die;
26575
26576 if (debug_info_level <= DINFO_LEVEL_TERSE)
26577 return;
26578
26579 gcc_assert (decl);
26580
26581 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26582 should be enough, for DWARF4 and older even if we emit as extension
26583 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26584 for the benefit of consumers unaware of DW_AT_export_symbols. */
26585 if (implicit
26586 && dwarf_version >= 5
26587 && lang_hooks.decls.decl_dwarf_attribute (decl,
26588 DW_AT_export_symbols) == 1)
26589 return;
26590
26591 set_early_dwarf s;
26592
26593 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26594 We need decl DIE for reference and scope die. First, get DIE for the decl
26595 itself. */
26596
26597 /* Get the scope die for decl context. Use comp_unit_die for global module
26598 or decl. If die is not found for non globals, force new die. */
26599 if (context
26600 && TYPE_P (context)
26601 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26602 return;
26603
26604 scope_die = get_context_die (context);
26605
26606 if (child)
26607 {
26608 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26609 there is nothing we can do, here. */
26610 if (dwarf_version < 3 && dwarf_strict)
26611 return;
26612
26613 gcc_assert (scope_die->die_child);
26614 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26615 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26616 scope_die = scope_die->die_child;
26617 }
26618
26619 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26620 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26621 }
26622
26623 /* Output debug information for namelists. */
26624
26625 static dw_die_ref
26626 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26627 {
26628 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26629 tree value;
26630 unsigned i;
26631
26632 if (debug_info_level <= DINFO_LEVEL_TERSE)
26633 return NULL;
26634
26635 gcc_assert (scope_die != NULL);
26636 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26637 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26638
26639 /* If there are no item_decls, we have a nondefining namelist, e.g.
26640 with USE association; hence, set DW_AT_declaration. */
26641 if (item_decls == NULL_TREE)
26642 {
26643 add_AT_flag (nml_die, DW_AT_declaration, 1);
26644 return nml_die;
26645 }
26646
26647 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26648 {
26649 nml_item_ref_die = lookup_decl_die (value);
26650 if (!nml_item_ref_die)
26651 nml_item_ref_die = force_decl_die (value);
26652
26653 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26654 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26655 }
26656 return nml_die;
26657 }
26658
26659
26660 /* Write the debugging output for DECL and return the DIE. */
26661
26662 static void
26663 dwarf2out_decl (tree decl)
26664 {
26665 dw_die_ref context_die = comp_unit_die ();
26666
26667 switch (TREE_CODE (decl))
26668 {
26669 case ERROR_MARK:
26670 return;
26671
26672 case FUNCTION_DECL:
26673 /* If we're a nested function, initially use a parent of NULL; if we're
26674 a plain function, this will be fixed up in decls_for_scope. If
26675 we're a method, it will be ignored, since we already have a DIE. */
26676 if (decl_function_context (decl)
26677 /* But if we're in terse mode, we don't care about scope. */
26678 && debug_info_level > DINFO_LEVEL_TERSE)
26679 context_die = NULL;
26680 break;
26681
26682 case VAR_DECL:
26683 /* For local statics lookup proper context die. */
26684 if (local_function_static (decl))
26685 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26686
26687 /* If we are in terse mode, don't generate any DIEs to represent any
26688 variable declarations or definitions. */
26689 if (debug_info_level <= DINFO_LEVEL_TERSE)
26690 return;
26691 break;
26692
26693 case CONST_DECL:
26694 if (debug_info_level <= DINFO_LEVEL_TERSE)
26695 return;
26696 if (!is_fortran () && !is_ada ())
26697 return;
26698 if (TREE_STATIC (decl) && decl_function_context (decl))
26699 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26700 break;
26701
26702 case NAMESPACE_DECL:
26703 case IMPORTED_DECL:
26704 if (debug_info_level <= DINFO_LEVEL_TERSE)
26705 return;
26706 if (lookup_decl_die (decl) != NULL)
26707 return;
26708 break;
26709
26710 case TYPE_DECL:
26711 /* Don't emit stubs for types unless they are needed by other DIEs. */
26712 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26713 return;
26714
26715 /* Don't bother trying to generate any DIEs to represent any of the
26716 normal built-in types for the language we are compiling. */
26717 if (DECL_IS_BUILTIN (decl))
26718 return;
26719
26720 /* If we are in terse mode, don't generate any DIEs for types. */
26721 if (debug_info_level <= DINFO_LEVEL_TERSE)
26722 return;
26723
26724 /* If we're a function-scope tag, initially use a parent of NULL;
26725 this will be fixed up in decls_for_scope. */
26726 if (decl_function_context (decl))
26727 context_die = NULL;
26728
26729 break;
26730
26731 case NAMELIST_DECL:
26732 break;
26733
26734 default:
26735 return;
26736 }
26737
26738 gen_decl_die (decl, NULL, NULL, context_die);
26739
26740 if (flag_checking)
26741 {
26742 dw_die_ref die = lookup_decl_die (decl);
26743 if (die)
26744 check_die (die);
26745 }
26746 }
26747
26748 /* Write the debugging output for DECL. */
26749
26750 static void
26751 dwarf2out_function_decl (tree decl)
26752 {
26753 dwarf2out_decl (decl);
26754 call_arg_locations = NULL;
26755 call_arg_loc_last = NULL;
26756 call_site_count = -1;
26757 tail_call_site_count = -1;
26758 decl_loc_table->empty ();
26759 cached_dw_loc_list_table->empty ();
26760 }
26761
26762 /* Output a marker (i.e. a label) for the beginning of the generated code for
26763 a lexical block. */
26764
26765 static void
26766 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26767 unsigned int blocknum)
26768 {
26769 switch_to_section (current_function_section ());
26770 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26771 }
26772
26773 /* Output a marker (i.e. a label) for the end of the generated code for a
26774 lexical block. */
26775
26776 static void
26777 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26778 {
26779 switch_to_section (current_function_section ());
26780 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26781 }
26782
26783 /* Returns nonzero if it is appropriate not to emit any debugging
26784 information for BLOCK, because it doesn't contain any instructions.
26785
26786 Don't allow this for blocks with nested functions or local classes
26787 as we would end up with orphans, and in the presence of scheduling
26788 we may end up calling them anyway. */
26789
26790 static bool
26791 dwarf2out_ignore_block (const_tree block)
26792 {
26793 tree decl;
26794 unsigned int i;
26795
26796 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26797 if (TREE_CODE (decl) == FUNCTION_DECL
26798 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26799 return 0;
26800 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26801 {
26802 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26803 if (TREE_CODE (decl) == FUNCTION_DECL
26804 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26805 return 0;
26806 }
26807
26808 return 1;
26809 }
26810
26811 /* Hash table routines for file_hash. */
26812
26813 bool
26814 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26815 {
26816 return filename_cmp (p1->filename, p2) == 0;
26817 }
26818
26819 hashval_t
26820 dwarf_file_hasher::hash (dwarf_file_data *p)
26821 {
26822 return htab_hash_string (p->filename);
26823 }
26824
26825 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26826 dwarf2out.c) and return its "index". The index of each (known) filename is
26827 just a unique number which is associated with only that one filename. We
26828 need such numbers for the sake of generating labels (in the .debug_sfnames
26829 section) and references to those files numbers (in the .debug_srcinfo
26830 and .debug_macinfo sections). If the filename given as an argument is not
26831 found in our current list, add it to the list and assign it the next
26832 available unique index number. */
26833
26834 static struct dwarf_file_data *
26835 lookup_filename (const char *file_name)
26836 {
26837 struct dwarf_file_data * created;
26838
26839 if (!file_name)
26840 return NULL;
26841
26842 dwarf_file_data **slot
26843 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26844 INSERT);
26845 if (*slot)
26846 return *slot;
26847
26848 created = ggc_alloc<dwarf_file_data> ();
26849 created->filename = file_name;
26850 created->emitted_number = 0;
26851 *slot = created;
26852 return created;
26853 }
26854
26855 /* If the assembler will construct the file table, then translate the compiler
26856 internal file table number into the assembler file table number, and emit
26857 a .file directive if we haven't already emitted one yet. The file table
26858 numbers are different because we prune debug info for unused variables and
26859 types, which may include filenames. */
26860
26861 static int
26862 maybe_emit_file (struct dwarf_file_data * fd)
26863 {
26864 if (! fd->emitted_number)
26865 {
26866 if (last_emitted_file)
26867 fd->emitted_number = last_emitted_file->emitted_number + 1;
26868 else
26869 fd->emitted_number = 1;
26870 last_emitted_file = fd;
26871
26872 if (output_asm_line_debug_info ())
26873 {
26874 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26875 output_quoted_string (asm_out_file,
26876 remap_debug_filename (fd->filename));
26877 fputc ('\n', asm_out_file);
26878 }
26879 }
26880
26881 return fd->emitted_number;
26882 }
26883
26884 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26885 That generation should happen after function debug info has been
26886 generated. The value of the attribute is the constant value of ARG. */
26887
26888 static void
26889 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26890 {
26891 die_arg_entry entry;
26892
26893 if (!die || !arg)
26894 return;
26895
26896 gcc_assert (early_dwarf);
26897
26898 if (!tmpl_value_parm_die_table)
26899 vec_alloc (tmpl_value_parm_die_table, 32);
26900
26901 entry.die = die;
26902 entry.arg = arg;
26903 vec_safe_push (tmpl_value_parm_die_table, entry);
26904 }
26905
26906 /* Return TRUE if T is an instance of generic type, FALSE
26907 otherwise. */
26908
26909 static bool
26910 generic_type_p (tree t)
26911 {
26912 if (t == NULL_TREE || !TYPE_P (t))
26913 return false;
26914 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26915 }
26916
26917 /* Schedule the generation of the generic parameter dies for the
26918 instance of generic type T. The proper generation itself is later
26919 done by gen_scheduled_generic_parms_dies. */
26920
26921 static void
26922 schedule_generic_params_dies_gen (tree t)
26923 {
26924 if (!generic_type_p (t))
26925 return;
26926
26927 gcc_assert (early_dwarf);
26928
26929 if (!generic_type_instances)
26930 vec_alloc (generic_type_instances, 256);
26931
26932 vec_safe_push (generic_type_instances, t);
26933 }
26934
26935 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26936 by append_entry_to_tmpl_value_parm_die_table. This function must
26937 be called after function DIEs have been generated. */
26938
26939 static void
26940 gen_remaining_tmpl_value_param_die_attribute (void)
26941 {
26942 if (tmpl_value_parm_die_table)
26943 {
26944 unsigned i, j;
26945 die_arg_entry *e;
26946
26947 /* We do this in two phases - first get the cases we can
26948 handle during early-finish, preserving those we cannot
26949 (containing symbolic constants where we don't yet know
26950 whether we are going to output the referenced symbols).
26951 For those we try again at late-finish. */
26952 j = 0;
26953 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26954 {
26955 if (!e->die->removed
26956 && !tree_add_const_value_attribute (e->die, e->arg))
26957 {
26958 dw_loc_descr_ref loc = NULL;
26959 if (! early_dwarf
26960 && (dwarf_version >= 5 || !dwarf_strict))
26961 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26962 if (loc)
26963 add_AT_loc (e->die, DW_AT_location, loc);
26964 else
26965 (*tmpl_value_parm_die_table)[j++] = *e;
26966 }
26967 }
26968 tmpl_value_parm_die_table->truncate (j);
26969 }
26970 }
26971
26972 /* Generate generic parameters DIEs for instances of generic types
26973 that have been previously scheduled by
26974 schedule_generic_params_dies_gen. This function must be called
26975 after all the types of the CU have been laid out. */
26976
26977 static void
26978 gen_scheduled_generic_parms_dies (void)
26979 {
26980 unsigned i;
26981 tree t;
26982
26983 if (!generic_type_instances)
26984 return;
26985
26986 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26987 if (COMPLETE_TYPE_P (t))
26988 gen_generic_params_dies (t);
26989
26990 generic_type_instances = NULL;
26991 }
26992
26993
26994 /* Replace DW_AT_name for the decl with name. */
26995
26996 static void
26997 dwarf2out_set_name (tree decl, tree name)
26998 {
26999 dw_die_ref die;
27000 dw_attr_node *attr;
27001 const char *dname;
27002
27003 die = TYPE_SYMTAB_DIE (decl);
27004 if (!die)
27005 return;
27006
27007 dname = dwarf2_name (name, 0);
27008 if (!dname)
27009 return;
27010
27011 attr = get_AT (die, DW_AT_name);
27012 if (attr)
27013 {
27014 struct indirect_string_node *node;
27015
27016 node = find_AT_string (dname);
27017 /* replace the string. */
27018 attr->dw_attr_val.v.val_str = node;
27019 }
27020
27021 else
27022 add_name_attribute (die, dname);
27023 }
27024
27025 /* True if before or during processing of the first function being emitted. */
27026 static bool in_first_function_p = true;
27027 /* True if loc_note during dwarf2out_var_location call might still be
27028 before first real instruction at address equal to .Ltext0. */
27029 static bool maybe_at_text_label_p = true;
27030 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27031 static unsigned int first_loclabel_num_not_at_text_label;
27032
27033 /* Look ahead for a real insn, or for a begin stmt marker. */
27034
27035 static rtx_insn *
27036 dwarf2out_next_real_insn (rtx_insn *loc_note)
27037 {
27038 rtx_insn *next_real = NEXT_INSN (loc_note);
27039
27040 while (next_real)
27041 if (INSN_P (next_real))
27042 break;
27043 else
27044 next_real = NEXT_INSN (next_real);
27045
27046 return next_real;
27047 }
27048
27049 /* Called by the final INSN scan whenever we see a var location. We
27050 use it to drop labels in the right places, and throw the location in
27051 our lookup table. */
27052
27053 static void
27054 dwarf2out_var_location (rtx_insn *loc_note)
27055 {
27056 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27057 struct var_loc_node *newloc;
27058 rtx_insn *next_real, *next_note;
27059 rtx_insn *call_insn = NULL;
27060 static const char *last_label;
27061 static const char *last_postcall_label;
27062 static bool last_in_cold_section_p;
27063 static rtx_insn *expected_next_loc_note;
27064 tree decl;
27065 bool var_loc_p;
27066 var_loc_view view = 0;
27067
27068 if (!NOTE_P (loc_note))
27069 {
27070 if (CALL_P (loc_note))
27071 {
27072 maybe_reset_location_view (loc_note, cur_line_info_table);
27073 call_site_count++;
27074 if (SIBLING_CALL_P (loc_note))
27075 tail_call_site_count++;
27076 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27077 {
27078 call_insn = loc_note;
27079 loc_note = NULL;
27080 var_loc_p = false;
27081
27082 next_real = dwarf2out_next_real_insn (call_insn);
27083 next_note = NULL;
27084 cached_next_real_insn = NULL;
27085 goto create_label;
27086 }
27087 if (optimize == 0 && !flag_var_tracking)
27088 {
27089 /* When the var-tracking pass is not running, there is no note
27090 for indirect calls whose target is compile-time known. In this
27091 case, process such calls specifically so that we generate call
27092 sites for them anyway. */
27093 rtx x = PATTERN (loc_note);
27094 if (GET_CODE (x) == PARALLEL)
27095 x = XVECEXP (x, 0, 0);
27096 if (GET_CODE (x) == SET)
27097 x = SET_SRC (x);
27098 if (GET_CODE (x) == CALL)
27099 x = XEXP (x, 0);
27100 if (!MEM_P (x)
27101 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27102 || !SYMBOL_REF_DECL (XEXP (x, 0))
27103 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27104 != FUNCTION_DECL))
27105 {
27106 call_insn = loc_note;
27107 loc_note = NULL;
27108 var_loc_p = false;
27109
27110 next_real = dwarf2out_next_real_insn (call_insn);
27111 next_note = NULL;
27112 cached_next_real_insn = NULL;
27113 goto create_label;
27114 }
27115 }
27116 }
27117 else if (!debug_variable_location_views)
27118 gcc_unreachable ();
27119 else
27120 maybe_reset_location_view (loc_note, cur_line_info_table);
27121
27122 return;
27123 }
27124
27125 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27126 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27127 return;
27128
27129 /* Optimize processing a large consecutive sequence of location
27130 notes so we don't spend too much time in next_real_insn. If the
27131 next insn is another location note, remember the next_real_insn
27132 calculation for next time. */
27133 next_real = cached_next_real_insn;
27134 if (next_real)
27135 {
27136 if (expected_next_loc_note != loc_note)
27137 next_real = NULL;
27138 }
27139
27140 next_note = NEXT_INSN (loc_note);
27141 if (! next_note
27142 || next_note->deleted ()
27143 || ! NOTE_P (next_note)
27144 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27145 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27146 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27147 next_note = NULL;
27148
27149 if (! next_real)
27150 next_real = dwarf2out_next_real_insn (loc_note);
27151
27152 if (next_note)
27153 {
27154 expected_next_loc_note = next_note;
27155 cached_next_real_insn = next_real;
27156 }
27157 else
27158 cached_next_real_insn = NULL;
27159
27160 /* If there are no instructions which would be affected by this note,
27161 don't do anything. */
27162 if (var_loc_p
27163 && next_real == NULL_RTX
27164 && !NOTE_DURING_CALL_P (loc_note))
27165 return;
27166
27167 create_label:
27168
27169 if (next_real == NULL_RTX)
27170 next_real = get_last_insn ();
27171
27172 /* If there were any real insns between note we processed last time
27173 and this note (or if it is the first note), clear
27174 last_{,postcall_}label so that they are not reused this time. */
27175 if (last_var_location_insn == NULL_RTX
27176 || last_var_location_insn != next_real
27177 || last_in_cold_section_p != in_cold_section_p)
27178 {
27179 last_label = NULL;
27180 last_postcall_label = NULL;
27181 }
27182
27183 if (var_loc_p)
27184 {
27185 const char *label
27186 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27187 view = cur_line_info_table->view;
27188 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27189 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27190 if (newloc == NULL)
27191 return;
27192 }
27193 else
27194 {
27195 decl = NULL_TREE;
27196 newloc = NULL;
27197 }
27198
27199 /* If there were no real insns between note we processed last time
27200 and this note, use the label we emitted last time. Otherwise
27201 create a new label and emit it. */
27202 if (last_label == NULL)
27203 {
27204 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27205 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27206 loclabel_num++;
27207 last_label = ggc_strdup (loclabel);
27208 /* See if loclabel might be equal to .Ltext0. If yes,
27209 bump first_loclabel_num_not_at_text_label. */
27210 if (!have_multiple_function_sections
27211 && in_first_function_p
27212 && maybe_at_text_label_p)
27213 {
27214 static rtx_insn *last_start;
27215 rtx_insn *insn;
27216 for (insn = loc_note; insn; insn = previous_insn (insn))
27217 if (insn == last_start)
27218 break;
27219 else if (!NONDEBUG_INSN_P (insn))
27220 continue;
27221 else
27222 {
27223 rtx body = PATTERN (insn);
27224 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27225 continue;
27226 /* Inline asm could occupy zero bytes. */
27227 else if (GET_CODE (body) == ASM_INPUT
27228 || asm_noperands (body) >= 0)
27229 continue;
27230 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27231 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27232 continue;
27233 #endif
27234 else
27235 {
27236 /* Assume insn has non-zero length. */
27237 maybe_at_text_label_p = false;
27238 break;
27239 }
27240 }
27241 if (maybe_at_text_label_p)
27242 {
27243 last_start = loc_note;
27244 first_loclabel_num_not_at_text_label = loclabel_num;
27245 }
27246 }
27247 }
27248
27249 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27250 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27251
27252 if (!var_loc_p)
27253 {
27254 struct call_arg_loc_node *ca_loc
27255 = ggc_cleared_alloc<call_arg_loc_node> ();
27256 rtx_insn *prev = call_insn;
27257
27258 ca_loc->call_arg_loc_note
27259 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27260 ca_loc->next = NULL;
27261 ca_loc->label = last_label;
27262 gcc_assert (prev
27263 && (CALL_P (prev)
27264 || (NONJUMP_INSN_P (prev)
27265 && GET_CODE (PATTERN (prev)) == SEQUENCE
27266 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27267 if (!CALL_P (prev))
27268 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27269 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27270
27271 /* Look for a SYMBOL_REF in the "prev" instruction. */
27272 rtx x = get_call_rtx_from (PATTERN (prev));
27273 if (x)
27274 {
27275 /* Try to get the call symbol, if any. */
27276 if (MEM_P (XEXP (x, 0)))
27277 x = XEXP (x, 0);
27278 /* First, look for a memory access to a symbol_ref. */
27279 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27280 && SYMBOL_REF_DECL (XEXP (x, 0))
27281 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27282 ca_loc->symbol_ref = XEXP (x, 0);
27283 /* Otherwise, look at a compile-time known user-level function
27284 declaration. */
27285 else if (MEM_P (x)
27286 && MEM_EXPR (x)
27287 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27288 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27289 }
27290
27291 ca_loc->block = insn_scope (prev);
27292 if (call_arg_locations)
27293 call_arg_loc_last->next = ca_loc;
27294 else
27295 call_arg_locations = ca_loc;
27296 call_arg_loc_last = ca_loc;
27297 }
27298 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27299 {
27300 newloc->label = last_label;
27301 newloc->view = view;
27302 }
27303 else
27304 {
27305 if (!last_postcall_label)
27306 {
27307 sprintf (loclabel, "%s-1", last_label);
27308 last_postcall_label = ggc_strdup (loclabel);
27309 }
27310 newloc->label = last_postcall_label;
27311 /* ??? This view is at last_label, not last_label-1, but we
27312 could only assume view at last_label-1 is zero if we could
27313 assume calls always have length greater than one. This is
27314 probably true in general, though there might be a rare
27315 exception to this rule, e.g. if a call insn is optimized out
27316 by target magic. Then, even the -1 in the label will be
27317 wrong, which might invalidate the range. Anyway, using view,
27318 though technically possibly incorrect, will work as far as
27319 ranges go: since L-1 is in the middle of the call insn,
27320 (L-1).0 and (L-1).V shouldn't make any difference, and having
27321 the loclist entry refer to the .loc entry might be useful, so
27322 leave it like this. */
27323 newloc->view = view;
27324 }
27325
27326 if (var_loc_p && flag_debug_asm)
27327 {
27328 const char *name, *sep, *patstr;
27329 if (decl && DECL_NAME (decl))
27330 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27331 else
27332 name = "";
27333 if (NOTE_VAR_LOCATION_LOC (loc_note))
27334 {
27335 sep = " => ";
27336 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27337 }
27338 else
27339 {
27340 sep = " ";
27341 patstr = "RESET";
27342 }
27343 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27344 name, sep, patstr);
27345 }
27346
27347 last_var_location_insn = next_real;
27348 last_in_cold_section_p = in_cold_section_p;
27349 }
27350
27351 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27352 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27353 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27354 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27355 BLOCK_FRAGMENT_ORIGIN links. */
27356 static bool
27357 block_within_block_p (tree block, tree outer, bool bothways)
27358 {
27359 if (block == outer)
27360 return true;
27361
27362 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27363 for (tree context = BLOCK_SUPERCONTEXT (block);
27364 context != outer;
27365 context = BLOCK_SUPERCONTEXT (context))
27366 if (!context || TREE_CODE (context) != BLOCK)
27367 return false;
27368
27369 if (!bothways)
27370 return true;
27371
27372 /* Now check that each block is actually referenced by its
27373 parent. */
27374 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27375 context = BLOCK_SUPERCONTEXT (context))
27376 {
27377 if (BLOCK_FRAGMENT_ORIGIN (context))
27378 {
27379 gcc_assert (!BLOCK_SUBBLOCKS (context));
27380 context = BLOCK_FRAGMENT_ORIGIN (context);
27381 }
27382 for (tree sub = BLOCK_SUBBLOCKS (context);
27383 sub != block;
27384 sub = BLOCK_CHAIN (sub))
27385 if (!sub)
27386 return false;
27387 if (context == outer)
27388 return true;
27389 else
27390 block = context;
27391 }
27392 }
27393
27394 /* Called during final while assembling the marker of the entry point
27395 for an inlined function. */
27396
27397 static void
27398 dwarf2out_inline_entry (tree block)
27399 {
27400 gcc_assert (debug_inline_points);
27401
27402 /* If we can't represent it, don't bother. */
27403 if (!(dwarf_version >= 3 || !dwarf_strict))
27404 return;
27405
27406 gcc_assert (DECL_P (block_ultimate_origin (block)));
27407
27408 /* Sanity check the block tree. This would catch a case in which
27409 BLOCK got removed from the tree reachable from the outermost
27410 lexical block, but got retained in markers. It would still link
27411 back to its parents, but some ancestor would be missing a link
27412 down the path to the sub BLOCK. If the block got removed, its
27413 BLOCK_NUMBER will not be a usable value. */
27414 if (flag_checking)
27415 gcc_assert (block_within_block_p (block,
27416 DECL_INITIAL (current_function_decl),
27417 true));
27418
27419 gcc_assert (inlined_function_outer_scope_p (block));
27420 gcc_assert (!BLOCK_DIE (block));
27421
27422 if (BLOCK_FRAGMENT_ORIGIN (block))
27423 block = BLOCK_FRAGMENT_ORIGIN (block);
27424 /* Can the entry point ever not be at the beginning of an
27425 unfragmented lexical block? */
27426 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27427 || (cur_line_info_table
27428 && !ZERO_VIEW_P (cur_line_info_table->view))))
27429 return;
27430
27431 if (!inline_entry_data_table)
27432 inline_entry_data_table
27433 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27434
27435
27436 inline_entry_data **iedp
27437 = inline_entry_data_table->find_slot_with_hash (block,
27438 htab_hash_pointer (block),
27439 INSERT);
27440 if (*iedp)
27441 /* ??? Ideally, we'd record all entry points for the same inlined
27442 function (some may have been duplicated by e.g. unrolling), but
27443 we have no way to represent that ATM. */
27444 return;
27445
27446 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27447 ied->block = block;
27448 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27449 ied->label_num = BLOCK_NUMBER (block);
27450 if (cur_line_info_table)
27451 ied->view = cur_line_info_table->view;
27452
27453 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27454
27455 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27456 BLOCK_NUMBER (block));
27457 ASM_OUTPUT_LABEL (asm_out_file, label);
27458 }
27459
27460 /* Called from finalize_size_functions for size functions so that their body
27461 can be encoded in the debug info to describe the layout of variable-length
27462 structures. */
27463
27464 static void
27465 dwarf2out_size_function (tree decl)
27466 {
27467 function_to_dwarf_procedure (decl);
27468 }
27469
27470 /* Note in one location list that text section has changed. */
27471
27472 int
27473 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27474 {
27475 var_loc_list *list = *slot;
27476 if (list->first)
27477 list->last_before_switch
27478 = list->last->next ? list->last->next : list->last;
27479 return 1;
27480 }
27481
27482 /* Note in all location lists that text section has changed. */
27483
27484 static void
27485 var_location_switch_text_section (void)
27486 {
27487 if (decl_loc_table == NULL)
27488 return;
27489
27490 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27491 }
27492
27493 /* Create a new line number table. */
27494
27495 static dw_line_info_table *
27496 new_line_info_table (void)
27497 {
27498 dw_line_info_table *table;
27499
27500 table = ggc_cleared_alloc<dw_line_info_table> ();
27501 table->file_num = 1;
27502 table->line_num = 1;
27503 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27504 FORCE_RESET_NEXT_VIEW (table->view);
27505 table->symviews_since_reset = 0;
27506
27507 return table;
27508 }
27509
27510 /* Lookup the "current" table into which we emit line info, so
27511 that we don't have to do it for every source line. */
27512
27513 static void
27514 set_cur_line_info_table (section *sec)
27515 {
27516 dw_line_info_table *table;
27517
27518 if (sec == text_section)
27519 table = text_section_line_info;
27520 else if (sec == cold_text_section)
27521 {
27522 table = cold_text_section_line_info;
27523 if (!table)
27524 {
27525 cold_text_section_line_info = table = new_line_info_table ();
27526 table->end_label = cold_end_label;
27527 }
27528 }
27529 else
27530 {
27531 const char *end_label;
27532
27533 if (crtl->has_bb_partition)
27534 {
27535 if (in_cold_section_p)
27536 end_label = crtl->subsections.cold_section_end_label;
27537 else
27538 end_label = crtl->subsections.hot_section_end_label;
27539 }
27540 else
27541 {
27542 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27543 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27544 current_function_funcdef_no);
27545 end_label = ggc_strdup (label);
27546 }
27547
27548 table = new_line_info_table ();
27549 table->end_label = end_label;
27550
27551 vec_safe_push (separate_line_info, table);
27552 }
27553
27554 if (output_asm_line_debug_info ())
27555 table->is_stmt = (cur_line_info_table
27556 ? cur_line_info_table->is_stmt
27557 : DWARF_LINE_DEFAULT_IS_STMT_START);
27558 cur_line_info_table = table;
27559 }
27560
27561
27562 /* We need to reset the locations at the beginning of each
27563 function. We can't do this in the end_function hook, because the
27564 declarations that use the locations won't have been output when
27565 that hook is called. Also compute have_multiple_function_sections here. */
27566
27567 static void
27568 dwarf2out_begin_function (tree fun)
27569 {
27570 section *sec = function_section (fun);
27571
27572 if (sec != text_section)
27573 have_multiple_function_sections = true;
27574
27575 if (crtl->has_bb_partition && !cold_text_section)
27576 {
27577 gcc_assert (current_function_decl == fun);
27578 cold_text_section = unlikely_text_section ();
27579 switch_to_section (cold_text_section);
27580 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27581 switch_to_section (sec);
27582 }
27583
27584 dwarf2out_note_section_used ();
27585 call_site_count = 0;
27586 tail_call_site_count = 0;
27587
27588 set_cur_line_info_table (sec);
27589 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27590 }
27591
27592 /* Helper function of dwarf2out_end_function, called only after emitting
27593 the very first function into assembly. Check if some .debug_loc range
27594 might end with a .LVL* label that could be equal to .Ltext0.
27595 In that case we must force using absolute addresses in .debug_loc ranges,
27596 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27597 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27598 list terminator.
27599 Set have_multiple_function_sections to true in that case and
27600 terminate htab traversal. */
27601
27602 int
27603 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27604 {
27605 var_loc_list *entry = *slot;
27606 struct var_loc_node *node;
27607
27608 node = entry->first;
27609 if (node && node->next && node->next->label)
27610 {
27611 unsigned int i;
27612 const char *label = node->next->label;
27613 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27614
27615 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27616 {
27617 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27618 if (strcmp (label, loclabel) == 0)
27619 {
27620 have_multiple_function_sections = true;
27621 return 0;
27622 }
27623 }
27624 }
27625 return 1;
27626 }
27627
27628 /* Hook called after emitting a function into assembly.
27629 This does something only for the very first function emitted. */
27630
27631 static void
27632 dwarf2out_end_function (unsigned int)
27633 {
27634 if (in_first_function_p
27635 && !have_multiple_function_sections
27636 && first_loclabel_num_not_at_text_label
27637 && decl_loc_table)
27638 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27639 in_first_function_p = false;
27640 maybe_at_text_label_p = false;
27641 }
27642
27643 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27644 front-ends register a translation unit even before dwarf2out_init is
27645 called. */
27646 static tree main_translation_unit = NULL_TREE;
27647
27648 /* Hook called by front-ends after they built their main translation unit.
27649 Associate comp_unit_die to UNIT. */
27650
27651 static void
27652 dwarf2out_register_main_translation_unit (tree unit)
27653 {
27654 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27655 && main_translation_unit == NULL_TREE);
27656 main_translation_unit = unit;
27657 /* If dwarf2out_init has not been called yet, it will perform the association
27658 itself looking at main_translation_unit. */
27659 if (decl_die_table != NULL)
27660 equate_decl_number_to_die (unit, comp_unit_die ());
27661 }
27662
27663 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27664
27665 static void
27666 push_dw_line_info_entry (dw_line_info_table *table,
27667 enum dw_line_info_opcode opcode, unsigned int val)
27668 {
27669 dw_line_info_entry e;
27670 e.opcode = opcode;
27671 e.val = val;
27672 vec_safe_push (table->entries, e);
27673 }
27674
27675 /* Output a label to mark the beginning of a source code line entry
27676 and record information relating to this source line, in
27677 'line_info_table' for later output of the .debug_line section. */
27678 /* ??? The discriminator parameter ought to be unsigned. */
27679
27680 static void
27681 dwarf2out_source_line (unsigned int line, unsigned int column,
27682 const char *filename,
27683 int discriminator, bool is_stmt)
27684 {
27685 unsigned int file_num;
27686 dw_line_info_table *table;
27687 static var_loc_view lvugid;
27688
27689 if (debug_info_level < DINFO_LEVEL_TERSE)
27690 return;
27691
27692 table = cur_line_info_table;
27693
27694 if (line == 0)
27695 {
27696 if (debug_variable_location_views
27697 && output_asm_line_debug_info ()
27698 && table && !RESETTING_VIEW_P (table->view))
27699 {
27700 /* If we're using the assembler to compute view numbers, we
27701 can't issue a .loc directive for line zero, so we can't
27702 get a view number at this point. We might attempt to
27703 compute it from the previous view, or equate it to a
27704 subsequent view (though it might not be there!), but
27705 since we're omitting the line number entry, we might as
27706 well omit the view number as well. That means pretending
27707 it's a view number zero, which might very well turn out
27708 to be correct. ??? Extend the assembler so that the
27709 compiler could emit e.g. ".locview .LVU#", to output a
27710 view without changing line number information. We'd then
27711 have to count it in symviews_since_reset; when it's omitted,
27712 it doesn't count. */
27713 if (!zero_view_p)
27714 zero_view_p = BITMAP_GGC_ALLOC ();
27715 bitmap_set_bit (zero_view_p, table->view);
27716 if (flag_debug_asm)
27717 {
27718 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27719 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27720 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27721 ASM_COMMENT_START);
27722 assemble_name (asm_out_file, label);
27723 putc ('\n', asm_out_file);
27724 }
27725 table->view = ++lvugid;
27726 }
27727 return;
27728 }
27729
27730 /* The discriminator column was added in dwarf4. Simplify the below
27731 by simply removing it if we're not supposed to output it. */
27732 if (dwarf_version < 4 && dwarf_strict)
27733 discriminator = 0;
27734
27735 if (!debug_column_info)
27736 column = 0;
27737
27738 file_num = maybe_emit_file (lookup_filename (filename));
27739
27740 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27741 the debugger has used the second (possibly duplicate) line number
27742 at the beginning of the function to mark the end of the prologue.
27743 We could eliminate any other duplicates within the function. For
27744 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27745 that second line number entry. */
27746 /* Recall that this end-of-prologue indication is *not* the same thing
27747 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27748 to which the hook corresponds, follows the last insn that was
27749 emitted by gen_prologue. What we need is to precede the first insn
27750 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27751 insn that corresponds to something the user wrote. These may be
27752 very different locations once scheduling is enabled. */
27753
27754 if (0 && file_num == table->file_num
27755 && line == table->line_num
27756 && column == table->column_num
27757 && discriminator == table->discrim_num
27758 && is_stmt == table->is_stmt)
27759 return;
27760
27761 switch_to_section (current_function_section ());
27762
27763 /* If requested, emit something human-readable. */
27764 if (flag_debug_asm)
27765 {
27766 if (debug_column_info)
27767 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27768 filename, line, column);
27769 else
27770 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27771 filename, line);
27772 }
27773
27774 if (output_asm_line_debug_info ())
27775 {
27776 /* Emit the .loc directive understood by GNU as. */
27777 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27778 file_num, line, is_stmt, discriminator */
27779 fputs ("\t.loc ", asm_out_file);
27780 fprint_ul (asm_out_file, file_num);
27781 putc (' ', asm_out_file);
27782 fprint_ul (asm_out_file, line);
27783 putc (' ', asm_out_file);
27784 fprint_ul (asm_out_file, column);
27785
27786 if (is_stmt != table->is_stmt)
27787 {
27788 fputs (" is_stmt ", asm_out_file);
27789 putc (is_stmt ? '1' : '0', asm_out_file);
27790 }
27791 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27792 {
27793 gcc_assert (discriminator > 0);
27794 fputs (" discriminator ", asm_out_file);
27795 fprint_ul (asm_out_file, (unsigned long) discriminator);
27796 }
27797 if (debug_variable_location_views)
27798 {
27799 if (!RESETTING_VIEW_P (table->view))
27800 {
27801 table->symviews_since_reset++;
27802 if (table->symviews_since_reset > symview_upper_bound)
27803 symview_upper_bound = table->symviews_since_reset;
27804 /* When we're using the assembler to compute view
27805 numbers, we output symbolic labels after "view" in
27806 .loc directives, and the assembler will set them for
27807 us, so that we can refer to the view numbers in
27808 location lists. The only exceptions are when we know
27809 a view will be zero: "-0" is a forced reset, used
27810 e.g. in the beginning of functions, whereas "0" tells
27811 the assembler to check that there was a PC change
27812 since the previous view, in a way that implicitly
27813 resets the next view. */
27814 fputs (" view ", asm_out_file);
27815 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27816 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27817 assemble_name (asm_out_file, label);
27818 table->view = ++lvugid;
27819 }
27820 else
27821 {
27822 table->symviews_since_reset = 0;
27823 if (FORCE_RESETTING_VIEW_P (table->view))
27824 fputs (" view -0", asm_out_file);
27825 else
27826 fputs (" view 0", asm_out_file);
27827 /* Mark the present view as a zero view. Earlier debug
27828 binds may have already added its id to loclists to be
27829 emitted later, so we can't reuse the id for something
27830 else. However, it's good to know whether a view is
27831 known to be zero, because then we may be able to
27832 optimize out locviews that are all zeros, so take
27833 note of it in zero_view_p. */
27834 if (!zero_view_p)
27835 zero_view_p = BITMAP_GGC_ALLOC ();
27836 bitmap_set_bit (zero_view_p, lvugid);
27837 table->view = ++lvugid;
27838 }
27839 }
27840 putc ('\n', asm_out_file);
27841 }
27842 else
27843 {
27844 unsigned int label_num = ++line_info_label_num;
27845
27846 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27847
27848 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27849 push_dw_line_info_entry (table, LI_adv_address, label_num);
27850 else
27851 push_dw_line_info_entry (table, LI_set_address, label_num);
27852 if (debug_variable_location_views)
27853 {
27854 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27855 if (resetting)
27856 table->view = 0;
27857
27858 if (flag_debug_asm)
27859 fprintf (asm_out_file, "\t%s view %s%d\n",
27860 ASM_COMMENT_START,
27861 resetting ? "-" : "",
27862 table->view);
27863
27864 table->view++;
27865 }
27866 if (file_num != table->file_num)
27867 push_dw_line_info_entry (table, LI_set_file, file_num);
27868 if (discriminator != table->discrim_num)
27869 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27870 if (is_stmt != table->is_stmt)
27871 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27872 push_dw_line_info_entry (table, LI_set_line, line);
27873 if (debug_column_info)
27874 push_dw_line_info_entry (table, LI_set_column, column);
27875 }
27876
27877 table->file_num = file_num;
27878 table->line_num = line;
27879 table->column_num = column;
27880 table->discrim_num = discriminator;
27881 table->is_stmt = is_stmt;
27882 table->in_use = true;
27883 }
27884
27885 /* Record the beginning of a new source file. */
27886
27887 static void
27888 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27889 {
27890 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27891 {
27892 macinfo_entry e;
27893 e.code = DW_MACINFO_start_file;
27894 e.lineno = lineno;
27895 e.info = ggc_strdup (filename);
27896 vec_safe_push (macinfo_table, e);
27897 }
27898 }
27899
27900 /* Record the end of a source file. */
27901
27902 static void
27903 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27904 {
27905 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27906 {
27907 macinfo_entry e;
27908 e.code = DW_MACINFO_end_file;
27909 e.lineno = lineno;
27910 e.info = NULL;
27911 vec_safe_push (macinfo_table, e);
27912 }
27913 }
27914
27915 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27916 the tail part of the directive line, i.e. the part which is past the
27917 initial whitespace, #, whitespace, directive-name, whitespace part. */
27918
27919 static void
27920 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27921 const char *buffer ATTRIBUTE_UNUSED)
27922 {
27923 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27924 {
27925 macinfo_entry e;
27926 /* Insert a dummy first entry to be able to optimize the whole
27927 predefined macro block using DW_MACRO_import. */
27928 if (macinfo_table->is_empty () && lineno <= 1)
27929 {
27930 e.code = 0;
27931 e.lineno = 0;
27932 e.info = NULL;
27933 vec_safe_push (macinfo_table, e);
27934 }
27935 e.code = DW_MACINFO_define;
27936 e.lineno = lineno;
27937 e.info = ggc_strdup (buffer);
27938 vec_safe_push (macinfo_table, e);
27939 }
27940 }
27941
27942 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27943 the tail part of the directive line, i.e. the part which is past the
27944 initial whitespace, #, whitespace, directive-name, whitespace part. */
27945
27946 static void
27947 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27948 const char *buffer ATTRIBUTE_UNUSED)
27949 {
27950 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27951 {
27952 macinfo_entry e;
27953 /* Insert a dummy first entry to be able to optimize the whole
27954 predefined macro block using DW_MACRO_import. */
27955 if (macinfo_table->is_empty () && lineno <= 1)
27956 {
27957 e.code = 0;
27958 e.lineno = 0;
27959 e.info = NULL;
27960 vec_safe_push (macinfo_table, e);
27961 }
27962 e.code = DW_MACINFO_undef;
27963 e.lineno = lineno;
27964 e.info = ggc_strdup (buffer);
27965 vec_safe_push (macinfo_table, e);
27966 }
27967 }
27968
27969 /* Helpers to manipulate hash table of CUs. */
27970
27971 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
27972 {
27973 static inline hashval_t hash (const macinfo_entry *);
27974 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
27975 };
27976
27977 inline hashval_t
27978 macinfo_entry_hasher::hash (const macinfo_entry *entry)
27979 {
27980 return htab_hash_string (entry->info);
27981 }
27982
27983 inline bool
27984 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
27985 const macinfo_entry *entry2)
27986 {
27987 return !strcmp (entry1->info, entry2->info);
27988 }
27989
27990 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27991
27992 /* Output a single .debug_macinfo entry. */
27993
27994 static void
27995 output_macinfo_op (macinfo_entry *ref)
27996 {
27997 int file_num;
27998 size_t len;
27999 struct indirect_string_node *node;
28000 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28001 struct dwarf_file_data *fd;
28002
28003 switch (ref->code)
28004 {
28005 case DW_MACINFO_start_file:
28006 fd = lookup_filename (ref->info);
28007 file_num = maybe_emit_file (fd);
28008 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28009 dw2_asm_output_data_uleb128 (ref->lineno,
28010 "Included from line number %lu",
28011 (unsigned long) ref->lineno);
28012 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28013 break;
28014 case DW_MACINFO_end_file:
28015 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28016 break;
28017 case DW_MACINFO_define:
28018 case DW_MACINFO_undef:
28019 len = strlen (ref->info) + 1;
28020 if (!dwarf_strict
28021 && len > DWARF_OFFSET_SIZE
28022 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28023 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28024 {
28025 ref->code = ref->code == DW_MACINFO_define
28026 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28027 output_macinfo_op (ref);
28028 return;
28029 }
28030 dw2_asm_output_data (1, ref->code,
28031 ref->code == DW_MACINFO_define
28032 ? "Define macro" : "Undefine macro");
28033 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28034 (unsigned long) ref->lineno);
28035 dw2_asm_output_nstring (ref->info, -1, "The macro");
28036 break;
28037 case DW_MACRO_define_strp:
28038 case DW_MACRO_undef_strp:
28039 node = find_AT_string (ref->info);
28040 gcc_assert (node
28041 && (node->form == DW_FORM_strp
28042 || node->form == DW_FORM_GNU_str_index));
28043 dw2_asm_output_data (1, ref->code,
28044 ref->code == DW_MACRO_define_strp
28045 ? "Define macro strp"
28046 : "Undefine macro strp");
28047 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28048 (unsigned long) ref->lineno);
28049 if (node->form == DW_FORM_strp)
28050 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28051 debug_str_section, "The macro: \"%s\"",
28052 ref->info);
28053 else
28054 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28055 ref->info);
28056 break;
28057 case DW_MACRO_import:
28058 dw2_asm_output_data (1, ref->code, "Import");
28059 ASM_GENERATE_INTERNAL_LABEL (label,
28060 DEBUG_MACRO_SECTION_LABEL,
28061 ref->lineno + macinfo_label_base);
28062 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28063 break;
28064 default:
28065 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28066 ASM_COMMENT_START, (unsigned long) ref->code);
28067 break;
28068 }
28069 }
28070
28071 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28072 other compilation unit .debug_macinfo sections. IDX is the first
28073 index of a define/undef, return the number of ops that should be
28074 emitted in a comdat .debug_macinfo section and emit
28075 a DW_MACRO_import entry referencing it.
28076 If the define/undef entry should be emitted normally, return 0. */
28077
28078 static unsigned
28079 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28080 macinfo_hash_type **macinfo_htab)
28081 {
28082 macinfo_entry *first, *second, *cur, *inc;
28083 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28084 unsigned char checksum[16];
28085 struct md5_ctx ctx;
28086 char *grp_name, *tail;
28087 const char *base;
28088 unsigned int i, count, encoded_filename_len, linebuf_len;
28089 macinfo_entry **slot;
28090
28091 first = &(*macinfo_table)[idx];
28092 second = &(*macinfo_table)[idx + 1];
28093
28094 /* Optimize only if there are at least two consecutive define/undef ops,
28095 and either all of them are before first DW_MACINFO_start_file
28096 with lineno {0,1} (i.e. predefined macro block), or all of them are
28097 in some included header file. */
28098 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28099 return 0;
28100 if (vec_safe_is_empty (files))
28101 {
28102 if (first->lineno > 1 || second->lineno > 1)
28103 return 0;
28104 }
28105 else if (first->lineno == 0)
28106 return 0;
28107
28108 /* Find the last define/undef entry that can be grouped together
28109 with first and at the same time compute md5 checksum of their
28110 codes, linenumbers and strings. */
28111 md5_init_ctx (&ctx);
28112 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28113 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28114 break;
28115 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28116 break;
28117 else
28118 {
28119 unsigned char code = cur->code;
28120 md5_process_bytes (&code, 1, &ctx);
28121 checksum_uleb128 (cur->lineno, &ctx);
28122 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28123 }
28124 md5_finish_ctx (&ctx, checksum);
28125 count = i - idx;
28126
28127 /* From the containing include filename (if any) pick up just
28128 usable characters from its basename. */
28129 if (vec_safe_is_empty (files))
28130 base = "";
28131 else
28132 base = lbasename (files->last ().info);
28133 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28134 if (ISIDNUM (base[i]) || base[i] == '.')
28135 encoded_filename_len++;
28136 /* Count . at the end. */
28137 if (encoded_filename_len)
28138 encoded_filename_len++;
28139
28140 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28141 linebuf_len = strlen (linebuf);
28142
28143 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28144 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28145 + 16 * 2 + 1);
28146 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28147 tail = grp_name + 4;
28148 if (encoded_filename_len)
28149 {
28150 for (i = 0; base[i]; i++)
28151 if (ISIDNUM (base[i]) || base[i] == '.')
28152 *tail++ = base[i];
28153 *tail++ = '.';
28154 }
28155 memcpy (tail, linebuf, linebuf_len);
28156 tail += linebuf_len;
28157 *tail++ = '.';
28158 for (i = 0; i < 16; i++)
28159 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28160
28161 /* Construct a macinfo_entry for DW_MACRO_import
28162 in the empty vector entry before the first define/undef. */
28163 inc = &(*macinfo_table)[idx - 1];
28164 inc->code = DW_MACRO_import;
28165 inc->lineno = 0;
28166 inc->info = ggc_strdup (grp_name);
28167 if (!*macinfo_htab)
28168 *macinfo_htab = new macinfo_hash_type (10);
28169 /* Avoid emitting duplicates. */
28170 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28171 if (*slot != NULL)
28172 {
28173 inc->code = 0;
28174 inc->info = NULL;
28175 /* If such an entry has been used before, just emit
28176 a DW_MACRO_import op. */
28177 inc = *slot;
28178 output_macinfo_op (inc);
28179 /* And clear all macinfo_entry in the range to avoid emitting them
28180 in the second pass. */
28181 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28182 {
28183 cur->code = 0;
28184 cur->info = NULL;
28185 }
28186 }
28187 else
28188 {
28189 *slot = inc;
28190 inc->lineno = (*macinfo_htab)->elements ();
28191 output_macinfo_op (inc);
28192 }
28193 return count;
28194 }
28195
28196 /* Save any strings needed by the macinfo table in the debug str
28197 table. All strings must be collected into the table by the time
28198 index_string is called. */
28199
28200 static void
28201 save_macinfo_strings (void)
28202 {
28203 unsigned len;
28204 unsigned i;
28205 macinfo_entry *ref;
28206
28207 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28208 {
28209 switch (ref->code)
28210 {
28211 /* Match the logic in output_macinfo_op to decide on
28212 indirect strings. */
28213 case DW_MACINFO_define:
28214 case DW_MACINFO_undef:
28215 len = strlen (ref->info) + 1;
28216 if (!dwarf_strict
28217 && len > DWARF_OFFSET_SIZE
28218 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28219 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28220 set_indirect_string (find_AT_string (ref->info));
28221 break;
28222 case DW_MACRO_define_strp:
28223 case DW_MACRO_undef_strp:
28224 set_indirect_string (find_AT_string (ref->info));
28225 break;
28226 default:
28227 break;
28228 }
28229 }
28230 }
28231
28232 /* Output macinfo section(s). */
28233
28234 static void
28235 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28236 {
28237 unsigned i;
28238 unsigned long length = vec_safe_length (macinfo_table);
28239 macinfo_entry *ref;
28240 vec<macinfo_entry, va_gc> *files = NULL;
28241 macinfo_hash_type *macinfo_htab = NULL;
28242 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28243
28244 if (! length)
28245 return;
28246
28247 /* output_macinfo* uses these interchangeably. */
28248 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28249 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28250 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28251 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28252
28253 /* AIX Assembler inserts the length, so adjust the reference to match the
28254 offset expected by debuggers. */
28255 strcpy (dl_section_ref, debug_line_label);
28256 if (XCOFF_DEBUGGING_INFO)
28257 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28258
28259 /* For .debug_macro emit the section header. */
28260 if (!dwarf_strict || dwarf_version >= 5)
28261 {
28262 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28263 "DWARF macro version number");
28264 if (DWARF_OFFSET_SIZE == 8)
28265 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28266 else
28267 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28268 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28269 debug_line_section, NULL);
28270 }
28271
28272 /* In the first loop, it emits the primary .debug_macinfo section
28273 and after each emitted op the macinfo_entry is cleared.
28274 If a longer range of define/undef ops can be optimized using
28275 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28276 the vector before the first define/undef in the range and the
28277 whole range of define/undef ops is not emitted and kept. */
28278 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28279 {
28280 switch (ref->code)
28281 {
28282 case DW_MACINFO_start_file:
28283 vec_safe_push (files, *ref);
28284 break;
28285 case DW_MACINFO_end_file:
28286 if (!vec_safe_is_empty (files))
28287 files->pop ();
28288 break;
28289 case DW_MACINFO_define:
28290 case DW_MACINFO_undef:
28291 if ((!dwarf_strict || dwarf_version >= 5)
28292 && HAVE_COMDAT_GROUP
28293 && vec_safe_length (files) != 1
28294 && i > 0
28295 && i + 1 < length
28296 && (*macinfo_table)[i - 1].code == 0)
28297 {
28298 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28299 if (count)
28300 {
28301 i += count - 1;
28302 continue;
28303 }
28304 }
28305 break;
28306 case 0:
28307 /* A dummy entry may be inserted at the beginning to be able
28308 to optimize the whole block of predefined macros. */
28309 if (i == 0)
28310 continue;
28311 default:
28312 break;
28313 }
28314 output_macinfo_op (ref);
28315 ref->info = NULL;
28316 ref->code = 0;
28317 }
28318
28319 if (!macinfo_htab)
28320 return;
28321
28322 /* Save the number of transparent includes so we can adjust the
28323 label number for the fat LTO object DWARF. */
28324 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28325
28326 delete macinfo_htab;
28327 macinfo_htab = NULL;
28328
28329 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28330 terminate the current chain and switch to a new comdat .debug_macinfo
28331 section and emit the define/undef entries within it. */
28332 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28333 switch (ref->code)
28334 {
28335 case 0:
28336 continue;
28337 case DW_MACRO_import:
28338 {
28339 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28340 tree comdat_key = get_identifier (ref->info);
28341 /* Terminate the previous .debug_macinfo section. */
28342 dw2_asm_output_data (1, 0, "End compilation unit");
28343 targetm.asm_out.named_section (debug_macinfo_section_name,
28344 SECTION_DEBUG
28345 | SECTION_LINKONCE
28346 | (early_lto_debug
28347 ? SECTION_EXCLUDE : 0),
28348 comdat_key);
28349 ASM_GENERATE_INTERNAL_LABEL (label,
28350 DEBUG_MACRO_SECTION_LABEL,
28351 ref->lineno + macinfo_label_base);
28352 ASM_OUTPUT_LABEL (asm_out_file, label);
28353 ref->code = 0;
28354 ref->info = NULL;
28355 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28356 "DWARF macro version number");
28357 if (DWARF_OFFSET_SIZE == 8)
28358 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28359 else
28360 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28361 }
28362 break;
28363 case DW_MACINFO_define:
28364 case DW_MACINFO_undef:
28365 output_macinfo_op (ref);
28366 ref->code = 0;
28367 ref->info = NULL;
28368 break;
28369 default:
28370 gcc_unreachable ();
28371 }
28372
28373 macinfo_label_base += macinfo_label_base_adj;
28374 }
28375
28376 /* Initialize the various sections and labels for dwarf output and prefix
28377 them with PREFIX if non-NULL. Returns the generation (zero based
28378 number of times function was called). */
28379
28380 static unsigned
28381 init_sections_and_labels (bool early_lto_debug)
28382 {
28383 /* As we may get called multiple times have a generation count for
28384 labels. */
28385 static unsigned generation = 0;
28386
28387 if (early_lto_debug)
28388 {
28389 if (!dwarf_split_debug_info)
28390 {
28391 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28392 SECTION_DEBUG | SECTION_EXCLUDE,
28393 NULL);
28394 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28395 SECTION_DEBUG | SECTION_EXCLUDE,
28396 NULL);
28397 debug_macinfo_section_name
28398 = ((dwarf_strict && dwarf_version < 5)
28399 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28400 debug_macinfo_section = get_section (debug_macinfo_section_name,
28401 SECTION_DEBUG
28402 | SECTION_EXCLUDE, NULL);
28403 /* For macro info we have to refer to a debug_line section, so
28404 similar to split-dwarf emit a skeleton one for early debug. */
28405 debug_skeleton_line_section
28406 = get_section (DEBUG_LTO_LINE_SECTION,
28407 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28408 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28409 DEBUG_SKELETON_LINE_SECTION_LABEL,
28410 generation);
28411 }
28412 else
28413 {
28414 /* ??? Which of the following do we need early? */
28415 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28416 SECTION_DEBUG | SECTION_EXCLUDE,
28417 NULL);
28418 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28419 SECTION_DEBUG | SECTION_EXCLUDE,
28420 NULL);
28421 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28422 SECTION_DEBUG
28423 | SECTION_EXCLUDE, NULL);
28424 debug_skeleton_abbrev_section
28425 = get_section (DEBUG_LTO_ABBREV_SECTION,
28426 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28427 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28428 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28429 generation);
28430
28431 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28432 stay in the main .o, but the skeleton_line goes into the split
28433 off dwo. */
28434 debug_skeleton_line_section
28435 = get_section (DEBUG_LTO_LINE_SECTION,
28436 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28437 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28438 DEBUG_SKELETON_LINE_SECTION_LABEL,
28439 generation);
28440 debug_str_offsets_section
28441 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28442 SECTION_DEBUG | SECTION_EXCLUDE,
28443 NULL);
28444 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28445 DEBUG_SKELETON_INFO_SECTION_LABEL,
28446 generation);
28447 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28448 DEBUG_STR_DWO_SECTION_FLAGS,
28449 NULL);
28450 debug_macinfo_section_name
28451 = ((dwarf_strict && dwarf_version < 5)
28452 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28453 debug_macinfo_section = get_section (debug_macinfo_section_name,
28454 SECTION_DEBUG | SECTION_EXCLUDE,
28455 NULL);
28456 }
28457 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28458 DEBUG_STR_SECTION_FLAGS
28459 | SECTION_EXCLUDE, NULL);
28460 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28461 debug_line_str_section
28462 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28463 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28464 }
28465 else
28466 {
28467 if (!dwarf_split_debug_info)
28468 {
28469 debug_info_section = get_section (DEBUG_INFO_SECTION,
28470 SECTION_DEBUG, NULL);
28471 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28472 SECTION_DEBUG, NULL);
28473 debug_loc_section = get_section (dwarf_version >= 5
28474 ? DEBUG_LOCLISTS_SECTION
28475 : DEBUG_LOC_SECTION,
28476 SECTION_DEBUG, NULL);
28477 debug_macinfo_section_name
28478 = ((dwarf_strict && dwarf_version < 5)
28479 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28480 debug_macinfo_section = get_section (debug_macinfo_section_name,
28481 SECTION_DEBUG, NULL);
28482 }
28483 else
28484 {
28485 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28486 SECTION_DEBUG | SECTION_EXCLUDE,
28487 NULL);
28488 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28489 SECTION_DEBUG | SECTION_EXCLUDE,
28490 NULL);
28491 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28492 SECTION_DEBUG, NULL);
28493 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28494 SECTION_DEBUG, NULL);
28495 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28496 SECTION_DEBUG, NULL);
28497 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28498 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28499 generation);
28500
28501 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28502 stay in the main .o, but the skeleton_line goes into the
28503 split off dwo. */
28504 debug_skeleton_line_section
28505 = get_section (DEBUG_DWO_LINE_SECTION,
28506 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28507 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28508 DEBUG_SKELETON_LINE_SECTION_LABEL,
28509 generation);
28510 debug_str_offsets_section
28511 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28512 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28513 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28514 DEBUG_SKELETON_INFO_SECTION_LABEL,
28515 generation);
28516 debug_loc_section = get_section (dwarf_version >= 5
28517 ? DEBUG_DWO_LOCLISTS_SECTION
28518 : DEBUG_DWO_LOC_SECTION,
28519 SECTION_DEBUG | SECTION_EXCLUDE,
28520 NULL);
28521 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28522 DEBUG_STR_DWO_SECTION_FLAGS,
28523 NULL);
28524 debug_macinfo_section_name
28525 = ((dwarf_strict && dwarf_version < 5)
28526 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28527 debug_macinfo_section = get_section (debug_macinfo_section_name,
28528 SECTION_DEBUG | SECTION_EXCLUDE,
28529 NULL);
28530 }
28531 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28532 SECTION_DEBUG, NULL);
28533 debug_line_section = get_section (DEBUG_LINE_SECTION,
28534 SECTION_DEBUG, NULL);
28535 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28536 SECTION_DEBUG, NULL);
28537 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28538 SECTION_DEBUG, NULL);
28539 debug_str_section = get_section (DEBUG_STR_SECTION,
28540 DEBUG_STR_SECTION_FLAGS, NULL);
28541 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28542 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28543 DEBUG_STR_SECTION_FLAGS, NULL);
28544
28545 debug_ranges_section = get_section (dwarf_version >= 5
28546 ? DEBUG_RNGLISTS_SECTION
28547 : DEBUG_RANGES_SECTION,
28548 SECTION_DEBUG, NULL);
28549 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28550 SECTION_DEBUG, NULL);
28551 }
28552
28553 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28554 DEBUG_ABBREV_SECTION_LABEL, generation);
28555 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28556 DEBUG_INFO_SECTION_LABEL, generation);
28557 info_section_emitted = false;
28558 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28559 DEBUG_LINE_SECTION_LABEL, generation);
28560 /* There are up to 4 unique ranges labels per generation.
28561 See also output_rnglists. */
28562 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28563 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28564 if (dwarf_version >= 5 && dwarf_split_debug_info)
28565 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28566 DEBUG_RANGES_SECTION_LABEL,
28567 1 + generation * 4);
28568 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28569 DEBUG_ADDR_SECTION_LABEL, generation);
28570 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28571 (dwarf_strict && dwarf_version < 5)
28572 ? DEBUG_MACINFO_SECTION_LABEL
28573 : DEBUG_MACRO_SECTION_LABEL, generation);
28574 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28575 generation);
28576
28577 ++generation;
28578 return generation - 1;
28579 }
28580
28581 /* Set up for Dwarf output at the start of compilation. */
28582
28583 static void
28584 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28585 {
28586 /* Allocate the file_table. */
28587 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28588
28589 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28590 /* Allocate the decl_die_table. */
28591 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28592
28593 /* Allocate the decl_loc_table. */
28594 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28595
28596 /* Allocate the cached_dw_loc_list_table. */
28597 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28598
28599 /* Allocate the initial hunk of the decl_scope_table. */
28600 vec_alloc (decl_scope_table, 256);
28601
28602 /* Allocate the initial hunk of the abbrev_die_table. */
28603 vec_alloc (abbrev_die_table, 256);
28604 /* Zero-th entry is allocated, but unused. */
28605 abbrev_die_table->quick_push (NULL);
28606
28607 /* Allocate the dwarf_proc_stack_usage_map. */
28608 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28609
28610 /* Allocate the pubtypes and pubnames vectors. */
28611 vec_alloc (pubname_table, 32);
28612 vec_alloc (pubtype_table, 32);
28613
28614 vec_alloc (incomplete_types, 64);
28615
28616 vec_alloc (used_rtx_array, 32);
28617
28618 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28619 vec_alloc (macinfo_table, 64);
28620 #endif
28621
28622 /* If front-ends already registered a main translation unit but we were not
28623 ready to perform the association, do this now. */
28624 if (main_translation_unit != NULL_TREE)
28625 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28626 }
28627
28628 /* Called before compile () starts outputtting functions, variables
28629 and toplevel asms into assembly. */
28630
28631 static void
28632 dwarf2out_assembly_start (void)
28633 {
28634 if (text_section_line_info)
28635 return;
28636
28637 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28638 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28639 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28640 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28641 COLD_TEXT_SECTION_LABEL, 0);
28642 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28643
28644 switch_to_section (text_section);
28645 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28646 #endif
28647
28648 /* Make sure the line number table for .text always exists. */
28649 text_section_line_info = new_line_info_table ();
28650 text_section_line_info->end_label = text_end_label;
28651
28652 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28653 cur_line_info_table = text_section_line_info;
28654 #endif
28655
28656 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28657 && dwarf2out_do_cfi_asm ()
28658 && !dwarf2out_do_eh_frame ())
28659 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28660 }
28661
28662 /* A helper function for dwarf2out_finish called through
28663 htab_traverse. Assign a string its index. All strings must be
28664 collected into the table by the time index_string is called,
28665 because the indexing code relies on htab_traverse to traverse nodes
28666 in the same order for each run. */
28667
28668 int
28669 index_string (indirect_string_node **h, unsigned int *index)
28670 {
28671 indirect_string_node *node = *h;
28672
28673 find_string_form (node);
28674 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28675 {
28676 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28677 node->index = *index;
28678 *index += 1;
28679 }
28680 return 1;
28681 }
28682
28683 /* A helper function for output_indirect_strings called through
28684 htab_traverse. Output the offset to a string and update the
28685 current offset. */
28686
28687 int
28688 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28689 {
28690 indirect_string_node *node = *h;
28691
28692 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28693 {
28694 /* Assert that this node has been assigned an index. */
28695 gcc_assert (node->index != NO_INDEX_ASSIGNED
28696 && node->index != NOT_INDEXED);
28697 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28698 "indexed string 0x%x: %s", node->index, node->str);
28699 *offset += strlen (node->str) + 1;
28700 }
28701 return 1;
28702 }
28703
28704 /* A helper function for dwarf2out_finish called through
28705 htab_traverse. Output the indexed string. */
28706
28707 int
28708 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28709 {
28710 struct indirect_string_node *node = *h;
28711
28712 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28713 {
28714 /* Assert that the strings are output in the same order as their
28715 indexes were assigned. */
28716 gcc_assert (*cur_idx == node->index);
28717 assemble_string (node->str, strlen (node->str) + 1);
28718 *cur_idx += 1;
28719 }
28720 return 1;
28721 }
28722
28723 /* A helper function for dwarf2out_finish called through
28724 htab_traverse. Emit one queued .debug_str string. */
28725
28726 int
28727 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28728 {
28729 struct indirect_string_node *node = *h;
28730
28731 node->form = find_string_form (node);
28732 if (node->form == form && node->refcount > 0)
28733 {
28734 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28735 assemble_string (node->str, strlen (node->str) + 1);
28736 }
28737
28738 return 1;
28739 }
28740
28741 /* Output the indexed string table. */
28742
28743 static void
28744 output_indirect_strings (void)
28745 {
28746 switch_to_section (debug_str_section);
28747 if (!dwarf_split_debug_info)
28748 debug_str_hash->traverse<enum dwarf_form,
28749 output_indirect_string> (DW_FORM_strp);
28750 else
28751 {
28752 unsigned int offset = 0;
28753 unsigned int cur_idx = 0;
28754
28755 if (skeleton_debug_str_hash)
28756 skeleton_debug_str_hash->traverse<enum dwarf_form,
28757 output_indirect_string> (DW_FORM_strp);
28758
28759 switch_to_section (debug_str_offsets_section);
28760 debug_str_hash->traverse_noresize
28761 <unsigned int *, output_index_string_offset> (&offset);
28762 switch_to_section (debug_str_dwo_section);
28763 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28764 (&cur_idx);
28765 }
28766 }
28767
28768 /* Callback for htab_traverse to assign an index to an entry in the
28769 table, and to write that entry to the .debug_addr section. */
28770
28771 int
28772 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28773 {
28774 addr_table_entry *entry = *slot;
28775
28776 if (entry->refcount == 0)
28777 {
28778 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28779 || entry->index == NOT_INDEXED);
28780 return 1;
28781 }
28782
28783 gcc_assert (entry->index == *cur_index);
28784 (*cur_index)++;
28785
28786 switch (entry->kind)
28787 {
28788 case ate_kind_rtx:
28789 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28790 "0x%x", entry->index);
28791 break;
28792 case ate_kind_rtx_dtprel:
28793 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28794 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28795 DWARF2_ADDR_SIZE,
28796 entry->addr.rtl);
28797 fputc ('\n', asm_out_file);
28798 break;
28799 case ate_kind_label:
28800 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28801 "0x%x", entry->index);
28802 break;
28803 default:
28804 gcc_unreachable ();
28805 }
28806 return 1;
28807 }
28808
28809 /* Produce the .debug_addr section. */
28810
28811 static void
28812 output_addr_table (void)
28813 {
28814 unsigned int index = 0;
28815 if (addr_index_table == NULL || addr_index_table->size () == 0)
28816 return;
28817
28818 switch_to_section (debug_addr_section);
28819 addr_index_table
28820 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28821 }
28822
28823 #if ENABLE_ASSERT_CHECKING
28824 /* Verify that all marks are clear. */
28825
28826 static void
28827 verify_marks_clear (dw_die_ref die)
28828 {
28829 dw_die_ref c;
28830
28831 gcc_assert (! die->die_mark);
28832 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28833 }
28834 #endif /* ENABLE_ASSERT_CHECKING */
28835
28836 /* Clear the marks for a die and its children.
28837 Be cool if the mark isn't set. */
28838
28839 static void
28840 prune_unmark_dies (dw_die_ref die)
28841 {
28842 dw_die_ref c;
28843
28844 if (die->die_mark)
28845 die->die_mark = 0;
28846 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28847 }
28848
28849 /* Given LOC that is referenced by a DIE we're marking as used, find all
28850 referenced DWARF procedures it references and mark them as used. */
28851
28852 static void
28853 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28854 {
28855 for (; loc != NULL; loc = loc->dw_loc_next)
28856 switch (loc->dw_loc_opc)
28857 {
28858 case DW_OP_implicit_pointer:
28859 case DW_OP_convert:
28860 case DW_OP_reinterpret:
28861 case DW_OP_GNU_implicit_pointer:
28862 case DW_OP_GNU_convert:
28863 case DW_OP_GNU_reinterpret:
28864 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28865 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28866 break;
28867 case DW_OP_GNU_variable_value:
28868 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28869 {
28870 dw_die_ref ref
28871 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28872 if (ref == NULL)
28873 break;
28874 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28875 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28876 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28877 }
28878 /* FALLTHRU */
28879 case DW_OP_call2:
28880 case DW_OP_call4:
28881 case DW_OP_call_ref:
28882 case DW_OP_const_type:
28883 case DW_OP_GNU_const_type:
28884 case DW_OP_GNU_parameter_ref:
28885 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28886 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28887 break;
28888 case DW_OP_regval_type:
28889 case DW_OP_deref_type:
28890 case DW_OP_GNU_regval_type:
28891 case DW_OP_GNU_deref_type:
28892 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28893 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28894 break;
28895 case DW_OP_entry_value:
28896 case DW_OP_GNU_entry_value:
28897 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28898 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28899 break;
28900 default:
28901 break;
28902 }
28903 }
28904
28905 /* Given DIE that we're marking as used, find any other dies
28906 it references as attributes and mark them as used. */
28907
28908 static void
28909 prune_unused_types_walk_attribs (dw_die_ref die)
28910 {
28911 dw_attr_node *a;
28912 unsigned ix;
28913
28914 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28915 {
28916 switch (AT_class (a))
28917 {
28918 /* Make sure DWARF procedures referenced by location descriptions will
28919 get emitted. */
28920 case dw_val_class_loc:
28921 prune_unused_types_walk_loc_descr (AT_loc (a));
28922 break;
28923 case dw_val_class_loc_list:
28924 for (dw_loc_list_ref list = AT_loc_list (a);
28925 list != NULL;
28926 list = list->dw_loc_next)
28927 prune_unused_types_walk_loc_descr (list->expr);
28928 break;
28929
28930 case dw_val_class_view_list:
28931 /* This points to a loc_list in another attribute, so it's
28932 already covered. */
28933 break;
28934
28935 case dw_val_class_die_ref:
28936 /* A reference to another DIE.
28937 Make sure that it will get emitted.
28938 If it was broken out into a comdat group, don't follow it. */
28939 if (! AT_ref (a)->comdat_type_p
28940 || a->dw_attr == DW_AT_specification)
28941 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
28942 break;
28943
28944 case dw_val_class_str:
28945 /* Set the string's refcount to 0 so that prune_unused_types_mark
28946 accounts properly for it. */
28947 a->dw_attr_val.v.val_str->refcount = 0;
28948 break;
28949
28950 default:
28951 break;
28952 }
28953 }
28954 }
28955
28956 /* Mark the generic parameters and arguments children DIEs of DIE. */
28957
28958 static void
28959 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
28960 {
28961 dw_die_ref c;
28962
28963 if (die == NULL || die->die_child == NULL)
28964 return;
28965 c = die->die_child;
28966 do
28967 {
28968 if (is_template_parameter (c))
28969 prune_unused_types_mark (c, 1);
28970 c = c->die_sib;
28971 } while (c && c != die->die_child);
28972 }
28973
28974 /* Mark DIE as being used. If DOKIDS is true, then walk down
28975 to DIE's children. */
28976
28977 static void
28978 prune_unused_types_mark (dw_die_ref die, int dokids)
28979 {
28980 dw_die_ref c;
28981
28982 if (die->die_mark == 0)
28983 {
28984 /* We haven't done this node yet. Mark it as used. */
28985 die->die_mark = 1;
28986 /* If this is the DIE of a generic type instantiation,
28987 mark the children DIEs that describe its generic parms and
28988 args. */
28989 prune_unused_types_mark_generic_parms_dies (die);
28990
28991 /* We also have to mark its parents as used.
28992 (But we don't want to mark our parent's kids due to this,
28993 unless it is a class.) */
28994 if (die->die_parent)
28995 prune_unused_types_mark (die->die_parent,
28996 class_scope_p (die->die_parent));
28997
28998 /* Mark any referenced nodes. */
28999 prune_unused_types_walk_attribs (die);
29000
29001 /* If this node is a specification,
29002 also mark the definition, if it exists. */
29003 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29004 prune_unused_types_mark (die->die_definition, 1);
29005 }
29006
29007 if (dokids && die->die_mark != 2)
29008 {
29009 /* We need to walk the children, but haven't done so yet.
29010 Remember that we've walked the kids. */
29011 die->die_mark = 2;
29012
29013 /* If this is an array type, we need to make sure our
29014 kids get marked, even if they're types. If we're
29015 breaking out types into comdat sections, do this
29016 for all type definitions. */
29017 if (die->die_tag == DW_TAG_array_type
29018 || (use_debug_types
29019 && is_type_die (die) && ! is_declaration_die (die)))
29020 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29021 else
29022 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29023 }
29024 }
29025
29026 /* For local classes, look if any static member functions were emitted
29027 and if so, mark them. */
29028
29029 static void
29030 prune_unused_types_walk_local_classes (dw_die_ref die)
29031 {
29032 dw_die_ref c;
29033
29034 if (die->die_mark == 2)
29035 return;
29036
29037 switch (die->die_tag)
29038 {
29039 case DW_TAG_structure_type:
29040 case DW_TAG_union_type:
29041 case DW_TAG_class_type:
29042 break;
29043
29044 case DW_TAG_subprogram:
29045 if (!get_AT_flag (die, DW_AT_declaration)
29046 || die->die_definition != NULL)
29047 prune_unused_types_mark (die, 1);
29048 return;
29049
29050 default:
29051 return;
29052 }
29053
29054 /* Mark children. */
29055 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29056 }
29057
29058 /* Walk the tree DIE and mark types that we actually use. */
29059
29060 static void
29061 prune_unused_types_walk (dw_die_ref die)
29062 {
29063 dw_die_ref c;
29064
29065 /* Don't do anything if this node is already marked and
29066 children have been marked as well. */
29067 if (die->die_mark == 2)
29068 return;
29069
29070 switch (die->die_tag)
29071 {
29072 case DW_TAG_structure_type:
29073 case DW_TAG_union_type:
29074 case DW_TAG_class_type:
29075 if (die->die_perennial_p)
29076 break;
29077
29078 for (c = die->die_parent; c; c = c->die_parent)
29079 if (c->die_tag == DW_TAG_subprogram)
29080 break;
29081
29082 /* Finding used static member functions inside of classes
29083 is needed just for local classes, because for other classes
29084 static member function DIEs with DW_AT_specification
29085 are emitted outside of the DW_TAG_*_type. If we ever change
29086 it, we'd need to call this even for non-local classes. */
29087 if (c)
29088 prune_unused_types_walk_local_classes (die);
29089
29090 /* It's a type node --- don't mark it. */
29091 return;
29092
29093 case DW_TAG_const_type:
29094 case DW_TAG_packed_type:
29095 case DW_TAG_pointer_type:
29096 case DW_TAG_reference_type:
29097 case DW_TAG_rvalue_reference_type:
29098 case DW_TAG_volatile_type:
29099 case DW_TAG_typedef:
29100 case DW_TAG_array_type:
29101 case DW_TAG_interface_type:
29102 case DW_TAG_friend:
29103 case DW_TAG_enumeration_type:
29104 case DW_TAG_subroutine_type:
29105 case DW_TAG_string_type:
29106 case DW_TAG_set_type:
29107 case DW_TAG_subrange_type:
29108 case DW_TAG_ptr_to_member_type:
29109 case DW_TAG_file_type:
29110 /* Type nodes are useful only when other DIEs reference them --- don't
29111 mark them. */
29112 /* FALLTHROUGH */
29113
29114 case DW_TAG_dwarf_procedure:
29115 /* Likewise for DWARF procedures. */
29116
29117 if (die->die_perennial_p)
29118 break;
29119
29120 return;
29121
29122 default:
29123 /* Mark everything else. */
29124 break;
29125 }
29126
29127 if (die->die_mark == 0)
29128 {
29129 die->die_mark = 1;
29130
29131 /* Now, mark any dies referenced from here. */
29132 prune_unused_types_walk_attribs (die);
29133 }
29134
29135 die->die_mark = 2;
29136
29137 /* Mark children. */
29138 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29139 }
29140
29141 /* Increment the string counts on strings referred to from DIE's
29142 attributes. */
29143
29144 static void
29145 prune_unused_types_update_strings (dw_die_ref die)
29146 {
29147 dw_attr_node *a;
29148 unsigned ix;
29149
29150 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29151 if (AT_class (a) == dw_val_class_str)
29152 {
29153 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29154 s->refcount++;
29155 /* Avoid unnecessarily putting strings that are used less than
29156 twice in the hash table. */
29157 if (s->refcount
29158 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29159 {
29160 indirect_string_node **slot
29161 = debug_str_hash->find_slot_with_hash (s->str,
29162 htab_hash_string (s->str),
29163 INSERT);
29164 gcc_assert (*slot == NULL);
29165 *slot = s;
29166 }
29167 }
29168 }
29169
29170 /* Mark DIE and its children as removed. */
29171
29172 static void
29173 mark_removed (dw_die_ref die)
29174 {
29175 dw_die_ref c;
29176 die->removed = true;
29177 FOR_EACH_CHILD (die, c, mark_removed (c));
29178 }
29179
29180 /* Remove from the tree DIE any dies that aren't marked. */
29181
29182 static void
29183 prune_unused_types_prune (dw_die_ref die)
29184 {
29185 dw_die_ref c;
29186
29187 gcc_assert (die->die_mark);
29188 prune_unused_types_update_strings (die);
29189
29190 if (! die->die_child)
29191 return;
29192
29193 c = die->die_child;
29194 do {
29195 dw_die_ref prev = c, next;
29196 for (c = c->die_sib; ! c->die_mark; c = next)
29197 if (c == die->die_child)
29198 {
29199 /* No marked children between 'prev' and the end of the list. */
29200 if (prev == c)
29201 /* No marked children at all. */
29202 die->die_child = NULL;
29203 else
29204 {
29205 prev->die_sib = c->die_sib;
29206 die->die_child = prev;
29207 }
29208 c->die_sib = NULL;
29209 mark_removed (c);
29210 return;
29211 }
29212 else
29213 {
29214 next = c->die_sib;
29215 c->die_sib = NULL;
29216 mark_removed (c);
29217 }
29218
29219 if (c != prev->die_sib)
29220 prev->die_sib = c;
29221 prune_unused_types_prune (c);
29222 } while (c != die->die_child);
29223 }
29224
29225 /* Remove dies representing declarations that we never use. */
29226
29227 static void
29228 prune_unused_types (void)
29229 {
29230 unsigned int i;
29231 limbo_die_node *node;
29232 comdat_type_node *ctnode;
29233 pubname_entry *pub;
29234 dw_die_ref base_type;
29235
29236 #if ENABLE_ASSERT_CHECKING
29237 /* All the marks should already be clear. */
29238 verify_marks_clear (comp_unit_die ());
29239 for (node = limbo_die_list; node; node = node->next)
29240 verify_marks_clear (node->die);
29241 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29242 verify_marks_clear (ctnode->root_die);
29243 #endif /* ENABLE_ASSERT_CHECKING */
29244
29245 /* Mark types that are used in global variables. */
29246 premark_types_used_by_global_vars ();
29247
29248 /* Set the mark on nodes that are actually used. */
29249 prune_unused_types_walk (comp_unit_die ());
29250 for (node = limbo_die_list; node; node = node->next)
29251 prune_unused_types_walk (node->die);
29252 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29253 {
29254 prune_unused_types_walk (ctnode->root_die);
29255 prune_unused_types_mark (ctnode->type_die, 1);
29256 }
29257
29258 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29259 are unusual in that they are pubnames that are the children of pubtypes.
29260 They should only be marked via their parent DW_TAG_enumeration_type die,
29261 not as roots in themselves. */
29262 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29263 if (pub->die->die_tag != DW_TAG_enumerator)
29264 prune_unused_types_mark (pub->die, 1);
29265 for (i = 0; base_types.iterate (i, &base_type); i++)
29266 prune_unused_types_mark (base_type, 1);
29267
29268 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29269 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29270 callees). */
29271 cgraph_node *cnode;
29272 FOR_EACH_FUNCTION (cnode)
29273 if (cnode->referred_to_p (false))
29274 {
29275 dw_die_ref die = lookup_decl_die (cnode->decl);
29276 if (die == NULL || die->die_mark)
29277 continue;
29278 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29279 if (e->caller != cnode
29280 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29281 {
29282 prune_unused_types_mark (die, 1);
29283 break;
29284 }
29285 }
29286
29287 if (debug_str_hash)
29288 debug_str_hash->empty ();
29289 if (skeleton_debug_str_hash)
29290 skeleton_debug_str_hash->empty ();
29291 prune_unused_types_prune (comp_unit_die ());
29292 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29293 {
29294 node = *pnode;
29295 if (!node->die->die_mark)
29296 *pnode = node->next;
29297 else
29298 {
29299 prune_unused_types_prune (node->die);
29300 pnode = &node->next;
29301 }
29302 }
29303 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29304 prune_unused_types_prune (ctnode->root_die);
29305
29306 /* Leave the marks clear. */
29307 prune_unmark_dies (comp_unit_die ());
29308 for (node = limbo_die_list; node; node = node->next)
29309 prune_unmark_dies (node->die);
29310 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29311 prune_unmark_dies (ctnode->root_die);
29312 }
29313
29314 /* Helpers to manipulate hash table of comdat type units. */
29315
29316 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29317 {
29318 static inline hashval_t hash (const comdat_type_node *);
29319 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29320 };
29321
29322 inline hashval_t
29323 comdat_type_hasher::hash (const comdat_type_node *type_node)
29324 {
29325 hashval_t h;
29326 memcpy (&h, type_node->signature, sizeof (h));
29327 return h;
29328 }
29329
29330 inline bool
29331 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29332 const comdat_type_node *type_node_2)
29333 {
29334 return (! memcmp (type_node_1->signature, type_node_2->signature,
29335 DWARF_TYPE_SIGNATURE_SIZE));
29336 }
29337
29338 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29339 to the location it would have been added, should we know its
29340 DECL_ASSEMBLER_NAME when we added other attributes. This will
29341 probably improve compactness of debug info, removing equivalent
29342 abbrevs, and hide any differences caused by deferring the
29343 computation of the assembler name, triggered by e.g. PCH. */
29344
29345 static inline void
29346 move_linkage_attr (dw_die_ref die)
29347 {
29348 unsigned ix = vec_safe_length (die->die_attr);
29349 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29350
29351 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29352 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29353
29354 while (--ix > 0)
29355 {
29356 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29357
29358 if (prev->dw_attr == DW_AT_decl_line
29359 || prev->dw_attr == DW_AT_decl_column
29360 || prev->dw_attr == DW_AT_name)
29361 break;
29362 }
29363
29364 if (ix != vec_safe_length (die->die_attr) - 1)
29365 {
29366 die->die_attr->pop ();
29367 die->die_attr->quick_insert (ix, linkage);
29368 }
29369 }
29370
29371 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29372 referenced from typed stack ops and count how often they are used. */
29373
29374 static void
29375 mark_base_types (dw_loc_descr_ref loc)
29376 {
29377 dw_die_ref base_type = NULL;
29378
29379 for (; loc; loc = loc->dw_loc_next)
29380 {
29381 switch (loc->dw_loc_opc)
29382 {
29383 case DW_OP_regval_type:
29384 case DW_OP_deref_type:
29385 case DW_OP_GNU_regval_type:
29386 case DW_OP_GNU_deref_type:
29387 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29388 break;
29389 case DW_OP_convert:
29390 case DW_OP_reinterpret:
29391 case DW_OP_GNU_convert:
29392 case DW_OP_GNU_reinterpret:
29393 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29394 continue;
29395 /* FALLTHRU */
29396 case DW_OP_const_type:
29397 case DW_OP_GNU_const_type:
29398 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29399 break;
29400 case DW_OP_entry_value:
29401 case DW_OP_GNU_entry_value:
29402 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29403 continue;
29404 default:
29405 continue;
29406 }
29407 gcc_assert (base_type->die_parent == comp_unit_die ());
29408 if (base_type->die_mark)
29409 base_type->die_mark++;
29410 else
29411 {
29412 base_types.safe_push (base_type);
29413 base_type->die_mark = 1;
29414 }
29415 }
29416 }
29417
29418 /* Comparison function for sorting marked base types. */
29419
29420 static int
29421 base_type_cmp (const void *x, const void *y)
29422 {
29423 dw_die_ref dx = *(const dw_die_ref *) x;
29424 dw_die_ref dy = *(const dw_die_ref *) y;
29425 unsigned int byte_size1, byte_size2;
29426 unsigned int encoding1, encoding2;
29427 unsigned int align1, align2;
29428 if (dx->die_mark > dy->die_mark)
29429 return -1;
29430 if (dx->die_mark < dy->die_mark)
29431 return 1;
29432 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29433 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29434 if (byte_size1 < byte_size2)
29435 return 1;
29436 if (byte_size1 > byte_size2)
29437 return -1;
29438 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29439 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29440 if (encoding1 < encoding2)
29441 return 1;
29442 if (encoding1 > encoding2)
29443 return -1;
29444 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29445 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29446 if (align1 < align2)
29447 return 1;
29448 if (align1 > align2)
29449 return -1;
29450 return 0;
29451 }
29452
29453 /* Move base types marked by mark_base_types as early as possible
29454 in the CU, sorted by decreasing usage count both to make the
29455 uleb128 references as small as possible and to make sure they
29456 will have die_offset already computed by calc_die_sizes when
29457 sizes of typed stack loc ops is computed. */
29458
29459 static void
29460 move_marked_base_types (void)
29461 {
29462 unsigned int i;
29463 dw_die_ref base_type, die, c;
29464
29465 if (base_types.is_empty ())
29466 return;
29467
29468 /* Sort by decreasing usage count, they will be added again in that
29469 order later on. */
29470 base_types.qsort (base_type_cmp);
29471 die = comp_unit_die ();
29472 c = die->die_child;
29473 do
29474 {
29475 dw_die_ref prev = c;
29476 c = c->die_sib;
29477 while (c->die_mark)
29478 {
29479 remove_child_with_prev (c, prev);
29480 /* As base types got marked, there must be at least
29481 one node other than DW_TAG_base_type. */
29482 gcc_assert (die->die_child != NULL);
29483 c = prev->die_sib;
29484 }
29485 }
29486 while (c != die->die_child);
29487 gcc_assert (die->die_child);
29488 c = die->die_child;
29489 for (i = 0; base_types.iterate (i, &base_type); i++)
29490 {
29491 base_type->die_mark = 0;
29492 base_type->die_sib = c->die_sib;
29493 c->die_sib = base_type;
29494 c = base_type;
29495 }
29496 }
29497
29498 /* Helper function for resolve_addr, attempt to resolve
29499 one CONST_STRING, return true if successful. Similarly verify that
29500 SYMBOL_REFs refer to variables emitted in the current CU. */
29501
29502 static bool
29503 resolve_one_addr (rtx *addr)
29504 {
29505 rtx rtl = *addr;
29506
29507 if (GET_CODE (rtl) == CONST_STRING)
29508 {
29509 size_t len = strlen (XSTR (rtl, 0)) + 1;
29510 tree t = build_string (len, XSTR (rtl, 0));
29511 tree tlen = size_int (len - 1);
29512 TREE_TYPE (t)
29513 = build_array_type (char_type_node, build_index_type (tlen));
29514 rtl = lookup_constant_def (t);
29515 if (!rtl || !MEM_P (rtl))
29516 return false;
29517 rtl = XEXP (rtl, 0);
29518 if (GET_CODE (rtl) == SYMBOL_REF
29519 && SYMBOL_REF_DECL (rtl)
29520 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29521 return false;
29522 vec_safe_push (used_rtx_array, rtl);
29523 *addr = rtl;
29524 return true;
29525 }
29526
29527 if (GET_CODE (rtl) == SYMBOL_REF
29528 && SYMBOL_REF_DECL (rtl))
29529 {
29530 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29531 {
29532 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29533 return false;
29534 }
29535 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29536 return false;
29537 }
29538
29539 if (GET_CODE (rtl) == CONST)
29540 {
29541 subrtx_ptr_iterator::array_type array;
29542 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29543 if (!resolve_one_addr (*iter))
29544 return false;
29545 }
29546
29547 return true;
29548 }
29549
29550 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29551 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29552 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29553
29554 static rtx
29555 string_cst_pool_decl (tree t)
29556 {
29557 rtx rtl = output_constant_def (t, 1);
29558 unsigned char *array;
29559 dw_loc_descr_ref l;
29560 tree decl;
29561 size_t len;
29562 dw_die_ref ref;
29563
29564 if (!rtl || !MEM_P (rtl))
29565 return NULL_RTX;
29566 rtl = XEXP (rtl, 0);
29567 if (GET_CODE (rtl) != SYMBOL_REF
29568 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29569 return NULL_RTX;
29570
29571 decl = SYMBOL_REF_DECL (rtl);
29572 if (!lookup_decl_die (decl))
29573 {
29574 len = TREE_STRING_LENGTH (t);
29575 vec_safe_push (used_rtx_array, rtl);
29576 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29577 array = ggc_vec_alloc<unsigned char> (len);
29578 memcpy (array, TREE_STRING_POINTER (t), len);
29579 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29580 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29581 l->dw_loc_oprnd2.v.val_vec.length = len;
29582 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29583 l->dw_loc_oprnd2.v.val_vec.array = array;
29584 add_AT_loc (ref, DW_AT_location, l);
29585 equate_decl_number_to_die (decl, ref);
29586 }
29587 return rtl;
29588 }
29589
29590 /* Helper function of resolve_addr_in_expr. LOC is
29591 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29592 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29593 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29594 with DW_OP_implicit_pointer if possible
29595 and return true, if unsuccessful, return false. */
29596
29597 static bool
29598 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29599 {
29600 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29601 HOST_WIDE_INT offset = 0;
29602 dw_die_ref ref = NULL;
29603 tree decl;
29604
29605 if (GET_CODE (rtl) == CONST
29606 && GET_CODE (XEXP (rtl, 0)) == PLUS
29607 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29608 {
29609 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29610 rtl = XEXP (XEXP (rtl, 0), 0);
29611 }
29612 if (GET_CODE (rtl) == CONST_STRING)
29613 {
29614 size_t len = strlen (XSTR (rtl, 0)) + 1;
29615 tree t = build_string (len, XSTR (rtl, 0));
29616 tree tlen = size_int (len - 1);
29617
29618 TREE_TYPE (t)
29619 = build_array_type (char_type_node, build_index_type (tlen));
29620 rtl = string_cst_pool_decl (t);
29621 if (!rtl)
29622 return false;
29623 }
29624 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29625 {
29626 decl = SYMBOL_REF_DECL (rtl);
29627 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29628 {
29629 ref = lookup_decl_die (decl);
29630 if (ref && (get_AT (ref, DW_AT_location)
29631 || get_AT (ref, DW_AT_const_value)))
29632 {
29633 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29634 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29635 loc->dw_loc_oprnd1.val_entry = NULL;
29636 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29637 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29638 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29639 loc->dw_loc_oprnd2.v.val_int = offset;
29640 return true;
29641 }
29642 }
29643 }
29644 return false;
29645 }
29646
29647 /* Helper function for resolve_addr, handle one location
29648 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29649 the location list couldn't be resolved. */
29650
29651 static bool
29652 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29653 {
29654 dw_loc_descr_ref keep = NULL;
29655 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29656 switch (loc->dw_loc_opc)
29657 {
29658 case DW_OP_addr:
29659 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29660 {
29661 if ((prev == NULL
29662 || prev->dw_loc_opc == DW_OP_piece
29663 || prev->dw_loc_opc == DW_OP_bit_piece)
29664 && loc->dw_loc_next
29665 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29666 && (!dwarf_strict || dwarf_version >= 5)
29667 && optimize_one_addr_into_implicit_ptr (loc))
29668 break;
29669 return false;
29670 }
29671 break;
29672 case DW_OP_GNU_addr_index:
29673 case DW_OP_GNU_const_index:
29674 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
29675 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
29676 {
29677 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29678 if (!resolve_one_addr (&rtl))
29679 return false;
29680 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29681 loc->dw_loc_oprnd1.val_entry
29682 = add_addr_table_entry (rtl, ate_kind_rtx);
29683 }
29684 break;
29685 case DW_OP_const4u:
29686 case DW_OP_const8u:
29687 if (loc->dtprel
29688 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29689 return false;
29690 break;
29691 case DW_OP_plus_uconst:
29692 if (size_of_loc_descr (loc)
29693 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29694 + 1
29695 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29696 {
29697 dw_loc_descr_ref repl
29698 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29699 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29700 add_loc_descr (&repl, loc->dw_loc_next);
29701 *loc = *repl;
29702 }
29703 break;
29704 case DW_OP_implicit_value:
29705 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29706 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29707 return false;
29708 break;
29709 case DW_OP_implicit_pointer:
29710 case DW_OP_GNU_implicit_pointer:
29711 case DW_OP_GNU_parameter_ref:
29712 case DW_OP_GNU_variable_value:
29713 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29714 {
29715 dw_die_ref ref
29716 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29717 if (ref == NULL)
29718 return false;
29719 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29720 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29721 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29722 }
29723 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29724 {
29725 if (prev == NULL
29726 && loc->dw_loc_next == NULL
29727 && AT_class (a) == dw_val_class_loc)
29728 switch (a->dw_attr)
29729 {
29730 /* Following attributes allow both exprloc and reference,
29731 so if the whole expression is DW_OP_GNU_variable_value
29732 alone we could transform it into reference. */
29733 case DW_AT_byte_size:
29734 case DW_AT_bit_size:
29735 case DW_AT_lower_bound:
29736 case DW_AT_upper_bound:
29737 case DW_AT_bit_stride:
29738 case DW_AT_count:
29739 case DW_AT_allocated:
29740 case DW_AT_associated:
29741 case DW_AT_byte_stride:
29742 a->dw_attr_val.val_class = dw_val_class_die_ref;
29743 a->dw_attr_val.val_entry = NULL;
29744 a->dw_attr_val.v.val_die_ref.die
29745 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29746 a->dw_attr_val.v.val_die_ref.external = 0;
29747 return true;
29748 default:
29749 break;
29750 }
29751 if (dwarf_strict)
29752 return false;
29753 }
29754 break;
29755 case DW_OP_const_type:
29756 case DW_OP_regval_type:
29757 case DW_OP_deref_type:
29758 case DW_OP_convert:
29759 case DW_OP_reinterpret:
29760 case DW_OP_GNU_const_type:
29761 case DW_OP_GNU_regval_type:
29762 case DW_OP_GNU_deref_type:
29763 case DW_OP_GNU_convert:
29764 case DW_OP_GNU_reinterpret:
29765 while (loc->dw_loc_next
29766 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29767 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29768 {
29769 dw_die_ref base1, base2;
29770 unsigned enc1, enc2, size1, size2;
29771 if (loc->dw_loc_opc == DW_OP_regval_type
29772 || loc->dw_loc_opc == DW_OP_deref_type
29773 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29774 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29775 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29776 else if (loc->dw_loc_oprnd1.val_class
29777 == dw_val_class_unsigned_const)
29778 break;
29779 else
29780 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29781 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29782 == dw_val_class_unsigned_const)
29783 break;
29784 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29785 gcc_assert (base1->die_tag == DW_TAG_base_type
29786 && base2->die_tag == DW_TAG_base_type);
29787 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29788 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29789 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29790 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29791 if (size1 == size2
29792 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29793 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29794 && loc != keep)
29795 || enc1 == enc2))
29796 {
29797 /* Optimize away next DW_OP_convert after
29798 adjusting LOC's base type die reference. */
29799 if (loc->dw_loc_opc == DW_OP_regval_type
29800 || loc->dw_loc_opc == DW_OP_deref_type
29801 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29802 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29803 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29804 else
29805 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29806 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29807 continue;
29808 }
29809 /* Don't change integer DW_OP_convert after e.g. floating
29810 point typed stack entry. */
29811 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29812 keep = loc->dw_loc_next;
29813 break;
29814 }
29815 break;
29816 default:
29817 break;
29818 }
29819 return true;
29820 }
29821
29822 /* Helper function of resolve_addr. DIE had DW_AT_location of
29823 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29824 and DW_OP_addr couldn't be resolved. resolve_addr has already
29825 removed the DW_AT_location attribute. This function attempts to
29826 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29827 to it or DW_AT_const_value attribute, if possible. */
29828
29829 static void
29830 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29831 {
29832 if (!VAR_P (decl)
29833 || lookup_decl_die (decl) != die
29834 || DECL_EXTERNAL (decl)
29835 || !TREE_STATIC (decl)
29836 || DECL_INITIAL (decl) == NULL_TREE
29837 || DECL_P (DECL_INITIAL (decl))
29838 || get_AT (die, DW_AT_const_value))
29839 return;
29840
29841 tree init = DECL_INITIAL (decl);
29842 HOST_WIDE_INT offset = 0;
29843 /* For variables that have been optimized away and thus
29844 don't have a memory location, see if we can emit
29845 DW_AT_const_value instead. */
29846 if (tree_add_const_value_attribute (die, init))
29847 return;
29848 if (dwarf_strict && dwarf_version < 5)
29849 return;
29850 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29851 and ADDR_EXPR refers to a decl that has DW_AT_location or
29852 DW_AT_const_value (but isn't addressable, otherwise
29853 resolving the original DW_OP_addr wouldn't fail), see if
29854 we can add DW_OP_implicit_pointer. */
29855 STRIP_NOPS (init);
29856 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29857 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29858 {
29859 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29860 init = TREE_OPERAND (init, 0);
29861 STRIP_NOPS (init);
29862 }
29863 if (TREE_CODE (init) != ADDR_EXPR)
29864 return;
29865 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29866 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29867 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29868 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29869 && TREE_OPERAND (init, 0) != decl))
29870 {
29871 dw_die_ref ref;
29872 dw_loc_descr_ref l;
29873
29874 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29875 {
29876 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29877 if (!rtl)
29878 return;
29879 decl = SYMBOL_REF_DECL (rtl);
29880 }
29881 else
29882 decl = TREE_OPERAND (init, 0);
29883 ref = lookup_decl_die (decl);
29884 if (ref == NULL
29885 || (!get_AT (ref, DW_AT_location)
29886 && !get_AT (ref, DW_AT_const_value)))
29887 return;
29888 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29889 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29890 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29891 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29892 add_AT_loc (die, DW_AT_location, l);
29893 }
29894 }
29895
29896 /* Return NULL if l is a DWARF expression, or first op that is not
29897 valid DWARF expression. */
29898
29899 static dw_loc_descr_ref
29900 non_dwarf_expression (dw_loc_descr_ref l)
29901 {
29902 while (l)
29903 {
29904 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29905 return l;
29906 switch (l->dw_loc_opc)
29907 {
29908 case DW_OP_regx:
29909 case DW_OP_implicit_value:
29910 case DW_OP_stack_value:
29911 case DW_OP_implicit_pointer:
29912 case DW_OP_GNU_implicit_pointer:
29913 case DW_OP_GNU_parameter_ref:
29914 case DW_OP_piece:
29915 case DW_OP_bit_piece:
29916 return l;
29917 default:
29918 break;
29919 }
29920 l = l->dw_loc_next;
29921 }
29922 return NULL;
29923 }
29924
29925 /* Return adjusted copy of EXPR:
29926 If it is empty DWARF expression, return it.
29927 If it is valid non-empty DWARF expression,
29928 return copy of EXPR with DW_OP_deref appended to it.
29929 If it is DWARF expression followed by DW_OP_reg{N,x}, return
29930 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
29931 If it is DWARF expression followed by DW_OP_stack_value, return
29932 copy of the DWARF expression without anything appended.
29933 Otherwise, return NULL. */
29934
29935 static dw_loc_descr_ref
29936 copy_deref_exprloc (dw_loc_descr_ref expr)
29937 {
29938 dw_loc_descr_ref tail = NULL;
29939
29940 if (expr == NULL)
29941 return NULL;
29942
29943 dw_loc_descr_ref l = non_dwarf_expression (expr);
29944 if (l && l->dw_loc_next)
29945 return NULL;
29946
29947 if (l)
29948 {
29949 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29950 tail = new_loc_descr ((enum dwarf_location_atom)
29951 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
29952 0, 0);
29953 else
29954 switch (l->dw_loc_opc)
29955 {
29956 case DW_OP_regx:
29957 tail = new_loc_descr (DW_OP_bregx,
29958 l->dw_loc_oprnd1.v.val_unsigned, 0);
29959 break;
29960 case DW_OP_stack_value:
29961 break;
29962 default:
29963 return NULL;
29964 }
29965 }
29966 else
29967 tail = new_loc_descr (DW_OP_deref, 0, 0);
29968
29969 dw_loc_descr_ref ret = NULL, *p = &ret;
29970 while (expr != l)
29971 {
29972 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
29973 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
29974 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
29975 p = &(*p)->dw_loc_next;
29976 expr = expr->dw_loc_next;
29977 }
29978 *p = tail;
29979 return ret;
29980 }
29981
29982 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
29983 reference to a variable or argument, adjust it if needed and return:
29984 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
29985 attribute if present should be removed
29986 0 keep the attribute perhaps with minor modifications, no need to rescan
29987 1 if the attribute has been successfully adjusted. */
29988
29989 static int
29990 optimize_string_length (dw_attr_node *a)
29991 {
29992 dw_loc_descr_ref l = AT_loc (a), lv;
29993 dw_die_ref die;
29994 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29995 {
29996 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
29997 die = lookup_decl_die (decl);
29998 if (die)
29999 {
30000 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30001 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30002 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30003 }
30004 else
30005 return -1;
30006 }
30007 else
30008 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30009
30010 /* DWARF5 allows reference class, so we can then reference the DIE.
30011 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30012 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30013 {
30014 a->dw_attr_val.val_class = dw_val_class_die_ref;
30015 a->dw_attr_val.val_entry = NULL;
30016 a->dw_attr_val.v.val_die_ref.die = die;
30017 a->dw_attr_val.v.val_die_ref.external = 0;
30018 return 0;
30019 }
30020
30021 dw_attr_node *av = get_AT (die, DW_AT_location);
30022 dw_loc_list_ref d;
30023 bool non_dwarf_expr = false;
30024
30025 if (av == NULL)
30026 return dwarf_strict ? -1 : 0;
30027 switch (AT_class (av))
30028 {
30029 case dw_val_class_loc_list:
30030 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30031 if (d->expr && non_dwarf_expression (d->expr))
30032 non_dwarf_expr = true;
30033 break;
30034 case dw_val_class_view_list:
30035 gcc_unreachable ();
30036 case dw_val_class_loc:
30037 lv = AT_loc (av);
30038 if (lv == NULL)
30039 return dwarf_strict ? -1 : 0;
30040 if (non_dwarf_expression (lv))
30041 non_dwarf_expr = true;
30042 break;
30043 default:
30044 return dwarf_strict ? -1 : 0;
30045 }
30046
30047 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30048 into DW_OP_call4 or DW_OP_GNU_variable_value into
30049 DW_OP_call4 DW_OP_deref, do so. */
30050 if (!non_dwarf_expr
30051 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30052 {
30053 l->dw_loc_opc = DW_OP_call4;
30054 if (l->dw_loc_next)
30055 l->dw_loc_next = NULL;
30056 else
30057 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30058 return 0;
30059 }
30060
30061 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30062 copy over the DW_AT_location attribute from die to a. */
30063 if (l->dw_loc_next != NULL)
30064 {
30065 a->dw_attr_val = av->dw_attr_val;
30066 return 1;
30067 }
30068
30069 dw_loc_list_ref list, *p;
30070 switch (AT_class (av))
30071 {
30072 case dw_val_class_loc_list:
30073 p = &list;
30074 list = NULL;
30075 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30076 {
30077 lv = copy_deref_exprloc (d->expr);
30078 if (lv)
30079 {
30080 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30081 p = &(*p)->dw_loc_next;
30082 }
30083 else if (!dwarf_strict && d->expr)
30084 return 0;
30085 }
30086 if (list == NULL)
30087 return dwarf_strict ? -1 : 0;
30088 a->dw_attr_val.val_class = dw_val_class_loc_list;
30089 gen_llsym (list);
30090 *AT_loc_list_ptr (a) = list;
30091 return 1;
30092 case dw_val_class_loc:
30093 lv = copy_deref_exprloc (AT_loc (av));
30094 if (lv == NULL)
30095 return dwarf_strict ? -1 : 0;
30096 a->dw_attr_val.v.val_loc = lv;
30097 return 1;
30098 default:
30099 gcc_unreachable ();
30100 }
30101 }
30102
30103 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30104 an address in .rodata section if the string literal is emitted there,
30105 or remove the containing location list or replace DW_AT_const_value
30106 with DW_AT_location and empty location expression, if it isn't found
30107 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30108 to something that has been emitted in the current CU. */
30109
30110 static void
30111 resolve_addr (dw_die_ref die)
30112 {
30113 dw_die_ref c;
30114 dw_attr_node *a;
30115 dw_loc_list_ref *curr, *start, loc;
30116 unsigned ix;
30117 bool remove_AT_byte_size = false;
30118
30119 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30120 switch (AT_class (a))
30121 {
30122 case dw_val_class_loc_list:
30123 start = curr = AT_loc_list_ptr (a);
30124 loc = *curr;
30125 gcc_assert (loc);
30126 /* The same list can be referenced more than once. See if we have
30127 already recorded the result from a previous pass. */
30128 if (loc->replaced)
30129 *curr = loc->dw_loc_next;
30130 else if (!loc->resolved_addr)
30131 {
30132 /* As things stand, we do not expect or allow one die to
30133 reference a suffix of another die's location list chain.
30134 References must be identical or completely separate.
30135 There is therefore no need to cache the result of this
30136 pass on any list other than the first; doing so
30137 would lead to unnecessary writes. */
30138 while (*curr)
30139 {
30140 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30141 if (!resolve_addr_in_expr (a, (*curr)->expr))
30142 {
30143 dw_loc_list_ref next = (*curr)->dw_loc_next;
30144 dw_loc_descr_ref l = (*curr)->expr;
30145
30146 if (next && (*curr)->ll_symbol)
30147 {
30148 gcc_assert (!next->ll_symbol);
30149 next->ll_symbol = (*curr)->ll_symbol;
30150 next->vl_symbol = (*curr)->vl_symbol;
30151 }
30152 if (dwarf_split_debug_info)
30153 remove_loc_list_addr_table_entries (l);
30154 *curr = next;
30155 }
30156 else
30157 {
30158 mark_base_types ((*curr)->expr);
30159 curr = &(*curr)->dw_loc_next;
30160 }
30161 }
30162 if (loc == *start)
30163 loc->resolved_addr = 1;
30164 else
30165 {
30166 loc->replaced = 1;
30167 loc->dw_loc_next = *start;
30168 }
30169 }
30170 if (!*start)
30171 {
30172 remove_AT (die, a->dw_attr);
30173 ix--;
30174 }
30175 break;
30176 case dw_val_class_view_list:
30177 {
30178 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30179 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30180 dw_val_node *llnode
30181 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30182 /* If we no longer have a loclist, or it no longer needs
30183 views, drop this attribute. */
30184 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30185 {
30186 remove_AT (die, a->dw_attr);
30187 ix--;
30188 }
30189 break;
30190 }
30191 case dw_val_class_loc:
30192 {
30193 dw_loc_descr_ref l = AT_loc (a);
30194 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30195 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30196 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30197 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30198 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30199 with DW_FORM_ref referencing the same DIE as
30200 DW_OP_GNU_variable_value used to reference. */
30201 if (a->dw_attr == DW_AT_string_length
30202 && l
30203 && l->dw_loc_opc == DW_OP_GNU_variable_value
30204 && (l->dw_loc_next == NULL
30205 || (l->dw_loc_next->dw_loc_next == NULL
30206 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30207 {
30208 switch (optimize_string_length (a))
30209 {
30210 case -1:
30211 remove_AT (die, a->dw_attr);
30212 ix--;
30213 /* If we drop DW_AT_string_length, we need to drop also
30214 DW_AT_{string_length_,}byte_size. */
30215 remove_AT_byte_size = true;
30216 continue;
30217 default:
30218 break;
30219 case 1:
30220 /* Even if we keep the optimized DW_AT_string_length,
30221 it might have changed AT_class, so process it again. */
30222 ix--;
30223 continue;
30224 }
30225 }
30226 /* For -gdwarf-2 don't attempt to optimize
30227 DW_AT_data_member_location containing
30228 DW_OP_plus_uconst - older consumers might
30229 rely on it being that op instead of a more complex,
30230 but shorter, location description. */
30231 if ((dwarf_version > 2
30232 || a->dw_attr != DW_AT_data_member_location
30233 || l == NULL
30234 || l->dw_loc_opc != DW_OP_plus_uconst
30235 || l->dw_loc_next != NULL)
30236 && !resolve_addr_in_expr (a, l))
30237 {
30238 if (dwarf_split_debug_info)
30239 remove_loc_list_addr_table_entries (l);
30240 if (l != NULL
30241 && l->dw_loc_next == NULL
30242 && l->dw_loc_opc == DW_OP_addr
30243 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30244 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30245 && a->dw_attr == DW_AT_location)
30246 {
30247 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30248 remove_AT (die, a->dw_attr);
30249 ix--;
30250 optimize_location_into_implicit_ptr (die, decl);
30251 break;
30252 }
30253 if (a->dw_attr == DW_AT_string_length)
30254 /* If we drop DW_AT_string_length, we need to drop also
30255 DW_AT_{string_length_,}byte_size. */
30256 remove_AT_byte_size = true;
30257 remove_AT (die, a->dw_attr);
30258 ix--;
30259 }
30260 else
30261 mark_base_types (l);
30262 }
30263 break;
30264 case dw_val_class_addr:
30265 if (a->dw_attr == DW_AT_const_value
30266 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30267 {
30268 if (AT_index (a) != NOT_INDEXED)
30269 remove_addr_table_entry (a->dw_attr_val.val_entry);
30270 remove_AT (die, a->dw_attr);
30271 ix--;
30272 }
30273 if ((die->die_tag == DW_TAG_call_site
30274 && a->dw_attr == DW_AT_call_origin)
30275 || (die->die_tag == DW_TAG_GNU_call_site
30276 && a->dw_attr == DW_AT_abstract_origin))
30277 {
30278 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30279 dw_die_ref tdie = lookup_decl_die (tdecl);
30280 dw_die_ref cdie;
30281 if (tdie == NULL
30282 && DECL_EXTERNAL (tdecl)
30283 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30284 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30285 {
30286 dw_die_ref pdie = cdie;
30287 /* Make sure we don't add these DIEs into type units.
30288 We could emit skeleton DIEs for context (namespaces,
30289 outer structs/classes) and a skeleton DIE for the
30290 innermost context with DW_AT_signature pointing to the
30291 type unit. See PR78835. */
30292 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30293 pdie = pdie->die_parent;
30294 if (pdie == NULL)
30295 {
30296 /* Creating a full DIE for tdecl is overly expensive and
30297 at this point even wrong when in the LTO phase
30298 as it can end up generating new type DIEs we didn't
30299 output and thus optimize_external_refs will crash. */
30300 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30301 add_AT_flag (tdie, DW_AT_external, 1);
30302 add_AT_flag (tdie, DW_AT_declaration, 1);
30303 add_linkage_attr (tdie, tdecl);
30304 add_name_and_src_coords_attributes (tdie, tdecl, true);
30305 equate_decl_number_to_die (tdecl, tdie);
30306 }
30307 }
30308 if (tdie)
30309 {
30310 a->dw_attr_val.val_class = dw_val_class_die_ref;
30311 a->dw_attr_val.v.val_die_ref.die = tdie;
30312 a->dw_attr_val.v.val_die_ref.external = 0;
30313 }
30314 else
30315 {
30316 if (AT_index (a) != NOT_INDEXED)
30317 remove_addr_table_entry (a->dw_attr_val.val_entry);
30318 remove_AT (die, a->dw_attr);
30319 ix--;
30320 }
30321 }
30322 break;
30323 default:
30324 break;
30325 }
30326
30327 if (remove_AT_byte_size)
30328 remove_AT (die, dwarf_version >= 5
30329 ? DW_AT_string_length_byte_size
30330 : DW_AT_byte_size);
30331
30332 FOR_EACH_CHILD (die, c, resolve_addr (c));
30333 }
30334 \f
30335 /* Helper routines for optimize_location_lists.
30336 This pass tries to share identical local lists in .debug_loc
30337 section. */
30338
30339 /* Iteratively hash operands of LOC opcode into HSTATE. */
30340
30341 static void
30342 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30343 {
30344 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30345 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30346
30347 switch (loc->dw_loc_opc)
30348 {
30349 case DW_OP_const4u:
30350 case DW_OP_const8u:
30351 if (loc->dtprel)
30352 goto hash_addr;
30353 /* FALLTHRU */
30354 case DW_OP_const1u:
30355 case DW_OP_const1s:
30356 case DW_OP_const2u:
30357 case DW_OP_const2s:
30358 case DW_OP_const4s:
30359 case DW_OP_const8s:
30360 case DW_OP_constu:
30361 case DW_OP_consts:
30362 case DW_OP_pick:
30363 case DW_OP_plus_uconst:
30364 case DW_OP_breg0:
30365 case DW_OP_breg1:
30366 case DW_OP_breg2:
30367 case DW_OP_breg3:
30368 case DW_OP_breg4:
30369 case DW_OP_breg5:
30370 case DW_OP_breg6:
30371 case DW_OP_breg7:
30372 case DW_OP_breg8:
30373 case DW_OP_breg9:
30374 case DW_OP_breg10:
30375 case DW_OP_breg11:
30376 case DW_OP_breg12:
30377 case DW_OP_breg13:
30378 case DW_OP_breg14:
30379 case DW_OP_breg15:
30380 case DW_OP_breg16:
30381 case DW_OP_breg17:
30382 case DW_OP_breg18:
30383 case DW_OP_breg19:
30384 case DW_OP_breg20:
30385 case DW_OP_breg21:
30386 case DW_OP_breg22:
30387 case DW_OP_breg23:
30388 case DW_OP_breg24:
30389 case DW_OP_breg25:
30390 case DW_OP_breg26:
30391 case DW_OP_breg27:
30392 case DW_OP_breg28:
30393 case DW_OP_breg29:
30394 case DW_OP_breg30:
30395 case DW_OP_breg31:
30396 case DW_OP_regx:
30397 case DW_OP_fbreg:
30398 case DW_OP_piece:
30399 case DW_OP_deref_size:
30400 case DW_OP_xderef_size:
30401 hstate.add_object (val1->v.val_int);
30402 break;
30403 case DW_OP_skip:
30404 case DW_OP_bra:
30405 {
30406 int offset;
30407
30408 gcc_assert (val1->val_class == dw_val_class_loc);
30409 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30410 hstate.add_object (offset);
30411 }
30412 break;
30413 case DW_OP_implicit_value:
30414 hstate.add_object (val1->v.val_unsigned);
30415 switch (val2->val_class)
30416 {
30417 case dw_val_class_const:
30418 hstate.add_object (val2->v.val_int);
30419 break;
30420 case dw_val_class_vec:
30421 {
30422 unsigned int elt_size = val2->v.val_vec.elt_size;
30423 unsigned int len = val2->v.val_vec.length;
30424
30425 hstate.add_int (elt_size);
30426 hstate.add_int (len);
30427 hstate.add (val2->v.val_vec.array, len * elt_size);
30428 }
30429 break;
30430 case dw_val_class_const_double:
30431 hstate.add_object (val2->v.val_double.low);
30432 hstate.add_object (val2->v.val_double.high);
30433 break;
30434 case dw_val_class_wide_int:
30435 hstate.add (val2->v.val_wide->get_val (),
30436 get_full_len (*val2->v.val_wide)
30437 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30438 break;
30439 case dw_val_class_addr:
30440 inchash::add_rtx (val2->v.val_addr, hstate);
30441 break;
30442 default:
30443 gcc_unreachable ();
30444 }
30445 break;
30446 case DW_OP_bregx:
30447 case DW_OP_bit_piece:
30448 hstate.add_object (val1->v.val_int);
30449 hstate.add_object (val2->v.val_int);
30450 break;
30451 case DW_OP_addr:
30452 hash_addr:
30453 if (loc->dtprel)
30454 {
30455 unsigned char dtprel = 0xd1;
30456 hstate.add_object (dtprel);
30457 }
30458 inchash::add_rtx (val1->v.val_addr, hstate);
30459 break;
30460 case DW_OP_GNU_addr_index:
30461 case DW_OP_GNU_const_index:
30462 {
30463 if (loc->dtprel)
30464 {
30465 unsigned char dtprel = 0xd1;
30466 hstate.add_object (dtprel);
30467 }
30468 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30469 }
30470 break;
30471 case DW_OP_implicit_pointer:
30472 case DW_OP_GNU_implicit_pointer:
30473 hstate.add_int (val2->v.val_int);
30474 break;
30475 case DW_OP_entry_value:
30476 case DW_OP_GNU_entry_value:
30477 hstate.add_object (val1->v.val_loc);
30478 break;
30479 case DW_OP_regval_type:
30480 case DW_OP_deref_type:
30481 case DW_OP_GNU_regval_type:
30482 case DW_OP_GNU_deref_type:
30483 {
30484 unsigned int byte_size
30485 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30486 unsigned int encoding
30487 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30488 hstate.add_object (val1->v.val_int);
30489 hstate.add_object (byte_size);
30490 hstate.add_object (encoding);
30491 }
30492 break;
30493 case DW_OP_convert:
30494 case DW_OP_reinterpret:
30495 case DW_OP_GNU_convert:
30496 case DW_OP_GNU_reinterpret:
30497 if (val1->val_class == dw_val_class_unsigned_const)
30498 {
30499 hstate.add_object (val1->v.val_unsigned);
30500 break;
30501 }
30502 /* FALLTHRU */
30503 case DW_OP_const_type:
30504 case DW_OP_GNU_const_type:
30505 {
30506 unsigned int byte_size
30507 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30508 unsigned int encoding
30509 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30510 hstate.add_object (byte_size);
30511 hstate.add_object (encoding);
30512 if (loc->dw_loc_opc != DW_OP_const_type
30513 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30514 break;
30515 hstate.add_object (val2->val_class);
30516 switch (val2->val_class)
30517 {
30518 case dw_val_class_const:
30519 hstate.add_object (val2->v.val_int);
30520 break;
30521 case dw_val_class_vec:
30522 {
30523 unsigned int elt_size = val2->v.val_vec.elt_size;
30524 unsigned int len = val2->v.val_vec.length;
30525
30526 hstate.add_object (elt_size);
30527 hstate.add_object (len);
30528 hstate.add (val2->v.val_vec.array, len * elt_size);
30529 }
30530 break;
30531 case dw_val_class_const_double:
30532 hstate.add_object (val2->v.val_double.low);
30533 hstate.add_object (val2->v.val_double.high);
30534 break;
30535 case dw_val_class_wide_int:
30536 hstate.add (val2->v.val_wide->get_val (),
30537 get_full_len (*val2->v.val_wide)
30538 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30539 break;
30540 default:
30541 gcc_unreachable ();
30542 }
30543 }
30544 break;
30545
30546 default:
30547 /* Other codes have no operands. */
30548 break;
30549 }
30550 }
30551
30552 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30553
30554 static inline void
30555 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30556 {
30557 dw_loc_descr_ref l;
30558 bool sizes_computed = false;
30559 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30560 size_of_locs (loc);
30561
30562 for (l = loc; l != NULL; l = l->dw_loc_next)
30563 {
30564 enum dwarf_location_atom opc = l->dw_loc_opc;
30565 hstate.add_object (opc);
30566 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30567 {
30568 size_of_locs (loc);
30569 sizes_computed = true;
30570 }
30571 hash_loc_operands (l, hstate);
30572 }
30573 }
30574
30575 /* Compute hash of the whole location list LIST_HEAD. */
30576
30577 static inline void
30578 hash_loc_list (dw_loc_list_ref list_head)
30579 {
30580 dw_loc_list_ref curr = list_head;
30581 inchash::hash hstate;
30582
30583 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30584 {
30585 hstate.add (curr->begin, strlen (curr->begin) + 1);
30586 hstate.add (curr->end, strlen (curr->end) + 1);
30587 hstate.add_object (curr->vbegin);
30588 hstate.add_object (curr->vend);
30589 if (curr->section)
30590 hstate.add (curr->section, strlen (curr->section) + 1);
30591 hash_locs (curr->expr, hstate);
30592 }
30593 list_head->hash = hstate.end ();
30594 }
30595
30596 /* Return true if X and Y opcodes have the same operands. */
30597
30598 static inline bool
30599 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30600 {
30601 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30602 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30603 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30604 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30605
30606 switch (x->dw_loc_opc)
30607 {
30608 case DW_OP_const4u:
30609 case DW_OP_const8u:
30610 if (x->dtprel)
30611 goto hash_addr;
30612 /* FALLTHRU */
30613 case DW_OP_const1u:
30614 case DW_OP_const1s:
30615 case DW_OP_const2u:
30616 case DW_OP_const2s:
30617 case DW_OP_const4s:
30618 case DW_OP_const8s:
30619 case DW_OP_constu:
30620 case DW_OP_consts:
30621 case DW_OP_pick:
30622 case DW_OP_plus_uconst:
30623 case DW_OP_breg0:
30624 case DW_OP_breg1:
30625 case DW_OP_breg2:
30626 case DW_OP_breg3:
30627 case DW_OP_breg4:
30628 case DW_OP_breg5:
30629 case DW_OP_breg6:
30630 case DW_OP_breg7:
30631 case DW_OP_breg8:
30632 case DW_OP_breg9:
30633 case DW_OP_breg10:
30634 case DW_OP_breg11:
30635 case DW_OP_breg12:
30636 case DW_OP_breg13:
30637 case DW_OP_breg14:
30638 case DW_OP_breg15:
30639 case DW_OP_breg16:
30640 case DW_OP_breg17:
30641 case DW_OP_breg18:
30642 case DW_OP_breg19:
30643 case DW_OP_breg20:
30644 case DW_OP_breg21:
30645 case DW_OP_breg22:
30646 case DW_OP_breg23:
30647 case DW_OP_breg24:
30648 case DW_OP_breg25:
30649 case DW_OP_breg26:
30650 case DW_OP_breg27:
30651 case DW_OP_breg28:
30652 case DW_OP_breg29:
30653 case DW_OP_breg30:
30654 case DW_OP_breg31:
30655 case DW_OP_regx:
30656 case DW_OP_fbreg:
30657 case DW_OP_piece:
30658 case DW_OP_deref_size:
30659 case DW_OP_xderef_size:
30660 return valx1->v.val_int == valy1->v.val_int;
30661 case DW_OP_skip:
30662 case DW_OP_bra:
30663 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30664 can cause irrelevant differences in dw_loc_addr. */
30665 gcc_assert (valx1->val_class == dw_val_class_loc
30666 && valy1->val_class == dw_val_class_loc
30667 && (dwarf_split_debug_info
30668 || x->dw_loc_addr == y->dw_loc_addr));
30669 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30670 case DW_OP_implicit_value:
30671 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30672 || valx2->val_class != valy2->val_class)
30673 return false;
30674 switch (valx2->val_class)
30675 {
30676 case dw_val_class_const:
30677 return valx2->v.val_int == valy2->v.val_int;
30678 case dw_val_class_vec:
30679 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30680 && valx2->v.val_vec.length == valy2->v.val_vec.length
30681 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30682 valx2->v.val_vec.elt_size
30683 * valx2->v.val_vec.length) == 0;
30684 case dw_val_class_const_double:
30685 return valx2->v.val_double.low == valy2->v.val_double.low
30686 && valx2->v.val_double.high == valy2->v.val_double.high;
30687 case dw_val_class_wide_int:
30688 return *valx2->v.val_wide == *valy2->v.val_wide;
30689 case dw_val_class_addr:
30690 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30691 default:
30692 gcc_unreachable ();
30693 }
30694 case DW_OP_bregx:
30695 case DW_OP_bit_piece:
30696 return valx1->v.val_int == valy1->v.val_int
30697 && valx2->v.val_int == valy2->v.val_int;
30698 case DW_OP_addr:
30699 hash_addr:
30700 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30701 case DW_OP_GNU_addr_index:
30702 case DW_OP_GNU_const_index:
30703 {
30704 rtx ax1 = valx1->val_entry->addr.rtl;
30705 rtx ay1 = valy1->val_entry->addr.rtl;
30706 return rtx_equal_p (ax1, ay1);
30707 }
30708 case DW_OP_implicit_pointer:
30709 case DW_OP_GNU_implicit_pointer:
30710 return valx1->val_class == dw_val_class_die_ref
30711 && valx1->val_class == valy1->val_class
30712 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30713 && valx2->v.val_int == valy2->v.val_int;
30714 case DW_OP_entry_value:
30715 case DW_OP_GNU_entry_value:
30716 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30717 case DW_OP_const_type:
30718 case DW_OP_GNU_const_type:
30719 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30720 || valx2->val_class != valy2->val_class)
30721 return false;
30722 switch (valx2->val_class)
30723 {
30724 case dw_val_class_const:
30725 return valx2->v.val_int == valy2->v.val_int;
30726 case dw_val_class_vec:
30727 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30728 && valx2->v.val_vec.length == valy2->v.val_vec.length
30729 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30730 valx2->v.val_vec.elt_size
30731 * valx2->v.val_vec.length) == 0;
30732 case dw_val_class_const_double:
30733 return valx2->v.val_double.low == valy2->v.val_double.low
30734 && valx2->v.val_double.high == valy2->v.val_double.high;
30735 case dw_val_class_wide_int:
30736 return *valx2->v.val_wide == *valy2->v.val_wide;
30737 default:
30738 gcc_unreachable ();
30739 }
30740 case DW_OP_regval_type:
30741 case DW_OP_deref_type:
30742 case DW_OP_GNU_regval_type:
30743 case DW_OP_GNU_deref_type:
30744 return valx1->v.val_int == valy1->v.val_int
30745 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30746 case DW_OP_convert:
30747 case DW_OP_reinterpret:
30748 case DW_OP_GNU_convert:
30749 case DW_OP_GNU_reinterpret:
30750 if (valx1->val_class != valy1->val_class)
30751 return false;
30752 if (valx1->val_class == dw_val_class_unsigned_const)
30753 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30754 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30755 case DW_OP_GNU_parameter_ref:
30756 return valx1->val_class == dw_val_class_die_ref
30757 && valx1->val_class == valy1->val_class
30758 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30759 default:
30760 /* Other codes have no operands. */
30761 return true;
30762 }
30763 }
30764
30765 /* Return true if DWARF location expressions X and Y are the same. */
30766
30767 static inline bool
30768 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30769 {
30770 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30771 if (x->dw_loc_opc != y->dw_loc_opc
30772 || x->dtprel != y->dtprel
30773 || !compare_loc_operands (x, y))
30774 break;
30775 return x == NULL && y == NULL;
30776 }
30777
30778 /* Hashtable helpers. */
30779
30780 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30781 {
30782 static inline hashval_t hash (const dw_loc_list_struct *);
30783 static inline bool equal (const dw_loc_list_struct *,
30784 const dw_loc_list_struct *);
30785 };
30786
30787 /* Return precomputed hash of location list X. */
30788
30789 inline hashval_t
30790 loc_list_hasher::hash (const dw_loc_list_struct *x)
30791 {
30792 return x->hash;
30793 }
30794
30795 /* Return true if location lists A and B are the same. */
30796
30797 inline bool
30798 loc_list_hasher::equal (const dw_loc_list_struct *a,
30799 const dw_loc_list_struct *b)
30800 {
30801 if (a == b)
30802 return 1;
30803 if (a->hash != b->hash)
30804 return 0;
30805 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30806 if (strcmp (a->begin, b->begin) != 0
30807 || strcmp (a->end, b->end) != 0
30808 || (a->section == NULL) != (b->section == NULL)
30809 || (a->section && strcmp (a->section, b->section) != 0)
30810 || a->vbegin != b->vbegin || a->vend != b->vend
30811 || !compare_locs (a->expr, b->expr))
30812 break;
30813 return a == NULL && b == NULL;
30814 }
30815
30816 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30817
30818
30819 /* Recursively optimize location lists referenced from DIE
30820 children and share them whenever possible. */
30821
30822 static void
30823 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30824 {
30825 dw_die_ref c;
30826 dw_attr_node *a;
30827 unsigned ix;
30828 dw_loc_list_struct **slot;
30829 bool drop_locviews = false;
30830 bool has_locviews = false;
30831
30832 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30833 if (AT_class (a) == dw_val_class_loc_list)
30834 {
30835 dw_loc_list_ref list = AT_loc_list (a);
30836 /* TODO: perform some optimizations here, before hashing
30837 it and storing into the hash table. */
30838 hash_loc_list (list);
30839 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30840 if (*slot == NULL)
30841 {
30842 *slot = list;
30843 if (loc_list_has_views (list))
30844 gcc_assert (list->vl_symbol);
30845 else if (list->vl_symbol)
30846 {
30847 drop_locviews = true;
30848 list->vl_symbol = NULL;
30849 }
30850 }
30851 else
30852 {
30853 if (list->vl_symbol && !(*slot)->vl_symbol)
30854 drop_locviews = true;
30855 a->dw_attr_val.v.val_loc_list = *slot;
30856 }
30857 }
30858 else if (AT_class (a) == dw_val_class_view_list)
30859 {
30860 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30861 has_locviews = true;
30862 }
30863
30864
30865 if (drop_locviews && has_locviews)
30866 remove_AT (die, DW_AT_GNU_locviews);
30867
30868 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30869 }
30870
30871
30872 /* Recursively assign each location list a unique index into the debug_addr
30873 section. */
30874
30875 static void
30876 index_location_lists (dw_die_ref die)
30877 {
30878 dw_die_ref c;
30879 dw_attr_node *a;
30880 unsigned ix;
30881
30882 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30883 if (AT_class (a) == dw_val_class_loc_list)
30884 {
30885 dw_loc_list_ref list = AT_loc_list (a);
30886 dw_loc_list_ref curr;
30887 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30888 {
30889 /* Don't index an entry that has already been indexed
30890 or won't be output. */
30891 if (curr->begin_entry != NULL
30892 || skip_loc_list_entry (curr))
30893 continue;
30894
30895 curr->begin_entry
30896 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30897 }
30898 }
30899
30900 FOR_EACH_CHILD (die, c, index_location_lists (c));
30901 }
30902
30903 /* Optimize location lists referenced from DIE
30904 children and share them whenever possible. */
30905
30906 static void
30907 optimize_location_lists (dw_die_ref die)
30908 {
30909 loc_list_hash_type htab (500);
30910 optimize_location_lists_1 (die, &htab);
30911 }
30912 \f
30913 /* Traverse the limbo die list, and add parent/child links. The only
30914 dies without parents that should be here are concrete instances of
30915 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
30916 For concrete instances, we can get the parent die from the abstract
30917 instance. */
30918
30919 static void
30920 flush_limbo_die_list (void)
30921 {
30922 limbo_die_node *node;
30923
30924 /* get_context_die calls force_decl_die, which can put new DIEs on the
30925 limbo list in LTO mode when nested functions are put in a different
30926 partition than that of their parent function. */
30927 while ((node = limbo_die_list))
30928 {
30929 dw_die_ref die = node->die;
30930 limbo_die_list = node->next;
30931
30932 if (die->die_parent == NULL)
30933 {
30934 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
30935
30936 if (origin && origin->die_parent)
30937 add_child_die (origin->die_parent, die);
30938 else if (is_cu_die (die))
30939 ;
30940 else if (seen_error ())
30941 /* It's OK to be confused by errors in the input. */
30942 add_child_die (comp_unit_die (), die);
30943 else
30944 {
30945 /* In certain situations, the lexical block containing a
30946 nested function can be optimized away, which results
30947 in the nested function die being orphaned. Likewise
30948 with the return type of that nested function. Force
30949 this to be a child of the containing function.
30950
30951 It may happen that even the containing function got fully
30952 inlined and optimized out. In that case we are lost and
30953 assign the empty child. This should not be big issue as
30954 the function is likely unreachable too. */
30955 gcc_assert (node->created_for);
30956
30957 if (DECL_P (node->created_for))
30958 origin = get_context_die (DECL_CONTEXT (node->created_for));
30959 else if (TYPE_P (node->created_for))
30960 origin = scope_die_for (node->created_for, comp_unit_die ());
30961 else
30962 origin = comp_unit_die ();
30963
30964 add_child_die (origin, die);
30965 }
30966 }
30967 }
30968 }
30969
30970 /* Reset DIEs so we can output them again. */
30971
30972 static void
30973 reset_dies (dw_die_ref die)
30974 {
30975 dw_die_ref c;
30976
30977 /* Remove stuff we re-generate. */
30978 die->die_mark = 0;
30979 die->die_offset = 0;
30980 die->die_abbrev = 0;
30981 remove_AT (die, DW_AT_sibling);
30982
30983 FOR_EACH_CHILD (die, c, reset_dies (c));
30984 }
30985
30986 /* Output stuff that dwarf requires at the end of every file,
30987 and generate the DWARF-2 debugging info. */
30988
30989 static void
30990 dwarf2out_finish (const char *)
30991 {
30992 comdat_type_node *ctnode;
30993 dw_die_ref main_comp_unit_die;
30994 unsigned char checksum[16];
30995 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
30996
30997 /* Flush out any latecomers to the limbo party. */
30998 flush_limbo_die_list ();
30999
31000 if (inline_entry_data_table)
31001 gcc_assert (inline_entry_data_table->elements () == 0);
31002
31003 if (flag_checking)
31004 {
31005 verify_die (comp_unit_die ());
31006 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31007 verify_die (node->die);
31008 }
31009
31010 /* We shouldn't have any symbols with delayed asm names for
31011 DIEs generated after early finish. */
31012 gcc_assert (deferred_asm_name == NULL);
31013
31014 gen_remaining_tmpl_value_param_die_attribute ();
31015
31016 if (flag_generate_lto || flag_generate_offload)
31017 {
31018 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31019
31020 /* Prune stuff so that dwarf2out_finish runs successfully
31021 for the fat part of the object. */
31022 reset_dies (comp_unit_die ());
31023 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31024 reset_dies (node->die);
31025
31026 hash_table<comdat_type_hasher> comdat_type_table (100);
31027 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31028 {
31029 comdat_type_node **slot
31030 = comdat_type_table.find_slot (ctnode, INSERT);
31031
31032 /* Don't reset types twice. */
31033 if (*slot != HTAB_EMPTY_ENTRY)
31034 continue;
31035
31036 /* Add a pointer to the line table for the main compilation unit
31037 so that the debugger can make sense of DW_AT_decl_file
31038 attributes. */
31039 if (debug_info_level >= DINFO_LEVEL_TERSE)
31040 reset_dies (ctnode->root_die);
31041
31042 *slot = ctnode;
31043 }
31044
31045 /* Reset die CU symbol so we don't output it twice. */
31046 comp_unit_die ()->die_id.die_symbol = NULL;
31047
31048 /* Remove DW_AT_macro from the early output. */
31049 if (have_macinfo)
31050 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31051
31052 /* Remove indirect string decisions. */
31053 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31054 }
31055
31056 #if ENABLE_ASSERT_CHECKING
31057 {
31058 dw_die_ref die = comp_unit_die (), c;
31059 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31060 }
31061 #endif
31062 resolve_addr (comp_unit_die ());
31063 move_marked_base_types ();
31064
31065 /* Initialize sections and labels used for actual assembler output. */
31066 unsigned generation = init_sections_and_labels (false);
31067
31068 /* Traverse the DIE's and add sibling attributes to those DIE's that
31069 have children. */
31070 add_sibling_attributes (comp_unit_die ());
31071 limbo_die_node *node;
31072 for (node = cu_die_list; node; node = node->next)
31073 add_sibling_attributes (node->die);
31074 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31075 add_sibling_attributes (ctnode->root_die);
31076
31077 /* When splitting DWARF info, we put some attributes in the
31078 skeleton compile_unit DIE that remains in the .o, while
31079 most attributes go in the DWO compile_unit_die. */
31080 if (dwarf_split_debug_info)
31081 {
31082 limbo_die_node *cu;
31083 main_comp_unit_die = gen_compile_unit_die (NULL);
31084 if (dwarf_version >= 5)
31085 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31086 cu = limbo_die_list;
31087 gcc_assert (cu->die == main_comp_unit_die);
31088 limbo_die_list = limbo_die_list->next;
31089 cu->next = cu_die_list;
31090 cu_die_list = cu;
31091 }
31092 else
31093 main_comp_unit_die = comp_unit_die ();
31094
31095 /* Output a terminator label for the .text section. */
31096 switch_to_section (text_section);
31097 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31098 if (cold_text_section)
31099 {
31100 switch_to_section (cold_text_section);
31101 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31102 }
31103
31104 /* We can only use the low/high_pc attributes if all of the code was
31105 in .text. */
31106 if (!have_multiple_function_sections
31107 || (dwarf_version < 3 && dwarf_strict))
31108 {
31109 /* Don't add if the CU has no associated code. */
31110 if (text_section_used)
31111 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31112 text_end_label, true);
31113 }
31114 else
31115 {
31116 unsigned fde_idx;
31117 dw_fde_ref fde;
31118 bool range_list_added = false;
31119
31120 if (text_section_used)
31121 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31122 text_end_label, &range_list_added, true);
31123 if (cold_text_section_used)
31124 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31125 cold_end_label, &range_list_added, true);
31126
31127 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31128 {
31129 if (DECL_IGNORED_P (fde->decl))
31130 continue;
31131 if (!fde->in_std_section)
31132 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31133 fde->dw_fde_end, &range_list_added,
31134 true);
31135 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31136 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31137 fde->dw_fde_second_end, &range_list_added,
31138 true);
31139 }
31140
31141 if (range_list_added)
31142 {
31143 /* We need to give .debug_loc and .debug_ranges an appropriate
31144 "base address". Use zero so that these addresses become
31145 absolute. Historically, we've emitted the unexpected
31146 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31147 Emit both to give time for other tools to adapt. */
31148 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31149 if (! dwarf_strict && dwarf_version < 4)
31150 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31151
31152 add_ranges (NULL);
31153 }
31154 }
31155
31156 /* AIX Assembler inserts the length, so adjust the reference to match the
31157 offset expected by debuggers. */
31158 strcpy (dl_section_ref, debug_line_section_label);
31159 if (XCOFF_DEBUGGING_INFO)
31160 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31161
31162 if (debug_info_level >= DINFO_LEVEL_TERSE)
31163 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31164 dl_section_ref);
31165
31166 if (have_macinfo)
31167 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31168 macinfo_section_label);
31169
31170 if (dwarf_split_debug_info)
31171 {
31172 if (have_location_lists)
31173 {
31174 if (dwarf_version >= 5)
31175 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31176 loc_section_label);
31177 /* optimize_location_lists calculates the size of the lists,
31178 so index them first, and assign indices to the entries.
31179 Although optimize_location_lists will remove entries from
31180 the table, it only does so for duplicates, and therefore
31181 only reduces ref_counts to 1. */
31182 index_location_lists (comp_unit_die ());
31183 }
31184
31185 if (addr_index_table != NULL)
31186 {
31187 unsigned int index = 0;
31188 addr_index_table
31189 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31190 (&index);
31191 }
31192 }
31193
31194 loc_list_idx = 0;
31195 if (have_location_lists)
31196 {
31197 optimize_location_lists (comp_unit_die ());
31198 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31199 if (dwarf_version >= 5 && dwarf_split_debug_info)
31200 assign_location_list_indexes (comp_unit_die ());
31201 }
31202
31203 save_macinfo_strings ();
31204
31205 if (dwarf_split_debug_info)
31206 {
31207 unsigned int index = 0;
31208
31209 /* Add attributes common to skeleton compile_units and
31210 type_units. Because these attributes include strings, it
31211 must be done before freezing the string table. Top-level
31212 skeleton die attrs are added when the skeleton type unit is
31213 created, so ensure it is created by this point. */
31214 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31215 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31216 }
31217
31218 /* Output all of the compilation units. We put the main one last so that
31219 the offsets are available to output_pubnames. */
31220 for (node = cu_die_list; node; node = node->next)
31221 output_comp_unit (node->die, 0, NULL);
31222
31223 hash_table<comdat_type_hasher> comdat_type_table (100);
31224 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31225 {
31226 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31227
31228 /* Don't output duplicate types. */
31229 if (*slot != HTAB_EMPTY_ENTRY)
31230 continue;
31231
31232 /* Add a pointer to the line table for the main compilation unit
31233 so that the debugger can make sense of DW_AT_decl_file
31234 attributes. */
31235 if (debug_info_level >= DINFO_LEVEL_TERSE)
31236 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31237 (!dwarf_split_debug_info
31238 ? dl_section_ref
31239 : debug_skeleton_line_section_label));
31240
31241 output_comdat_type_unit (ctnode);
31242 *slot = ctnode;
31243 }
31244
31245 if (dwarf_split_debug_info)
31246 {
31247 int mark;
31248 struct md5_ctx ctx;
31249
31250 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31251 index_rnglists ();
31252
31253 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31254 md5_init_ctx (&ctx);
31255 mark = 0;
31256 die_checksum (comp_unit_die (), &ctx, &mark);
31257 unmark_all_dies (comp_unit_die ());
31258 md5_finish_ctx (&ctx, checksum);
31259
31260 if (dwarf_version < 5)
31261 {
31262 /* Use the first 8 bytes of the checksum as the dwo_id,
31263 and add it to both comp-unit DIEs. */
31264 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31265 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31266 }
31267
31268 /* Add the base offset of the ranges table to the skeleton
31269 comp-unit DIE. */
31270 if (!vec_safe_is_empty (ranges_table))
31271 {
31272 if (dwarf_version >= 5)
31273 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31274 ranges_base_label);
31275 else
31276 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31277 ranges_section_label);
31278 }
31279
31280 switch_to_section (debug_addr_section);
31281 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31282 output_addr_table ();
31283 }
31284
31285 /* Output the main compilation unit if non-empty or if .debug_macinfo
31286 or .debug_macro will be emitted. */
31287 output_comp_unit (comp_unit_die (), have_macinfo,
31288 dwarf_split_debug_info ? checksum : NULL);
31289
31290 if (dwarf_split_debug_info && info_section_emitted)
31291 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31292
31293 /* Output the abbreviation table. */
31294 if (vec_safe_length (abbrev_die_table) != 1)
31295 {
31296 switch_to_section (debug_abbrev_section);
31297 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31298 output_abbrev_section ();
31299 }
31300
31301 /* Output location list section if necessary. */
31302 if (have_location_lists)
31303 {
31304 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31305 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31306 /* Output the location lists info. */
31307 switch_to_section (debug_loc_section);
31308 if (dwarf_version >= 5)
31309 {
31310 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31311 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31312 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31313 dw2_asm_output_data (4, 0xffffffff,
31314 "Initial length escape value indicating "
31315 "64-bit DWARF extension");
31316 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31317 "Length of Location Lists");
31318 ASM_OUTPUT_LABEL (asm_out_file, l1);
31319 output_dwarf_version ();
31320 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31321 dw2_asm_output_data (1, 0, "Segment Size");
31322 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31323 "Offset Entry Count");
31324 }
31325 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31326 if (dwarf_version >= 5 && dwarf_split_debug_info)
31327 {
31328 unsigned int save_loc_list_idx = loc_list_idx;
31329 loc_list_idx = 0;
31330 output_loclists_offsets (comp_unit_die ());
31331 gcc_assert (save_loc_list_idx == loc_list_idx);
31332 }
31333 output_location_lists (comp_unit_die ());
31334 if (dwarf_version >= 5)
31335 ASM_OUTPUT_LABEL (asm_out_file, l2);
31336 }
31337
31338 output_pubtables ();
31339
31340 /* Output the address range information if a CU (.debug_info section)
31341 was emitted. We output an empty table even if we had no functions
31342 to put in it. This because the consumer has no way to tell the
31343 difference between an empty table that we omitted and failure to
31344 generate a table that would have contained data. */
31345 if (info_section_emitted)
31346 {
31347 switch_to_section (debug_aranges_section);
31348 output_aranges ();
31349 }
31350
31351 /* Output ranges section if necessary. */
31352 if (!vec_safe_is_empty (ranges_table))
31353 {
31354 if (dwarf_version >= 5)
31355 output_rnglists (generation);
31356 else
31357 output_ranges ();
31358 }
31359
31360 /* Have to end the macro section. */
31361 if (have_macinfo)
31362 {
31363 switch_to_section (debug_macinfo_section);
31364 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31365 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31366 : debug_skeleton_line_section_label, false);
31367 dw2_asm_output_data (1, 0, "End compilation unit");
31368 }
31369
31370 /* Output the source line correspondence table. We must do this
31371 even if there is no line information. Otherwise, on an empty
31372 translation unit, we will generate a present, but empty,
31373 .debug_info section. IRIX 6.5 `nm' will then complain when
31374 examining the file. This is done late so that any filenames
31375 used by the debug_info section are marked as 'used'. */
31376 switch_to_section (debug_line_section);
31377 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31378 if (! output_asm_line_debug_info ())
31379 output_line_info (false);
31380
31381 if (dwarf_split_debug_info && info_section_emitted)
31382 {
31383 switch_to_section (debug_skeleton_line_section);
31384 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31385 output_line_info (true);
31386 }
31387
31388 /* If we emitted any indirect strings, output the string table too. */
31389 if (debug_str_hash || skeleton_debug_str_hash)
31390 output_indirect_strings ();
31391 if (debug_line_str_hash)
31392 {
31393 switch_to_section (debug_line_str_section);
31394 const enum dwarf_form form = DW_FORM_line_strp;
31395 debug_line_str_hash->traverse<enum dwarf_form,
31396 output_indirect_string> (form);
31397 }
31398
31399 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31400 symview_upper_bound = 0;
31401 if (zero_view_p)
31402 bitmap_clear (zero_view_p);
31403 }
31404
31405 /* Returns a hash value for X (which really is a variable_value_struct). */
31406
31407 inline hashval_t
31408 variable_value_hasher::hash (variable_value_struct *x)
31409 {
31410 return (hashval_t) x->decl_id;
31411 }
31412
31413 /* Return nonzero if decl_id of variable_value_struct X is the same as
31414 UID of decl Y. */
31415
31416 inline bool
31417 variable_value_hasher::equal (variable_value_struct *x, tree y)
31418 {
31419 return x->decl_id == DECL_UID (y);
31420 }
31421
31422 /* Helper function for resolve_variable_value, handle
31423 DW_OP_GNU_variable_value in one location expression.
31424 Return true if exprloc has been changed into loclist. */
31425
31426 static bool
31427 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31428 {
31429 dw_loc_descr_ref next;
31430 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31431 {
31432 next = loc->dw_loc_next;
31433 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31434 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31435 continue;
31436
31437 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31438 if (DECL_CONTEXT (decl) != current_function_decl)
31439 continue;
31440
31441 dw_die_ref ref = lookup_decl_die (decl);
31442 if (ref)
31443 {
31444 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31445 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31446 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31447 continue;
31448 }
31449 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31450 if (l == NULL)
31451 continue;
31452 if (l->dw_loc_next)
31453 {
31454 if (AT_class (a) != dw_val_class_loc)
31455 continue;
31456 switch (a->dw_attr)
31457 {
31458 /* Following attributes allow both exprloc and loclist
31459 classes, so we can change them into a loclist. */
31460 case DW_AT_location:
31461 case DW_AT_string_length:
31462 case DW_AT_return_addr:
31463 case DW_AT_data_member_location:
31464 case DW_AT_frame_base:
31465 case DW_AT_segment:
31466 case DW_AT_static_link:
31467 case DW_AT_use_location:
31468 case DW_AT_vtable_elem_location:
31469 if (prev)
31470 {
31471 prev->dw_loc_next = NULL;
31472 prepend_loc_descr_to_each (l, AT_loc (a));
31473 }
31474 if (next)
31475 add_loc_descr_to_each (l, next);
31476 a->dw_attr_val.val_class = dw_val_class_loc_list;
31477 a->dw_attr_val.val_entry = NULL;
31478 a->dw_attr_val.v.val_loc_list = l;
31479 have_location_lists = true;
31480 return true;
31481 /* Following attributes allow both exprloc and reference,
31482 so if the whole expression is DW_OP_GNU_variable_value alone
31483 we could transform it into reference. */
31484 case DW_AT_byte_size:
31485 case DW_AT_bit_size:
31486 case DW_AT_lower_bound:
31487 case DW_AT_upper_bound:
31488 case DW_AT_bit_stride:
31489 case DW_AT_count:
31490 case DW_AT_allocated:
31491 case DW_AT_associated:
31492 case DW_AT_byte_stride:
31493 if (prev == NULL && next == NULL)
31494 break;
31495 /* FALLTHRU */
31496 default:
31497 if (dwarf_strict)
31498 continue;
31499 break;
31500 }
31501 /* Create DW_TAG_variable that we can refer to. */
31502 gen_decl_die (decl, NULL_TREE, NULL,
31503 lookup_decl_die (current_function_decl));
31504 ref = lookup_decl_die (decl);
31505 if (ref)
31506 {
31507 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31508 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31509 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31510 }
31511 continue;
31512 }
31513 if (prev)
31514 {
31515 prev->dw_loc_next = l->expr;
31516 add_loc_descr (&prev->dw_loc_next, next);
31517 free_loc_descr (loc, NULL);
31518 next = prev->dw_loc_next;
31519 }
31520 else
31521 {
31522 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31523 add_loc_descr (&loc, next);
31524 next = loc;
31525 }
31526 loc = prev;
31527 }
31528 return false;
31529 }
31530
31531 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31532
31533 static void
31534 resolve_variable_value (dw_die_ref die)
31535 {
31536 dw_attr_node *a;
31537 dw_loc_list_ref loc;
31538 unsigned ix;
31539
31540 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31541 switch (AT_class (a))
31542 {
31543 case dw_val_class_loc:
31544 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31545 break;
31546 /* FALLTHRU */
31547 case dw_val_class_loc_list:
31548 loc = AT_loc_list (a);
31549 gcc_assert (loc);
31550 for (; loc; loc = loc->dw_loc_next)
31551 resolve_variable_value_in_expr (a, loc->expr);
31552 break;
31553 default:
31554 break;
31555 }
31556 }
31557
31558 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31559 temporaries in the current function. */
31560
31561 static void
31562 resolve_variable_values (void)
31563 {
31564 if (!variable_value_hash || !current_function_decl)
31565 return;
31566
31567 struct variable_value_struct *node
31568 = variable_value_hash->find_with_hash (current_function_decl,
31569 DECL_UID (current_function_decl));
31570
31571 if (node == NULL)
31572 return;
31573
31574 unsigned int i;
31575 dw_die_ref die;
31576 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31577 resolve_variable_value (die);
31578 }
31579
31580 /* Helper function for note_variable_value, handle one location
31581 expression. */
31582
31583 static void
31584 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31585 {
31586 for (; loc; loc = loc->dw_loc_next)
31587 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31588 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31589 {
31590 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31591 dw_die_ref ref = lookup_decl_die (decl);
31592 if (! ref && (flag_generate_lto || flag_generate_offload))
31593 {
31594 /* ??? This is somewhat a hack because we do not create DIEs
31595 for variables not in BLOCK trees early but when generating
31596 early LTO output we need the dw_val_class_decl_ref to be
31597 fully resolved. For fat LTO objects we'd also like to
31598 undo this after LTO dwarf output. */
31599 gcc_assert (DECL_CONTEXT (decl));
31600 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31601 gcc_assert (ctx != NULL);
31602 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31603 ref = lookup_decl_die (decl);
31604 gcc_assert (ref != NULL);
31605 }
31606 if (ref)
31607 {
31608 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31609 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31610 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31611 continue;
31612 }
31613 if (VAR_P (decl)
31614 && DECL_CONTEXT (decl)
31615 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31616 && lookup_decl_die (DECL_CONTEXT (decl)))
31617 {
31618 if (!variable_value_hash)
31619 variable_value_hash
31620 = hash_table<variable_value_hasher>::create_ggc (10);
31621
31622 tree fndecl = DECL_CONTEXT (decl);
31623 struct variable_value_struct *node;
31624 struct variable_value_struct **slot
31625 = variable_value_hash->find_slot_with_hash (fndecl,
31626 DECL_UID (fndecl),
31627 INSERT);
31628 if (*slot == NULL)
31629 {
31630 node = ggc_cleared_alloc<variable_value_struct> ();
31631 node->decl_id = DECL_UID (fndecl);
31632 *slot = node;
31633 }
31634 else
31635 node = *slot;
31636
31637 vec_safe_push (node->dies, die);
31638 }
31639 }
31640 }
31641
31642 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31643 with dw_val_class_decl_ref operand. */
31644
31645 static void
31646 note_variable_value (dw_die_ref die)
31647 {
31648 dw_die_ref c;
31649 dw_attr_node *a;
31650 dw_loc_list_ref loc;
31651 unsigned ix;
31652
31653 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31654 switch (AT_class (a))
31655 {
31656 case dw_val_class_loc_list:
31657 loc = AT_loc_list (a);
31658 gcc_assert (loc);
31659 if (!loc->noted_variable_value)
31660 {
31661 loc->noted_variable_value = 1;
31662 for (; loc; loc = loc->dw_loc_next)
31663 note_variable_value_in_expr (die, loc->expr);
31664 }
31665 break;
31666 case dw_val_class_loc:
31667 note_variable_value_in_expr (die, AT_loc (a));
31668 break;
31669 default:
31670 break;
31671 }
31672
31673 /* Mark children. */
31674 FOR_EACH_CHILD (die, c, note_variable_value (c));
31675 }
31676
31677 /* Perform any cleanups needed after the early debug generation pass
31678 has run. */
31679
31680 static void
31681 dwarf2out_early_finish (const char *filename)
31682 {
31683 set_early_dwarf s;
31684
31685 /* PCH might result in DW_AT_producer string being restored from the
31686 header compilation, so always fill it with empty string initially
31687 and overwrite only here. */
31688 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31689 producer_string = gen_producer_string ();
31690 producer->dw_attr_val.v.val_str->refcount--;
31691 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31692
31693 /* Add the name for the main input file now. We delayed this from
31694 dwarf2out_init to avoid complications with PCH. */
31695 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31696 add_comp_dir_attribute (comp_unit_die ());
31697
31698 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31699 DW_AT_comp_dir into .debug_line_str section. */
31700 if (!dwarf2out_as_loc_support
31701 && dwarf_version >= 5
31702 && DWARF5_USE_DEBUG_LINE_STR)
31703 {
31704 for (int i = 0; i < 2; i++)
31705 {
31706 dw_attr_node *a = get_AT (comp_unit_die (),
31707 i ? DW_AT_comp_dir : DW_AT_name);
31708 if (a == NULL
31709 || AT_class (a) != dw_val_class_str
31710 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31711 continue;
31712
31713 if (! debug_line_str_hash)
31714 debug_line_str_hash
31715 = hash_table<indirect_string_hasher>::create_ggc (10);
31716
31717 struct indirect_string_node *node
31718 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31719 set_indirect_string (node);
31720 node->form = DW_FORM_line_strp;
31721 a->dw_attr_val.v.val_str->refcount--;
31722 a->dw_attr_val.v.val_str = node;
31723 }
31724 }
31725
31726 /* With LTO early dwarf was really finished at compile-time, so make
31727 sure to adjust the phase after annotating the LTRANS CU DIE. */
31728 if (in_lto_p)
31729 {
31730 early_dwarf_finished = true;
31731 return;
31732 }
31733
31734 /* Walk through the list of incomplete types again, trying once more to
31735 emit full debugging info for them. */
31736 retry_incomplete_types ();
31737
31738 /* The point here is to flush out the limbo list so that it is empty
31739 and we don't need to stream it for LTO. */
31740 flush_limbo_die_list ();
31741
31742 gen_scheduled_generic_parms_dies ();
31743 gen_remaining_tmpl_value_param_die_attribute ();
31744
31745 /* Add DW_AT_linkage_name for all deferred DIEs. */
31746 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31747 {
31748 tree decl = node->created_for;
31749 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31750 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31751 ended up in deferred_asm_name before we knew it was
31752 constant and never written to disk. */
31753 && DECL_ASSEMBLER_NAME (decl))
31754 {
31755 add_linkage_attr (node->die, decl);
31756 move_linkage_attr (node->die);
31757 }
31758 }
31759 deferred_asm_name = NULL;
31760
31761 if (flag_eliminate_unused_debug_types)
31762 prune_unused_types ();
31763
31764 /* Generate separate COMDAT sections for type DIEs. */
31765 if (use_debug_types)
31766 {
31767 break_out_comdat_types (comp_unit_die ());
31768
31769 /* Each new type_unit DIE was added to the limbo die list when created.
31770 Since these have all been added to comdat_type_list, clear the
31771 limbo die list. */
31772 limbo_die_list = NULL;
31773
31774 /* For each new comdat type unit, copy declarations for incomplete
31775 types to make the new unit self-contained (i.e., no direct
31776 references to the main compile unit). */
31777 for (comdat_type_node *ctnode = comdat_type_list;
31778 ctnode != NULL; ctnode = ctnode->next)
31779 copy_decls_for_unworthy_types (ctnode->root_die);
31780 copy_decls_for_unworthy_types (comp_unit_die ());
31781
31782 /* In the process of copying declarations from one unit to another,
31783 we may have left some declarations behind that are no longer
31784 referenced. Prune them. */
31785 prune_unused_types ();
31786 }
31787
31788 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31789 with dw_val_class_decl_ref operand. */
31790 note_variable_value (comp_unit_die ());
31791 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31792 note_variable_value (node->die);
31793 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31794 ctnode = ctnode->next)
31795 note_variable_value (ctnode->root_die);
31796 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31797 note_variable_value (node->die);
31798
31799 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31800 both the main_cu and all skeleton TUs. Making this call unconditional
31801 would end up either adding a second copy of the AT_pubnames attribute, or
31802 requiring a special case in add_top_level_skeleton_die_attrs. */
31803 if (!dwarf_split_debug_info)
31804 add_AT_pubnames (comp_unit_die ());
31805
31806 /* The early debug phase is now finished. */
31807 early_dwarf_finished = true;
31808
31809 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31810 if (!flag_generate_lto && !flag_generate_offload)
31811 return;
31812
31813 /* Now as we are going to output for LTO initialize sections and labels
31814 to the LTO variants. We don't need a random-seed postfix as other
31815 LTO sections as linking the LTO debug sections into one in a partial
31816 link is fine. */
31817 init_sections_and_labels (true);
31818
31819 /* The output below is modeled after dwarf2out_finish with all
31820 location related output removed and some LTO specific changes.
31821 Some refactoring might make both smaller and easier to match up. */
31822
31823 /* Traverse the DIE's and add add sibling attributes to those DIE's
31824 that have children. */
31825 add_sibling_attributes (comp_unit_die ());
31826 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31827 add_sibling_attributes (node->die);
31828 for (comdat_type_node *ctnode = comdat_type_list;
31829 ctnode != NULL; ctnode = ctnode->next)
31830 add_sibling_attributes (ctnode->root_die);
31831
31832 if (have_macinfo)
31833 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31834 macinfo_section_label);
31835
31836 save_macinfo_strings ();
31837
31838 if (dwarf_split_debug_info)
31839 {
31840 unsigned int index = 0;
31841 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31842 }
31843
31844 /* Output all of the compilation units. We put the main one last so that
31845 the offsets are available to output_pubnames. */
31846 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31847 output_comp_unit (node->die, 0, NULL);
31848
31849 hash_table<comdat_type_hasher> comdat_type_table (100);
31850 for (comdat_type_node *ctnode = comdat_type_list;
31851 ctnode != NULL; ctnode = ctnode->next)
31852 {
31853 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31854
31855 /* Don't output duplicate types. */
31856 if (*slot != HTAB_EMPTY_ENTRY)
31857 continue;
31858
31859 /* Add a pointer to the line table for the main compilation unit
31860 so that the debugger can make sense of DW_AT_decl_file
31861 attributes. */
31862 if (debug_info_level >= DINFO_LEVEL_TERSE)
31863 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31864 (!dwarf_split_debug_info
31865 ? debug_line_section_label
31866 : debug_skeleton_line_section_label));
31867
31868 output_comdat_type_unit (ctnode);
31869 *slot = ctnode;
31870 }
31871
31872 /* Stick a unique symbol to the main debuginfo section. */
31873 compute_comp_unit_symbol (comp_unit_die ());
31874
31875 /* Output the main compilation unit. We always need it if only for
31876 the CU symbol. */
31877 output_comp_unit (comp_unit_die (), true, NULL);
31878
31879 /* Output the abbreviation table. */
31880 if (vec_safe_length (abbrev_die_table) != 1)
31881 {
31882 switch_to_section (debug_abbrev_section);
31883 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31884 output_abbrev_section ();
31885 }
31886
31887 /* Have to end the macro section. */
31888 if (have_macinfo)
31889 {
31890 /* We have to save macinfo state if we need to output it again
31891 for the FAT part of the object. */
31892 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
31893 if (flag_fat_lto_objects)
31894 macinfo_table = macinfo_table->copy ();
31895
31896 switch_to_section (debug_macinfo_section);
31897 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31898 output_macinfo (debug_skeleton_line_section_label, true);
31899 dw2_asm_output_data (1, 0, "End compilation unit");
31900
31901 /* Emit a skeleton debug_line section. */
31902 switch_to_section (debug_skeleton_line_section);
31903 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31904 output_line_info (true);
31905
31906 if (flag_fat_lto_objects)
31907 {
31908 vec_free (macinfo_table);
31909 macinfo_table = saved_macinfo_table;
31910 }
31911 }
31912
31913
31914 /* If we emitted any indirect strings, output the string table too. */
31915 if (debug_str_hash || skeleton_debug_str_hash)
31916 output_indirect_strings ();
31917
31918 /* Switch back to the text section. */
31919 switch_to_section (text_section);
31920 }
31921
31922 /* Reset all state within dwarf2out.c so that we can rerun the compiler
31923 within the same process. For use by toplev::finalize. */
31924
31925 void
31926 dwarf2out_c_finalize (void)
31927 {
31928 last_var_location_insn = NULL;
31929 cached_next_real_insn = NULL;
31930 used_rtx_array = NULL;
31931 incomplete_types = NULL;
31932 decl_scope_table = NULL;
31933 debug_info_section = NULL;
31934 debug_skeleton_info_section = NULL;
31935 debug_abbrev_section = NULL;
31936 debug_skeleton_abbrev_section = NULL;
31937 debug_aranges_section = NULL;
31938 debug_addr_section = NULL;
31939 debug_macinfo_section = NULL;
31940 debug_line_section = NULL;
31941 debug_skeleton_line_section = NULL;
31942 debug_loc_section = NULL;
31943 debug_pubnames_section = NULL;
31944 debug_pubtypes_section = NULL;
31945 debug_str_section = NULL;
31946 debug_line_str_section = NULL;
31947 debug_str_dwo_section = NULL;
31948 debug_str_offsets_section = NULL;
31949 debug_ranges_section = NULL;
31950 debug_frame_section = NULL;
31951 fde_vec = NULL;
31952 debug_str_hash = NULL;
31953 debug_line_str_hash = NULL;
31954 skeleton_debug_str_hash = NULL;
31955 dw2_string_counter = 0;
31956 have_multiple_function_sections = false;
31957 text_section_used = false;
31958 cold_text_section_used = false;
31959 cold_text_section = NULL;
31960 current_unit_personality = NULL;
31961
31962 early_dwarf = false;
31963 early_dwarf_finished = false;
31964
31965 next_die_offset = 0;
31966 single_comp_unit_die = NULL;
31967 comdat_type_list = NULL;
31968 limbo_die_list = NULL;
31969 file_table = NULL;
31970 decl_die_table = NULL;
31971 common_block_die_table = NULL;
31972 decl_loc_table = NULL;
31973 call_arg_locations = NULL;
31974 call_arg_loc_last = NULL;
31975 call_site_count = -1;
31976 tail_call_site_count = -1;
31977 cached_dw_loc_list_table = NULL;
31978 abbrev_die_table = NULL;
31979 delete dwarf_proc_stack_usage_map;
31980 dwarf_proc_stack_usage_map = NULL;
31981 line_info_label_num = 0;
31982 cur_line_info_table = NULL;
31983 text_section_line_info = NULL;
31984 cold_text_section_line_info = NULL;
31985 separate_line_info = NULL;
31986 info_section_emitted = false;
31987 pubname_table = NULL;
31988 pubtype_table = NULL;
31989 macinfo_table = NULL;
31990 ranges_table = NULL;
31991 ranges_by_label = NULL;
31992 rnglist_idx = 0;
31993 have_location_lists = false;
31994 loclabel_num = 0;
31995 poc_label_num = 0;
31996 last_emitted_file = NULL;
31997 label_num = 0;
31998 tmpl_value_parm_die_table = NULL;
31999 generic_type_instances = NULL;
32000 frame_pointer_fb_offset = 0;
32001 frame_pointer_fb_offset_valid = false;
32002 base_types.release ();
32003 XDELETEVEC (producer_string);
32004 producer_string = NULL;
32005 }
32006
32007 #include "gt-dwarf2out.h"