DWARF sort longer dirs before shorter ones in directory table.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_symview:
1438 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1439 case dw_val_class_lbl_id:
1440 case dw_val_class_lineptr:
1441 case dw_val_class_macptr:
1442 case dw_val_class_loclistsptr:
1443 case dw_val_class_high_pc:
1444 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1445 case dw_val_class_str:
1446 return a->v.val_str == b->v.val_str;
1447 case dw_val_class_flag:
1448 return a->v.val_flag == b->v.val_flag;
1449 case dw_val_class_file:
1450 case dw_val_class_file_implicit:
1451 return a->v.val_file == b->v.val_file;
1452 case dw_val_class_decl_ref:
1453 return a->v.val_decl_ref == b->v.val_decl_ref;
1454
1455 case dw_val_class_const_double:
1456 return (a->v.val_double.high == b->v.val_double.high
1457 && a->v.val_double.low == b->v.val_double.low);
1458
1459 case dw_val_class_wide_int:
1460 return *a->v.val_wide == *b->v.val_wide;
1461
1462 case dw_val_class_vec:
1463 {
1464 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1465 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1466
1467 return (a_len == b_len
1468 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1469 }
1470
1471 case dw_val_class_data8:
1472 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1473
1474 case dw_val_class_vms_delta:
1475 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1476 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1477
1478 case dw_val_class_discr_value:
1479 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1480 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1481 case dw_val_class_discr_list:
1482 /* It makes no sense comparing two discriminant value lists. */
1483 return false;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 /* Compare two location atoms for exact equality. */
1489
1490 static bool
1491 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1492 {
1493 if (a->dw_loc_opc != b->dw_loc_opc)
1494 return false;
1495
1496 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1497 address size, but since we always allocate cleared storage it
1498 should be zero for other types of locations. */
1499 if (a->dtprel != b->dtprel)
1500 return false;
1501
1502 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1503 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1504 }
1505
1506 /* Compare two complete location expressions for exact equality. */
1507
1508 bool
1509 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1510 {
1511 while (1)
1512 {
1513 if (a == b)
1514 return true;
1515 if (a == NULL || b == NULL)
1516 return false;
1517 if (!loc_descr_equal_p_1 (a, b))
1518 return false;
1519
1520 a = a->dw_loc_next;
1521 b = b->dw_loc_next;
1522 }
1523 }
1524
1525
1526 /* Add a constant POLY_OFFSET to a location expression. */
1527
1528 static void
1529 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1530 {
1531 dw_loc_descr_ref loc;
1532 HOST_WIDE_INT *p;
1533
1534 gcc_assert (*list_head != NULL);
1535
1536 if (known_eq (poly_offset, 0))
1537 return;
1538
1539 /* Find the end of the chain. */
1540 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1541 ;
1542
1543 HOST_WIDE_INT offset;
1544 if (!poly_offset.is_constant (&offset))
1545 {
1546 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1547 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1548 return;
1549 }
1550
1551 p = NULL;
1552 if (loc->dw_loc_opc == DW_OP_fbreg
1553 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1554 p = &loc->dw_loc_oprnd1.v.val_int;
1555 else if (loc->dw_loc_opc == DW_OP_bregx)
1556 p = &loc->dw_loc_oprnd2.v.val_int;
1557
1558 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1559 offset. Don't optimize if an signed integer overflow would happen. */
1560 if (p != NULL
1561 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1562 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1563 *p += offset;
1564
1565 else if (offset > 0)
1566 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1567
1568 else
1569 {
1570 loc->dw_loc_next
1571 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1572 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1573 }
1574 }
1575
1576 /* Return a pointer to a newly allocated location description for
1577 REG and OFFSET. */
1578
1579 static inline dw_loc_descr_ref
1580 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1581 {
1582 HOST_WIDE_INT const_offset;
1583 if (offset.is_constant (&const_offset))
1584 {
1585 if (reg <= 31)
1586 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1587 const_offset, 0);
1588 else
1589 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1590 }
1591 else
1592 {
1593 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1594 loc_descr_plus_const (&ret, offset);
1595 return ret;
1596 }
1597 }
1598
1599 /* Add a constant OFFSET to a location list. */
1600
1601 static void
1602 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1603 {
1604 dw_loc_list_ref d;
1605 for (d = list_head; d != NULL; d = d->dw_loc_next)
1606 loc_descr_plus_const (&d->expr, offset);
1607 }
1608
1609 #define DWARF_REF_SIZE \
1610 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1611
1612 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1613 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1614 DW_FORM_data16 with 128 bits. */
1615 #define DWARF_LARGEST_DATA_FORM_BITS \
1616 (dwarf_version >= 5 ? 128 : 64)
1617
1618 /* Utility inline function for construction of ops that were GNU extension
1619 before DWARF 5. */
1620 static inline enum dwarf_location_atom
1621 dwarf_OP (enum dwarf_location_atom op)
1622 {
1623 switch (op)
1624 {
1625 case DW_OP_implicit_pointer:
1626 if (dwarf_version < 5)
1627 return DW_OP_GNU_implicit_pointer;
1628 break;
1629
1630 case DW_OP_entry_value:
1631 if (dwarf_version < 5)
1632 return DW_OP_GNU_entry_value;
1633 break;
1634
1635 case DW_OP_const_type:
1636 if (dwarf_version < 5)
1637 return DW_OP_GNU_const_type;
1638 break;
1639
1640 case DW_OP_regval_type:
1641 if (dwarf_version < 5)
1642 return DW_OP_GNU_regval_type;
1643 break;
1644
1645 case DW_OP_deref_type:
1646 if (dwarf_version < 5)
1647 return DW_OP_GNU_deref_type;
1648 break;
1649
1650 case DW_OP_convert:
1651 if (dwarf_version < 5)
1652 return DW_OP_GNU_convert;
1653 break;
1654
1655 case DW_OP_reinterpret:
1656 if (dwarf_version < 5)
1657 return DW_OP_GNU_reinterpret;
1658 break;
1659
1660 default:
1661 break;
1662 }
1663 return op;
1664 }
1665
1666 /* Similarly for attributes. */
1667 static inline enum dwarf_attribute
1668 dwarf_AT (enum dwarf_attribute at)
1669 {
1670 switch (at)
1671 {
1672 case DW_AT_call_return_pc:
1673 if (dwarf_version < 5)
1674 return DW_AT_low_pc;
1675 break;
1676
1677 case DW_AT_call_tail_call:
1678 if (dwarf_version < 5)
1679 return DW_AT_GNU_tail_call;
1680 break;
1681
1682 case DW_AT_call_origin:
1683 if (dwarf_version < 5)
1684 return DW_AT_abstract_origin;
1685 break;
1686
1687 case DW_AT_call_target:
1688 if (dwarf_version < 5)
1689 return DW_AT_GNU_call_site_target;
1690 break;
1691
1692 case DW_AT_call_target_clobbered:
1693 if (dwarf_version < 5)
1694 return DW_AT_GNU_call_site_target_clobbered;
1695 break;
1696
1697 case DW_AT_call_parameter:
1698 if (dwarf_version < 5)
1699 return DW_AT_abstract_origin;
1700 break;
1701
1702 case DW_AT_call_value:
1703 if (dwarf_version < 5)
1704 return DW_AT_GNU_call_site_value;
1705 break;
1706
1707 case DW_AT_call_data_value:
1708 if (dwarf_version < 5)
1709 return DW_AT_GNU_call_site_data_value;
1710 break;
1711
1712 case DW_AT_call_all_calls:
1713 if (dwarf_version < 5)
1714 return DW_AT_GNU_all_call_sites;
1715 break;
1716
1717 case DW_AT_call_all_tail_calls:
1718 if (dwarf_version < 5)
1719 return DW_AT_GNU_all_tail_call_sites;
1720 break;
1721
1722 case DW_AT_dwo_name:
1723 if (dwarf_version < 5)
1724 return DW_AT_GNU_dwo_name;
1725 break;
1726
1727 default:
1728 break;
1729 }
1730 return at;
1731 }
1732
1733 /* And similarly for tags. */
1734 static inline enum dwarf_tag
1735 dwarf_TAG (enum dwarf_tag tag)
1736 {
1737 switch (tag)
1738 {
1739 case DW_TAG_call_site:
1740 if (dwarf_version < 5)
1741 return DW_TAG_GNU_call_site;
1742 break;
1743
1744 case DW_TAG_call_site_parameter:
1745 if (dwarf_version < 5)
1746 return DW_TAG_GNU_call_site_parameter;
1747 break;
1748
1749 default:
1750 break;
1751 }
1752 return tag;
1753 }
1754
1755 static unsigned long int get_base_type_offset (dw_die_ref);
1756
1757 /* Return the size of a location descriptor. */
1758
1759 static unsigned long
1760 size_of_loc_descr (dw_loc_descr_ref loc)
1761 {
1762 unsigned long size = 1;
1763
1764 switch (loc->dw_loc_opc)
1765 {
1766 case DW_OP_addr:
1767 size += DWARF2_ADDR_SIZE;
1768 break;
1769 case DW_OP_GNU_addr_index:
1770 case DW_OP_GNU_const_index:
1771 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1772 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1773 break;
1774 case DW_OP_const1u:
1775 case DW_OP_const1s:
1776 size += 1;
1777 break;
1778 case DW_OP_const2u:
1779 case DW_OP_const2s:
1780 size += 2;
1781 break;
1782 case DW_OP_const4u:
1783 case DW_OP_const4s:
1784 size += 4;
1785 break;
1786 case DW_OP_const8u:
1787 case DW_OP_const8s:
1788 size += 8;
1789 break;
1790 case DW_OP_constu:
1791 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1792 break;
1793 case DW_OP_consts:
1794 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1795 break;
1796 case DW_OP_pick:
1797 size += 1;
1798 break;
1799 case DW_OP_plus_uconst:
1800 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1801 break;
1802 case DW_OP_skip:
1803 case DW_OP_bra:
1804 size += 2;
1805 break;
1806 case DW_OP_breg0:
1807 case DW_OP_breg1:
1808 case DW_OP_breg2:
1809 case DW_OP_breg3:
1810 case DW_OP_breg4:
1811 case DW_OP_breg5:
1812 case DW_OP_breg6:
1813 case DW_OP_breg7:
1814 case DW_OP_breg8:
1815 case DW_OP_breg9:
1816 case DW_OP_breg10:
1817 case DW_OP_breg11:
1818 case DW_OP_breg12:
1819 case DW_OP_breg13:
1820 case DW_OP_breg14:
1821 case DW_OP_breg15:
1822 case DW_OP_breg16:
1823 case DW_OP_breg17:
1824 case DW_OP_breg18:
1825 case DW_OP_breg19:
1826 case DW_OP_breg20:
1827 case DW_OP_breg21:
1828 case DW_OP_breg22:
1829 case DW_OP_breg23:
1830 case DW_OP_breg24:
1831 case DW_OP_breg25:
1832 case DW_OP_breg26:
1833 case DW_OP_breg27:
1834 case DW_OP_breg28:
1835 case DW_OP_breg29:
1836 case DW_OP_breg30:
1837 case DW_OP_breg31:
1838 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1839 break;
1840 case DW_OP_regx:
1841 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1842 break;
1843 case DW_OP_fbreg:
1844 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1845 break;
1846 case DW_OP_bregx:
1847 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1848 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1849 break;
1850 case DW_OP_piece:
1851 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1852 break;
1853 case DW_OP_bit_piece:
1854 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1855 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1856 break;
1857 case DW_OP_deref_size:
1858 case DW_OP_xderef_size:
1859 size += 1;
1860 break;
1861 case DW_OP_call2:
1862 size += 2;
1863 break;
1864 case DW_OP_call4:
1865 size += 4;
1866 break;
1867 case DW_OP_call_ref:
1868 case DW_OP_GNU_variable_value:
1869 size += DWARF_REF_SIZE;
1870 break;
1871 case DW_OP_implicit_value:
1872 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1873 + loc->dw_loc_oprnd1.v.val_unsigned;
1874 break;
1875 case DW_OP_implicit_pointer:
1876 case DW_OP_GNU_implicit_pointer:
1877 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1878 break;
1879 case DW_OP_entry_value:
1880 case DW_OP_GNU_entry_value:
1881 {
1882 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1883 size += size_of_uleb128 (op_size) + op_size;
1884 break;
1885 }
1886 case DW_OP_const_type:
1887 case DW_OP_GNU_const_type:
1888 {
1889 unsigned long o
1890 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1891 size += size_of_uleb128 (o) + 1;
1892 switch (loc->dw_loc_oprnd2.val_class)
1893 {
1894 case dw_val_class_vec:
1895 size += loc->dw_loc_oprnd2.v.val_vec.length
1896 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1897 break;
1898 case dw_val_class_const:
1899 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1900 break;
1901 case dw_val_class_const_double:
1902 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1903 break;
1904 case dw_val_class_wide_int:
1905 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1906 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1907 break;
1908 default:
1909 gcc_unreachable ();
1910 }
1911 break;
1912 }
1913 case DW_OP_regval_type:
1914 case DW_OP_GNU_regval_type:
1915 {
1916 unsigned long o
1917 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1918 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1919 + size_of_uleb128 (o);
1920 }
1921 break;
1922 case DW_OP_deref_type:
1923 case DW_OP_GNU_deref_type:
1924 {
1925 unsigned long o
1926 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1927 size += 1 + size_of_uleb128 (o);
1928 }
1929 break;
1930 case DW_OP_convert:
1931 case DW_OP_reinterpret:
1932 case DW_OP_GNU_convert:
1933 case DW_OP_GNU_reinterpret:
1934 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1935 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1936 else
1937 {
1938 unsigned long o
1939 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1940 size += size_of_uleb128 (o);
1941 }
1942 break;
1943 case DW_OP_GNU_parameter_ref:
1944 size += 4;
1945 break;
1946 default:
1947 break;
1948 }
1949
1950 return size;
1951 }
1952
1953 /* Return the size of a series of location descriptors. */
1954
1955 unsigned long
1956 size_of_locs (dw_loc_descr_ref loc)
1957 {
1958 dw_loc_descr_ref l;
1959 unsigned long size;
1960
1961 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1962 field, to avoid writing to a PCH file. */
1963 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1964 {
1965 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1966 break;
1967 size += size_of_loc_descr (l);
1968 }
1969 if (! l)
1970 return size;
1971
1972 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1973 {
1974 l->dw_loc_addr = size;
1975 size += size_of_loc_descr (l);
1976 }
1977
1978 return size;
1979 }
1980
1981 /* Return the size of the value in a DW_AT_discr_value attribute. */
1982
1983 static int
1984 size_of_discr_value (dw_discr_value *discr_value)
1985 {
1986 if (discr_value->pos)
1987 return size_of_uleb128 (discr_value->v.uval);
1988 else
1989 return size_of_sleb128 (discr_value->v.sval);
1990 }
1991
1992 /* Return the size of the value in a DW_AT_discr_list attribute. */
1993
1994 static int
1995 size_of_discr_list (dw_discr_list_ref discr_list)
1996 {
1997 int size = 0;
1998
1999 for (dw_discr_list_ref list = discr_list;
2000 list != NULL;
2001 list = list->dw_discr_next)
2002 {
2003 /* One byte for the discriminant value descriptor, and then one or two
2004 LEB128 numbers, depending on whether it's a single case label or a
2005 range label. */
2006 size += 1;
2007 size += size_of_discr_value (&list->dw_discr_lower_bound);
2008 if (list->dw_discr_range != 0)
2009 size += size_of_discr_value (&list->dw_discr_upper_bound);
2010 }
2011 return size;
2012 }
2013
2014 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2015 static void get_ref_die_offset_label (char *, dw_die_ref);
2016 static unsigned long int get_ref_die_offset (dw_die_ref);
2017
2018 /* Output location description stack opcode's operands (if any).
2019 The for_eh_or_skip parameter controls whether register numbers are
2020 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2021 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2022 info). This should be suppressed for the cases that have not been converted
2023 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2024
2025 static void
2026 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2027 {
2028 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2029 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2030
2031 switch (loc->dw_loc_opc)
2032 {
2033 #ifdef DWARF2_DEBUGGING_INFO
2034 case DW_OP_const2u:
2035 case DW_OP_const2s:
2036 dw2_asm_output_data (2, val1->v.val_int, NULL);
2037 break;
2038 case DW_OP_const4u:
2039 if (loc->dtprel)
2040 {
2041 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2042 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2043 val1->v.val_addr);
2044 fputc ('\n', asm_out_file);
2045 break;
2046 }
2047 /* FALLTHRU */
2048 case DW_OP_const4s:
2049 dw2_asm_output_data (4, val1->v.val_int, NULL);
2050 break;
2051 case DW_OP_const8u:
2052 if (loc->dtprel)
2053 {
2054 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2055 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2056 val1->v.val_addr);
2057 fputc ('\n', asm_out_file);
2058 break;
2059 }
2060 /* FALLTHRU */
2061 case DW_OP_const8s:
2062 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2063 dw2_asm_output_data (8, val1->v.val_int, NULL);
2064 break;
2065 case DW_OP_skip:
2066 case DW_OP_bra:
2067 {
2068 int offset;
2069
2070 gcc_assert (val1->val_class == dw_val_class_loc);
2071 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2072
2073 dw2_asm_output_data (2, offset, NULL);
2074 }
2075 break;
2076 case DW_OP_implicit_value:
2077 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2078 switch (val2->val_class)
2079 {
2080 case dw_val_class_const:
2081 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2082 break;
2083 case dw_val_class_vec:
2084 {
2085 unsigned int elt_size = val2->v.val_vec.elt_size;
2086 unsigned int len = val2->v.val_vec.length;
2087 unsigned int i;
2088 unsigned char *p;
2089
2090 if (elt_size > sizeof (HOST_WIDE_INT))
2091 {
2092 elt_size /= 2;
2093 len *= 2;
2094 }
2095 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2096 i < len;
2097 i++, p += elt_size)
2098 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2099 "fp or vector constant word %u", i);
2100 }
2101 break;
2102 case dw_val_class_const_double:
2103 {
2104 unsigned HOST_WIDE_INT first, second;
2105
2106 if (WORDS_BIG_ENDIAN)
2107 {
2108 first = val2->v.val_double.high;
2109 second = val2->v.val_double.low;
2110 }
2111 else
2112 {
2113 first = val2->v.val_double.low;
2114 second = val2->v.val_double.high;
2115 }
2116 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2117 first, NULL);
2118 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2119 second, NULL);
2120 }
2121 break;
2122 case dw_val_class_wide_int:
2123 {
2124 int i;
2125 int len = get_full_len (*val2->v.val_wide);
2126 if (WORDS_BIG_ENDIAN)
2127 for (i = len - 1; i >= 0; --i)
2128 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2129 val2->v.val_wide->elt (i), NULL);
2130 else
2131 for (i = 0; i < len; ++i)
2132 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2133 val2->v.val_wide->elt (i), NULL);
2134 }
2135 break;
2136 case dw_val_class_addr:
2137 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2138 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2139 break;
2140 default:
2141 gcc_unreachable ();
2142 }
2143 break;
2144 #else
2145 case DW_OP_const2u:
2146 case DW_OP_const2s:
2147 case DW_OP_const4u:
2148 case DW_OP_const4s:
2149 case DW_OP_const8u:
2150 case DW_OP_const8s:
2151 case DW_OP_skip:
2152 case DW_OP_bra:
2153 case DW_OP_implicit_value:
2154 /* We currently don't make any attempt to make sure these are
2155 aligned properly like we do for the main unwind info, so
2156 don't support emitting things larger than a byte if we're
2157 only doing unwinding. */
2158 gcc_unreachable ();
2159 #endif
2160 case DW_OP_const1u:
2161 case DW_OP_const1s:
2162 dw2_asm_output_data (1, val1->v.val_int, NULL);
2163 break;
2164 case DW_OP_constu:
2165 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2166 break;
2167 case DW_OP_consts:
2168 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2169 break;
2170 case DW_OP_pick:
2171 dw2_asm_output_data (1, val1->v.val_int, NULL);
2172 break;
2173 case DW_OP_plus_uconst:
2174 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2175 break;
2176 case DW_OP_breg0:
2177 case DW_OP_breg1:
2178 case DW_OP_breg2:
2179 case DW_OP_breg3:
2180 case DW_OP_breg4:
2181 case DW_OP_breg5:
2182 case DW_OP_breg6:
2183 case DW_OP_breg7:
2184 case DW_OP_breg8:
2185 case DW_OP_breg9:
2186 case DW_OP_breg10:
2187 case DW_OP_breg11:
2188 case DW_OP_breg12:
2189 case DW_OP_breg13:
2190 case DW_OP_breg14:
2191 case DW_OP_breg15:
2192 case DW_OP_breg16:
2193 case DW_OP_breg17:
2194 case DW_OP_breg18:
2195 case DW_OP_breg19:
2196 case DW_OP_breg20:
2197 case DW_OP_breg21:
2198 case DW_OP_breg22:
2199 case DW_OP_breg23:
2200 case DW_OP_breg24:
2201 case DW_OP_breg25:
2202 case DW_OP_breg26:
2203 case DW_OP_breg27:
2204 case DW_OP_breg28:
2205 case DW_OP_breg29:
2206 case DW_OP_breg30:
2207 case DW_OP_breg31:
2208 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2209 break;
2210 case DW_OP_regx:
2211 {
2212 unsigned r = val1->v.val_unsigned;
2213 if (for_eh_or_skip >= 0)
2214 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2215 gcc_assert (size_of_uleb128 (r)
2216 == size_of_uleb128 (val1->v.val_unsigned));
2217 dw2_asm_output_data_uleb128 (r, NULL);
2218 }
2219 break;
2220 case DW_OP_fbreg:
2221 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2222 break;
2223 case DW_OP_bregx:
2224 {
2225 unsigned r = val1->v.val_unsigned;
2226 if (for_eh_or_skip >= 0)
2227 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2228 gcc_assert (size_of_uleb128 (r)
2229 == size_of_uleb128 (val1->v.val_unsigned));
2230 dw2_asm_output_data_uleb128 (r, NULL);
2231 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2232 }
2233 break;
2234 case DW_OP_piece:
2235 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2236 break;
2237 case DW_OP_bit_piece:
2238 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2239 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2240 break;
2241 case DW_OP_deref_size:
2242 case DW_OP_xderef_size:
2243 dw2_asm_output_data (1, val1->v.val_int, NULL);
2244 break;
2245
2246 case DW_OP_addr:
2247 if (loc->dtprel)
2248 {
2249 if (targetm.asm_out.output_dwarf_dtprel)
2250 {
2251 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2252 DWARF2_ADDR_SIZE,
2253 val1->v.val_addr);
2254 fputc ('\n', asm_out_file);
2255 }
2256 else
2257 gcc_unreachable ();
2258 }
2259 else
2260 {
2261 #ifdef DWARF2_DEBUGGING_INFO
2262 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2263 #else
2264 gcc_unreachable ();
2265 #endif
2266 }
2267 break;
2268
2269 case DW_OP_GNU_addr_index:
2270 case DW_OP_GNU_const_index:
2271 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2272 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2273 "(index into .debug_addr)");
2274 break;
2275
2276 case DW_OP_call2:
2277 case DW_OP_call4:
2278 {
2279 unsigned long die_offset
2280 = get_ref_die_offset (val1->v.val_die_ref.die);
2281 /* Make sure the offset has been computed and that we can encode it as
2282 an operand. */
2283 gcc_assert (die_offset > 0
2284 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2285 ? 0xffff
2286 : 0xffffffff));
2287 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2288 die_offset, NULL);
2289 }
2290 break;
2291
2292 case DW_OP_call_ref:
2293 case DW_OP_GNU_variable_value:
2294 {
2295 char label[MAX_ARTIFICIAL_LABEL_BYTES
2296 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2297 gcc_assert (val1->val_class == dw_val_class_die_ref);
2298 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2299 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2300 }
2301 break;
2302
2303 case DW_OP_implicit_pointer:
2304 case DW_OP_GNU_implicit_pointer:
2305 {
2306 char label[MAX_ARTIFICIAL_LABEL_BYTES
2307 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2308 gcc_assert (val1->val_class == dw_val_class_die_ref);
2309 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2310 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2311 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2312 }
2313 break;
2314
2315 case DW_OP_entry_value:
2316 case DW_OP_GNU_entry_value:
2317 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2318 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2319 break;
2320
2321 case DW_OP_const_type:
2322 case DW_OP_GNU_const_type:
2323 {
2324 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2325 gcc_assert (o);
2326 dw2_asm_output_data_uleb128 (o, NULL);
2327 switch (val2->val_class)
2328 {
2329 case dw_val_class_const:
2330 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2331 dw2_asm_output_data (1, l, NULL);
2332 dw2_asm_output_data (l, val2->v.val_int, NULL);
2333 break;
2334 case dw_val_class_vec:
2335 {
2336 unsigned int elt_size = val2->v.val_vec.elt_size;
2337 unsigned int len = val2->v.val_vec.length;
2338 unsigned int i;
2339 unsigned char *p;
2340
2341 l = len * elt_size;
2342 dw2_asm_output_data (1, l, NULL);
2343 if (elt_size > sizeof (HOST_WIDE_INT))
2344 {
2345 elt_size /= 2;
2346 len *= 2;
2347 }
2348 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2349 i < len;
2350 i++, p += elt_size)
2351 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2352 "fp or vector constant word %u", i);
2353 }
2354 break;
2355 case dw_val_class_const_double:
2356 {
2357 unsigned HOST_WIDE_INT first, second;
2358 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2359
2360 dw2_asm_output_data (1, 2 * l, NULL);
2361 if (WORDS_BIG_ENDIAN)
2362 {
2363 first = val2->v.val_double.high;
2364 second = val2->v.val_double.low;
2365 }
2366 else
2367 {
2368 first = val2->v.val_double.low;
2369 second = val2->v.val_double.high;
2370 }
2371 dw2_asm_output_data (l, first, NULL);
2372 dw2_asm_output_data (l, second, NULL);
2373 }
2374 break;
2375 case dw_val_class_wide_int:
2376 {
2377 int i;
2378 int len = get_full_len (*val2->v.val_wide);
2379 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2380
2381 dw2_asm_output_data (1, len * l, NULL);
2382 if (WORDS_BIG_ENDIAN)
2383 for (i = len - 1; i >= 0; --i)
2384 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2385 else
2386 for (i = 0; i < len; ++i)
2387 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2388 }
2389 break;
2390 default:
2391 gcc_unreachable ();
2392 }
2393 }
2394 break;
2395 case DW_OP_regval_type:
2396 case DW_OP_GNU_regval_type:
2397 {
2398 unsigned r = val1->v.val_unsigned;
2399 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2400 gcc_assert (o);
2401 if (for_eh_or_skip >= 0)
2402 {
2403 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2404 gcc_assert (size_of_uleb128 (r)
2405 == size_of_uleb128 (val1->v.val_unsigned));
2406 }
2407 dw2_asm_output_data_uleb128 (r, NULL);
2408 dw2_asm_output_data_uleb128 (o, NULL);
2409 }
2410 break;
2411 case DW_OP_deref_type:
2412 case DW_OP_GNU_deref_type:
2413 {
2414 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2415 gcc_assert (o);
2416 dw2_asm_output_data (1, val1->v.val_int, NULL);
2417 dw2_asm_output_data_uleb128 (o, NULL);
2418 }
2419 break;
2420 case DW_OP_convert:
2421 case DW_OP_reinterpret:
2422 case DW_OP_GNU_convert:
2423 case DW_OP_GNU_reinterpret:
2424 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2425 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2426 else
2427 {
2428 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2429 gcc_assert (o);
2430 dw2_asm_output_data_uleb128 (o, NULL);
2431 }
2432 break;
2433
2434 case DW_OP_GNU_parameter_ref:
2435 {
2436 unsigned long o;
2437 gcc_assert (val1->val_class == dw_val_class_die_ref);
2438 o = get_ref_die_offset (val1->v.val_die_ref.die);
2439 dw2_asm_output_data (4, o, NULL);
2440 }
2441 break;
2442
2443 default:
2444 /* Other codes have no operands. */
2445 break;
2446 }
2447 }
2448
2449 /* Output a sequence of location operations.
2450 The for_eh_or_skip parameter controls whether register numbers are
2451 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2452 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2453 info). This should be suppressed for the cases that have not been converted
2454 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2455
2456 void
2457 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2458 {
2459 for (; loc != NULL; loc = loc->dw_loc_next)
2460 {
2461 enum dwarf_location_atom opc = loc->dw_loc_opc;
2462 /* Output the opcode. */
2463 if (for_eh_or_skip >= 0
2464 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2465 {
2466 unsigned r = (opc - DW_OP_breg0);
2467 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2468 gcc_assert (r <= 31);
2469 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2470 }
2471 else if (for_eh_or_skip >= 0
2472 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2473 {
2474 unsigned r = (opc - DW_OP_reg0);
2475 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2476 gcc_assert (r <= 31);
2477 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2478 }
2479
2480 dw2_asm_output_data (1, opc,
2481 "%s", dwarf_stack_op_name (opc));
2482
2483 /* Output the operand(s) (if any). */
2484 output_loc_operands (loc, for_eh_or_skip);
2485 }
2486 }
2487
2488 /* Output location description stack opcode's operands (if any).
2489 The output is single bytes on a line, suitable for .cfi_escape. */
2490
2491 static void
2492 output_loc_operands_raw (dw_loc_descr_ref loc)
2493 {
2494 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2495 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2496
2497 switch (loc->dw_loc_opc)
2498 {
2499 case DW_OP_addr:
2500 case DW_OP_GNU_addr_index:
2501 case DW_OP_GNU_const_index:
2502 case DW_OP_implicit_value:
2503 /* We cannot output addresses in .cfi_escape, only bytes. */
2504 gcc_unreachable ();
2505
2506 case DW_OP_const1u:
2507 case DW_OP_const1s:
2508 case DW_OP_pick:
2509 case DW_OP_deref_size:
2510 case DW_OP_xderef_size:
2511 fputc (',', asm_out_file);
2512 dw2_asm_output_data_raw (1, val1->v.val_int);
2513 break;
2514
2515 case DW_OP_const2u:
2516 case DW_OP_const2s:
2517 fputc (',', asm_out_file);
2518 dw2_asm_output_data_raw (2, val1->v.val_int);
2519 break;
2520
2521 case DW_OP_const4u:
2522 case DW_OP_const4s:
2523 fputc (',', asm_out_file);
2524 dw2_asm_output_data_raw (4, val1->v.val_int);
2525 break;
2526
2527 case DW_OP_const8u:
2528 case DW_OP_const8s:
2529 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2530 fputc (',', asm_out_file);
2531 dw2_asm_output_data_raw (8, val1->v.val_int);
2532 break;
2533
2534 case DW_OP_skip:
2535 case DW_OP_bra:
2536 {
2537 int offset;
2538
2539 gcc_assert (val1->val_class == dw_val_class_loc);
2540 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2541
2542 fputc (',', asm_out_file);
2543 dw2_asm_output_data_raw (2, offset);
2544 }
2545 break;
2546
2547 case DW_OP_regx:
2548 {
2549 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2550 gcc_assert (size_of_uleb128 (r)
2551 == size_of_uleb128 (val1->v.val_unsigned));
2552 fputc (',', asm_out_file);
2553 dw2_asm_output_data_uleb128_raw (r);
2554 }
2555 break;
2556
2557 case DW_OP_constu:
2558 case DW_OP_plus_uconst:
2559 case DW_OP_piece:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2562 break;
2563
2564 case DW_OP_bit_piece:
2565 fputc (',', asm_out_file);
2566 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2567 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2568 break;
2569
2570 case DW_OP_consts:
2571 case DW_OP_breg0:
2572 case DW_OP_breg1:
2573 case DW_OP_breg2:
2574 case DW_OP_breg3:
2575 case DW_OP_breg4:
2576 case DW_OP_breg5:
2577 case DW_OP_breg6:
2578 case DW_OP_breg7:
2579 case DW_OP_breg8:
2580 case DW_OP_breg9:
2581 case DW_OP_breg10:
2582 case DW_OP_breg11:
2583 case DW_OP_breg12:
2584 case DW_OP_breg13:
2585 case DW_OP_breg14:
2586 case DW_OP_breg15:
2587 case DW_OP_breg16:
2588 case DW_OP_breg17:
2589 case DW_OP_breg18:
2590 case DW_OP_breg19:
2591 case DW_OP_breg20:
2592 case DW_OP_breg21:
2593 case DW_OP_breg22:
2594 case DW_OP_breg23:
2595 case DW_OP_breg24:
2596 case DW_OP_breg25:
2597 case DW_OP_breg26:
2598 case DW_OP_breg27:
2599 case DW_OP_breg28:
2600 case DW_OP_breg29:
2601 case DW_OP_breg30:
2602 case DW_OP_breg31:
2603 case DW_OP_fbreg:
2604 fputc (',', asm_out_file);
2605 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2606 break;
2607
2608 case DW_OP_bregx:
2609 {
2610 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2611 gcc_assert (size_of_uleb128 (r)
2612 == size_of_uleb128 (val1->v.val_unsigned));
2613 fputc (',', asm_out_file);
2614 dw2_asm_output_data_uleb128_raw (r);
2615 fputc (',', asm_out_file);
2616 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2617 }
2618 break;
2619
2620 case DW_OP_implicit_pointer:
2621 case DW_OP_entry_value:
2622 case DW_OP_const_type:
2623 case DW_OP_regval_type:
2624 case DW_OP_deref_type:
2625 case DW_OP_convert:
2626 case DW_OP_reinterpret:
2627 case DW_OP_GNU_implicit_pointer:
2628 case DW_OP_GNU_entry_value:
2629 case DW_OP_GNU_const_type:
2630 case DW_OP_GNU_regval_type:
2631 case DW_OP_GNU_deref_type:
2632 case DW_OP_GNU_convert:
2633 case DW_OP_GNU_reinterpret:
2634 case DW_OP_GNU_parameter_ref:
2635 gcc_unreachable ();
2636 break;
2637
2638 default:
2639 /* Other codes have no operands. */
2640 break;
2641 }
2642 }
2643
2644 void
2645 output_loc_sequence_raw (dw_loc_descr_ref loc)
2646 {
2647 while (1)
2648 {
2649 enum dwarf_location_atom opc = loc->dw_loc_opc;
2650 /* Output the opcode. */
2651 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2652 {
2653 unsigned r = (opc - DW_OP_breg0);
2654 r = DWARF2_FRAME_REG_OUT (r, 1);
2655 gcc_assert (r <= 31);
2656 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2657 }
2658 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2659 {
2660 unsigned r = (opc - DW_OP_reg0);
2661 r = DWARF2_FRAME_REG_OUT (r, 1);
2662 gcc_assert (r <= 31);
2663 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2664 }
2665 /* Output the opcode. */
2666 fprintf (asm_out_file, "%#x", opc);
2667 output_loc_operands_raw (loc);
2668
2669 if (!loc->dw_loc_next)
2670 break;
2671 loc = loc->dw_loc_next;
2672
2673 fputc (',', asm_out_file);
2674 }
2675 }
2676
2677 /* This function builds a dwarf location descriptor sequence from a
2678 dw_cfa_location, adding the given OFFSET to the result of the
2679 expression. */
2680
2681 struct dw_loc_descr_node *
2682 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2683 {
2684 struct dw_loc_descr_node *head, *tmp;
2685
2686 offset += cfa->offset;
2687
2688 if (cfa->indirect)
2689 {
2690 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2691 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2692 head->dw_loc_oprnd1.val_entry = NULL;
2693 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2694 add_loc_descr (&head, tmp);
2695 loc_descr_plus_const (&head, offset);
2696 }
2697 else
2698 head = new_reg_loc_descr (cfa->reg, offset);
2699
2700 return head;
2701 }
2702
2703 /* This function builds a dwarf location descriptor sequence for
2704 the address at OFFSET from the CFA when stack is aligned to
2705 ALIGNMENT byte. */
2706
2707 struct dw_loc_descr_node *
2708 build_cfa_aligned_loc (dw_cfa_location *cfa,
2709 poly_int64 offset, HOST_WIDE_INT alignment)
2710 {
2711 struct dw_loc_descr_node *head;
2712 unsigned int dwarf_fp
2713 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2714
2715 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2716 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2717 {
2718 head = new_reg_loc_descr (dwarf_fp, 0);
2719 add_loc_descr (&head, int_loc_descriptor (alignment));
2720 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2721 loc_descr_plus_const (&head, offset);
2722 }
2723 else
2724 head = new_reg_loc_descr (dwarf_fp, offset);
2725 return head;
2726 }
2727 \f
2728 /* And now, the support for symbolic debugging information. */
2729
2730 /* .debug_str support. */
2731
2732 static void dwarf2out_init (const char *);
2733 static void dwarf2out_finish (const char *);
2734 static void dwarf2out_early_finish (const char *);
2735 static void dwarf2out_assembly_start (void);
2736 static void dwarf2out_define (unsigned int, const char *);
2737 static void dwarf2out_undef (unsigned int, const char *);
2738 static void dwarf2out_start_source_file (unsigned, const char *);
2739 static void dwarf2out_end_source_file (unsigned);
2740 static void dwarf2out_function_decl (tree);
2741 static void dwarf2out_begin_block (unsigned, unsigned);
2742 static void dwarf2out_end_block (unsigned, unsigned);
2743 static bool dwarf2out_ignore_block (const_tree);
2744 static void dwarf2out_early_global_decl (tree);
2745 static void dwarf2out_late_global_decl (tree);
2746 static void dwarf2out_type_decl (tree, int);
2747 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2748 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2749 dw_die_ref);
2750 static void dwarf2out_abstract_function (tree);
2751 static void dwarf2out_var_location (rtx_insn *);
2752 static void dwarf2out_inline_entry (tree);
2753 static void dwarf2out_size_function (tree);
2754 static void dwarf2out_begin_function (tree);
2755 static void dwarf2out_end_function (unsigned int);
2756 static void dwarf2out_register_main_translation_unit (tree unit);
2757 static void dwarf2out_set_name (tree, tree);
2758 static void dwarf2out_register_external_die (tree decl, const char *sym,
2759 unsigned HOST_WIDE_INT off);
2760 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2761 unsigned HOST_WIDE_INT *off);
2762
2763 /* The debug hooks structure. */
2764
2765 const struct gcc_debug_hooks dwarf2_debug_hooks =
2766 {
2767 dwarf2out_init,
2768 dwarf2out_finish,
2769 dwarf2out_early_finish,
2770 dwarf2out_assembly_start,
2771 dwarf2out_define,
2772 dwarf2out_undef,
2773 dwarf2out_start_source_file,
2774 dwarf2out_end_source_file,
2775 dwarf2out_begin_block,
2776 dwarf2out_end_block,
2777 dwarf2out_ignore_block,
2778 dwarf2out_source_line,
2779 dwarf2out_begin_prologue,
2780 #if VMS_DEBUGGING_INFO
2781 dwarf2out_vms_end_prologue,
2782 dwarf2out_vms_begin_epilogue,
2783 #else
2784 debug_nothing_int_charstar,
2785 debug_nothing_int_charstar,
2786 #endif
2787 dwarf2out_end_epilogue,
2788 dwarf2out_begin_function,
2789 dwarf2out_end_function, /* end_function */
2790 dwarf2out_register_main_translation_unit,
2791 dwarf2out_function_decl, /* function_decl */
2792 dwarf2out_early_global_decl,
2793 dwarf2out_late_global_decl,
2794 dwarf2out_type_decl, /* type_decl */
2795 dwarf2out_imported_module_or_decl,
2796 dwarf2out_die_ref_for_decl,
2797 dwarf2out_register_external_die,
2798 debug_nothing_tree, /* deferred_inline_function */
2799 /* The DWARF 2 backend tries to reduce debugging bloat by not
2800 emitting the abstract description of inline functions until
2801 something tries to reference them. */
2802 dwarf2out_abstract_function, /* outlining_inline_function */
2803 debug_nothing_rtx_code_label, /* label */
2804 debug_nothing_int, /* handle_pch */
2805 dwarf2out_var_location,
2806 dwarf2out_inline_entry, /* inline_entry */
2807 dwarf2out_size_function, /* size_function */
2808 dwarf2out_switch_text_section,
2809 dwarf2out_set_name,
2810 1, /* start_end_main_source_file */
2811 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2812 };
2813
2814 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2815 {
2816 dwarf2out_init,
2817 debug_nothing_charstar,
2818 debug_nothing_charstar,
2819 dwarf2out_assembly_start,
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 debug_nothing_int_charstar,
2823 debug_nothing_int,
2824 debug_nothing_int_int, /* begin_block */
2825 debug_nothing_int_int, /* end_block */
2826 debug_true_const_tree, /* ignore_block */
2827 dwarf2out_source_line, /* source_line */
2828 debug_nothing_int_int_charstar, /* begin_prologue */
2829 debug_nothing_int_charstar, /* end_prologue */
2830 debug_nothing_int_charstar, /* begin_epilogue */
2831 debug_nothing_int_charstar, /* end_epilogue */
2832 debug_nothing_tree, /* begin_function */
2833 debug_nothing_int, /* end_function */
2834 debug_nothing_tree, /* register_main_translation_unit */
2835 debug_nothing_tree, /* function_decl */
2836 debug_nothing_tree, /* early_global_decl */
2837 debug_nothing_tree, /* late_global_decl */
2838 debug_nothing_tree_int, /* type_decl */
2839 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2840 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2841 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2842 debug_nothing_tree, /* deferred_inline_function */
2843 debug_nothing_tree, /* outlining_inline_function */
2844 debug_nothing_rtx_code_label, /* label */
2845 debug_nothing_int, /* handle_pch */
2846 debug_nothing_rtx_insn, /* var_location */
2847 debug_nothing_tree, /* inline_entry */
2848 debug_nothing_tree, /* size_function */
2849 debug_nothing_void, /* switch_text_section */
2850 debug_nothing_tree_tree, /* set_name */
2851 0, /* start_end_main_source_file */
2852 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2853 };
2854 \f
2855 /* NOTE: In the comments in this file, many references are made to
2856 "Debugging Information Entries". This term is abbreviated as `DIE'
2857 throughout the remainder of this file. */
2858
2859 /* An internal representation of the DWARF output is built, and then
2860 walked to generate the DWARF debugging info. The walk of the internal
2861 representation is done after the entire program has been compiled.
2862 The types below are used to describe the internal representation. */
2863
2864 /* Whether to put type DIEs into their own section .debug_types instead
2865 of making them part of the .debug_info section. Only supported for
2866 Dwarf V4 or higher and the user didn't disable them through
2867 -fno-debug-types-section. It is more efficient to put them in a
2868 separate comdat sections since the linker will then be able to
2869 remove duplicates. But not all tools support .debug_types sections
2870 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2871 it is DW_UT_type unit type in .debug_info section. */
2872
2873 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2874
2875 /* Various DIE's use offsets relative to the beginning of the
2876 .debug_info section to refer to each other. */
2877
2878 typedef long int dw_offset;
2879
2880 struct comdat_type_node;
2881
2882 /* The entries in the line_info table more-or-less mirror the opcodes
2883 that are used in the real dwarf line table. Arrays of these entries
2884 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2885 supported. */
2886
2887 enum dw_line_info_opcode {
2888 /* Emit DW_LNE_set_address; the operand is the label index. */
2889 LI_set_address,
2890
2891 /* Emit a row to the matrix with the given line. This may be done
2892 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2893 special opcodes. */
2894 LI_set_line,
2895
2896 /* Emit a DW_LNS_set_file. */
2897 LI_set_file,
2898
2899 /* Emit a DW_LNS_set_column. */
2900 LI_set_column,
2901
2902 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2903 LI_negate_stmt,
2904
2905 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2906 LI_set_prologue_end,
2907 LI_set_epilogue_begin,
2908
2909 /* Emit a DW_LNE_set_discriminator. */
2910 LI_set_discriminator,
2911
2912 /* Output a Fixed Advance PC; the target PC is the label index; the
2913 base PC is the previous LI_adv_address or LI_set_address entry.
2914 We only use this when emitting debug views without assembler
2915 support, at explicit user request. Ideally, we should only use
2916 it when the offset might be zero but we can't tell: it's the only
2917 way to maybe change the PC without resetting the view number. */
2918 LI_adv_address
2919 };
2920
2921 typedef struct GTY(()) dw_line_info_struct {
2922 enum dw_line_info_opcode opcode;
2923 unsigned int val;
2924 } dw_line_info_entry;
2925
2926
2927 struct GTY(()) dw_line_info_table {
2928 /* The label that marks the end of this section. */
2929 const char *end_label;
2930
2931 /* The values for the last row of the matrix, as collected in the table.
2932 These are used to minimize the changes to the next row. */
2933 unsigned int file_num;
2934 unsigned int line_num;
2935 unsigned int column_num;
2936 int discrim_num;
2937 bool is_stmt;
2938 bool in_use;
2939
2940 /* This denotes the NEXT view number.
2941
2942 If it is 0, it is known that the NEXT view will be the first view
2943 at the given PC.
2944
2945 If it is -1, we're forcing the view number to be reset, e.g. at a
2946 function entry.
2947
2948 The meaning of other nonzero values depends on whether we're
2949 computing views internally or leaving it for the assembler to do
2950 so. If we're emitting them internally, view denotes the view
2951 number since the last known advance of PC. If we're leaving it
2952 for the assembler, it denotes the LVU label number that we're
2953 going to ask the assembler to assign. */
2954 var_loc_view view;
2955
2956 /* This counts the number of symbolic views emitted in this table
2957 since the latest view reset. Its max value, over all tables,
2958 sets symview_upper_bound. */
2959 var_loc_view symviews_since_reset;
2960
2961 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2962 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2963 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
2964 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
2965
2966 vec<dw_line_info_entry, va_gc> *entries;
2967 };
2968
2969 /* This is an upper bound for view numbers that the assembler may
2970 assign to symbolic views output in this translation. It is used to
2971 decide how big a field to use to represent view numbers in
2972 symview-classed attributes. */
2973
2974 static var_loc_view symview_upper_bound;
2975
2976 /* If we're keep track of location views and their reset points, and
2977 INSN is a reset point (i.e., it necessarily advances the PC), mark
2978 the next view in TABLE as reset. */
2979
2980 static void
2981 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
2982 {
2983 if (!debug_internal_reset_location_views)
2984 return;
2985
2986 /* Maybe turn (part of?) this test into a default target hook. */
2987 int reset = 0;
2988
2989 if (targetm.reset_location_view)
2990 reset = targetm.reset_location_view (insn);
2991
2992 if (reset)
2993 ;
2994 else if (JUMP_TABLE_DATA_P (insn))
2995 reset = 1;
2996 else if (GET_CODE (insn) == USE
2997 || GET_CODE (insn) == CLOBBER
2998 || GET_CODE (insn) == ASM_INPUT
2999 || asm_noperands (insn) >= 0)
3000 ;
3001 else if (get_attr_min_length (insn) > 0)
3002 reset = 1;
3003
3004 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3005 RESET_NEXT_VIEW (table->view);
3006 }
3007
3008 /* Each DIE attribute has a field specifying the attribute kind,
3009 a link to the next attribute in the chain, and an attribute value.
3010 Attributes are typically linked below the DIE they modify. */
3011
3012 typedef struct GTY(()) dw_attr_struct {
3013 enum dwarf_attribute dw_attr;
3014 dw_val_node dw_attr_val;
3015 }
3016 dw_attr_node;
3017
3018
3019 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3020 The children of each node form a circular list linked by
3021 die_sib. die_child points to the node *before* the "first" child node. */
3022
3023 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3024 union die_symbol_or_type_node
3025 {
3026 const char * GTY ((tag ("0"))) die_symbol;
3027 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3028 }
3029 GTY ((desc ("%0.comdat_type_p"))) die_id;
3030 vec<dw_attr_node, va_gc> *die_attr;
3031 dw_die_ref die_parent;
3032 dw_die_ref die_child;
3033 dw_die_ref die_sib;
3034 dw_die_ref die_definition; /* ref from a specification to its definition */
3035 dw_offset die_offset;
3036 unsigned long die_abbrev;
3037 int die_mark;
3038 unsigned int decl_id;
3039 enum dwarf_tag die_tag;
3040 /* Die is used and must not be pruned as unused. */
3041 BOOL_BITFIELD die_perennial_p : 1;
3042 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3043 /* For an external ref to die_symbol if die_offset contains an extra
3044 offset to that symbol. */
3045 BOOL_BITFIELD with_offset : 1;
3046 /* Whether this DIE was removed from the DIE tree, for example via
3047 prune_unused_types. We don't consider those present from the
3048 DIE lookup routines. */
3049 BOOL_BITFIELD removed : 1;
3050 /* Lots of spare bits. */
3051 }
3052 die_node;
3053
3054 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3055 static bool early_dwarf;
3056 static bool early_dwarf_finished;
3057 struct set_early_dwarf {
3058 bool saved;
3059 set_early_dwarf () : saved(early_dwarf)
3060 {
3061 gcc_assert (! early_dwarf_finished);
3062 early_dwarf = true;
3063 }
3064 ~set_early_dwarf () { early_dwarf = saved; }
3065 };
3066
3067 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3068 #define FOR_EACH_CHILD(die, c, expr) do { \
3069 c = die->die_child; \
3070 if (c) do { \
3071 c = c->die_sib; \
3072 expr; \
3073 } while (c != die->die_child); \
3074 } while (0)
3075
3076 /* The pubname structure */
3077
3078 typedef struct GTY(()) pubname_struct {
3079 dw_die_ref die;
3080 const char *name;
3081 }
3082 pubname_entry;
3083
3084
3085 struct GTY(()) dw_ranges {
3086 const char *label;
3087 /* If this is positive, it's a block number, otherwise it's a
3088 bitwise-negated index into dw_ranges_by_label. */
3089 int num;
3090 /* Index for the range list for DW_FORM_rnglistx. */
3091 unsigned int idx : 31;
3092 /* True if this range might be possibly in a different section
3093 from previous entry. */
3094 unsigned int maybe_new_sec : 1;
3095 };
3096
3097 /* A structure to hold a macinfo entry. */
3098
3099 typedef struct GTY(()) macinfo_struct {
3100 unsigned char code;
3101 unsigned HOST_WIDE_INT lineno;
3102 const char *info;
3103 }
3104 macinfo_entry;
3105
3106
3107 struct GTY(()) dw_ranges_by_label {
3108 const char *begin;
3109 const char *end;
3110 };
3111
3112 /* The comdat type node structure. */
3113 struct GTY(()) comdat_type_node
3114 {
3115 dw_die_ref root_die;
3116 dw_die_ref type_die;
3117 dw_die_ref skeleton_die;
3118 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3119 comdat_type_node *next;
3120 };
3121
3122 /* A list of DIEs for which we can't determine ancestry (parent_die
3123 field) just yet. Later in dwarf2out_finish we will fill in the
3124 missing bits. */
3125 typedef struct GTY(()) limbo_die_struct {
3126 dw_die_ref die;
3127 /* The tree for which this DIE was created. We use this to
3128 determine ancestry later. */
3129 tree created_for;
3130 struct limbo_die_struct *next;
3131 }
3132 limbo_die_node;
3133
3134 typedef struct skeleton_chain_struct
3135 {
3136 dw_die_ref old_die;
3137 dw_die_ref new_die;
3138 struct skeleton_chain_struct *parent;
3139 }
3140 skeleton_chain_node;
3141
3142 /* Define a macro which returns nonzero for a TYPE_DECL which was
3143 implicitly generated for a type.
3144
3145 Note that, unlike the C front-end (which generates a NULL named
3146 TYPE_DECL node for each complete tagged type, each array type,
3147 and each function type node created) the C++ front-end generates
3148 a _named_ TYPE_DECL node for each tagged type node created.
3149 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3150 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3151 front-end, but for each type, tagged or not. */
3152
3153 #define TYPE_DECL_IS_STUB(decl) \
3154 (DECL_NAME (decl) == NULL_TREE \
3155 || (DECL_ARTIFICIAL (decl) \
3156 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3157 /* This is necessary for stub decls that \
3158 appear in nested inline functions. */ \
3159 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3160 && (decl_ultimate_origin (decl) \
3161 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3162
3163 /* Information concerning the compilation unit's programming
3164 language, and compiler version. */
3165
3166 /* Fixed size portion of the DWARF compilation unit header. */
3167 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3168 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3169 + (dwarf_version >= 5 ? 4 : 3))
3170
3171 /* Fixed size portion of the DWARF comdat type unit header. */
3172 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3173 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3174 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3175
3176 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3177 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3178 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3179
3180 /* Fixed size portion of public names info. */
3181 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3182
3183 /* Fixed size portion of the address range info. */
3184 #define DWARF_ARANGES_HEADER_SIZE \
3185 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3186 DWARF2_ADDR_SIZE * 2) \
3187 - DWARF_INITIAL_LENGTH_SIZE)
3188
3189 /* Size of padding portion in the address range info. It must be
3190 aligned to twice the pointer size. */
3191 #define DWARF_ARANGES_PAD_SIZE \
3192 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3193 DWARF2_ADDR_SIZE * 2) \
3194 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3195
3196 /* Use assembler line directives if available. */
3197 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3198 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3199 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3200 #else
3201 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3202 #endif
3203 #endif
3204
3205 /* Use assembler views in line directives if available. */
3206 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3207 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3208 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3209 #else
3210 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3211 #endif
3212 #endif
3213
3214 /* Return true if GCC configure detected assembler support for .loc. */
3215
3216 bool
3217 dwarf2out_default_as_loc_support (void)
3218 {
3219 return DWARF2_ASM_LINE_DEBUG_INFO;
3220 #if (GCC_VERSION >= 3000)
3221 # undef DWARF2_ASM_LINE_DEBUG_INFO
3222 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3223 #endif
3224 }
3225
3226 /* Return true if GCC configure detected assembler support for views
3227 in .loc directives. */
3228
3229 bool
3230 dwarf2out_default_as_locview_support (void)
3231 {
3232 return DWARF2_ASM_VIEW_DEBUG_INFO;
3233 #if (GCC_VERSION >= 3000)
3234 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3235 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3236 #endif
3237 }
3238
3239 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3240 view computation, and it refers to a view identifier for which we
3241 will not emit a label because it is known to map to a view number
3242 zero. We won't allocate the bitmap if we're not using assembler
3243 support for location views, but we have to make the variable
3244 visible for GGC and for code that will be optimized out for lack of
3245 support but that's still parsed and compiled. We could abstract it
3246 out with macros, but it's not worth it. */
3247 static GTY(()) bitmap zero_view_p;
3248
3249 /* Evaluate to TRUE iff N is known to identify the first location view
3250 at its PC. When not using assembler location view computation,
3251 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3252 and views label numbers recorded in it are the ones known to be
3253 zero. */
3254 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3255 || (N) == (var_loc_view)-1 \
3256 || (zero_view_p \
3257 && bitmap_bit_p (zero_view_p, (N))))
3258
3259 /* Return true iff we're to emit .loc directives for the assembler to
3260 generate line number sections.
3261
3262 When we're not emitting views, all we need from the assembler is
3263 support for .loc directives.
3264
3265 If we are emitting views, we can only use the assembler's .loc
3266 support if it also supports views.
3267
3268 When the compiler is emitting the line number programs and
3269 computing view numbers itself, it resets view numbers at known PC
3270 changes and counts from that, and then it emits view numbers as
3271 literal constants in locviewlists. There are cases in which the
3272 compiler is not sure about PC changes, e.g. when extra alignment is
3273 requested for a label. In these cases, the compiler may not reset
3274 the view counter, and the potential PC advance in the line number
3275 program will use an opcode that does not reset the view counter
3276 even if the PC actually changes, so that compiler and debug info
3277 consumer can keep view numbers in sync.
3278
3279 When the compiler defers view computation to the assembler, it
3280 emits symbolic view numbers in locviewlists, with the exception of
3281 views known to be zero (forced resets, or reset after
3282 compiler-visible PC changes): instead of emitting symbols for
3283 these, we emit literal zero and assert the assembler agrees with
3284 the compiler's assessment. We could use symbolic views everywhere,
3285 instead of special-casing zero views, but then we'd be unable to
3286 optimize out locviewlists that contain only zeros. */
3287
3288 static bool
3289 output_asm_line_debug_info (void)
3290 {
3291 return (dwarf2out_as_loc_support
3292 && (dwarf2out_as_locview_support
3293 || !debug_variable_location_views));
3294 }
3295
3296 /* Minimum line offset in a special line info. opcode.
3297 This value was chosen to give a reasonable range of values. */
3298 #define DWARF_LINE_BASE -10
3299
3300 /* First special line opcode - leave room for the standard opcodes. */
3301 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3302
3303 /* Range of line offsets in a special line info. opcode. */
3304 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3305
3306 /* Flag that indicates the initial value of the is_stmt_start flag.
3307 In the present implementation, we do not mark any lines as
3308 the beginning of a source statement, because that information
3309 is not made available by the GCC front-end. */
3310 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3311
3312 /* Maximum number of operations per instruction bundle. */
3313 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3314 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3315 #endif
3316
3317 /* This location is used by calc_die_sizes() to keep track
3318 the offset of each DIE within the .debug_info section. */
3319 static unsigned long next_die_offset;
3320
3321 /* Record the root of the DIE's built for the current compilation unit. */
3322 static GTY(()) dw_die_ref single_comp_unit_die;
3323
3324 /* A list of type DIEs that have been separated into comdat sections. */
3325 static GTY(()) comdat_type_node *comdat_type_list;
3326
3327 /* A list of CU DIEs that have been separated. */
3328 static GTY(()) limbo_die_node *cu_die_list;
3329
3330 /* A list of DIEs with a NULL parent waiting to be relocated. */
3331 static GTY(()) limbo_die_node *limbo_die_list;
3332
3333 /* A list of DIEs for which we may have to generate
3334 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3335 static GTY(()) limbo_die_node *deferred_asm_name;
3336
3337 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3338 {
3339 typedef const char *compare_type;
3340
3341 static hashval_t hash (dwarf_file_data *);
3342 static bool equal (dwarf_file_data *, const char *);
3343 };
3344
3345 /* Filenames referenced by this compilation unit. */
3346 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3347
3348 struct decl_die_hasher : ggc_ptr_hash<die_node>
3349 {
3350 typedef tree compare_type;
3351
3352 static hashval_t hash (die_node *);
3353 static bool equal (die_node *, tree);
3354 };
3355 /* A hash table of references to DIE's that describe declarations.
3356 The key is a DECL_UID() which is a unique number identifying each decl. */
3357 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3358
3359 struct GTY ((for_user)) variable_value_struct {
3360 unsigned int decl_id;
3361 vec<dw_die_ref, va_gc> *dies;
3362 };
3363
3364 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3365 {
3366 typedef tree compare_type;
3367
3368 static hashval_t hash (variable_value_struct *);
3369 static bool equal (variable_value_struct *, tree);
3370 };
3371 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3372 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3373 DECL_CONTEXT of the referenced VAR_DECLs. */
3374 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3375
3376 struct block_die_hasher : ggc_ptr_hash<die_struct>
3377 {
3378 static hashval_t hash (die_struct *);
3379 static bool equal (die_struct *, die_struct *);
3380 };
3381
3382 /* A hash table of references to DIE's that describe COMMON blocks.
3383 The key is DECL_UID() ^ die_parent. */
3384 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3385
3386 typedef struct GTY(()) die_arg_entry_struct {
3387 dw_die_ref die;
3388 tree arg;
3389 } die_arg_entry;
3390
3391
3392 /* Node of the variable location list. */
3393 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3394 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3395 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3396 in mode of the EXPR_LIST node and first EXPR_LIST operand
3397 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3398 location or NULL for padding. For larger bitsizes,
3399 mode is 0 and first operand is a CONCAT with bitsize
3400 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3401 NULL as second operand. */
3402 rtx GTY (()) loc;
3403 const char * GTY (()) label;
3404 struct var_loc_node * GTY (()) next;
3405 var_loc_view view;
3406 };
3407
3408 /* Variable location list. */
3409 struct GTY ((for_user)) var_loc_list_def {
3410 struct var_loc_node * GTY (()) first;
3411
3412 /* Pointer to the last but one or last element of the
3413 chained list. If the list is empty, both first and
3414 last are NULL, if the list contains just one node
3415 or the last node certainly is not redundant, it points
3416 to the last node, otherwise points to the last but one.
3417 Do not mark it for GC because it is marked through the chain. */
3418 struct var_loc_node * GTY ((skip ("%h"))) last;
3419
3420 /* Pointer to the last element before section switch,
3421 if NULL, either sections weren't switched or first
3422 is after section switch. */
3423 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3424
3425 /* DECL_UID of the variable decl. */
3426 unsigned int decl_id;
3427 };
3428 typedef struct var_loc_list_def var_loc_list;
3429
3430 /* Call argument location list. */
3431 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3432 rtx GTY (()) call_arg_loc_note;
3433 const char * GTY (()) label;
3434 tree GTY (()) block;
3435 bool tail_call_p;
3436 rtx GTY (()) symbol_ref;
3437 struct call_arg_loc_node * GTY (()) next;
3438 };
3439
3440
3441 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3442 {
3443 typedef const_tree compare_type;
3444
3445 static hashval_t hash (var_loc_list *);
3446 static bool equal (var_loc_list *, const_tree);
3447 };
3448
3449 /* Table of decl location linked lists. */
3450 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3451
3452 /* Head and tail of call_arg_loc chain. */
3453 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3454 static struct call_arg_loc_node *call_arg_loc_last;
3455
3456 /* Number of call sites in the current function. */
3457 static int call_site_count = -1;
3458 /* Number of tail call sites in the current function. */
3459 static int tail_call_site_count = -1;
3460
3461 /* A cached location list. */
3462 struct GTY ((for_user)) cached_dw_loc_list_def {
3463 /* The DECL_UID of the decl that this entry describes. */
3464 unsigned int decl_id;
3465
3466 /* The cached location list. */
3467 dw_loc_list_ref loc_list;
3468 };
3469 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3470
3471 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3472 {
3473
3474 typedef const_tree compare_type;
3475
3476 static hashval_t hash (cached_dw_loc_list *);
3477 static bool equal (cached_dw_loc_list *, const_tree);
3478 };
3479
3480 /* Table of cached location lists. */
3481 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3482
3483 /* A vector of references to DIE's that are uniquely identified by their tag,
3484 presence/absence of children DIE's, and list of attribute/value pairs. */
3485 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3486
3487 /* A hash map to remember the stack usage for DWARF procedures. The value
3488 stored is the stack size difference between before the DWARF procedure
3489 invokation and after it returned. In other words, for a DWARF procedure
3490 that consumes N stack slots and that pushes M ones, this stores M - N. */
3491 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3492
3493 /* A global counter for generating labels for line number data. */
3494 static unsigned int line_info_label_num;
3495
3496 /* The current table to which we should emit line number information
3497 for the current function. This will be set up at the beginning of
3498 assembly for the function. */
3499 static GTY(()) dw_line_info_table *cur_line_info_table;
3500
3501 /* The two default tables of line number info. */
3502 static GTY(()) dw_line_info_table *text_section_line_info;
3503 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3504
3505 /* The set of all non-default tables of line number info. */
3506 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3507
3508 /* A flag to tell pubnames/types export if there is an info section to
3509 refer to. */
3510 static bool info_section_emitted;
3511
3512 /* A pointer to the base of a table that contains a list of publicly
3513 accessible names. */
3514 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3515
3516 /* A pointer to the base of a table that contains a list of publicly
3517 accessible types. */
3518 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3519
3520 /* A pointer to the base of a table that contains a list of macro
3521 defines/undefines (and file start/end markers). */
3522 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3523
3524 /* True if .debug_macinfo or .debug_macros section is going to be
3525 emitted. */
3526 #define have_macinfo \
3527 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3528 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3529 && !macinfo_table->is_empty ())
3530
3531 /* Vector of dies for which we should generate .debug_ranges info. */
3532 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3533
3534 /* Vector of pairs of labels referenced in ranges_table. */
3535 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3536
3537 /* Whether we have location lists that need outputting */
3538 static GTY(()) bool have_location_lists;
3539
3540 /* Unique label counter. */
3541 static GTY(()) unsigned int loclabel_num;
3542
3543 /* Unique label counter for point-of-call tables. */
3544 static GTY(()) unsigned int poc_label_num;
3545
3546 /* The last file entry emitted by maybe_emit_file(). */
3547 static GTY(()) struct dwarf_file_data * last_emitted_file;
3548
3549 /* Number of internal labels generated by gen_internal_sym(). */
3550 static GTY(()) int label_num;
3551
3552 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3553
3554 /* Instances of generic types for which we need to generate debug
3555 info that describe their generic parameters and arguments. That
3556 generation needs to happen once all types are properly laid out so
3557 we do it at the end of compilation. */
3558 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3559
3560 /* Offset from the "steady-state frame pointer" to the frame base,
3561 within the current function. */
3562 static poly_int64 frame_pointer_fb_offset;
3563 static bool frame_pointer_fb_offset_valid;
3564
3565 static vec<dw_die_ref> base_types;
3566
3567 /* Flags to represent a set of attribute classes for attributes that represent
3568 a scalar value (bounds, pointers, ...). */
3569 enum dw_scalar_form
3570 {
3571 dw_scalar_form_constant = 0x01,
3572 dw_scalar_form_exprloc = 0x02,
3573 dw_scalar_form_reference = 0x04
3574 };
3575
3576 /* Forward declarations for functions defined in this file. */
3577
3578 static int is_pseudo_reg (const_rtx);
3579 static tree type_main_variant (tree);
3580 static int is_tagged_type (const_tree);
3581 static const char *dwarf_tag_name (unsigned);
3582 static const char *dwarf_attr_name (unsigned);
3583 static const char *dwarf_form_name (unsigned);
3584 static tree decl_ultimate_origin (const_tree);
3585 static tree decl_class_context (tree);
3586 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3587 static inline enum dw_val_class AT_class (dw_attr_node *);
3588 static inline unsigned int AT_index (dw_attr_node *);
3589 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3590 static inline unsigned AT_flag (dw_attr_node *);
3591 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3592 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3593 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3594 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3595 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3596 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3597 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3598 unsigned int, unsigned char *);
3599 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3600 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3601 static inline const char *AT_string (dw_attr_node *);
3602 static enum dwarf_form AT_string_form (dw_attr_node *);
3603 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3604 static void add_AT_specification (dw_die_ref, dw_die_ref);
3605 static inline dw_die_ref AT_ref (dw_attr_node *);
3606 static inline int AT_ref_external (dw_attr_node *);
3607 static inline void set_AT_ref_external (dw_attr_node *, int);
3608 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3609 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3610 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3611 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3612 dw_loc_list_ref);
3613 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3614 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3615 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3616 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3617 static void remove_addr_table_entry (addr_table_entry *);
3618 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3619 static inline rtx AT_addr (dw_attr_node *);
3620 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3621 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3622 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3623 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3624 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3625 const char *);
3626 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3627 unsigned HOST_WIDE_INT);
3628 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3629 unsigned long, bool);
3630 static inline const char *AT_lbl (dw_attr_node *);
3631 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3632 static const char *get_AT_low_pc (dw_die_ref);
3633 static const char *get_AT_hi_pc (dw_die_ref);
3634 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3635 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3636 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3637 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3638 static bool is_cxx (void);
3639 static bool is_cxx (const_tree);
3640 static bool is_fortran (void);
3641 static bool is_ada (void);
3642 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3643 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3644 static void add_child_die (dw_die_ref, dw_die_ref);
3645 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3646 static dw_die_ref lookup_type_die (tree);
3647 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3648 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3649 static void equate_type_number_to_die (tree, dw_die_ref);
3650 static dw_die_ref lookup_decl_die (tree);
3651 static var_loc_list *lookup_decl_loc (const_tree);
3652 static void equate_decl_number_to_die (tree, dw_die_ref);
3653 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3654 static void print_spaces (FILE *);
3655 static void print_die (dw_die_ref, FILE *);
3656 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3657 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3658 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3659 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3660 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3661 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3662 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3663 struct md5_ctx *, int *);
3664 struct checksum_attributes;
3665 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3666 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3667 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3668 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3669 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3670 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3671 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3672 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3673 static int is_type_die (dw_die_ref);
3674 static int is_comdat_die (dw_die_ref);
3675 static inline bool is_template_instantiation (dw_die_ref);
3676 static int is_declaration_die (dw_die_ref);
3677 static int should_move_die_to_comdat (dw_die_ref);
3678 static dw_die_ref clone_as_declaration (dw_die_ref);
3679 static dw_die_ref clone_die (dw_die_ref);
3680 static dw_die_ref clone_tree (dw_die_ref);
3681 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3682 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3683 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3684 static dw_die_ref generate_skeleton (dw_die_ref);
3685 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3686 dw_die_ref,
3687 dw_die_ref);
3688 static void break_out_comdat_types (dw_die_ref);
3689 static void copy_decls_for_unworthy_types (dw_die_ref);
3690
3691 static void add_sibling_attributes (dw_die_ref);
3692 static void output_location_lists (dw_die_ref);
3693 static int constant_size (unsigned HOST_WIDE_INT);
3694 static unsigned long size_of_die (dw_die_ref);
3695 static void calc_die_sizes (dw_die_ref);
3696 static void calc_base_type_die_sizes (void);
3697 static void mark_dies (dw_die_ref);
3698 static void unmark_dies (dw_die_ref);
3699 static void unmark_all_dies (dw_die_ref);
3700 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3701 static unsigned long size_of_aranges (void);
3702 static enum dwarf_form value_format (dw_attr_node *);
3703 static void output_value_format (dw_attr_node *);
3704 static void output_abbrev_section (void);
3705 static void output_die_abbrevs (unsigned long, dw_die_ref);
3706 static void output_die (dw_die_ref);
3707 static void output_compilation_unit_header (enum dwarf_unit_type);
3708 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3709 static void output_comdat_type_unit (comdat_type_node *);
3710 static const char *dwarf2_name (tree, int);
3711 static void add_pubname (tree, dw_die_ref);
3712 static void add_enumerator_pubname (const char *, dw_die_ref);
3713 static void add_pubname_string (const char *, dw_die_ref);
3714 static void add_pubtype (tree, dw_die_ref);
3715 static void output_pubnames (vec<pubname_entry, va_gc> *);
3716 static void output_aranges (void);
3717 static unsigned int add_ranges (const_tree, bool = false);
3718 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3719 bool *, bool);
3720 static void output_ranges (void);
3721 static dw_line_info_table *new_line_info_table (void);
3722 static void output_line_info (bool);
3723 static void output_file_names (void);
3724 static dw_die_ref base_type_die (tree, bool);
3725 static int is_base_type (tree);
3726 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3727 static int decl_quals (const_tree);
3728 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3729 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3730 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3731 static int type_is_enum (const_tree);
3732 static unsigned int dbx_reg_number (const_rtx);
3733 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3734 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3735 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3736 enum var_init_status);
3737 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3738 enum var_init_status);
3739 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3740 enum var_init_status);
3741 static int is_based_loc (const_rtx);
3742 static bool resolve_one_addr (rtx *);
3743 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3744 enum var_init_status);
3745 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3746 enum var_init_status);
3747 struct loc_descr_context;
3748 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3749 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3750 static dw_loc_list_ref loc_list_from_tree (tree, int,
3751 struct loc_descr_context *);
3752 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3753 struct loc_descr_context *);
3754 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3755 static tree field_type (const_tree);
3756 static unsigned int simple_type_align_in_bits (const_tree);
3757 static unsigned int simple_decl_align_in_bits (const_tree);
3758 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3759 struct vlr_context;
3760 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3761 HOST_WIDE_INT *);
3762 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3763 dw_loc_list_ref);
3764 static void add_data_member_location_attribute (dw_die_ref, tree,
3765 struct vlr_context *);
3766 static bool add_const_value_attribute (dw_die_ref, rtx);
3767 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3768 static void insert_wide_int (const wide_int &, unsigned char *, int);
3769 static void insert_float (const_rtx, unsigned char *);
3770 static rtx rtl_for_decl_location (tree);
3771 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3772 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3773 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3774 static void add_name_attribute (dw_die_ref, const char *);
3775 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3776 static void add_comp_dir_attribute (dw_die_ref);
3777 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3778 struct loc_descr_context *);
3779 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3780 struct loc_descr_context *);
3781 static void add_subscript_info (dw_die_ref, tree, bool);
3782 static void add_byte_size_attribute (dw_die_ref, tree);
3783 static void add_alignment_attribute (dw_die_ref, tree);
3784 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3785 struct vlr_context *);
3786 static void add_bit_size_attribute (dw_die_ref, tree);
3787 static void add_prototyped_attribute (dw_die_ref, tree);
3788 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3789 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3790 static void add_src_coords_attributes (dw_die_ref, tree);
3791 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3792 static void add_discr_value (dw_die_ref, dw_discr_value *);
3793 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3794 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3795 static void push_decl_scope (tree);
3796 static void pop_decl_scope (void);
3797 static dw_die_ref scope_die_for (tree, dw_die_ref);
3798 static inline int local_scope_p (dw_die_ref);
3799 static inline int class_scope_p (dw_die_ref);
3800 static inline int class_or_namespace_scope_p (dw_die_ref);
3801 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3802 static void add_calling_convention_attribute (dw_die_ref, tree);
3803 static const char *type_tag (const_tree);
3804 static tree member_declared_type (const_tree);
3805 #if 0
3806 static const char *decl_start_label (tree);
3807 #endif
3808 static void gen_array_type_die (tree, dw_die_ref);
3809 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3810 #if 0
3811 static void gen_entry_point_die (tree, dw_die_ref);
3812 #endif
3813 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3814 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3815 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3816 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3817 static void gen_formal_types_die (tree, dw_die_ref);
3818 static void gen_subprogram_die (tree, dw_die_ref);
3819 static void gen_variable_die (tree, tree, dw_die_ref);
3820 static void gen_const_die (tree, dw_die_ref);
3821 static void gen_label_die (tree, dw_die_ref);
3822 static void gen_lexical_block_die (tree, dw_die_ref);
3823 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3824 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3825 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3826 static dw_die_ref gen_compile_unit_die (const char *);
3827 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3828 static void gen_member_die (tree, dw_die_ref);
3829 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3830 enum debug_info_usage);
3831 static void gen_subroutine_type_die (tree, dw_die_ref);
3832 static void gen_typedef_die (tree, dw_die_ref);
3833 static void gen_type_die (tree, dw_die_ref);
3834 static void gen_block_die (tree, dw_die_ref);
3835 static void decls_for_scope (tree, dw_die_ref);
3836 static bool is_naming_typedef_decl (const_tree);
3837 static inline dw_die_ref get_context_die (tree);
3838 static void gen_namespace_die (tree, dw_die_ref);
3839 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3840 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3841 static dw_die_ref force_decl_die (tree);
3842 static dw_die_ref force_type_die (tree);
3843 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3844 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3845 static struct dwarf_file_data * lookup_filename (const char *);
3846 static void retry_incomplete_types (void);
3847 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3848 static void gen_generic_params_dies (tree);
3849 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3850 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3851 static void splice_child_die (dw_die_ref, dw_die_ref);
3852 static int file_info_cmp (const void *, const void *);
3853 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3854 const char *, var_loc_view, const char *);
3855 static void output_loc_list (dw_loc_list_ref);
3856 static char *gen_internal_sym (const char *);
3857 static bool want_pubnames (void);
3858
3859 static void prune_unmark_dies (dw_die_ref);
3860 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3861 static void prune_unused_types_mark (dw_die_ref, int);
3862 static void prune_unused_types_walk (dw_die_ref);
3863 static void prune_unused_types_walk_attribs (dw_die_ref);
3864 static void prune_unused_types_prune (dw_die_ref);
3865 static void prune_unused_types (void);
3866 static int maybe_emit_file (struct dwarf_file_data *fd);
3867 static inline const char *AT_vms_delta1 (dw_attr_node *);
3868 static inline const char *AT_vms_delta2 (dw_attr_node *);
3869 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3870 const char *, const char *);
3871 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3872 static void gen_remaining_tmpl_value_param_die_attribute (void);
3873 static bool generic_type_p (tree);
3874 static void schedule_generic_params_dies_gen (tree t);
3875 static void gen_scheduled_generic_parms_dies (void);
3876 static void resolve_variable_values (void);
3877
3878 static const char *comp_dir_string (void);
3879
3880 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3881
3882 /* enum for tracking thread-local variables whose address is really an offset
3883 relative to the TLS pointer, which will need link-time relocation, but will
3884 not need relocation by the DWARF consumer. */
3885
3886 enum dtprel_bool
3887 {
3888 dtprel_false = 0,
3889 dtprel_true = 1
3890 };
3891
3892 /* Return the operator to use for an address of a variable. For dtprel_true, we
3893 use DW_OP_const*. For regular variables, which need both link-time
3894 relocation and consumer-level relocation (e.g., to account for shared objects
3895 loaded at a random address), we use DW_OP_addr*. */
3896
3897 static inline enum dwarf_location_atom
3898 dw_addr_op (enum dtprel_bool dtprel)
3899 {
3900 if (dtprel == dtprel_true)
3901 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3902 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3903 else
3904 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3905 }
3906
3907 /* Return a pointer to a newly allocated address location description. If
3908 dwarf_split_debug_info is true, then record the address with the appropriate
3909 relocation. */
3910 static inline dw_loc_descr_ref
3911 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3912 {
3913 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3914
3915 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3916 ref->dw_loc_oprnd1.v.val_addr = addr;
3917 ref->dtprel = dtprel;
3918 if (dwarf_split_debug_info)
3919 ref->dw_loc_oprnd1.val_entry
3920 = add_addr_table_entry (addr,
3921 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3922 else
3923 ref->dw_loc_oprnd1.val_entry = NULL;
3924
3925 return ref;
3926 }
3927
3928 /* Section names used to hold DWARF debugging information. */
3929
3930 #ifndef DEBUG_INFO_SECTION
3931 #define DEBUG_INFO_SECTION ".debug_info"
3932 #endif
3933 #ifndef DEBUG_DWO_INFO_SECTION
3934 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3935 #endif
3936 #ifndef DEBUG_LTO_INFO_SECTION
3937 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3938 #endif
3939 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3940 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3941 #endif
3942 #ifndef DEBUG_ABBREV_SECTION
3943 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3944 #endif
3945 #ifndef DEBUG_LTO_ABBREV_SECTION
3946 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3947 #endif
3948 #ifndef DEBUG_DWO_ABBREV_SECTION
3949 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3950 #endif
3951 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3952 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3953 #endif
3954 #ifndef DEBUG_ARANGES_SECTION
3955 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3956 #endif
3957 #ifndef DEBUG_ADDR_SECTION
3958 #define DEBUG_ADDR_SECTION ".debug_addr"
3959 #endif
3960 #ifndef DEBUG_MACINFO_SECTION
3961 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3962 #endif
3963 #ifndef DEBUG_LTO_MACINFO_SECTION
3964 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3965 #endif
3966 #ifndef DEBUG_DWO_MACINFO_SECTION
3967 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3968 #endif
3969 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3970 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3971 #endif
3972 #ifndef DEBUG_MACRO_SECTION
3973 #define DEBUG_MACRO_SECTION ".debug_macro"
3974 #endif
3975 #ifndef DEBUG_LTO_MACRO_SECTION
3976 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3977 #endif
3978 #ifndef DEBUG_DWO_MACRO_SECTION
3979 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3980 #endif
3981 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3982 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3983 #endif
3984 #ifndef DEBUG_LINE_SECTION
3985 #define DEBUG_LINE_SECTION ".debug_line"
3986 #endif
3987 #ifndef DEBUG_LTO_LINE_SECTION
3988 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3989 #endif
3990 #ifndef DEBUG_DWO_LINE_SECTION
3991 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3992 #endif
3993 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3994 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3995 #endif
3996 #ifndef DEBUG_LOC_SECTION
3997 #define DEBUG_LOC_SECTION ".debug_loc"
3998 #endif
3999 #ifndef DEBUG_DWO_LOC_SECTION
4000 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4001 #endif
4002 #ifndef DEBUG_LOCLISTS_SECTION
4003 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4004 #endif
4005 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4006 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4007 #endif
4008 #ifndef DEBUG_PUBNAMES_SECTION
4009 #define DEBUG_PUBNAMES_SECTION \
4010 ((debug_generate_pub_sections == 2) \
4011 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4012 #endif
4013 #ifndef DEBUG_PUBTYPES_SECTION
4014 #define DEBUG_PUBTYPES_SECTION \
4015 ((debug_generate_pub_sections == 2) \
4016 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4017 #endif
4018 #ifndef DEBUG_STR_OFFSETS_SECTION
4019 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4020 #endif
4021 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4022 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4023 #endif
4024 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4025 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4026 #endif
4027 #ifndef DEBUG_STR_SECTION
4028 #define DEBUG_STR_SECTION ".debug_str"
4029 #endif
4030 #ifndef DEBUG_LTO_STR_SECTION
4031 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4032 #endif
4033 #ifndef DEBUG_STR_DWO_SECTION
4034 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4035 #endif
4036 #ifndef DEBUG_LTO_STR_DWO_SECTION
4037 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4038 #endif
4039 #ifndef DEBUG_RANGES_SECTION
4040 #define DEBUG_RANGES_SECTION ".debug_ranges"
4041 #endif
4042 #ifndef DEBUG_RNGLISTS_SECTION
4043 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4044 #endif
4045 #ifndef DEBUG_LINE_STR_SECTION
4046 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4047 #endif
4048 #ifndef DEBUG_LTO_LINE_STR_SECTION
4049 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4050 #endif
4051
4052 /* Standard ELF section names for compiled code and data. */
4053 #ifndef TEXT_SECTION_NAME
4054 #define TEXT_SECTION_NAME ".text"
4055 #endif
4056
4057 /* Section flags for .debug_str section. */
4058 #define DEBUG_STR_SECTION_FLAGS \
4059 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4060 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4061 : SECTION_DEBUG)
4062
4063 /* Section flags for .debug_str.dwo section. */
4064 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4065
4066 /* Attribute used to refer to the macro section. */
4067 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4068 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4069
4070 /* Labels we insert at beginning sections we can reference instead of
4071 the section names themselves. */
4072
4073 #ifndef TEXT_SECTION_LABEL
4074 #define TEXT_SECTION_LABEL "Ltext"
4075 #endif
4076 #ifndef COLD_TEXT_SECTION_LABEL
4077 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4078 #endif
4079 #ifndef DEBUG_LINE_SECTION_LABEL
4080 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4081 #endif
4082 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4083 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4084 #endif
4085 #ifndef DEBUG_INFO_SECTION_LABEL
4086 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4087 #endif
4088 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4089 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4090 #endif
4091 #ifndef DEBUG_ABBREV_SECTION_LABEL
4092 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4093 #endif
4094 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4095 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4096 #endif
4097 #ifndef DEBUG_ADDR_SECTION_LABEL
4098 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4099 #endif
4100 #ifndef DEBUG_LOC_SECTION_LABEL
4101 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4102 #endif
4103 #ifndef DEBUG_RANGES_SECTION_LABEL
4104 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4105 #endif
4106 #ifndef DEBUG_MACINFO_SECTION_LABEL
4107 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4108 #endif
4109 #ifndef DEBUG_MACRO_SECTION_LABEL
4110 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4111 #endif
4112 #define SKELETON_COMP_DIE_ABBREV 1
4113 #define SKELETON_TYPE_DIE_ABBREV 2
4114
4115 /* Definitions of defaults for formats and names of various special
4116 (artificial) labels which may be generated within this file (when the -g
4117 options is used and DWARF2_DEBUGGING_INFO is in effect.
4118 If necessary, these may be overridden from within the tm.h file, but
4119 typically, overriding these defaults is unnecessary. */
4120
4121 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4122 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4123 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4124 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4125 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4126 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4127 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4128 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4129 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4130 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4131 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4132 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4133 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4134 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4135 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4136
4137 #ifndef TEXT_END_LABEL
4138 #define TEXT_END_LABEL "Letext"
4139 #endif
4140 #ifndef COLD_END_LABEL
4141 #define COLD_END_LABEL "Letext_cold"
4142 #endif
4143 #ifndef BLOCK_BEGIN_LABEL
4144 #define BLOCK_BEGIN_LABEL "LBB"
4145 #endif
4146 #ifndef BLOCK_INLINE_ENTRY_LABEL
4147 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4148 #endif
4149 #ifndef BLOCK_END_LABEL
4150 #define BLOCK_END_LABEL "LBE"
4151 #endif
4152 #ifndef LINE_CODE_LABEL
4153 #define LINE_CODE_LABEL "LM"
4154 #endif
4155
4156 \f
4157 /* Return the root of the DIE's built for the current compilation unit. */
4158 static dw_die_ref
4159 comp_unit_die (void)
4160 {
4161 if (!single_comp_unit_die)
4162 single_comp_unit_die = gen_compile_unit_die (NULL);
4163 return single_comp_unit_die;
4164 }
4165
4166 /* We allow a language front-end to designate a function that is to be
4167 called to "demangle" any name before it is put into a DIE. */
4168
4169 static const char *(*demangle_name_func) (const char *);
4170
4171 void
4172 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4173 {
4174 demangle_name_func = func;
4175 }
4176
4177 /* Test if rtl node points to a pseudo register. */
4178
4179 static inline int
4180 is_pseudo_reg (const_rtx rtl)
4181 {
4182 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4183 || (GET_CODE (rtl) == SUBREG
4184 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4185 }
4186
4187 /* Return a reference to a type, with its const and volatile qualifiers
4188 removed. */
4189
4190 static inline tree
4191 type_main_variant (tree type)
4192 {
4193 type = TYPE_MAIN_VARIANT (type);
4194
4195 /* ??? There really should be only one main variant among any group of
4196 variants of a given type (and all of the MAIN_VARIANT values for all
4197 members of the group should point to that one type) but sometimes the C
4198 front-end messes this up for array types, so we work around that bug
4199 here. */
4200 if (TREE_CODE (type) == ARRAY_TYPE)
4201 while (type != TYPE_MAIN_VARIANT (type))
4202 type = TYPE_MAIN_VARIANT (type);
4203
4204 return type;
4205 }
4206
4207 /* Return nonzero if the given type node represents a tagged type. */
4208
4209 static inline int
4210 is_tagged_type (const_tree type)
4211 {
4212 enum tree_code code = TREE_CODE (type);
4213
4214 return (code == RECORD_TYPE || code == UNION_TYPE
4215 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4216 }
4217
4218 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4219
4220 static void
4221 get_ref_die_offset_label (char *label, dw_die_ref ref)
4222 {
4223 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4224 }
4225
4226 /* Return die_offset of a DIE reference to a base type. */
4227
4228 static unsigned long int
4229 get_base_type_offset (dw_die_ref ref)
4230 {
4231 if (ref->die_offset)
4232 return ref->die_offset;
4233 if (comp_unit_die ()->die_abbrev)
4234 {
4235 calc_base_type_die_sizes ();
4236 gcc_assert (ref->die_offset);
4237 }
4238 return ref->die_offset;
4239 }
4240
4241 /* Return die_offset of a DIE reference other than base type. */
4242
4243 static unsigned long int
4244 get_ref_die_offset (dw_die_ref ref)
4245 {
4246 gcc_assert (ref->die_offset);
4247 return ref->die_offset;
4248 }
4249
4250 /* Convert a DIE tag into its string name. */
4251
4252 static const char *
4253 dwarf_tag_name (unsigned int tag)
4254 {
4255 const char *name = get_DW_TAG_name (tag);
4256
4257 if (name != NULL)
4258 return name;
4259
4260 return "DW_TAG_<unknown>";
4261 }
4262
4263 /* Convert a DWARF attribute code into its string name. */
4264
4265 static const char *
4266 dwarf_attr_name (unsigned int attr)
4267 {
4268 const char *name;
4269
4270 switch (attr)
4271 {
4272 #if VMS_DEBUGGING_INFO
4273 case DW_AT_HP_prologue:
4274 return "DW_AT_HP_prologue";
4275 #else
4276 case DW_AT_MIPS_loop_unroll_factor:
4277 return "DW_AT_MIPS_loop_unroll_factor";
4278 #endif
4279
4280 #if VMS_DEBUGGING_INFO
4281 case DW_AT_HP_epilogue:
4282 return "DW_AT_HP_epilogue";
4283 #else
4284 case DW_AT_MIPS_stride:
4285 return "DW_AT_MIPS_stride";
4286 #endif
4287 }
4288
4289 name = get_DW_AT_name (attr);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_AT_<unknown>";
4295 }
4296
4297 /* Convert a DWARF value form code into its string name. */
4298
4299 static const char *
4300 dwarf_form_name (unsigned int form)
4301 {
4302 const char *name = get_DW_FORM_name (form);
4303
4304 if (name != NULL)
4305 return name;
4306
4307 return "DW_FORM_<unknown>";
4308 }
4309 \f
4310 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4311 instance of an inlined instance of a decl which is local to an inline
4312 function, so we have to trace all of the way back through the origin chain
4313 to find out what sort of node actually served as the original seed for the
4314 given block. */
4315
4316 static tree
4317 decl_ultimate_origin (const_tree decl)
4318 {
4319 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4320 return NULL_TREE;
4321
4322 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4323 we're trying to output the abstract instance of this function. */
4324 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4325 return NULL_TREE;
4326
4327 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4328 most distant ancestor, this should never happen. */
4329 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4330
4331 return DECL_ABSTRACT_ORIGIN (decl);
4332 }
4333
4334 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4335 of a virtual function may refer to a base class, so we check the 'this'
4336 parameter. */
4337
4338 static tree
4339 decl_class_context (tree decl)
4340 {
4341 tree context = NULL_TREE;
4342
4343 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4344 context = DECL_CONTEXT (decl);
4345 else
4346 context = TYPE_MAIN_VARIANT
4347 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4348
4349 if (context && !TYPE_P (context))
4350 context = NULL_TREE;
4351
4352 return context;
4353 }
4354 \f
4355 /* Add an attribute/value pair to a DIE. */
4356
4357 static inline void
4358 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4359 {
4360 /* Maybe this should be an assert? */
4361 if (die == NULL)
4362 return;
4363
4364 if (flag_checking)
4365 {
4366 /* Check we do not add duplicate attrs. Can't use get_AT here
4367 because that recurses to the specification/abstract origin DIE. */
4368 dw_attr_node *a;
4369 unsigned ix;
4370 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4371 gcc_assert (a->dw_attr != attr->dw_attr);
4372 }
4373
4374 vec_safe_reserve (die->die_attr, 1);
4375 vec_safe_push (die->die_attr, *attr);
4376 }
4377
4378 static inline enum dw_val_class
4379 AT_class (dw_attr_node *a)
4380 {
4381 return a->dw_attr_val.val_class;
4382 }
4383
4384 /* Return the index for any attribute that will be referenced with a
4385 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4386 are stored in dw_attr_val.v.val_str for reference counting
4387 pruning. */
4388
4389 static inline unsigned int
4390 AT_index (dw_attr_node *a)
4391 {
4392 if (AT_class (a) == dw_val_class_str)
4393 return a->dw_attr_val.v.val_str->index;
4394 else if (a->dw_attr_val.val_entry != NULL)
4395 return a->dw_attr_val.val_entry->index;
4396 return NOT_INDEXED;
4397 }
4398
4399 /* Add a flag value attribute to a DIE. */
4400
4401 static inline void
4402 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4403 {
4404 dw_attr_node attr;
4405
4406 attr.dw_attr = attr_kind;
4407 attr.dw_attr_val.val_class = dw_val_class_flag;
4408 attr.dw_attr_val.val_entry = NULL;
4409 attr.dw_attr_val.v.val_flag = flag;
4410 add_dwarf_attr (die, &attr);
4411 }
4412
4413 static inline unsigned
4414 AT_flag (dw_attr_node *a)
4415 {
4416 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4417 return a->dw_attr_val.v.val_flag;
4418 }
4419
4420 /* Add a signed integer attribute value to a DIE. */
4421
4422 static inline void
4423 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4424 {
4425 dw_attr_node attr;
4426
4427 attr.dw_attr = attr_kind;
4428 attr.dw_attr_val.val_class = dw_val_class_const;
4429 attr.dw_attr_val.val_entry = NULL;
4430 attr.dw_attr_val.v.val_int = int_val;
4431 add_dwarf_attr (die, &attr);
4432 }
4433
4434 static inline HOST_WIDE_INT
4435 AT_int (dw_attr_node *a)
4436 {
4437 gcc_assert (a && (AT_class (a) == dw_val_class_const
4438 || AT_class (a) == dw_val_class_const_implicit));
4439 return a->dw_attr_val.v.val_int;
4440 }
4441
4442 /* Add an unsigned integer attribute value to a DIE. */
4443
4444 static inline void
4445 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4446 unsigned HOST_WIDE_INT unsigned_val)
4447 {
4448 dw_attr_node attr;
4449
4450 attr.dw_attr = attr_kind;
4451 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4452 attr.dw_attr_val.val_entry = NULL;
4453 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4454 add_dwarf_attr (die, &attr);
4455 }
4456
4457 static inline unsigned HOST_WIDE_INT
4458 AT_unsigned (dw_attr_node *a)
4459 {
4460 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4461 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4462 return a->dw_attr_val.v.val_unsigned;
4463 }
4464
4465 /* Add an unsigned wide integer attribute value to a DIE. */
4466
4467 static inline void
4468 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4469 const wide_int& w)
4470 {
4471 dw_attr_node attr;
4472
4473 attr.dw_attr = attr_kind;
4474 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4475 attr.dw_attr_val.val_entry = NULL;
4476 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4477 *attr.dw_attr_val.v.val_wide = w;
4478 add_dwarf_attr (die, &attr);
4479 }
4480
4481 /* Add an unsigned double integer attribute value to a DIE. */
4482
4483 static inline void
4484 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4485 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4486 {
4487 dw_attr_node attr;
4488
4489 attr.dw_attr = attr_kind;
4490 attr.dw_attr_val.val_class = dw_val_class_const_double;
4491 attr.dw_attr_val.val_entry = NULL;
4492 attr.dw_attr_val.v.val_double.high = high;
4493 attr.dw_attr_val.v.val_double.low = low;
4494 add_dwarf_attr (die, &attr);
4495 }
4496
4497 /* Add a floating point attribute value to a DIE and return it. */
4498
4499 static inline void
4500 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4501 unsigned int length, unsigned int elt_size, unsigned char *array)
4502 {
4503 dw_attr_node attr;
4504
4505 attr.dw_attr = attr_kind;
4506 attr.dw_attr_val.val_class = dw_val_class_vec;
4507 attr.dw_attr_val.val_entry = NULL;
4508 attr.dw_attr_val.v.val_vec.length = length;
4509 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4510 attr.dw_attr_val.v.val_vec.array = array;
4511 add_dwarf_attr (die, &attr);
4512 }
4513
4514 /* Add an 8-byte data attribute value to a DIE. */
4515
4516 static inline void
4517 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4518 unsigned char data8[8])
4519 {
4520 dw_attr_node attr;
4521
4522 attr.dw_attr = attr_kind;
4523 attr.dw_attr_val.val_class = dw_val_class_data8;
4524 attr.dw_attr_val.val_entry = NULL;
4525 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4526 add_dwarf_attr (die, &attr);
4527 }
4528
4529 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4530 dwarf_split_debug_info, address attributes in dies destined for the
4531 final executable have force_direct set to avoid using indexed
4532 references. */
4533
4534 static inline void
4535 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4536 bool force_direct)
4537 {
4538 dw_attr_node attr;
4539 char * lbl_id;
4540
4541 lbl_id = xstrdup (lbl_low);
4542 attr.dw_attr = DW_AT_low_pc;
4543 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4544 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4545 if (dwarf_split_debug_info && !force_direct)
4546 attr.dw_attr_val.val_entry
4547 = add_addr_table_entry (lbl_id, ate_kind_label);
4548 else
4549 attr.dw_attr_val.val_entry = NULL;
4550 add_dwarf_attr (die, &attr);
4551
4552 attr.dw_attr = DW_AT_high_pc;
4553 if (dwarf_version < 4)
4554 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4555 else
4556 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4557 lbl_id = xstrdup (lbl_high);
4558 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4559 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4560 && dwarf_split_debug_info && !force_direct)
4561 attr.dw_attr_val.val_entry
4562 = add_addr_table_entry (lbl_id, ate_kind_label);
4563 else
4564 attr.dw_attr_val.val_entry = NULL;
4565 add_dwarf_attr (die, &attr);
4566 }
4567
4568 /* Hash and equality functions for debug_str_hash. */
4569
4570 hashval_t
4571 indirect_string_hasher::hash (indirect_string_node *x)
4572 {
4573 return htab_hash_string (x->str);
4574 }
4575
4576 bool
4577 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4578 {
4579 return strcmp (x1->str, x2) == 0;
4580 }
4581
4582 /* Add STR to the given string hash table. */
4583
4584 static struct indirect_string_node *
4585 find_AT_string_in_table (const char *str,
4586 hash_table<indirect_string_hasher> *table)
4587 {
4588 struct indirect_string_node *node;
4589
4590 indirect_string_node **slot
4591 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4592 if (*slot == NULL)
4593 {
4594 node = ggc_cleared_alloc<indirect_string_node> ();
4595 node->str = ggc_strdup (str);
4596 *slot = node;
4597 }
4598 else
4599 node = *slot;
4600
4601 node->refcount++;
4602 return node;
4603 }
4604
4605 /* Add STR to the indirect string hash table. */
4606
4607 static struct indirect_string_node *
4608 find_AT_string (const char *str)
4609 {
4610 if (! debug_str_hash)
4611 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4612
4613 return find_AT_string_in_table (str, debug_str_hash);
4614 }
4615
4616 /* Add a string attribute value to a DIE. */
4617
4618 static inline void
4619 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4620 {
4621 dw_attr_node attr;
4622 struct indirect_string_node *node;
4623
4624 node = find_AT_string (str);
4625
4626 attr.dw_attr = attr_kind;
4627 attr.dw_attr_val.val_class = dw_val_class_str;
4628 attr.dw_attr_val.val_entry = NULL;
4629 attr.dw_attr_val.v.val_str = node;
4630 add_dwarf_attr (die, &attr);
4631 }
4632
4633 static inline const char *
4634 AT_string (dw_attr_node *a)
4635 {
4636 gcc_assert (a && AT_class (a) == dw_val_class_str);
4637 return a->dw_attr_val.v.val_str->str;
4638 }
4639
4640 /* Call this function directly to bypass AT_string_form's logic to put
4641 the string inline in the die. */
4642
4643 static void
4644 set_indirect_string (struct indirect_string_node *node)
4645 {
4646 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4647 /* Already indirect is a no op. */
4648 if (node->form == DW_FORM_strp
4649 || node->form == DW_FORM_line_strp
4650 || node->form == DW_FORM_GNU_str_index)
4651 {
4652 gcc_assert (node->label);
4653 return;
4654 }
4655 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4656 ++dw2_string_counter;
4657 node->label = xstrdup (label);
4658
4659 if (!dwarf_split_debug_info)
4660 {
4661 node->form = DW_FORM_strp;
4662 node->index = NOT_INDEXED;
4663 }
4664 else
4665 {
4666 node->form = DW_FORM_GNU_str_index;
4667 node->index = NO_INDEX_ASSIGNED;
4668 }
4669 }
4670
4671 /* A helper function for dwarf2out_finish, called to reset indirect
4672 string decisions done for early LTO dwarf output before fat object
4673 dwarf output. */
4674
4675 int
4676 reset_indirect_string (indirect_string_node **h, void *)
4677 {
4678 struct indirect_string_node *node = *h;
4679 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4680 {
4681 free (node->label);
4682 node->label = NULL;
4683 node->form = (dwarf_form) 0;
4684 node->index = 0;
4685 }
4686 return 1;
4687 }
4688
4689 /* Find out whether a string should be output inline in DIE
4690 or out-of-line in .debug_str section. */
4691
4692 static enum dwarf_form
4693 find_string_form (struct indirect_string_node *node)
4694 {
4695 unsigned int len;
4696
4697 if (node->form)
4698 return node->form;
4699
4700 len = strlen (node->str) + 1;
4701
4702 /* If the string is shorter or equal to the size of the reference, it is
4703 always better to put it inline. */
4704 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4705 return node->form = DW_FORM_string;
4706
4707 /* If we cannot expect the linker to merge strings in .debug_str
4708 section, only put it into .debug_str if it is worth even in this
4709 single module. */
4710 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4711 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4712 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4713 return node->form = DW_FORM_string;
4714
4715 set_indirect_string (node);
4716
4717 return node->form;
4718 }
4719
4720 /* Find out whether the string referenced from the attribute should be
4721 output inline in DIE or out-of-line in .debug_str section. */
4722
4723 static enum dwarf_form
4724 AT_string_form (dw_attr_node *a)
4725 {
4726 gcc_assert (a && AT_class (a) == dw_val_class_str);
4727 return find_string_form (a->dw_attr_val.v.val_str);
4728 }
4729
4730 /* Add a DIE reference attribute value to a DIE. */
4731
4732 static inline void
4733 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4734 {
4735 dw_attr_node attr;
4736 gcc_checking_assert (targ_die != NULL);
4737
4738 /* With LTO we can end up trying to reference something we didn't create
4739 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4740 if (targ_die == NULL)
4741 return;
4742
4743 attr.dw_attr = attr_kind;
4744 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4745 attr.dw_attr_val.val_entry = NULL;
4746 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4747 attr.dw_attr_val.v.val_die_ref.external = 0;
4748 add_dwarf_attr (die, &attr);
4749 }
4750
4751 /* Change DIE reference REF to point to NEW_DIE instead. */
4752
4753 static inline void
4754 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4755 {
4756 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4757 ref->dw_attr_val.v.val_die_ref.die = new_die;
4758 ref->dw_attr_val.v.val_die_ref.external = 0;
4759 }
4760
4761 /* Add an AT_specification attribute to a DIE, and also make the back
4762 pointer from the specification to the definition. */
4763
4764 static inline void
4765 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4766 {
4767 add_AT_die_ref (die, DW_AT_specification, targ_die);
4768 gcc_assert (!targ_die->die_definition);
4769 targ_die->die_definition = die;
4770 }
4771
4772 static inline dw_die_ref
4773 AT_ref (dw_attr_node *a)
4774 {
4775 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4776 return a->dw_attr_val.v.val_die_ref.die;
4777 }
4778
4779 static inline int
4780 AT_ref_external (dw_attr_node *a)
4781 {
4782 if (a && AT_class (a) == dw_val_class_die_ref)
4783 return a->dw_attr_val.v.val_die_ref.external;
4784
4785 return 0;
4786 }
4787
4788 static inline void
4789 set_AT_ref_external (dw_attr_node *a, int i)
4790 {
4791 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4792 a->dw_attr_val.v.val_die_ref.external = i;
4793 }
4794
4795 /* Add an FDE reference attribute value to a DIE. */
4796
4797 static inline void
4798 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4799 {
4800 dw_attr_node attr;
4801
4802 attr.dw_attr = attr_kind;
4803 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4804 attr.dw_attr_val.val_entry = NULL;
4805 attr.dw_attr_val.v.val_fde_index = targ_fde;
4806 add_dwarf_attr (die, &attr);
4807 }
4808
4809 /* Add a location description attribute value to a DIE. */
4810
4811 static inline void
4812 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4813 {
4814 dw_attr_node attr;
4815
4816 attr.dw_attr = attr_kind;
4817 attr.dw_attr_val.val_class = dw_val_class_loc;
4818 attr.dw_attr_val.val_entry = NULL;
4819 attr.dw_attr_val.v.val_loc = loc;
4820 add_dwarf_attr (die, &attr);
4821 }
4822
4823 static inline dw_loc_descr_ref
4824 AT_loc (dw_attr_node *a)
4825 {
4826 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4827 return a->dw_attr_val.v.val_loc;
4828 }
4829
4830 static inline void
4831 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4832 {
4833 dw_attr_node attr;
4834
4835 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4836 return;
4837
4838 attr.dw_attr = attr_kind;
4839 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4840 attr.dw_attr_val.val_entry = NULL;
4841 attr.dw_attr_val.v.val_loc_list = loc_list;
4842 add_dwarf_attr (die, &attr);
4843 have_location_lists = true;
4844 }
4845
4846 static inline dw_loc_list_ref
4847 AT_loc_list (dw_attr_node *a)
4848 {
4849 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4850 return a->dw_attr_val.v.val_loc_list;
4851 }
4852
4853 /* Add a view list attribute to DIE. It must have a DW_AT_location
4854 attribute, because the view list complements the location list. */
4855
4856 static inline void
4857 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4858 {
4859 dw_attr_node attr;
4860
4861 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4862 return;
4863
4864 attr.dw_attr = attr_kind;
4865 attr.dw_attr_val.val_class = dw_val_class_view_list;
4866 attr.dw_attr_val.val_entry = NULL;
4867 attr.dw_attr_val.v.val_view_list = die;
4868 add_dwarf_attr (die, &attr);
4869 gcc_checking_assert (get_AT (die, DW_AT_location));
4870 gcc_assert (have_location_lists);
4871 }
4872
4873 /* Return a pointer to the location list referenced by the attribute.
4874 If the named attribute is a view list, look up the corresponding
4875 DW_AT_location attribute and return its location list. */
4876
4877 static inline dw_loc_list_ref *
4878 AT_loc_list_ptr (dw_attr_node *a)
4879 {
4880 gcc_assert (a);
4881 switch (AT_class (a))
4882 {
4883 case dw_val_class_loc_list:
4884 return &a->dw_attr_val.v.val_loc_list;
4885 case dw_val_class_view_list:
4886 {
4887 dw_attr_node *l;
4888 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4889 if (!l)
4890 return NULL;
4891 gcc_checking_assert (l + 1 == a);
4892 return AT_loc_list_ptr (l);
4893 }
4894 default:
4895 gcc_unreachable ();
4896 }
4897 }
4898
4899 /* Return the location attribute value associated with a view list
4900 attribute value. */
4901
4902 static inline dw_val_node *
4903 view_list_to_loc_list_val_node (dw_val_node *val)
4904 {
4905 gcc_assert (val->val_class == dw_val_class_view_list);
4906 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4907 if (!loc)
4908 return NULL;
4909 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4910 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4911 return &loc->dw_attr_val;
4912 }
4913
4914 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4915 {
4916 static hashval_t hash (addr_table_entry *);
4917 static bool equal (addr_table_entry *, addr_table_entry *);
4918 };
4919
4920 /* Table of entries into the .debug_addr section. */
4921
4922 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4923
4924 /* Hash an address_table_entry. */
4925
4926 hashval_t
4927 addr_hasher::hash (addr_table_entry *a)
4928 {
4929 inchash::hash hstate;
4930 switch (a->kind)
4931 {
4932 case ate_kind_rtx:
4933 hstate.add_int (0);
4934 break;
4935 case ate_kind_rtx_dtprel:
4936 hstate.add_int (1);
4937 break;
4938 case ate_kind_label:
4939 return htab_hash_string (a->addr.label);
4940 default:
4941 gcc_unreachable ();
4942 }
4943 inchash::add_rtx (a->addr.rtl, hstate);
4944 return hstate.end ();
4945 }
4946
4947 /* Determine equality for two address_table_entries. */
4948
4949 bool
4950 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4951 {
4952 if (a1->kind != a2->kind)
4953 return 0;
4954 switch (a1->kind)
4955 {
4956 case ate_kind_rtx:
4957 case ate_kind_rtx_dtprel:
4958 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4959 case ate_kind_label:
4960 return strcmp (a1->addr.label, a2->addr.label) == 0;
4961 default:
4962 gcc_unreachable ();
4963 }
4964 }
4965
4966 /* Initialize an addr_table_entry. */
4967
4968 void
4969 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4970 {
4971 e->kind = kind;
4972 switch (kind)
4973 {
4974 case ate_kind_rtx:
4975 case ate_kind_rtx_dtprel:
4976 e->addr.rtl = (rtx) addr;
4977 break;
4978 case ate_kind_label:
4979 e->addr.label = (char *) addr;
4980 break;
4981 }
4982 e->refcount = 0;
4983 e->index = NO_INDEX_ASSIGNED;
4984 }
4985
4986 /* Add attr to the address table entry to the table. Defer setting an
4987 index until output time. */
4988
4989 static addr_table_entry *
4990 add_addr_table_entry (void *addr, enum ate_kind kind)
4991 {
4992 addr_table_entry *node;
4993 addr_table_entry finder;
4994
4995 gcc_assert (dwarf_split_debug_info);
4996 if (! addr_index_table)
4997 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4998 init_addr_table_entry (&finder, kind, addr);
4999 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5000
5001 if (*slot == HTAB_EMPTY_ENTRY)
5002 {
5003 node = ggc_cleared_alloc<addr_table_entry> ();
5004 init_addr_table_entry (node, kind, addr);
5005 *slot = node;
5006 }
5007 else
5008 node = *slot;
5009
5010 node->refcount++;
5011 return node;
5012 }
5013
5014 /* Remove an entry from the addr table by decrementing its refcount.
5015 Strictly, decrementing the refcount would be enough, but the
5016 assertion that the entry is actually in the table has found
5017 bugs. */
5018
5019 static void
5020 remove_addr_table_entry (addr_table_entry *entry)
5021 {
5022 gcc_assert (dwarf_split_debug_info && addr_index_table);
5023 /* After an index is assigned, the table is frozen. */
5024 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5025 entry->refcount--;
5026 }
5027
5028 /* Given a location list, remove all addresses it refers to from the
5029 address_table. */
5030
5031 static void
5032 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5033 {
5034 for (; descr; descr = descr->dw_loc_next)
5035 if (descr->dw_loc_oprnd1.val_entry != NULL)
5036 {
5037 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5038 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5039 }
5040 }
5041
5042 /* A helper function for dwarf2out_finish called through
5043 htab_traverse. Assign an addr_table_entry its index. All entries
5044 must be collected into the table when this function is called,
5045 because the indexing code relies on htab_traverse to traverse nodes
5046 in the same order for each run. */
5047
5048 int
5049 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5050 {
5051 addr_table_entry *node = *h;
5052
5053 /* Don't index unreferenced nodes. */
5054 if (node->refcount == 0)
5055 return 1;
5056
5057 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5058 node->index = *index;
5059 *index += 1;
5060
5061 return 1;
5062 }
5063
5064 /* Add an address constant attribute value to a DIE. When using
5065 dwarf_split_debug_info, address attributes in dies destined for the
5066 final executable should be direct references--setting the parameter
5067 force_direct ensures this behavior. */
5068
5069 static inline void
5070 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5071 bool force_direct)
5072 {
5073 dw_attr_node attr;
5074
5075 attr.dw_attr = attr_kind;
5076 attr.dw_attr_val.val_class = dw_val_class_addr;
5077 attr.dw_attr_val.v.val_addr = addr;
5078 if (dwarf_split_debug_info && !force_direct)
5079 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5080 else
5081 attr.dw_attr_val.val_entry = NULL;
5082 add_dwarf_attr (die, &attr);
5083 }
5084
5085 /* Get the RTX from to an address DIE attribute. */
5086
5087 static inline rtx
5088 AT_addr (dw_attr_node *a)
5089 {
5090 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5091 return a->dw_attr_val.v.val_addr;
5092 }
5093
5094 /* Add a file attribute value to a DIE. */
5095
5096 static inline void
5097 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5098 struct dwarf_file_data *fd)
5099 {
5100 dw_attr_node attr;
5101
5102 attr.dw_attr = attr_kind;
5103 attr.dw_attr_val.val_class = dw_val_class_file;
5104 attr.dw_attr_val.val_entry = NULL;
5105 attr.dw_attr_val.v.val_file = fd;
5106 add_dwarf_attr (die, &attr);
5107 }
5108
5109 /* Get the dwarf_file_data from a file DIE attribute. */
5110
5111 static inline struct dwarf_file_data *
5112 AT_file (dw_attr_node *a)
5113 {
5114 gcc_assert (a && (AT_class (a) == dw_val_class_file
5115 || AT_class (a) == dw_val_class_file_implicit));
5116 return a->dw_attr_val.v.val_file;
5117 }
5118
5119 /* Add a vms delta attribute value to a DIE. */
5120
5121 static inline void
5122 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5123 const char *lbl1, const char *lbl2)
5124 {
5125 dw_attr_node attr;
5126
5127 attr.dw_attr = attr_kind;
5128 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5129 attr.dw_attr_val.val_entry = NULL;
5130 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5131 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5132 add_dwarf_attr (die, &attr);
5133 }
5134
5135 /* Add a symbolic view identifier attribute value to a DIE. */
5136
5137 static inline void
5138 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5139 const char *view_label)
5140 {
5141 dw_attr_node attr;
5142
5143 attr.dw_attr = attr_kind;
5144 attr.dw_attr_val.val_class = dw_val_class_symview;
5145 attr.dw_attr_val.val_entry = NULL;
5146 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5147 add_dwarf_attr (die, &attr);
5148 }
5149
5150 /* Add a label identifier attribute value to a DIE. */
5151
5152 static inline void
5153 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5154 const char *lbl_id)
5155 {
5156 dw_attr_node attr;
5157
5158 attr.dw_attr = attr_kind;
5159 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5160 attr.dw_attr_val.val_entry = NULL;
5161 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5162 if (dwarf_split_debug_info)
5163 attr.dw_attr_val.val_entry
5164 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5165 ate_kind_label);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a section offset attribute value to a DIE, an offset into the
5170 debug_line section. */
5171
5172 static inline void
5173 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5174 const char *label)
5175 {
5176 dw_attr_node attr;
5177
5178 attr.dw_attr = attr_kind;
5179 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5180 attr.dw_attr_val.val_entry = NULL;
5181 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5182 add_dwarf_attr (die, &attr);
5183 }
5184
5185 /* Add a section offset attribute value to a DIE, an offset into the
5186 debug_loclists section. */
5187
5188 static inline void
5189 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5190 const char *label)
5191 {
5192 dw_attr_node attr;
5193
5194 attr.dw_attr = attr_kind;
5195 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5196 attr.dw_attr_val.val_entry = NULL;
5197 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5198 add_dwarf_attr (die, &attr);
5199 }
5200
5201 /* Add a section offset attribute value to a DIE, an offset into the
5202 debug_macinfo section. */
5203
5204 static inline void
5205 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5206 const char *label)
5207 {
5208 dw_attr_node attr;
5209
5210 attr.dw_attr = attr_kind;
5211 attr.dw_attr_val.val_class = dw_val_class_macptr;
5212 attr.dw_attr_val.val_entry = NULL;
5213 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5214 add_dwarf_attr (die, &attr);
5215 }
5216
5217 /* Add an offset attribute value to a DIE. */
5218
5219 static inline void
5220 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5221 unsigned HOST_WIDE_INT offset)
5222 {
5223 dw_attr_node attr;
5224
5225 attr.dw_attr = attr_kind;
5226 attr.dw_attr_val.val_class = dw_val_class_offset;
5227 attr.dw_attr_val.val_entry = NULL;
5228 attr.dw_attr_val.v.val_offset = offset;
5229 add_dwarf_attr (die, &attr);
5230 }
5231
5232 /* Add a range_list attribute value to a DIE. When using
5233 dwarf_split_debug_info, address attributes in dies destined for the
5234 final executable should be direct references--setting the parameter
5235 force_direct ensures this behavior. */
5236
5237 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5238 #define RELOCATED_OFFSET (NULL)
5239
5240 static void
5241 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5242 long unsigned int offset, bool force_direct)
5243 {
5244 dw_attr_node attr;
5245
5246 attr.dw_attr = attr_kind;
5247 attr.dw_attr_val.val_class = dw_val_class_range_list;
5248 /* For the range_list attribute, use val_entry to store whether the
5249 offset should follow split-debug-info or normal semantics. This
5250 value is read in output_range_list_offset. */
5251 if (dwarf_split_debug_info && !force_direct)
5252 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5253 else
5254 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5255 attr.dw_attr_val.v.val_offset = offset;
5256 add_dwarf_attr (die, &attr);
5257 }
5258
5259 /* Return the start label of a delta attribute. */
5260
5261 static inline const char *
5262 AT_vms_delta1 (dw_attr_node *a)
5263 {
5264 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5265 return a->dw_attr_val.v.val_vms_delta.lbl1;
5266 }
5267
5268 /* Return the end label of a delta attribute. */
5269
5270 static inline const char *
5271 AT_vms_delta2 (dw_attr_node *a)
5272 {
5273 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5274 return a->dw_attr_val.v.val_vms_delta.lbl2;
5275 }
5276
5277 static inline const char *
5278 AT_lbl (dw_attr_node *a)
5279 {
5280 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5281 || AT_class (a) == dw_val_class_lineptr
5282 || AT_class (a) == dw_val_class_macptr
5283 || AT_class (a) == dw_val_class_loclistsptr
5284 || AT_class (a) == dw_val_class_high_pc));
5285 return a->dw_attr_val.v.val_lbl_id;
5286 }
5287
5288 /* Get the attribute of type attr_kind. */
5289
5290 static dw_attr_node *
5291 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5292 {
5293 dw_attr_node *a;
5294 unsigned ix;
5295 dw_die_ref spec = NULL;
5296
5297 if (! die)
5298 return NULL;
5299
5300 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5301 if (a->dw_attr == attr_kind)
5302 return a;
5303 else if (a->dw_attr == DW_AT_specification
5304 || a->dw_attr == DW_AT_abstract_origin)
5305 spec = AT_ref (a);
5306
5307 if (spec)
5308 return get_AT (spec, attr_kind);
5309
5310 return NULL;
5311 }
5312
5313 /* Returns the parent of the declaration of DIE. */
5314
5315 static dw_die_ref
5316 get_die_parent (dw_die_ref die)
5317 {
5318 dw_die_ref t;
5319
5320 if (!die)
5321 return NULL;
5322
5323 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5324 || (t = get_AT_ref (die, DW_AT_specification)))
5325 die = t;
5326
5327 return die->die_parent;
5328 }
5329
5330 /* Return the "low pc" attribute value, typically associated with a subprogram
5331 DIE. Return null if the "low pc" attribute is either not present, or if it
5332 cannot be represented as an assembler label identifier. */
5333
5334 static inline const char *
5335 get_AT_low_pc (dw_die_ref die)
5336 {
5337 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5338
5339 return a ? AT_lbl (a) : NULL;
5340 }
5341
5342 /* Return the "high pc" attribute value, typically associated with a subprogram
5343 DIE. Return null if the "high pc" attribute is either not present, or if it
5344 cannot be represented as an assembler label identifier. */
5345
5346 static inline const char *
5347 get_AT_hi_pc (dw_die_ref die)
5348 {
5349 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5350
5351 return a ? AT_lbl (a) : NULL;
5352 }
5353
5354 /* Return the value of the string attribute designated by ATTR_KIND, or
5355 NULL if it is not present. */
5356
5357 static inline const char *
5358 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5359 {
5360 dw_attr_node *a = get_AT (die, attr_kind);
5361
5362 return a ? AT_string (a) : NULL;
5363 }
5364
5365 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5366 if it is not present. */
5367
5368 static inline int
5369 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5370 {
5371 dw_attr_node *a = get_AT (die, attr_kind);
5372
5373 return a ? AT_flag (a) : 0;
5374 }
5375
5376 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5377 if it is not present. */
5378
5379 static inline unsigned
5380 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5381 {
5382 dw_attr_node *a = get_AT (die, attr_kind);
5383
5384 return a ? AT_unsigned (a) : 0;
5385 }
5386
5387 static inline dw_die_ref
5388 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5389 {
5390 dw_attr_node *a = get_AT (die, attr_kind);
5391
5392 return a ? AT_ref (a) : NULL;
5393 }
5394
5395 static inline struct dwarf_file_data *
5396 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5397 {
5398 dw_attr_node *a = get_AT (die, attr_kind);
5399
5400 return a ? AT_file (a) : NULL;
5401 }
5402
5403 /* Return TRUE if the language is C++. */
5404
5405 static inline bool
5406 is_cxx (void)
5407 {
5408 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5409
5410 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5411 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5412 }
5413
5414 /* Return TRUE if DECL was created by the C++ frontend. */
5415
5416 static bool
5417 is_cxx (const_tree decl)
5418 {
5419 if (in_lto_p)
5420 {
5421 const_tree context = get_ultimate_context (decl);
5422 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5423 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5424 }
5425 return is_cxx ();
5426 }
5427
5428 /* Return TRUE if the language is Fortran. */
5429
5430 static inline bool
5431 is_fortran (void)
5432 {
5433 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5434
5435 return (lang == DW_LANG_Fortran77
5436 || lang == DW_LANG_Fortran90
5437 || lang == DW_LANG_Fortran95
5438 || lang == DW_LANG_Fortran03
5439 || lang == DW_LANG_Fortran08);
5440 }
5441
5442 static inline bool
5443 is_fortran (const_tree decl)
5444 {
5445 if (in_lto_p)
5446 {
5447 const_tree context = get_ultimate_context (decl);
5448 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5449 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5450 "GNU Fortran", 11) == 0
5451 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5452 "GNU F77") == 0);
5453 }
5454 return is_fortran ();
5455 }
5456
5457 /* Return TRUE if the language is Ada. */
5458
5459 static inline bool
5460 is_ada (void)
5461 {
5462 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5463
5464 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5465 }
5466
5467 /* Remove the specified attribute if present. Return TRUE if removal
5468 was successful. */
5469
5470 static bool
5471 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5472 {
5473 dw_attr_node *a;
5474 unsigned ix;
5475
5476 if (! die)
5477 return false;
5478
5479 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5480 if (a->dw_attr == attr_kind)
5481 {
5482 if (AT_class (a) == dw_val_class_str)
5483 if (a->dw_attr_val.v.val_str->refcount)
5484 a->dw_attr_val.v.val_str->refcount--;
5485
5486 /* vec::ordered_remove should help reduce the number of abbrevs
5487 that are needed. */
5488 die->die_attr->ordered_remove (ix);
5489 return true;
5490 }
5491 return false;
5492 }
5493
5494 /* Remove CHILD from its parent. PREV must have the property that
5495 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5496
5497 static void
5498 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5499 {
5500 gcc_assert (child->die_parent == prev->die_parent);
5501 gcc_assert (prev->die_sib == child);
5502 if (prev == child)
5503 {
5504 gcc_assert (child->die_parent->die_child == child);
5505 prev = NULL;
5506 }
5507 else
5508 prev->die_sib = child->die_sib;
5509 if (child->die_parent->die_child == child)
5510 child->die_parent->die_child = prev;
5511 child->die_sib = NULL;
5512 }
5513
5514 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5515 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5516
5517 static void
5518 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5519 {
5520 dw_die_ref parent = old_child->die_parent;
5521
5522 gcc_assert (parent == prev->die_parent);
5523 gcc_assert (prev->die_sib == old_child);
5524
5525 new_child->die_parent = parent;
5526 if (prev == old_child)
5527 {
5528 gcc_assert (parent->die_child == old_child);
5529 new_child->die_sib = new_child;
5530 }
5531 else
5532 {
5533 prev->die_sib = new_child;
5534 new_child->die_sib = old_child->die_sib;
5535 }
5536 if (old_child->die_parent->die_child == old_child)
5537 old_child->die_parent->die_child = new_child;
5538 old_child->die_sib = NULL;
5539 }
5540
5541 /* Move all children from OLD_PARENT to NEW_PARENT. */
5542
5543 static void
5544 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5545 {
5546 dw_die_ref c;
5547 new_parent->die_child = old_parent->die_child;
5548 old_parent->die_child = NULL;
5549 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5550 }
5551
5552 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5553 matches TAG. */
5554
5555 static void
5556 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5557 {
5558 dw_die_ref c;
5559
5560 c = die->die_child;
5561 if (c) do {
5562 dw_die_ref prev = c;
5563 c = c->die_sib;
5564 while (c->die_tag == tag)
5565 {
5566 remove_child_with_prev (c, prev);
5567 c->die_parent = NULL;
5568 /* Might have removed every child. */
5569 if (die->die_child == NULL)
5570 return;
5571 c = prev->die_sib;
5572 }
5573 } while (c != die->die_child);
5574 }
5575
5576 /* Add a CHILD_DIE as the last child of DIE. */
5577
5578 static void
5579 add_child_die (dw_die_ref die, dw_die_ref child_die)
5580 {
5581 /* FIXME this should probably be an assert. */
5582 if (! die || ! child_die)
5583 return;
5584 gcc_assert (die != child_die);
5585
5586 child_die->die_parent = die;
5587 if (die->die_child)
5588 {
5589 child_die->die_sib = die->die_child->die_sib;
5590 die->die_child->die_sib = child_die;
5591 }
5592 else
5593 child_die->die_sib = child_die;
5594 die->die_child = child_die;
5595 }
5596
5597 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5598
5599 static void
5600 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5601 dw_die_ref after_die)
5602 {
5603 gcc_assert (die
5604 && child_die
5605 && after_die
5606 && die->die_child
5607 && die != child_die);
5608
5609 child_die->die_parent = die;
5610 child_die->die_sib = after_die->die_sib;
5611 after_die->die_sib = child_die;
5612 if (die->die_child == after_die)
5613 die->die_child = child_die;
5614 }
5615
5616 /* Unassociate CHILD from its parent, and make its parent be
5617 NEW_PARENT. */
5618
5619 static void
5620 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5621 {
5622 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5623 if (p->die_sib == child)
5624 {
5625 remove_child_with_prev (child, p);
5626 break;
5627 }
5628 add_child_die (new_parent, child);
5629 }
5630
5631 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5632 is the specification, to the end of PARENT's list of children.
5633 This is done by removing and re-adding it. */
5634
5635 static void
5636 splice_child_die (dw_die_ref parent, dw_die_ref child)
5637 {
5638 /* We want the declaration DIE from inside the class, not the
5639 specification DIE at toplevel. */
5640 if (child->die_parent != parent)
5641 {
5642 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5643
5644 if (tmp)
5645 child = tmp;
5646 }
5647
5648 gcc_assert (child->die_parent == parent
5649 || (child->die_parent
5650 == get_AT_ref (parent, DW_AT_specification)));
5651
5652 reparent_child (child, parent);
5653 }
5654
5655 /* Create and return a new die with TAG_VALUE as tag. */
5656
5657 static inline dw_die_ref
5658 new_die_raw (enum dwarf_tag tag_value)
5659 {
5660 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5661 die->die_tag = tag_value;
5662 return die;
5663 }
5664
5665 /* Create and return a new die with a parent of PARENT_DIE. If
5666 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5667 associated tree T must be supplied to determine parenthood
5668 later. */
5669
5670 static inline dw_die_ref
5671 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5672 {
5673 dw_die_ref die = new_die_raw (tag_value);
5674
5675 if (parent_die != NULL)
5676 add_child_die (parent_die, die);
5677 else
5678 {
5679 limbo_die_node *limbo_node;
5680
5681 /* No DIEs created after early dwarf should end up in limbo,
5682 because the limbo list should not persist past LTO
5683 streaming. */
5684 if (tag_value != DW_TAG_compile_unit
5685 /* These are allowed because they're generated while
5686 breaking out COMDAT units late. */
5687 && tag_value != DW_TAG_type_unit
5688 && tag_value != DW_TAG_skeleton_unit
5689 && !early_dwarf
5690 /* Allow nested functions to live in limbo because they will
5691 only temporarily live there, as decls_for_scope will fix
5692 them up. */
5693 && (TREE_CODE (t) != FUNCTION_DECL
5694 || !decl_function_context (t))
5695 /* Same as nested functions above but for types. Types that
5696 are local to a function will be fixed in
5697 decls_for_scope. */
5698 && (!RECORD_OR_UNION_TYPE_P (t)
5699 || !TYPE_CONTEXT (t)
5700 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5701 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5702 especially in the ltrans stage, but once we implement LTO
5703 dwarf streaming, we should remove this exception. */
5704 && !in_lto_p)
5705 {
5706 fprintf (stderr, "symbol ended up in limbo too late:");
5707 debug_generic_stmt (t);
5708 gcc_unreachable ();
5709 }
5710
5711 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5712 limbo_node->die = die;
5713 limbo_node->created_for = t;
5714 limbo_node->next = limbo_die_list;
5715 limbo_die_list = limbo_node;
5716 }
5717
5718 return die;
5719 }
5720
5721 /* Return the DIE associated with the given type specifier. */
5722
5723 static inline dw_die_ref
5724 lookup_type_die (tree type)
5725 {
5726 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5727 if (die && die->removed)
5728 {
5729 TYPE_SYMTAB_DIE (type) = NULL;
5730 return NULL;
5731 }
5732 return die;
5733 }
5734
5735 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5736 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5737 anonymous type instead the one of the naming typedef. */
5738
5739 static inline dw_die_ref
5740 strip_naming_typedef (tree type, dw_die_ref type_die)
5741 {
5742 if (type
5743 && TREE_CODE (type) == RECORD_TYPE
5744 && type_die
5745 && type_die->die_tag == DW_TAG_typedef
5746 && is_naming_typedef_decl (TYPE_NAME (type)))
5747 type_die = get_AT_ref (type_die, DW_AT_type);
5748 return type_die;
5749 }
5750
5751 /* Like lookup_type_die, but if type is an anonymous type named by a
5752 typedef[1], return the DIE of the anonymous type instead the one of
5753 the naming typedef. This is because in gen_typedef_die, we did
5754 equate the anonymous struct named by the typedef with the DIE of
5755 the naming typedef. So by default, lookup_type_die on an anonymous
5756 struct yields the DIE of the naming typedef.
5757
5758 [1]: Read the comment of is_naming_typedef_decl to learn about what
5759 a naming typedef is. */
5760
5761 static inline dw_die_ref
5762 lookup_type_die_strip_naming_typedef (tree type)
5763 {
5764 dw_die_ref die = lookup_type_die (type);
5765 return strip_naming_typedef (type, die);
5766 }
5767
5768 /* Equate a DIE to a given type specifier. */
5769
5770 static inline void
5771 equate_type_number_to_die (tree type, dw_die_ref type_die)
5772 {
5773 TYPE_SYMTAB_DIE (type) = type_die;
5774 }
5775
5776 /* Returns a hash value for X (which really is a die_struct). */
5777
5778 inline hashval_t
5779 decl_die_hasher::hash (die_node *x)
5780 {
5781 return (hashval_t) x->decl_id;
5782 }
5783
5784 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5785
5786 inline bool
5787 decl_die_hasher::equal (die_node *x, tree y)
5788 {
5789 return (x->decl_id == DECL_UID (y));
5790 }
5791
5792 /* Return the DIE associated with a given declaration. */
5793
5794 static inline dw_die_ref
5795 lookup_decl_die (tree decl)
5796 {
5797 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5798 NO_INSERT);
5799 if (!die)
5800 return NULL;
5801 if ((*die)->removed)
5802 {
5803 decl_die_table->clear_slot (die);
5804 return NULL;
5805 }
5806 return *die;
5807 }
5808
5809
5810 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5811 style reference. Return true if we found one refering to a DIE for
5812 DECL, otherwise return false. */
5813
5814 static bool
5815 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5816 unsigned HOST_WIDE_INT *off)
5817 {
5818 dw_die_ref die;
5819
5820 if (flag_wpa && !decl_die_table)
5821 return false;
5822
5823 if (TREE_CODE (decl) == BLOCK)
5824 die = BLOCK_DIE (decl);
5825 else
5826 die = lookup_decl_die (decl);
5827 if (!die)
5828 return false;
5829
5830 /* During WPA stage we currently use DIEs to store the
5831 decl <-> label + offset map. That's quite inefficient but it
5832 works for now. */
5833 if (flag_wpa)
5834 {
5835 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5836 if (!ref)
5837 {
5838 gcc_assert (die == comp_unit_die ());
5839 return false;
5840 }
5841 *off = ref->die_offset;
5842 *sym = ref->die_id.die_symbol;
5843 return true;
5844 }
5845
5846 /* Similar to get_ref_die_offset_label, but using the "correct"
5847 label. */
5848 *off = die->die_offset;
5849 while (die->die_parent)
5850 die = die->die_parent;
5851 /* For the containing CU DIE we compute a die_symbol in
5852 compute_comp_unit_symbol. */
5853 gcc_assert (die->die_tag == DW_TAG_compile_unit
5854 && die->die_id.die_symbol != NULL);
5855 *sym = die->die_id.die_symbol;
5856 return true;
5857 }
5858
5859 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5860
5861 static void
5862 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5863 const char *symbol, HOST_WIDE_INT offset)
5864 {
5865 /* Create a fake DIE that contains the reference. Don't use
5866 new_die because we don't want to end up in the limbo list. */
5867 dw_die_ref ref = new_die_raw (die->die_tag);
5868 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5869 ref->die_offset = offset;
5870 ref->with_offset = 1;
5871 add_AT_die_ref (die, attr_kind, ref);
5872 }
5873
5874 /* Create a DIE for DECL if required and add a reference to a DIE
5875 at SYMBOL + OFFSET which contains attributes dumped early. */
5876
5877 static void
5878 dwarf2out_register_external_die (tree decl, const char *sym,
5879 unsigned HOST_WIDE_INT off)
5880 {
5881 if (debug_info_level == DINFO_LEVEL_NONE)
5882 return;
5883
5884 if (flag_wpa && !decl_die_table)
5885 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5886
5887 dw_die_ref die
5888 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5889 gcc_assert (!die);
5890
5891 tree ctx;
5892 dw_die_ref parent = NULL;
5893 /* Need to lookup a DIE for the decls context - the containing
5894 function or translation unit. */
5895 if (TREE_CODE (decl) == BLOCK)
5896 {
5897 ctx = BLOCK_SUPERCONTEXT (decl);
5898 /* ??? We do not output DIEs for all scopes thus skip as
5899 many DIEs as needed. */
5900 while (TREE_CODE (ctx) == BLOCK
5901 && !BLOCK_DIE (ctx))
5902 ctx = BLOCK_SUPERCONTEXT (ctx);
5903 }
5904 else
5905 ctx = DECL_CONTEXT (decl);
5906 /* Peel types in the context stack. */
5907 while (ctx && TYPE_P (ctx))
5908 ctx = TYPE_CONTEXT (ctx);
5909 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5910 if (debug_info_level <= DINFO_LEVEL_TERSE)
5911 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5912 ctx = DECL_CONTEXT (ctx);
5913 if (ctx)
5914 {
5915 if (TREE_CODE (ctx) == BLOCK)
5916 parent = BLOCK_DIE (ctx);
5917 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5918 /* Keep the 1:1 association during WPA. */
5919 && !flag_wpa)
5920 /* Otherwise all late annotations go to the main CU which
5921 imports the original CUs. */
5922 parent = comp_unit_die ();
5923 else if (TREE_CODE (ctx) == FUNCTION_DECL
5924 && TREE_CODE (decl) != PARM_DECL
5925 && TREE_CODE (decl) != BLOCK)
5926 /* Leave function local entities parent determination to when
5927 we process scope vars. */
5928 ;
5929 else
5930 parent = lookup_decl_die (ctx);
5931 }
5932 else
5933 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5934 Handle this case gracefully by globalizing stuff. */
5935 parent = comp_unit_die ();
5936 /* Create a DIE "stub". */
5937 switch (TREE_CODE (decl))
5938 {
5939 case TRANSLATION_UNIT_DECL:
5940 if (! flag_wpa)
5941 {
5942 die = comp_unit_die ();
5943 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5944 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5945 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5946 to create a DIE for the original CUs. */
5947 return;
5948 }
5949 /* Keep the 1:1 association during WPA. */
5950 die = new_die (DW_TAG_compile_unit, NULL, decl);
5951 break;
5952 case NAMESPACE_DECL:
5953 if (is_fortran (decl))
5954 die = new_die (DW_TAG_module, parent, decl);
5955 else
5956 die = new_die (DW_TAG_namespace, parent, decl);
5957 break;
5958 case FUNCTION_DECL:
5959 die = new_die (DW_TAG_subprogram, parent, decl);
5960 break;
5961 case VAR_DECL:
5962 die = new_die (DW_TAG_variable, parent, decl);
5963 break;
5964 case RESULT_DECL:
5965 die = new_die (DW_TAG_variable, parent, decl);
5966 break;
5967 case PARM_DECL:
5968 die = new_die (DW_TAG_formal_parameter, parent, decl);
5969 break;
5970 case CONST_DECL:
5971 die = new_die (DW_TAG_constant, parent, decl);
5972 break;
5973 case LABEL_DECL:
5974 die = new_die (DW_TAG_label, parent, decl);
5975 break;
5976 case BLOCK:
5977 die = new_die (DW_TAG_lexical_block, parent, decl);
5978 break;
5979 default:
5980 gcc_unreachable ();
5981 }
5982 if (TREE_CODE (decl) == BLOCK)
5983 BLOCK_DIE (decl) = die;
5984 else
5985 equate_decl_number_to_die (decl, die);
5986
5987 /* Add a reference to the DIE providing early debug at $sym + off. */
5988 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5989 }
5990
5991 /* Returns a hash value for X (which really is a var_loc_list). */
5992
5993 inline hashval_t
5994 decl_loc_hasher::hash (var_loc_list *x)
5995 {
5996 return (hashval_t) x->decl_id;
5997 }
5998
5999 /* Return nonzero if decl_id of var_loc_list X is the same as
6000 UID of decl *Y. */
6001
6002 inline bool
6003 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6004 {
6005 return (x->decl_id == DECL_UID (y));
6006 }
6007
6008 /* Return the var_loc list associated with a given declaration. */
6009
6010 static inline var_loc_list *
6011 lookup_decl_loc (const_tree decl)
6012 {
6013 if (!decl_loc_table)
6014 return NULL;
6015 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6016 }
6017
6018 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6019
6020 inline hashval_t
6021 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6022 {
6023 return (hashval_t) x->decl_id;
6024 }
6025
6026 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6027 UID of decl *Y. */
6028
6029 inline bool
6030 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6031 {
6032 return (x->decl_id == DECL_UID (y));
6033 }
6034
6035 /* Equate a DIE to a particular declaration. */
6036
6037 static void
6038 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6039 {
6040 unsigned int decl_id = DECL_UID (decl);
6041
6042 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6043 decl_die->decl_id = decl_id;
6044 }
6045
6046 /* Return how many bits covers PIECE EXPR_LIST. */
6047
6048 static HOST_WIDE_INT
6049 decl_piece_bitsize (rtx piece)
6050 {
6051 int ret = (int) GET_MODE (piece);
6052 if (ret)
6053 return ret;
6054 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6055 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6056 return INTVAL (XEXP (XEXP (piece, 0), 0));
6057 }
6058
6059 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6060
6061 static rtx *
6062 decl_piece_varloc_ptr (rtx piece)
6063 {
6064 if ((int) GET_MODE (piece))
6065 return &XEXP (piece, 0);
6066 else
6067 return &XEXP (XEXP (piece, 0), 1);
6068 }
6069
6070 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6071 Next is the chain of following piece nodes. */
6072
6073 static rtx_expr_list *
6074 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6075 {
6076 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6077 return alloc_EXPR_LIST (bitsize, loc_note, next);
6078 else
6079 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6080 GEN_INT (bitsize),
6081 loc_note), next);
6082 }
6083
6084 /* Return rtx that should be stored into loc field for
6085 LOC_NOTE and BITPOS/BITSIZE. */
6086
6087 static rtx
6088 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6089 HOST_WIDE_INT bitsize)
6090 {
6091 if (bitsize != -1)
6092 {
6093 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6094 if (bitpos != 0)
6095 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6096 }
6097 return loc_note;
6098 }
6099
6100 /* This function either modifies location piece list *DEST in
6101 place (if SRC and INNER is NULL), or copies location piece list
6102 *SRC to *DEST while modifying it. Location BITPOS is modified
6103 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6104 not copied and if needed some padding around it is added.
6105 When modifying in place, DEST should point to EXPR_LIST where
6106 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6107 to the start of the whole list and INNER points to the EXPR_LIST
6108 where earlier pieces cover PIECE_BITPOS bits. */
6109
6110 static void
6111 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6112 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6113 HOST_WIDE_INT bitsize, rtx loc_note)
6114 {
6115 HOST_WIDE_INT diff;
6116 bool copy = inner != NULL;
6117
6118 if (copy)
6119 {
6120 /* First copy all nodes preceding the current bitpos. */
6121 while (src != inner)
6122 {
6123 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6124 decl_piece_bitsize (*src), NULL_RTX);
6125 dest = &XEXP (*dest, 1);
6126 src = &XEXP (*src, 1);
6127 }
6128 }
6129 /* Add padding if needed. */
6130 if (bitpos != piece_bitpos)
6131 {
6132 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6133 copy ? NULL_RTX : *dest);
6134 dest = &XEXP (*dest, 1);
6135 }
6136 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6137 {
6138 gcc_assert (!copy);
6139 /* A piece with correct bitpos and bitsize already exist,
6140 just update the location for it and return. */
6141 *decl_piece_varloc_ptr (*dest) = loc_note;
6142 return;
6143 }
6144 /* Add the piece that changed. */
6145 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6146 dest = &XEXP (*dest, 1);
6147 /* Skip over pieces that overlap it. */
6148 diff = bitpos - piece_bitpos + bitsize;
6149 if (!copy)
6150 src = dest;
6151 while (diff > 0 && *src)
6152 {
6153 rtx piece = *src;
6154 diff -= decl_piece_bitsize (piece);
6155 if (copy)
6156 src = &XEXP (piece, 1);
6157 else
6158 {
6159 *src = XEXP (piece, 1);
6160 free_EXPR_LIST_node (piece);
6161 }
6162 }
6163 /* Add padding if needed. */
6164 if (diff < 0 && *src)
6165 {
6166 if (!copy)
6167 dest = src;
6168 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6169 dest = &XEXP (*dest, 1);
6170 }
6171 if (!copy)
6172 return;
6173 /* Finally copy all nodes following it. */
6174 while (*src)
6175 {
6176 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6177 decl_piece_bitsize (*src), NULL_RTX);
6178 dest = &XEXP (*dest, 1);
6179 src = &XEXP (*src, 1);
6180 }
6181 }
6182
6183 /* Add a variable location node to the linked list for DECL. */
6184
6185 static struct var_loc_node *
6186 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6187 {
6188 unsigned int decl_id;
6189 var_loc_list *temp;
6190 struct var_loc_node *loc = NULL;
6191 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6192
6193 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6194 {
6195 tree realdecl = DECL_DEBUG_EXPR (decl);
6196 if (handled_component_p (realdecl)
6197 || (TREE_CODE (realdecl) == MEM_REF
6198 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6199 {
6200 bool reverse;
6201 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6202 &bitsize, &reverse);
6203 if (!innerdecl
6204 || !DECL_P (innerdecl)
6205 || DECL_IGNORED_P (innerdecl)
6206 || TREE_STATIC (innerdecl)
6207 || bitsize == 0
6208 || bitpos + bitsize > 256)
6209 return NULL;
6210 decl = innerdecl;
6211 }
6212 }
6213
6214 decl_id = DECL_UID (decl);
6215 var_loc_list **slot
6216 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6217 if (*slot == NULL)
6218 {
6219 temp = ggc_cleared_alloc<var_loc_list> ();
6220 temp->decl_id = decl_id;
6221 *slot = temp;
6222 }
6223 else
6224 temp = *slot;
6225
6226 /* For PARM_DECLs try to keep around the original incoming value,
6227 even if that means we'll emit a zero-range .debug_loc entry. */
6228 if (temp->last
6229 && temp->first == temp->last
6230 && TREE_CODE (decl) == PARM_DECL
6231 && NOTE_P (temp->first->loc)
6232 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6233 && DECL_INCOMING_RTL (decl)
6234 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6235 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6236 == GET_CODE (DECL_INCOMING_RTL (decl))
6237 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6238 && (bitsize != -1
6239 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6240 NOTE_VAR_LOCATION_LOC (loc_note))
6241 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6242 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6243 {
6244 loc = ggc_cleared_alloc<var_loc_node> ();
6245 temp->first->next = loc;
6246 temp->last = loc;
6247 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6248 }
6249 else if (temp->last)
6250 {
6251 struct var_loc_node *last = temp->last, *unused = NULL;
6252 rtx *piece_loc = NULL, last_loc_note;
6253 HOST_WIDE_INT piece_bitpos = 0;
6254 if (last->next)
6255 {
6256 last = last->next;
6257 gcc_assert (last->next == NULL);
6258 }
6259 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6260 {
6261 piece_loc = &last->loc;
6262 do
6263 {
6264 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6265 if (piece_bitpos + cur_bitsize > bitpos)
6266 break;
6267 piece_bitpos += cur_bitsize;
6268 piece_loc = &XEXP (*piece_loc, 1);
6269 }
6270 while (*piece_loc);
6271 }
6272 /* TEMP->LAST here is either pointer to the last but one or
6273 last element in the chained list, LAST is pointer to the
6274 last element. */
6275 if (label && strcmp (last->label, label) == 0 && last->view == view)
6276 {
6277 /* For SRA optimized variables if there weren't any real
6278 insns since last note, just modify the last node. */
6279 if (piece_loc != NULL)
6280 {
6281 adjust_piece_list (piece_loc, NULL, NULL,
6282 bitpos, piece_bitpos, bitsize, loc_note);
6283 return NULL;
6284 }
6285 /* If the last note doesn't cover any instructions, remove it. */
6286 if (temp->last != last)
6287 {
6288 temp->last->next = NULL;
6289 unused = last;
6290 last = temp->last;
6291 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6292 }
6293 else
6294 {
6295 gcc_assert (temp->first == temp->last
6296 || (temp->first->next == temp->last
6297 && TREE_CODE (decl) == PARM_DECL));
6298 memset (temp->last, '\0', sizeof (*temp->last));
6299 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6300 return temp->last;
6301 }
6302 }
6303 if (bitsize == -1 && NOTE_P (last->loc))
6304 last_loc_note = last->loc;
6305 else if (piece_loc != NULL
6306 && *piece_loc != NULL_RTX
6307 && piece_bitpos == bitpos
6308 && decl_piece_bitsize (*piece_loc) == bitsize)
6309 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6310 else
6311 last_loc_note = NULL_RTX;
6312 /* If the current location is the same as the end of the list,
6313 and either both or neither of the locations is uninitialized,
6314 we have nothing to do. */
6315 if (last_loc_note == NULL_RTX
6316 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6317 NOTE_VAR_LOCATION_LOC (loc_note)))
6318 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6319 != NOTE_VAR_LOCATION_STATUS (loc_note))
6320 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6321 == VAR_INIT_STATUS_UNINITIALIZED)
6322 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6323 == VAR_INIT_STATUS_UNINITIALIZED))))
6324 {
6325 /* Add LOC to the end of list and update LAST. If the last
6326 element of the list has been removed above, reuse its
6327 memory for the new node, otherwise allocate a new one. */
6328 if (unused)
6329 {
6330 loc = unused;
6331 memset (loc, '\0', sizeof (*loc));
6332 }
6333 else
6334 loc = ggc_cleared_alloc<var_loc_node> ();
6335 if (bitsize == -1 || piece_loc == NULL)
6336 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6337 else
6338 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6339 bitpos, piece_bitpos, bitsize, loc_note);
6340 last->next = loc;
6341 /* Ensure TEMP->LAST will point either to the new last but one
6342 element of the chain, or to the last element in it. */
6343 if (last != temp->last)
6344 temp->last = last;
6345 }
6346 else if (unused)
6347 ggc_free (unused);
6348 }
6349 else
6350 {
6351 loc = ggc_cleared_alloc<var_loc_node> ();
6352 temp->first = loc;
6353 temp->last = loc;
6354 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6355 }
6356 return loc;
6357 }
6358 \f
6359 /* Keep track of the number of spaces used to indent the
6360 output of the debugging routines that print the structure of
6361 the DIE internal representation. */
6362 static int print_indent;
6363
6364 /* Indent the line the number of spaces given by print_indent. */
6365
6366 static inline void
6367 print_spaces (FILE *outfile)
6368 {
6369 fprintf (outfile, "%*s", print_indent, "");
6370 }
6371
6372 /* Print a type signature in hex. */
6373
6374 static inline void
6375 print_signature (FILE *outfile, char *sig)
6376 {
6377 int i;
6378
6379 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6380 fprintf (outfile, "%02x", sig[i] & 0xff);
6381 }
6382
6383 static inline void
6384 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6385 {
6386 if (discr_value->pos)
6387 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6388 else
6389 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6390 }
6391
6392 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6393
6394 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6395 RECURSE, output location descriptor operations. */
6396
6397 static void
6398 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6399 {
6400 switch (val->val_class)
6401 {
6402 case dw_val_class_addr:
6403 fprintf (outfile, "address");
6404 break;
6405 case dw_val_class_offset:
6406 fprintf (outfile, "offset");
6407 break;
6408 case dw_val_class_loc:
6409 fprintf (outfile, "location descriptor");
6410 if (val->v.val_loc == NULL)
6411 fprintf (outfile, " -> <null>\n");
6412 else if (recurse)
6413 {
6414 fprintf (outfile, ":\n");
6415 print_indent += 4;
6416 print_loc_descr (val->v.val_loc, outfile);
6417 print_indent -= 4;
6418 }
6419 else
6420 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6421 break;
6422 case dw_val_class_loc_list:
6423 fprintf (outfile, "location list -> label:%s",
6424 val->v.val_loc_list->ll_symbol);
6425 break;
6426 case dw_val_class_view_list:
6427 val = view_list_to_loc_list_val_node (val);
6428 fprintf (outfile, "location list with views -> labels:%s and %s",
6429 val->v.val_loc_list->ll_symbol,
6430 val->v.val_loc_list->vl_symbol);
6431 break;
6432 case dw_val_class_range_list:
6433 fprintf (outfile, "range list");
6434 break;
6435 case dw_val_class_const:
6436 case dw_val_class_const_implicit:
6437 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6438 break;
6439 case dw_val_class_unsigned_const:
6440 case dw_val_class_unsigned_const_implicit:
6441 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6442 break;
6443 case dw_val_class_const_double:
6444 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6445 HOST_WIDE_INT_PRINT_UNSIGNED")",
6446 val->v.val_double.high,
6447 val->v.val_double.low);
6448 break;
6449 case dw_val_class_wide_int:
6450 {
6451 int i = val->v.val_wide->get_len ();
6452 fprintf (outfile, "constant (");
6453 gcc_assert (i > 0);
6454 if (val->v.val_wide->elt (i - 1) == 0)
6455 fprintf (outfile, "0x");
6456 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6457 val->v.val_wide->elt (--i));
6458 while (--i >= 0)
6459 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6460 val->v.val_wide->elt (i));
6461 fprintf (outfile, ")");
6462 break;
6463 }
6464 case dw_val_class_vec:
6465 fprintf (outfile, "floating-point or vector constant");
6466 break;
6467 case dw_val_class_flag:
6468 fprintf (outfile, "%u", val->v.val_flag);
6469 break;
6470 case dw_val_class_die_ref:
6471 if (val->v.val_die_ref.die != NULL)
6472 {
6473 dw_die_ref die = val->v.val_die_ref.die;
6474
6475 if (die->comdat_type_p)
6476 {
6477 fprintf (outfile, "die -> signature: ");
6478 print_signature (outfile,
6479 die->die_id.die_type_node->signature);
6480 }
6481 else if (die->die_id.die_symbol)
6482 {
6483 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6484 if (die->with_offset)
6485 fprintf (outfile, " + %ld", die->die_offset);
6486 }
6487 else
6488 fprintf (outfile, "die -> %ld", die->die_offset);
6489 fprintf (outfile, " (%p)", (void *) die);
6490 }
6491 else
6492 fprintf (outfile, "die -> <null>");
6493 break;
6494 case dw_val_class_vms_delta:
6495 fprintf (outfile, "delta: @slotcount(%s-%s)",
6496 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6497 break;
6498 case dw_val_class_symview:
6499 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6500 break;
6501 case dw_val_class_lbl_id:
6502 case dw_val_class_lineptr:
6503 case dw_val_class_macptr:
6504 case dw_val_class_loclistsptr:
6505 case dw_val_class_high_pc:
6506 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6507 break;
6508 case dw_val_class_str:
6509 if (val->v.val_str->str != NULL)
6510 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6511 else
6512 fprintf (outfile, "<null>");
6513 break;
6514 case dw_val_class_file:
6515 case dw_val_class_file_implicit:
6516 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6517 val->v.val_file->emitted_number);
6518 break;
6519 case dw_val_class_data8:
6520 {
6521 int i;
6522
6523 for (i = 0; i < 8; i++)
6524 fprintf (outfile, "%02x", val->v.val_data8[i]);
6525 break;
6526 }
6527 case dw_val_class_discr_value:
6528 print_discr_value (outfile, &val->v.val_discr_value);
6529 break;
6530 case dw_val_class_discr_list:
6531 for (dw_discr_list_ref node = val->v.val_discr_list;
6532 node != NULL;
6533 node = node->dw_discr_next)
6534 {
6535 if (node->dw_discr_range)
6536 {
6537 fprintf (outfile, " .. ");
6538 print_discr_value (outfile, &node->dw_discr_lower_bound);
6539 print_discr_value (outfile, &node->dw_discr_upper_bound);
6540 }
6541 else
6542 print_discr_value (outfile, &node->dw_discr_lower_bound);
6543
6544 if (node->dw_discr_next != NULL)
6545 fprintf (outfile, " | ");
6546 }
6547 default:
6548 break;
6549 }
6550 }
6551
6552 /* Likewise, for a DIE attribute. */
6553
6554 static void
6555 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6556 {
6557 print_dw_val (&a->dw_attr_val, recurse, outfile);
6558 }
6559
6560
6561 /* Print the list of operands in the LOC location description to OUTFILE. This
6562 routine is a debugging aid only. */
6563
6564 static void
6565 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6566 {
6567 dw_loc_descr_ref l = loc;
6568
6569 if (loc == NULL)
6570 {
6571 print_spaces (outfile);
6572 fprintf (outfile, "<null>\n");
6573 return;
6574 }
6575
6576 for (l = loc; l != NULL; l = l->dw_loc_next)
6577 {
6578 print_spaces (outfile);
6579 fprintf (outfile, "(%p) %s",
6580 (void *) l,
6581 dwarf_stack_op_name (l->dw_loc_opc));
6582 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6583 {
6584 fprintf (outfile, " ");
6585 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6586 }
6587 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6588 {
6589 fprintf (outfile, ", ");
6590 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6591 }
6592 fprintf (outfile, "\n");
6593 }
6594 }
6595
6596 /* Print the information associated with a given DIE, and its children.
6597 This routine is a debugging aid only. */
6598
6599 static void
6600 print_die (dw_die_ref die, FILE *outfile)
6601 {
6602 dw_attr_node *a;
6603 dw_die_ref c;
6604 unsigned ix;
6605
6606 print_spaces (outfile);
6607 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6608 die->die_offset, dwarf_tag_name (die->die_tag),
6609 (void*) die);
6610 print_spaces (outfile);
6611 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6612 fprintf (outfile, " offset: %ld", die->die_offset);
6613 fprintf (outfile, " mark: %d\n", die->die_mark);
6614
6615 if (die->comdat_type_p)
6616 {
6617 print_spaces (outfile);
6618 fprintf (outfile, " signature: ");
6619 print_signature (outfile, die->die_id.die_type_node->signature);
6620 fprintf (outfile, "\n");
6621 }
6622
6623 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6624 {
6625 print_spaces (outfile);
6626 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6627
6628 print_attribute (a, true, outfile);
6629 fprintf (outfile, "\n");
6630 }
6631
6632 if (die->die_child != NULL)
6633 {
6634 print_indent += 4;
6635 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6636 print_indent -= 4;
6637 }
6638 if (print_indent == 0)
6639 fprintf (outfile, "\n");
6640 }
6641
6642 /* Print the list of operations in the LOC location description. */
6643
6644 DEBUG_FUNCTION void
6645 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6646 {
6647 print_loc_descr (loc, stderr);
6648 }
6649
6650 /* Print the information collected for a given DIE. */
6651
6652 DEBUG_FUNCTION void
6653 debug_dwarf_die (dw_die_ref die)
6654 {
6655 print_die (die, stderr);
6656 }
6657
6658 DEBUG_FUNCTION void
6659 debug (die_struct &ref)
6660 {
6661 print_die (&ref, stderr);
6662 }
6663
6664 DEBUG_FUNCTION void
6665 debug (die_struct *ptr)
6666 {
6667 if (ptr)
6668 debug (*ptr);
6669 else
6670 fprintf (stderr, "<nil>\n");
6671 }
6672
6673
6674 /* Print all DWARF information collected for the compilation unit.
6675 This routine is a debugging aid only. */
6676
6677 DEBUG_FUNCTION void
6678 debug_dwarf (void)
6679 {
6680 print_indent = 0;
6681 print_die (comp_unit_die (), stderr);
6682 }
6683
6684 /* Verify the DIE tree structure. */
6685
6686 DEBUG_FUNCTION void
6687 verify_die (dw_die_ref die)
6688 {
6689 gcc_assert (!die->die_mark);
6690 if (die->die_parent == NULL
6691 && die->die_sib == NULL)
6692 return;
6693 /* Verify the die_sib list is cyclic. */
6694 dw_die_ref x = die;
6695 do
6696 {
6697 x->die_mark = 1;
6698 x = x->die_sib;
6699 }
6700 while (x && !x->die_mark);
6701 gcc_assert (x == die);
6702 x = die;
6703 do
6704 {
6705 /* Verify all dies have the same parent. */
6706 gcc_assert (x->die_parent == die->die_parent);
6707 if (x->die_child)
6708 {
6709 /* Verify the child has the proper parent and recurse. */
6710 gcc_assert (x->die_child->die_parent == x);
6711 verify_die (x->die_child);
6712 }
6713 x->die_mark = 0;
6714 x = x->die_sib;
6715 }
6716 while (x && x->die_mark);
6717 }
6718
6719 /* Sanity checks on DIEs. */
6720
6721 static void
6722 check_die (dw_die_ref die)
6723 {
6724 unsigned ix;
6725 dw_attr_node *a;
6726 bool inline_found = false;
6727 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6728 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6729 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6730 {
6731 switch (a->dw_attr)
6732 {
6733 case DW_AT_inline:
6734 if (a->dw_attr_val.v.val_unsigned)
6735 inline_found = true;
6736 break;
6737 case DW_AT_location:
6738 ++n_location;
6739 break;
6740 case DW_AT_low_pc:
6741 ++n_low_pc;
6742 break;
6743 case DW_AT_high_pc:
6744 ++n_high_pc;
6745 break;
6746 case DW_AT_artificial:
6747 ++n_artificial;
6748 break;
6749 case DW_AT_decl_column:
6750 ++n_decl_column;
6751 break;
6752 case DW_AT_decl_line:
6753 ++n_decl_line;
6754 break;
6755 case DW_AT_decl_file:
6756 ++n_decl_file;
6757 break;
6758 default:
6759 break;
6760 }
6761 }
6762 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6763 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6764 {
6765 fprintf (stderr, "Duplicate attributes in DIE:\n");
6766 debug_dwarf_die (die);
6767 gcc_unreachable ();
6768 }
6769 if (inline_found)
6770 {
6771 /* A debugging information entry that is a member of an abstract
6772 instance tree [that has DW_AT_inline] should not contain any
6773 attributes which describe aspects of the subroutine which vary
6774 between distinct inlined expansions or distinct out-of-line
6775 expansions. */
6776 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6777 gcc_assert (a->dw_attr != DW_AT_low_pc
6778 && a->dw_attr != DW_AT_high_pc
6779 && a->dw_attr != DW_AT_location
6780 && a->dw_attr != DW_AT_frame_base
6781 && a->dw_attr != DW_AT_call_all_calls
6782 && a->dw_attr != DW_AT_GNU_all_call_sites);
6783 }
6784 }
6785 \f
6786 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6787 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6788 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6789
6790 /* Calculate the checksum of a location expression. */
6791
6792 static inline void
6793 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6794 {
6795 int tem;
6796 inchash::hash hstate;
6797 hashval_t hash;
6798
6799 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6800 CHECKSUM (tem);
6801 hash_loc_operands (loc, hstate);
6802 hash = hstate.end();
6803 CHECKSUM (hash);
6804 }
6805
6806 /* Calculate the checksum of an attribute. */
6807
6808 static void
6809 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6810 {
6811 dw_loc_descr_ref loc;
6812 rtx r;
6813
6814 CHECKSUM (at->dw_attr);
6815
6816 /* We don't care that this was compiled with a different compiler
6817 snapshot; if the output is the same, that's what matters. */
6818 if (at->dw_attr == DW_AT_producer)
6819 return;
6820
6821 switch (AT_class (at))
6822 {
6823 case dw_val_class_const:
6824 case dw_val_class_const_implicit:
6825 CHECKSUM (at->dw_attr_val.v.val_int);
6826 break;
6827 case dw_val_class_unsigned_const:
6828 case dw_val_class_unsigned_const_implicit:
6829 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6830 break;
6831 case dw_val_class_const_double:
6832 CHECKSUM (at->dw_attr_val.v.val_double);
6833 break;
6834 case dw_val_class_wide_int:
6835 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6836 get_full_len (*at->dw_attr_val.v.val_wide)
6837 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6838 break;
6839 case dw_val_class_vec:
6840 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6841 (at->dw_attr_val.v.val_vec.length
6842 * at->dw_attr_val.v.val_vec.elt_size));
6843 break;
6844 case dw_val_class_flag:
6845 CHECKSUM (at->dw_attr_val.v.val_flag);
6846 break;
6847 case dw_val_class_str:
6848 CHECKSUM_STRING (AT_string (at));
6849 break;
6850
6851 case dw_val_class_addr:
6852 r = AT_addr (at);
6853 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6854 CHECKSUM_STRING (XSTR (r, 0));
6855 break;
6856
6857 case dw_val_class_offset:
6858 CHECKSUM (at->dw_attr_val.v.val_offset);
6859 break;
6860
6861 case dw_val_class_loc:
6862 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6863 loc_checksum (loc, ctx);
6864 break;
6865
6866 case dw_val_class_die_ref:
6867 die_checksum (AT_ref (at), ctx, mark);
6868 break;
6869
6870 case dw_val_class_fde_ref:
6871 case dw_val_class_vms_delta:
6872 case dw_val_class_symview:
6873 case dw_val_class_lbl_id:
6874 case dw_val_class_lineptr:
6875 case dw_val_class_macptr:
6876 case dw_val_class_loclistsptr:
6877 case dw_val_class_high_pc:
6878 break;
6879
6880 case dw_val_class_file:
6881 case dw_val_class_file_implicit:
6882 CHECKSUM_STRING (AT_file (at)->filename);
6883 break;
6884
6885 case dw_val_class_data8:
6886 CHECKSUM (at->dw_attr_val.v.val_data8);
6887 break;
6888
6889 default:
6890 break;
6891 }
6892 }
6893
6894 /* Calculate the checksum of a DIE. */
6895
6896 static void
6897 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6898 {
6899 dw_die_ref c;
6900 dw_attr_node *a;
6901 unsigned ix;
6902
6903 /* To avoid infinite recursion. */
6904 if (die->die_mark)
6905 {
6906 CHECKSUM (die->die_mark);
6907 return;
6908 }
6909 die->die_mark = ++(*mark);
6910
6911 CHECKSUM (die->die_tag);
6912
6913 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6914 attr_checksum (a, ctx, mark);
6915
6916 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6917 }
6918
6919 #undef CHECKSUM
6920 #undef CHECKSUM_BLOCK
6921 #undef CHECKSUM_STRING
6922
6923 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6924 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6925 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6926 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6927 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6928 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6929 #define CHECKSUM_ATTR(FOO) \
6930 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6931
6932 /* Calculate the checksum of a number in signed LEB128 format. */
6933
6934 static void
6935 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6936 {
6937 unsigned char byte;
6938 bool more;
6939
6940 while (1)
6941 {
6942 byte = (value & 0x7f);
6943 value >>= 7;
6944 more = !((value == 0 && (byte & 0x40) == 0)
6945 || (value == -1 && (byte & 0x40) != 0));
6946 if (more)
6947 byte |= 0x80;
6948 CHECKSUM (byte);
6949 if (!more)
6950 break;
6951 }
6952 }
6953
6954 /* Calculate the checksum of a number in unsigned LEB128 format. */
6955
6956 static void
6957 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6958 {
6959 while (1)
6960 {
6961 unsigned char byte = (value & 0x7f);
6962 value >>= 7;
6963 if (value != 0)
6964 /* More bytes to follow. */
6965 byte |= 0x80;
6966 CHECKSUM (byte);
6967 if (value == 0)
6968 break;
6969 }
6970 }
6971
6972 /* Checksum the context of the DIE. This adds the names of any
6973 surrounding namespaces or structures to the checksum. */
6974
6975 static void
6976 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6977 {
6978 const char *name;
6979 dw_die_ref spec;
6980 int tag = die->die_tag;
6981
6982 if (tag != DW_TAG_namespace
6983 && tag != DW_TAG_structure_type
6984 && tag != DW_TAG_class_type)
6985 return;
6986
6987 name = get_AT_string (die, DW_AT_name);
6988
6989 spec = get_AT_ref (die, DW_AT_specification);
6990 if (spec != NULL)
6991 die = spec;
6992
6993 if (die->die_parent != NULL)
6994 checksum_die_context (die->die_parent, ctx);
6995
6996 CHECKSUM_ULEB128 ('C');
6997 CHECKSUM_ULEB128 (tag);
6998 if (name != NULL)
6999 CHECKSUM_STRING (name);
7000 }
7001
7002 /* Calculate the checksum of a location expression. */
7003
7004 static inline void
7005 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7006 {
7007 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7008 were emitted as a DW_FORM_sdata instead of a location expression. */
7009 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7010 {
7011 CHECKSUM_ULEB128 (DW_FORM_sdata);
7012 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7013 return;
7014 }
7015
7016 /* Otherwise, just checksum the raw location expression. */
7017 while (loc != NULL)
7018 {
7019 inchash::hash hstate;
7020 hashval_t hash;
7021
7022 CHECKSUM_ULEB128 (loc->dtprel);
7023 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7024 hash_loc_operands (loc, hstate);
7025 hash = hstate.end ();
7026 CHECKSUM (hash);
7027 loc = loc->dw_loc_next;
7028 }
7029 }
7030
7031 /* Calculate the checksum of an attribute. */
7032
7033 static void
7034 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7035 struct md5_ctx *ctx, int *mark)
7036 {
7037 dw_loc_descr_ref loc;
7038 rtx r;
7039
7040 if (AT_class (at) == dw_val_class_die_ref)
7041 {
7042 dw_die_ref target_die = AT_ref (at);
7043
7044 /* For pointer and reference types, we checksum only the (qualified)
7045 name of the target type (if there is a name). For friend entries,
7046 we checksum only the (qualified) name of the target type or function.
7047 This allows the checksum to remain the same whether the target type
7048 is complete or not. */
7049 if ((at->dw_attr == DW_AT_type
7050 && (tag == DW_TAG_pointer_type
7051 || tag == DW_TAG_reference_type
7052 || tag == DW_TAG_rvalue_reference_type
7053 || tag == DW_TAG_ptr_to_member_type))
7054 || (at->dw_attr == DW_AT_friend
7055 && tag == DW_TAG_friend))
7056 {
7057 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7058
7059 if (name_attr != NULL)
7060 {
7061 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7062
7063 if (decl == NULL)
7064 decl = target_die;
7065 CHECKSUM_ULEB128 ('N');
7066 CHECKSUM_ULEB128 (at->dw_attr);
7067 if (decl->die_parent != NULL)
7068 checksum_die_context (decl->die_parent, ctx);
7069 CHECKSUM_ULEB128 ('E');
7070 CHECKSUM_STRING (AT_string (name_attr));
7071 return;
7072 }
7073 }
7074
7075 /* For all other references to another DIE, we check to see if the
7076 target DIE has already been visited. If it has, we emit a
7077 backward reference; if not, we descend recursively. */
7078 if (target_die->die_mark > 0)
7079 {
7080 CHECKSUM_ULEB128 ('R');
7081 CHECKSUM_ULEB128 (at->dw_attr);
7082 CHECKSUM_ULEB128 (target_die->die_mark);
7083 }
7084 else
7085 {
7086 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7087
7088 if (decl == NULL)
7089 decl = target_die;
7090 target_die->die_mark = ++(*mark);
7091 CHECKSUM_ULEB128 ('T');
7092 CHECKSUM_ULEB128 (at->dw_attr);
7093 if (decl->die_parent != NULL)
7094 checksum_die_context (decl->die_parent, ctx);
7095 die_checksum_ordered (target_die, ctx, mark);
7096 }
7097 return;
7098 }
7099
7100 CHECKSUM_ULEB128 ('A');
7101 CHECKSUM_ULEB128 (at->dw_attr);
7102
7103 switch (AT_class (at))
7104 {
7105 case dw_val_class_const:
7106 case dw_val_class_const_implicit:
7107 CHECKSUM_ULEB128 (DW_FORM_sdata);
7108 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7109 break;
7110
7111 case dw_val_class_unsigned_const:
7112 case dw_val_class_unsigned_const_implicit:
7113 CHECKSUM_ULEB128 (DW_FORM_sdata);
7114 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7115 break;
7116
7117 case dw_val_class_const_double:
7118 CHECKSUM_ULEB128 (DW_FORM_block);
7119 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7120 CHECKSUM (at->dw_attr_val.v.val_double);
7121 break;
7122
7123 case dw_val_class_wide_int:
7124 CHECKSUM_ULEB128 (DW_FORM_block);
7125 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7126 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7127 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7128 get_full_len (*at->dw_attr_val.v.val_wide)
7129 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7130 break;
7131
7132 case dw_val_class_vec:
7133 CHECKSUM_ULEB128 (DW_FORM_block);
7134 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7135 * at->dw_attr_val.v.val_vec.elt_size);
7136 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7137 (at->dw_attr_val.v.val_vec.length
7138 * at->dw_attr_val.v.val_vec.elt_size));
7139 break;
7140
7141 case dw_val_class_flag:
7142 CHECKSUM_ULEB128 (DW_FORM_flag);
7143 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7144 break;
7145
7146 case dw_val_class_str:
7147 CHECKSUM_ULEB128 (DW_FORM_string);
7148 CHECKSUM_STRING (AT_string (at));
7149 break;
7150
7151 case dw_val_class_addr:
7152 r = AT_addr (at);
7153 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7154 CHECKSUM_ULEB128 (DW_FORM_string);
7155 CHECKSUM_STRING (XSTR (r, 0));
7156 break;
7157
7158 case dw_val_class_offset:
7159 CHECKSUM_ULEB128 (DW_FORM_sdata);
7160 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7161 break;
7162
7163 case dw_val_class_loc:
7164 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7165 loc_checksum_ordered (loc, ctx);
7166 break;
7167
7168 case dw_val_class_fde_ref:
7169 case dw_val_class_symview:
7170 case dw_val_class_lbl_id:
7171 case dw_val_class_lineptr:
7172 case dw_val_class_macptr:
7173 case dw_val_class_loclistsptr:
7174 case dw_val_class_high_pc:
7175 break;
7176
7177 case dw_val_class_file:
7178 case dw_val_class_file_implicit:
7179 CHECKSUM_ULEB128 (DW_FORM_string);
7180 CHECKSUM_STRING (AT_file (at)->filename);
7181 break;
7182
7183 case dw_val_class_data8:
7184 CHECKSUM (at->dw_attr_val.v.val_data8);
7185 break;
7186
7187 default:
7188 break;
7189 }
7190 }
7191
7192 struct checksum_attributes
7193 {
7194 dw_attr_node *at_name;
7195 dw_attr_node *at_type;
7196 dw_attr_node *at_friend;
7197 dw_attr_node *at_accessibility;
7198 dw_attr_node *at_address_class;
7199 dw_attr_node *at_alignment;
7200 dw_attr_node *at_allocated;
7201 dw_attr_node *at_artificial;
7202 dw_attr_node *at_associated;
7203 dw_attr_node *at_binary_scale;
7204 dw_attr_node *at_bit_offset;
7205 dw_attr_node *at_bit_size;
7206 dw_attr_node *at_bit_stride;
7207 dw_attr_node *at_byte_size;
7208 dw_attr_node *at_byte_stride;
7209 dw_attr_node *at_const_value;
7210 dw_attr_node *at_containing_type;
7211 dw_attr_node *at_count;
7212 dw_attr_node *at_data_location;
7213 dw_attr_node *at_data_member_location;
7214 dw_attr_node *at_decimal_scale;
7215 dw_attr_node *at_decimal_sign;
7216 dw_attr_node *at_default_value;
7217 dw_attr_node *at_digit_count;
7218 dw_attr_node *at_discr;
7219 dw_attr_node *at_discr_list;
7220 dw_attr_node *at_discr_value;
7221 dw_attr_node *at_encoding;
7222 dw_attr_node *at_endianity;
7223 dw_attr_node *at_explicit;
7224 dw_attr_node *at_is_optional;
7225 dw_attr_node *at_location;
7226 dw_attr_node *at_lower_bound;
7227 dw_attr_node *at_mutable;
7228 dw_attr_node *at_ordering;
7229 dw_attr_node *at_picture_string;
7230 dw_attr_node *at_prototyped;
7231 dw_attr_node *at_small;
7232 dw_attr_node *at_segment;
7233 dw_attr_node *at_string_length;
7234 dw_attr_node *at_string_length_bit_size;
7235 dw_attr_node *at_string_length_byte_size;
7236 dw_attr_node *at_threads_scaled;
7237 dw_attr_node *at_upper_bound;
7238 dw_attr_node *at_use_location;
7239 dw_attr_node *at_use_UTF8;
7240 dw_attr_node *at_variable_parameter;
7241 dw_attr_node *at_virtuality;
7242 dw_attr_node *at_visibility;
7243 dw_attr_node *at_vtable_elem_location;
7244 };
7245
7246 /* Collect the attributes that we will want to use for the checksum. */
7247
7248 static void
7249 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7250 {
7251 dw_attr_node *a;
7252 unsigned ix;
7253
7254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7255 {
7256 switch (a->dw_attr)
7257 {
7258 case DW_AT_name:
7259 attrs->at_name = a;
7260 break;
7261 case DW_AT_type:
7262 attrs->at_type = a;
7263 break;
7264 case DW_AT_friend:
7265 attrs->at_friend = a;
7266 break;
7267 case DW_AT_accessibility:
7268 attrs->at_accessibility = a;
7269 break;
7270 case DW_AT_address_class:
7271 attrs->at_address_class = a;
7272 break;
7273 case DW_AT_alignment:
7274 attrs->at_alignment = a;
7275 break;
7276 case DW_AT_allocated:
7277 attrs->at_allocated = a;
7278 break;
7279 case DW_AT_artificial:
7280 attrs->at_artificial = a;
7281 break;
7282 case DW_AT_associated:
7283 attrs->at_associated = a;
7284 break;
7285 case DW_AT_binary_scale:
7286 attrs->at_binary_scale = a;
7287 break;
7288 case DW_AT_bit_offset:
7289 attrs->at_bit_offset = a;
7290 break;
7291 case DW_AT_bit_size:
7292 attrs->at_bit_size = a;
7293 break;
7294 case DW_AT_bit_stride:
7295 attrs->at_bit_stride = a;
7296 break;
7297 case DW_AT_byte_size:
7298 attrs->at_byte_size = a;
7299 break;
7300 case DW_AT_byte_stride:
7301 attrs->at_byte_stride = a;
7302 break;
7303 case DW_AT_const_value:
7304 attrs->at_const_value = a;
7305 break;
7306 case DW_AT_containing_type:
7307 attrs->at_containing_type = a;
7308 break;
7309 case DW_AT_count:
7310 attrs->at_count = a;
7311 break;
7312 case DW_AT_data_location:
7313 attrs->at_data_location = a;
7314 break;
7315 case DW_AT_data_member_location:
7316 attrs->at_data_member_location = a;
7317 break;
7318 case DW_AT_decimal_scale:
7319 attrs->at_decimal_scale = a;
7320 break;
7321 case DW_AT_decimal_sign:
7322 attrs->at_decimal_sign = a;
7323 break;
7324 case DW_AT_default_value:
7325 attrs->at_default_value = a;
7326 break;
7327 case DW_AT_digit_count:
7328 attrs->at_digit_count = a;
7329 break;
7330 case DW_AT_discr:
7331 attrs->at_discr = a;
7332 break;
7333 case DW_AT_discr_list:
7334 attrs->at_discr_list = a;
7335 break;
7336 case DW_AT_discr_value:
7337 attrs->at_discr_value = a;
7338 break;
7339 case DW_AT_encoding:
7340 attrs->at_encoding = a;
7341 break;
7342 case DW_AT_endianity:
7343 attrs->at_endianity = a;
7344 break;
7345 case DW_AT_explicit:
7346 attrs->at_explicit = a;
7347 break;
7348 case DW_AT_is_optional:
7349 attrs->at_is_optional = a;
7350 break;
7351 case DW_AT_location:
7352 attrs->at_location = a;
7353 break;
7354 case DW_AT_lower_bound:
7355 attrs->at_lower_bound = a;
7356 break;
7357 case DW_AT_mutable:
7358 attrs->at_mutable = a;
7359 break;
7360 case DW_AT_ordering:
7361 attrs->at_ordering = a;
7362 break;
7363 case DW_AT_picture_string:
7364 attrs->at_picture_string = a;
7365 break;
7366 case DW_AT_prototyped:
7367 attrs->at_prototyped = a;
7368 break;
7369 case DW_AT_small:
7370 attrs->at_small = a;
7371 break;
7372 case DW_AT_segment:
7373 attrs->at_segment = a;
7374 break;
7375 case DW_AT_string_length:
7376 attrs->at_string_length = a;
7377 break;
7378 case DW_AT_string_length_bit_size:
7379 attrs->at_string_length_bit_size = a;
7380 break;
7381 case DW_AT_string_length_byte_size:
7382 attrs->at_string_length_byte_size = a;
7383 break;
7384 case DW_AT_threads_scaled:
7385 attrs->at_threads_scaled = a;
7386 break;
7387 case DW_AT_upper_bound:
7388 attrs->at_upper_bound = a;
7389 break;
7390 case DW_AT_use_location:
7391 attrs->at_use_location = a;
7392 break;
7393 case DW_AT_use_UTF8:
7394 attrs->at_use_UTF8 = a;
7395 break;
7396 case DW_AT_variable_parameter:
7397 attrs->at_variable_parameter = a;
7398 break;
7399 case DW_AT_virtuality:
7400 attrs->at_virtuality = a;
7401 break;
7402 case DW_AT_visibility:
7403 attrs->at_visibility = a;
7404 break;
7405 case DW_AT_vtable_elem_location:
7406 attrs->at_vtable_elem_location = a;
7407 break;
7408 default:
7409 break;
7410 }
7411 }
7412 }
7413
7414 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7415
7416 static void
7417 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7418 {
7419 dw_die_ref c;
7420 dw_die_ref decl;
7421 struct checksum_attributes attrs;
7422
7423 CHECKSUM_ULEB128 ('D');
7424 CHECKSUM_ULEB128 (die->die_tag);
7425
7426 memset (&attrs, 0, sizeof (attrs));
7427
7428 decl = get_AT_ref (die, DW_AT_specification);
7429 if (decl != NULL)
7430 collect_checksum_attributes (&attrs, decl);
7431 collect_checksum_attributes (&attrs, die);
7432
7433 CHECKSUM_ATTR (attrs.at_name);
7434 CHECKSUM_ATTR (attrs.at_accessibility);
7435 CHECKSUM_ATTR (attrs.at_address_class);
7436 CHECKSUM_ATTR (attrs.at_allocated);
7437 CHECKSUM_ATTR (attrs.at_artificial);
7438 CHECKSUM_ATTR (attrs.at_associated);
7439 CHECKSUM_ATTR (attrs.at_binary_scale);
7440 CHECKSUM_ATTR (attrs.at_bit_offset);
7441 CHECKSUM_ATTR (attrs.at_bit_size);
7442 CHECKSUM_ATTR (attrs.at_bit_stride);
7443 CHECKSUM_ATTR (attrs.at_byte_size);
7444 CHECKSUM_ATTR (attrs.at_byte_stride);
7445 CHECKSUM_ATTR (attrs.at_const_value);
7446 CHECKSUM_ATTR (attrs.at_containing_type);
7447 CHECKSUM_ATTR (attrs.at_count);
7448 CHECKSUM_ATTR (attrs.at_data_location);
7449 CHECKSUM_ATTR (attrs.at_data_member_location);
7450 CHECKSUM_ATTR (attrs.at_decimal_scale);
7451 CHECKSUM_ATTR (attrs.at_decimal_sign);
7452 CHECKSUM_ATTR (attrs.at_default_value);
7453 CHECKSUM_ATTR (attrs.at_digit_count);
7454 CHECKSUM_ATTR (attrs.at_discr);
7455 CHECKSUM_ATTR (attrs.at_discr_list);
7456 CHECKSUM_ATTR (attrs.at_discr_value);
7457 CHECKSUM_ATTR (attrs.at_encoding);
7458 CHECKSUM_ATTR (attrs.at_endianity);
7459 CHECKSUM_ATTR (attrs.at_explicit);
7460 CHECKSUM_ATTR (attrs.at_is_optional);
7461 CHECKSUM_ATTR (attrs.at_location);
7462 CHECKSUM_ATTR (attrs.at_lower_bound);
7463 CHECKSUM_ATTR (attrs.at_mutable);
7464 CHECKSUM_ATTR (attrs.at_ordering);
7465 CHECKSUM_ATTR (attrs.at_picture_string);
7466 CHECKSUM_ATTR (attrs.at_prototyped);
7467 CHECKSUM_ATTR (attrs.at_small);
7468 CHECKSUM_ATTR (attrs.at_segment);
7469 CHECKSUM_ATTR (attrs.at_string_length);
7470 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7471 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7472 CHECKSUM_ATTR (attrs.at_threads_scaled);
7473 CHECKSUM_ATTR (attrs.at_upper_bound);
7474 CHECKSUM_ATTR (attrs.at_use_location);
7475 CHECKSUM_ATTR (attrs.at_use_UTF8);
7476 CHECKSUM_ATTR (attrs.at_variable_parameter);
7477 CHECKSUM_ATTR (attrs.at_virtuality);
7478 CHECKSUM_ATTR (attrs.at_visibility);
7479 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7480 CHECKSUM_ATTR (attrs.at_type);
7481 CHECKSUM_ATTR (attrs.at_friend);
7482 CHECKSUM_ATTR (attrs.at_alignment);
7483
7484 /* Checksum the child DIEs. */
7485 c = die->die_child;
7486 if (c) do {
7487 dw_attr_node *name_attr;
7488
7489 c = c->die_sib;
7490 name_attr = get_AT (c, DW_AT_name);
7491 if (is_template_instantiation (c))
7492 {
7493 /* Ignore instantiations of member type and function templates. */
7494 }
7495 else if (name_attr != NULL
7496 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7497 {
7498 /* Use a shallow checksum for named nested types and member
7499 functions. */
7500 CHECKSUM_ULEB128 ('S');
7501 CHECKSUM_ULEB128 (c->die_tag);
7502 CHECKSUM_STRING (AT_string (name_attr));
7503 }
7504 else
7505 {
7506 /* Use a deep checksum for other children. */
7507 /* Mark this DIE so it gets processed when unmarking. */
7508 if (c->die_mark == 0)
7509 c->die_mark = -1;
7510 die_checksum_ordered (c, ctx, mark);
7511 }
7512 } while (c != die->die_child);
7513
7514 CHECKSUM_ULEB128 (0);
7515 }
7516
7517 /* Add a type name and tag to a hash. */
7518 static void
7519 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7520 {
7521 CHECKSUM_ULEB128 (tag);
7522 CHECKSUM_STRING (name);
7523 }
7524
7525 #undef CHECKSUM
7526 #undef CHECKSUM_STRING
7527 #undef CHECKSUM_ATTR
7528 #undef CHECKSUM_LEB128
7529 #undef CHECKSUM_ULEB128
7530
7531 /* Generate the type signature for DIE. This is computed by generating an
7532 MD5 checksum over the DIE's tag, its relevant attributes, and its
7533 children. Attributes that are references to other DIEs are processed
7534 by recursion, using the MARK field to prevent infinite recursion.
7535 If the DIE is nested inside a namespace or another type, we also
7536 need to include that context in the signature. The lower 64 bits
7537 of the resulting MD5 checksum comprise the signature. */
7538
7539 static void
7540 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7541 {
7542 int mark;
7543 const char *name;
7544 unsigned char checksum[16];
7545 struct md5_ctx ctx;
7546 dw_die_ref decl;
7547 dw_die_ref parent;
7548
7549 name = get_AT_string (die, DW_AT_name);
7550 decl = get_AT_ref (die, DW_AT_specification);
7551 parent = get_die_parent (die);
7552
7553 /* First, compute a signature for just the type name (and its surrounding
7554 context, if any. This is stored in the type unit DIE for link-time
7555 ODR (one-definition rule) checking. */
7556
7557 if (is_cxx () && name != NULL)
7558 {
7559 md5_init_ctx (&ctx);
7560
7561 /* Checksum the names of surrounding namespaces and structures. */
7562 if (parent != NULL)
7563 checksum_die_context (parent, &ctx);
7564
7565 /* Checksum the current DIE. */
7566 die_odr_checksum (die->die_tag, name, &ctx);
7567 md5_finish_ctx (&ctx, checksum);
7568
7569 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7570 }
7571
7572 /* Next, compute the complete type signature. */
7573
7574 md5_init_ctx (&ctx);
7575 mark = 1;
7576 die->die_mark = mark;
7577
7578 /* Checksum the names of surrounding namespaces and structures. */
7579 if (parent != NULL)
7580 checksum_die_context (parent, &ctx);
7581
7582 /* Checksum the DIE and its children. */
7583 die_checksum_ordered (die, &ctx, &mark);
7584 unmark_all_dies (die);
7585 md5_finish_ctx (&ctx, checksum);
7586
7587 /* Store the signature in the type node and link the type DIE and the
7588 type node together. */
7589 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7590 DWARF_TYPE_SIGNATURE_SIZE);
7591 die->comdat_type_p = true;
7592 die->die_id.die_type_node = type_node;
7593 type_node->type_die = die;
7594
7595 /* If the DIE is a specification, link its declaration to the type node
7596 as well. */
7597 if (decl != NULL)
7598 {
7599 decl->comdat_type_p = true;
7600 decl->die_id.die_type_node = type_node;
7601 }
7602 }
7603
7604 /* Do the location expressions look same? */
7605 static inline int
7606 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7607 {
7608 return loc1->dw_loc_opc == loc2->dw_loc_opc
7609 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7610 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7611 }
7612
7613 /* Do the values look the same? */
7614 static int
7615 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7616 {
7617 dw_loc_descr_ref loc1, loc2;
7618 rtx r1, r2;
7619
7620 if (v1->val_class != v2->val_class)
7621 return 0;
7622
7623 switch (v1->val_class)
7624 {
7625 case dw_val_class_const:
7626 case dw_val_class_const_implicit:
7627 return v1->v.val_int == v2->v.val_int;
7628 case dw_val_class_unsigned_const:
7629 case dw_val_class_unsigned_const_implicit:
7630 return v1->v.val_unsigned == v2->v.val_unsigned;
7631 case dw_val_class_const_double:
7632 return v1->v.val_double.high == v2->v.val_double.high
7633 && v1->v.val_double.low == v2->v.val_double.low;
7634 case dw_val_class_wide_int:
7635 return *v1->v.val_wide == *v2->v.val_wide;
7636 case dw_val_class_vec:
7637 if (v1->v.val_vec.length != v2->v.val_vec.length
7638 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7639 return 0;
7640 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7641 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7642 return 0;
7643 return 1;
7644 case dw_val_class_flag:
7645 return v1->v.val_flag == v2->v.val_flag;
7646 case dw_val_class_str:
7647 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7648
7649 case dw_val_class_addr:
7650 r1 = v1->v.val_addr;
7651 r2 = v2->v.val_addr;
7652 if (GET_CODE (r1) != GET_CODE (r2))
7653 return 0;
7654 return !rtx_equal_p (r1, r2);
7655
7656 case dw_val_class_offset:
7657 return v1->v.val_offset == v2->v.val_offset;
7658
7659 case dw_val_class_loc:
7660 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7661 loc1 && loc2;
7662 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7663 if (!same_loc_p (loc1, loc2, mark))
7664 return 0;
7665 return !loc1 && !loc2;
7666
7667 case dw_val_class_die_ref:
7668 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7669
7670 case dw_val_class_symview:
7671 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7672
7673 case dw_val_class_fde_ref:
7674 case dw_val_class_vms_delta:
7675 case dw_val_class_lbl_id:
7676 case dw_val_class_lineptr:
7677 case dw_val_class_macptr:
7678 case dw_val_class_loclistsptr:
7679 case dw_val_class_high_pc:
7680 return 1;
7681
7682 case dw_val_class_file:
7683 case dw_val_class_file_implicit:
7684 return v1->v.val_file == v2->v.val_file;
7685
7686 case dw_val_class_data8:
7687 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7688
7689 default:
7690 return 1;
7691 }
7692 }
7693
7694 /* Do the attributes look the same? */
7695
7696 static int
7697 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7698 {
7699 if (at1->dw_attr != at2->dw_attr)
7700 return 0;
7701
7702 /* We don't care that this was compiled with a different compiler
7703 snapshot; if the output is the same, that's what matters. */
7704 if (at1->dw_attr == DW_AT_producer)
7705 return 1;
7706
7707 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7708 }
7709
7710 /* Do the dies look the same? */
7711
7712 static int
7713 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7714 {
7715 dw_die_ref c1, c2;
7716 dw_attr_node *a1;
7717 unsigned ix;
7718
7719 /* To avoid infinite recursion. */
7720 if (die1->die_mark)
7721 return die1->die_mark == die2->die_mark;
7722 die1->die_mark = die2->die_mark = ++(*mark);
7723
7724 if (die1->die_tag != die2->die_tag)
7725 return 0;
7726
7727 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7728 return 0;
7729
7730 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7731 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7732 return 0;
7733
7734 c1 = die1->die_child;
7735 c2 = die2->die_child;
7736 if (! c1)
7737 {
7738 if (c2)
7739 return 0;
7740 }
7741 else
7742 for (;;)
7743 {
7744 if (!same_die_p (c1, c2, mark))
7745 return 0;
7746 c1 = c1->die_sib;
7747 c2 = c2->die_sib;
7748 if (c1 == die1->die_child)
7749 {
7750 if (c2 == die2->die_child)
7751 break;
7752 else
7753 return 0;
7754 }
7755 }
7756
7757 return 1;
7758 }
7759
7760 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7761 children, and set die_symbol. */
7762
7763 static void
7764 compute_comp_unit_symbol (dw_die_ref unit_die)
7765 {
7766 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7767 const char *base = die_name ? lbasename (die_name) : "anonymous";
7768 char *name = XALLOCAVEC (char, strlen (base) + 64);
7769 char *p;
7770 int i, mark;
7771 unsigned char checksum[16];
7772 struct md5_ctx ctx;
7773
7774 /* Compute the checksum of the DIE, then append part of it as hex digits to
7775 the name filename of the unit. */
7776
7777 md5_init_ctx (&ctx);
7778 mark = 0;
7779 die_checksum (unit_die, &ctx, &mark);
7780 unmark_all_dies (unit_die);
7781 md5_finish_ctx (&ctx, checksum);
7782
7783 /* When we this for comp_unit_die () we have a DW_AT_name that might
7784 not start with a letter but with anything valid for filenames and
7785 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7786 character is not a letter. */
7787 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7788 clean_symbol_name (name);
7789
7790 p = name + strlen (name);
7791 for (i = 0; i < 4; i++)
7792 {
7793 sprintf (p, "%.2x", checksum[i]);
7794 p += 2;
7795 }
7796
7797 unit_die->die_id.die_symbol = xstrdup (name);
7798 }
7799
7800 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7801
7802 static int
7803 is_type_die (dw_die_ref die)
7804 {
7805 switch (die->die_tag)
7806 {
7807 case DW_TAG_array_type:
7808 case DW_TAG_class_type:
7809 case DW_TAG_interface_type:
7810 case DW_TAG_enumeration_type:
7811 case DW_TAG_pointer_type:
7812 case DW_TAG_reference_type:
7813 case DW_TAG_rvalue_reference_type:
7814 case DW_TAG_string_type:
7815 case DW_TAG_structure_type:
7816 case DW_TAG_subroutine_type:
7817 case DW_TAG_union_type:
7818 case DW_TAG_ptr_to_member_type:
7819 case DW_TAG_set_type:
7820 case DW_TAG_subrange_type:
7821 case DW_TAG_base_type:
7822 case DW_TAG_const_type:
7823 case DW_TAG_file_type:
7824 case DW_TAG_packed_type:
7825 case DW_TAG_volatile_type:
7826 case DW_TAG_typedef:
7827 return 1;
7828 default:
7829 return 0;
7830 }
7831 }
7832
7833 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7834 Basically, we want to choose the bits that are likely to be shared between
7835 compilations (types) and leave out the bits that are specific to individual
7836 compilations (functions). */
7837
7838 static int
7839 is_comdat_die (dw_die_ref c)
7840 {
7841 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7842 we do for stabs. The advantage is a greater likelihood of sharing between
7843 objects that don't include headers in the same order (and therefore would
7844 put the base types in a different comdat). jason 8/28/00 */
7845
7846 if (c->die_tag == DW_TAG_base_type)
7847 return 0;
7848
7849 if (c->die_tag == DW_TAG_pointer_type
7850 || c->die_tag == DW_TAG_reference_type
7851 || c->die_tag == DW_TAG_rvalue_reference_type
7852 || c->die_tag == DW_TAG_const_type
7853 || c->die_tag == DW_TAG_volatile_type)
7854 {
7855 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7856
7857 return t ? is_comdat_die (t) : 0;
7858 }
7859
7860 return is_type_die (c);
7861 }
7862
7863 /* Returns true iff C is a compile-unit DIE. */
7864
7865 static inline bool
7866 is_cu_die (dw_die_ref c)
7867 {
7868 return c && (c->die_tag == DW_TAG_compile_unit
7869 || c->die_tag == DW_TAG_skeleton_unit);
7870 }
7871
7872 /* Returns true iff C is a unit DIE of some sort. */
7873
7874 static inline bool
7875 is_unit_die (dw_die_ref c)
7876 {
7877 return c && (c->die_tag == DW_TAG_compile_unit
7878 || c->die_tag == DW_TAG_partial_unit
7879 || c->die_tag == DW_TAG_type_unit
7880 || c->die_tag == DW_TAG_skeleton_unit);
7881 }
7882
7883 /* Returns true iff C is a namespace DIE. */
7884
7885 static inline bool
7886 is_namespace_die (dw_die_ref c)
7887 {
7888 return c && c->die_tag == DW_TAG_namespace;
7889 }
7890
7891 /* Returns true iff C is a class or structure DIE. */
7892
7893 static inline bool
7894 is_class_die (dw_die_ref c)
7895 {
7896 return c && (c->die_tag == DW_TAG_class_type
7897 || c->die_tag == DW_TAG_structure_type);
7898 }
7899
7900 /* Return non-zero if this DIE is a template parameter. */
7901
7902 static inline bool
7903 is_template_parameter (dw_die_ref die)
7904 {
7905 switch (die->die_tag)
7906 {
7907 case DW_TAG_template_type_param:
7908 case DW_TAG_template_value_param:
7909 case DW_TAG_GNU_template_template_param:
7910 case DW_TAG_GNU_template_parameter_pack:
7911 return true;
7912 default:
7913 return false;
7914 }
7915 }
7916
7917 /* Return non-zero if this DIE represents a template instantiation. */
7918
7919 static inline bool
7920 is_template_instantiation (dw_die_ref die)
7921 {
7922 dw_die_ref c;
7923
7924 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7925 return false;
7926 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7927 return false;
7928 }
7929
7930 static char *
7931 gen_internal_sym (const char *prefix)
7932 {
7933 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7934
7935 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7936 return xstrdup (buf);
7937 }
7938
7939 /* Return non-zero if this DIE is a declaration. */
7940
7941 static int
7942 is_declaration_die (dw_die_ref die)
7943 {
7944 dw_attr_node *a;
7945 unsigned ix;
7946
7947 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7948 if (a->dw_attr == DW_AT_declaration)
7949 return 1;
7950
7951 return 0;
7952 }
7953
7954 /* Return non-zero if this DIE is nested inside a subprogram. */
7955
7956 static int
7957 is_nested_in_subprogram (dw_die_ref die)
7958 {
7959 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7960
7961 if (decl == NULL)
7962 decl = die;
7963 return local_scope_p (decl);
7964 }
7965
7966 /* Return non-zero if this DIE contains a defining declaration of a
7967 subprogram. */
7968
7969 static int
7970 contains_subprogram_definition (dw_die_ref die)
7971 {
7972 dw_die_ref c;
7973
7974 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7975 return 1;
7976 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7977 return 0;
7978 }
7979
7980 /* Return non-zero if this is a type DIE that should be moved to a
7981 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7982 unit type. */
7983
7984 static int
7985 should_move_die_to_comdat (dw_die_ref die)
7986 {
7987 switch (die->die_tag)
7988 {
7989 case DW_TAG_class_type:
7990 case DW_TAG_structure_type:
7991 case DW_TAG_enumeration_type:
7992 case DW_TAG_union_type:
7993 /* Don't move declarations, inlined instances, types nested in a
7994 subprogram, or types that contain subprogram definitions. */
7995 if (is_declaration_die (die)
7996 || get_AT (die, DW_AT_abstract_origin)
7997 || is_nested_in_subprogram (die)
7998 || contains_subprogram_definition (die))
7999 return 0;
8000 return 1;
8001 case DW_TAG_array_type:
8002 case DW_TAG_interface_type:
8003 case DW_TAG_pointer_type:
8004 case DW_TAG_reference_type:
8005 case DW_TAG_rvalue_reference_type:
8006 case DW_TAG_string_type:
8007 case DW_TAG_subroutine_type:
8008 case DW_TAG_ptr_to_member_type:
8009 case DW_TAG_set_type:
8010 case DW_TAG_subrange_type:
8011 case DW_TAG_base_type:
8012 case DW_TAG_const_type:
8013 case DW_TAG_file_type:
8014 case DW_TAG_packed_type:
8015 case DW_TAG_volatile_type:
8016 case DW_TAG_typedef:
8017 default:
8018 return 0;
8019 }
8020 }
8021
8022 /* Make a clone of DIE. */
8023
8024 static dw_die_ref
8025 clone_die (dw_die_ref die)
8026 {
8027 dw_die_ref clone = new_die_raw (die->die_tag);
8028 dw_attr_node *a;
8029 unsigned ix;
8030
8031 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8032 add_dwarf_attr (clone, a);
8033
8034 return clone;
8035 }
8036
8037 /* Make a clone of the tree rooted at DIE. */
8038
8039 static dw_die_ref
8040 clone_tree (dw_die_ref die)
8041 {
8042 dw_die_ref c;
8043 dw_die_ref clone = clone_die (die);
8044
8045 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8046
8047 return clone;
8048 }
8049
8050 /* Make a clone of DIE as a declaration. */
8051
8052 static dw_die_ref
8053 clone_as_declaration (dw_die_ref die)
8054 {
8055 dw_die_ref clone;
8056 dw_die_ref decl;
8057 dw_attr_node *a;
8058 unsigned ix;
8059
8060 /* If the DIE is already a declaration, just clone it. */
8061 if (is_declaration_die (die))
8062 return clone_die (die);
8063
8064 /* If the DIE is a specification, just clone its declaration DIE. */
8065 decl = get_AT_ref (die, DW_AT_specification);
8066 if (decl != NULL)
8067 {
8068 clone = clone_die (decl);
8069 if (die->comdat_type_p)
8070 add_AT_die_ref (clone, DW_AT_signature, die);
8071 return clone;
8072 }
8073
8074 clone = new_die_raw (die->die_tag);
8075
8076 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8077 {
8078 /* We don't want to copy over all attributes.
8079 For example we don't want DW_AT_byte_size because otherwise we will no
8080 longer have a declaration and GDB will treat it as a definition. */
8081
8082 switch (a->dw_attr)
8083 {
8084 case DW_AT_abstract_origin:
8085 case DW_AT_artificial:
8086 case DW_AT_containing_type:
8087 case DW_AT_external:
8088 case DW_AT_name:
8089 case DW_AT_type:
8090 case DW_AT_virtuality:
8091 case DW_AT_linkage_name:
8092 case DW_AT_MIPS_linkage_name:
8093 add_dwarf_attr (clone, a);
8094 break;
8095 case DW_AT_byte_size:
8096 case DW_AT_alignment:
8097 default:
8098 break;
8099 }
8100 }
8101
8102 if (die->comdat_type_p)
8103 add_AT_die_ref (clone, DW_AT_signature, die);
8104
8105 add_AT_flag (clone, DW_AT_declaration, 1);
8106 return clone;
8107 }
8108
8109
8110 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8111
8112 struct decl_table_entry
8113 {
8114 dw_die_ref orig;
8115 dw_die_ref copy;
8116 };
8117
8118 /* Helpers to manipulate hash table of copied declarations. */
8119
8120 /* Hashtable helpers. */
8121
8122 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8123 {
8124 typedef die_struct *compare_type;
8125 static inline hashval_t hash (const decl_table_entry *);
8126 static inline bool equal (const decl_table_entry *, const die_struct *);
8127 };
8128
8129 inline hashval_t
8130 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8131 {
8132 return htab_hash_pointer (entry->orig);
8133 }
8134
8135 inline bool
8136 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8137 const die_struct *entry2)
8138 {
8139 return entry1->orig == entry2;
8140 }
8141
8142 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8143
8144 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8145 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8146 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8147 to check if the ancestor has already been copied into UNIT. */
8148
8149 static dw_die_ref
8150 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8151 decl_hash_type *decl_table)
8152 {
8153 dw_die_ref parent = die->die_parent;
8154 dw_die_ref new_parent = unit;
8155 dw_die_ref copy;
8156 decl_table_entry **slot = NULL;
8157 struct decl_table_entry *entry = NULL;
8158
8159 if (decl_table)
8160 {
8161 /* Check if the entry has already been copied to UNIT. */
8162 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8163 INSERT);
8164 if (*slot != HTAB_EMPTY_ENTRY)
8165 {
8166 entry = *slot;
8167 return entry->copy;
8168 }
8169
8170 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8171 entry = XCNEW (struct decl_table_entry);
8172 entry->orig = die;
8173 entry->copy = NULL;
8174 *slot = entry;
8175 }
8176
8177 if (parent != NULL)
8178 {
8179 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8180 if (spec != NULL)
8181 parent = spec;
8182 if (!is_unit_die (parent))
8183 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8184 }
8185
8186 copy = clone_as_declaration (die);
8187 add_child_die (new_parent, copy);
8188
8189 if (decl_table)
8190 {
8191 /* Record the pointer to the copy. */
8192 entry->copy = copy;
8193 }
8194
8195 return copy;
8196 }
8197 /* Copy the declaration context to the new type unit DIE. This includes
8198 any surrounding namespace or type declarations. If the DIE has an
8199 AT_specification attribute, it also includes attributes and children
8200 attached to the specification, and returns a pointer to the original
8201 parent of the declaration DIE. Returns NULL otherwise. */
8202
8203 static dw_die_ref
8204 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8205 {
8206 dw_die_ref decl;
8207 dw_die_ref new_decl;
8208 dw_die_ref orig_parent = NULL;
8209
8210 decl = get_AT_ref (die, DW_AT_specification);
8211 if (decl == NULL)
8212 decl = die;
8213 else
8214 {
8215 unsigned ix;
8216 dw_die_ref c;
8217 dw_attr_node *a;
8218
8219 /* The original DIE will be changed to a declaration, and must
8220 be moved to be a child of the original declaration DIE. */
8221 orig_parent = decl->die_parent;
8222
8223 /* Copy the type node pointer from the new DIE to the original
8224 declaration DIE so we can forward references later. */
8225 decl->comdat_type_p = true;
8226 decl->die_id.die_type_node = die->die_id.die_type_node;
8227
8228 remove_AT (die, DW_AT_specification);
8229
8230 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8231 {
8232 if (a->dw_attr != DW_AT_name
8233 && a->dw_attr != DW_AT_declaration
8234 && a->dw_attr != DW_AT_external)
8235 add_dwarf_attr (die, a);
8236 }
8237
8238 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8239 }
8240
8241 if (decl->die_parent != NULL
8242 && !is_unit_die (decl->die_parent))
8243 {
8244 new_decl = copy_ancestor_tree (unit, decl, NULL);
8245 if (new_decl != NULL)
8246 {
8247 remove_AT (new_decl, DW_AT_signature);
8248 add_AT_specification (die, new_decl);
8249 }
8250 }
8251
8252 return orig_parent;
8253 }
8254
8255 /* Generate the skeleton ancestor tree for the given NODE, then clone
8256 the DIE and add the clone into the tree. */
8257
8258 static void
8259 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8260 {
8261 if (node->new_die != NULL)
8262 return;
8263
8264 node->new_die = clone_as_declaration (node->old_die);
8265
8266 if (node->parent != NULL)
8267 {
8268 generate_skeleton_ancestor_tree (node->parent);
8269 add_child_die (node->parent->new_die, node->new_die);
8270 }
8271 }
8272
8273 /* Generate a skeleton tree of DIEs containing any declarations that are
8274 found in the original tree. We traverse the tree looking for declaration
8275 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8276
8277 static void
8278 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8279 {
8280 skeleton_chain_node node;
8281 dw_die_ref c;
8282 dw_die_ref first;
8283 dw_die_ref prev = NULL;
8284 dw_die_ref next = NULL;
8285
8286 node.parent = parent;
8287
8288 first = c = parent->old_die->die_child;
8289 if (c)
8290 next = c->die_sib;
8291 if (c) do {
8292 if (prev == NULL || prev->die_sib == c)
8293 prev = c;
8294 c = next;
8295 next = (c == first ? NULL : c->die_sib);
8296 node.old_die = c;
8297 node.new_die = NULL;
8298 if (is_declaration_die (c))
8299 {
8300 if (is_template_instantiation (c))
8301 {
8302 /* Instantiated templates do not need to be cloned into the
8303 type unit. Just move the DIE and its children back to
8304 the skeleton tree (in the main CU). */
8305 remove_child_with_prev (c, prev);
8306 add_child_die (parent->new_die, c);
8307 c = prev;
8308 }
8309 else if (c->comdat_type_p)
8310 {
8311 /* This is the skeleton of earlier break_out_comdat_types
8312 type. Clone the existing DIE, but keep the children
8313 under the original (which is in the main CU). */
8314 dw_die_ref clone = clone_die (c);
8315
8316 replace_child (c, clone, prev);
8317 generate_skeleton_ancestor_tree (parent);
8318 add_child_die (parent->new_die, c);
8319 c = clone;
8320 continue;
8321 }
8322 else
8323 {
8324 /* Clone the existing DIE, move the original to the skeleton
8325 tree (which is in the main CU), and put the clone, with
8326 all the original's children, where the original came from
8327 (which is about to be moved to the type unit). */
8328 dw_die_ref clone = clone_die (c);
8329 move_all_children (c, clone);
8330
8331 /* If the original has a DW_AT_object_pointer attribute,
8332 it would now point to a child DIE just moved to the
8333 cloned tree, so we need to remove that attribute from
8334 the original. */
8335 remove_AT (c, DW_AT_object_pointer);
8336
8337 replace_child (c, clone, prev);
8338 generate_skeleton_ancestor_tree (parent);
8339 add_child_die (parent->new_die, c);
8340 node.old_die = clone;
8341 node.new_die = c;
8342 c = clone;
8343 }
8344 }
8345 generate_skeleton_bottom_up (&node);
8346 } while (next != NULL);
8347 }
8348
8349 /* Wrapper function for generate_skeleton_bottom_up. */
8350
8351 static dw_die_ref
8352 generate_skeleton (dw_die_ref die)
8353 {
8354 skeleton_chain_node node;
8355
8356 node.old_die = die;
8357 node.new_die = NULL;
8358 node.parent = NULL;
8359
8360 /* If this type definition is nested inside another type,
8361 and is not an instantiation of a template, always leave
8362 at least a declaration in its place. */
8363 if (die->die_parent != NULL
8364 && is_type_die (die->die_parent)
8365 && !is_template_instantiation (die))
8366 node.new_die = clone_as_declaration (die);
8367
8368 generate_skeleton_bottom_up (&node);
8369 return node.new_die;
8370 }
8371
8372 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8373 declaration. The original DIE is moved to a new compile unit so that
8374 existing references to it follow it to the new location. If any of the
8375 original DIE's descendants is a declaration, we need to replace the
8376 original DIE with a skeleton tree and move the declarations back into the
8377 skeleton tree. */
8378
8379 static dw_die_ref
8380 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8381 dw_die_ref prev)
8382 {
8383 dw_die_ref skeleton, orig_parent;
8384
8385 /* Copy the declaration context to the type unit DIE. If the returned
8386 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8387 that DIE. */
8388 orig_parent = copy_declaration_context (unit, child);
8389
8390 skeleton = generate_skeleton (child);
8391 if (skeleton == NULL)
8392 remove_child_with_prev (child, prev);
8393 else
8394 {
8395 skeleton->comdat_type_p = true;
8396 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8397
8398 /* If the original DIE was a specification, we need to put
8399 the skeleton under the parent DIE of the declaration.
8400 This leaves the original declaration in the tree, but
8401 it will be pruned later since there are no longer any
8402 references to it. */
8403 if (orig_parent != NULL)
8404 {
8405 remove_child_with_prev (child, prev);
8406 add_child_die (orig_parent, skeleton);
8407 }
8408 else
8409 replace_child (child, skeleton, prev);
8410 }
8411
8412 return skeleton;
8413 }
8414
8415 static void
8416 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8417 comdat_type_node *type_node,
8418 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8419
8420 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8421 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8422 DWARF procedure references in the DW_AT_location attribute. */
8423
8424 static dw_die_ref
8425 copy_dwarf_procedure (dw_die_ref die,
8426 comdat_type_node *type_node,
8427 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8428 {
8429 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8430
8431 /* DWARF procedures are not supposed to have children... */
8432 gcc_assert (die->die_child == NULL);
8433
8434 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8435 gcc_assert (vec_safe_length (die->die_attr) == 1
8436 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8437
8438 /* Do not copy more than once DWARF procedures. */
8439 bool existed;
8440 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8441 if (existed)
8442 return die_copy;
8443
8444 die_copy = clone_die (die);
8445 add_child_die (type_node->root_die, die_copy);
8446 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8447 return die_copy;
8448 }
8449
8450 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8451 procedures in DIE's attributes. */
8452
8453 static void
8454 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8455 comdat_type_node *type_node,
8456 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8457 {
8458 dw_attr_node *a;
8459 unsigned i;
8460
8461 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8462 {
8463 dw_loc_descr_ref loc;
8464
8465 if (a->dw_attr_val.val_class != dw_val_class_loc)
8466 continue;
8467
8468 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8469 {
8470 switch (loc->dw_loc_opc)
8471 {
8472 case DW_OP_call2:
8473 case DW_OP_call4:
8474 case DW_OP_call_ref:
8475 gcc_assert (loc->dw_loc_oprnd1.val_class
8476 == dw_val_class_die_ref);
8477 loc->dw_loc_oprnd1.v.val_die_ref.die
8478 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8479 type_node,
8480 copied_dwarf_procs);
8481
8482 default:
8483 break;
8484 }
8485 }
8486 }
8487 }
8488
8489 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8490 rewrite references to point to the copies.
8491
8492 References are looked for in DIE's attributes and recursively in all its
8493 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8494 mapping from old DWARF procedures to their copy. It is used not to copy
8495 twice the same DWARF procedure under TYPE_NODE. */
8496
8497 static void
8498 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8499 comdat_type_node *type_node,
8500 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8501 {
8502 dw_die_ref c;
8503
8504 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8505 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8506 type_node,
8507 copied_dwarf_procs));
8508 }
8509
8510 /* Traverse the DIE and set up additional .debug_types or .debug_info
8511 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8512 section. */
8513
8514 static void
8515 break_out_comdat_types (dw_die_ref die)
8516 {
8517 dw_die_ref c;
8518 dw_die_ref first;
8519 dw_die_ref prev = NULL;
8520 dw_die_ref next = NULL;
8521 dw_die_ref unit = NULL;
8522
8523 first = c = die->die_child;
8524 if (c)
8525 next = c->die_sib;
8526 if (c) do {
8527 if (prev == NULL || prev->die_sib == c)
8528 prev = c;
8529 c = next;
8530 next = (c == first ? NULL : c->die_sib);
8531 if (should_move_die_to_comdat (c))
8532 {
8533 dw_die_ref replacement;
8534 comdat_type_node *type_node;
8535
8536 /* Break out nested types into their own type units. */
8537 break_out_comdat_types (c);
8538
8539 /* Create a new type unit DIE as the root for the new tree, and
8540 add it to the list of comdat types. */
8541 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8542 add_AT_unsigned (unit, DW_AT_language,
8543 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8544 type_node = ggc_cleared_alloc<comdat_type_node> ();
8545 type_node->root_die = unit;
8546 type_node->next = comdat_type_list;
8547 comdat_type_list = type_node;
8548
8549 /* Generate the type signature. */
8550 generate_type_signature (c, type_node);
8551
8552 /* Copy the declaration context, attributes, and children of the
8553 declaration into the new type unit DIE, then remove this DIE
8554 from the main CU (or replace it with a skeleton if necessary). */
8555 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8556 type_node->skeleton_die = replacement;
8557
8558 /* Add the DIE to the new compunit. */
8559 add_child_die (unit, c);
8560
8561 /* Types can reference DWARF procedures for type size or data location
8562 expressions. Calls in DWARF expressions cannot target procedures
8563 that are not in the same section. So we must copy DWARF procedures
8564 along with this type and then rewrite references to them. */
8565 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8566 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8567
8568 if (replacement != NULL)
8569 c = replacement;
8570 }
8571 else if (c->die_tag == DW_TAG_namespace
8572 || c->die_tag == DW_TAG_class_type
8573 || c->die_tag == DW_TAG_structure_type
8574 || c->die_tag == DW_TAG_union_type)
8575 {
8576 /* Look for nested types that can be broken out. */
8577 break_out_comdat_types (c);
8578 }
8579 } while (next != NULL);
8580 }
8581
8582 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8583 Enter all the cloned children into the hash table decl_table. */
8584
8585 static dw_die_ref
8586 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8587 {
8588 dw_die_ref c;
8589 dw_die_ref clone;
8590 struct decl_table_entry *entry;
8591 decl_table_entry **slot;
8592
8593 if (die->die_tag == DW_TAG_subprogram)
8594 clone = clone_as_declaration (die);
8595 else
8596 clone = clone_die (die);
8597
8598 slot = decl_table->find_slot_with_hash (die,
8599 htab_hash_pointer (die), INSERT);
8600
8601 /* Assert that DIE isn't in the hash table yet. If it would be there
8602 before, the ancestors would be necessarily there as well, therefore
8603 clone_tree_partial wouldn't be called. */
8604 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8605
8606 entry = XCNEW (struct decl_table_entry);
8607 entry->orig = die;
8608 entry->copy = clone;
8609 *slot = entry;
8610
8611 if (die->die_tag != DW_TAG_subprogram)
8612 FOR_EACH_CHILD (die, c,
8613 add_child_die (clone, clone_tree_partial (c, decl_table)));
8614
8615 return clone;
8616 }
8617
8618 /* Walk the DIE and its children, looking for references to incomplete
8619 or trivial types that are unmarked (i.e., that are not in the current
8620 type_unit). */
8621
8622 static void
8623 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8624 {
8625 dw_die_ref c;
8626 dw_attr_node *a;
8627 unsigned ix;
8628
8629 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8630 {
8631 if (AT_class (a) == dw_val_class_die_ref)
8632 {
8633 dw_die_ref targ = AT_ref (a);
8634 decl_table_entry **slot;
8635 struct decl_table_entry *entry;
8636
8637 if (targ->die_mark != 0 || targ->comdat_type_p)
8638 continue;
8639
8640 slot = decl_table->find_slot_with_hash (targ,
8641 htab_hash_pointer (targ),
8642 INSERT);
8643
8644 if (*slot != HTAB_EMPTY_ENTRY)
8645 {
8646 /* TARG has already been copied, so we just need to
8647 modify the reference to point to the copy. */
8648 entry = *slot;
8649 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8650 }
8651 else
8652 {
8653 dw_die_ref parent = unit;
8654 dw_die_ref copy = clone_die (targ);
8655
8656 /* Record in DECL_TABLE that TARG has been copied.
8657 Need to do this now, before the recursive call,
8658 because DECL_TABLE may be expanded and SLOT
8659 would no longer be a valid pointer. */
8660 entry = XCNEW (struct decl_table_entry);
8661 entry->orig = targ;
8662 entry->copy = copy;
8663 *slot = entry;
8664
8665 /* If TARG is not a declaration DIE, we need to copy its
8666 children. */
8667 if (!is_declaration_die (targ))
8668 {
8669 FOR_EACH_CHILD (
8670 targ, c,
8671 add_child_die (copy,
8672 clone_tree_partial (c, decl_table)));
8673 }
8674
8675 /* Make sure the cloned tree is marked as part of the
8676 type unit. */
8677 mark_dies (copy);
8678
8679 /* If TARG has surrounding context, copy its ancestor tree
8680 into the new type unit. */
8681 if (targ->die_parent != NULL
8682 && !is_unit_die (targ->die_parent))
8683 parent = copy_ancestor_tree (unit, targ->die_parent,
8684 decl_table);
8685
8686 add_child_die (parent, copy);
8687 a->dw_attr_val.v.val_die_ref.die = copy;
8688
8689 /* Make sure the newly-copied DIE is walked. If it was
8690 installed in a previously-added context, it won't
8691 get visited otherwise. */
8692 if (parent != unit)
8693 {
8694 /* Find the highest point of the newly-added tree,
8695 mark each node along the way, and walk from there. */
8696 parent->die_mark = 1;
8697 while (parent->die_parent
8698 && parent->die_parent->die_mark == 0)
8699 {
8700 parent = parent->die_parent;
8701 parent->die_mark = 1;
8702 }
8703 copy_decls_walk (unit, parent, decl_table);
8704 }
8705 }
8706 }
8707 }
8708
8709 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8710 }
8711
8712 /* Copy declarations for "unworthy" types into the new comdat section.
8713 Incomplete types, modified types, and certain other types aren't broken
8714 out into comdat sections of their own, so they don't have a signature,
8715 and we need to copy the declaration into the same section so that we
8716 don't have an external reference. */
8717
8718 static void
8719 copy_decls_for_unworthy_types (dw_die_ref unit)
8720 {
8721 mark_dies (unit);
8722 decl_hash_type decl_table (10);
8723 copy_decls_walk (unit, unit, &decl_table);
8724 unmark_dies (unit);
8725 }
8726
8727 /* Traverse the DIE and add a sibling attribute if it may have the
8728 effect of speeding up access to siblings. To save some space,
8729 avoid generating sibling attributes for DIE's without children. */
8730
8731 static void
8732 add_sibling_attributes (dw_die_ref die)
8733 {
8734 dw_die_ref c;
8735
8736 if (! die->die_child)
8737 return;
8738
8739 if (die->die_parent && die != die->die_parent->die_child)
8740 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8741
8742 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8743 }
8744
8745 /* Output all location lists for the DIE and its children. */
8746
8747 static void
8748 output_location_lists (dw_die_ref die)
8749 {
8750 dw_die_ref c;
8751 dw_attr_node *a;
8752 unsigned ix;
8753
8754 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8755 if (AT_class (a) == dw_val_class_loc_list)
8756 output_loc_list (AT_loc_list (a));
8757
8758 FOR_EACH_CHILD (die, c, output_location_lists (c));
8759 }
8760
8761 /* During assign_location_list_indexes and output_loclists_offset the
8762 current index, after it the number of assigned indexes (i.e. how
8763 large the .debug_loclists* offset table should be). */
8764 static unsigned int loc_list_idx;
8765
8766 /* Output all location list offsets for the DIE and its children. */
8767
8768 static void
8769 output_loclists_offsets (dw_die_ref die)
8770 {
8771 dw_die_ref c;
8772 dw_attr_node *a;
8773 unsigned ix;
8774
8775 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8776 if (AT_class (a) == dw_val_class_loc_list)
8777 {
8778 dw_loc_list_ref l = AT_loc_list (a);
8779 if (l->offset_emitted)
8780 continue;
8781 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8782 loc_section_label, NULL);
8783 gcc_assert (l->hash == loc_list_idx);
8784 loc_list_idx++;
8785 l->offset_emitted = true;
8786 }
8787
8788 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8789 }
8790
8791 /* Recursively set indexes of location lists. */
8792
8793 static void
8794 assign_location_list_indexes (dw_die_ref die)
8795 {
8796 dw_die_ref c;
8797 dw_attr_node *a;
8798 unsigned ix;
8799
8800 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8801 if (AT_class (a) == dw_val_class_loc_list)
8802 {
8803 dw_loc_list_ref list = AT_loc_list (a);
8804 if (!list->num_assigned)
8805 {
8806 list->num_assigned = true;
8807 list->hash = loc_list_idx++;
8808 }
8809 }
8810
8811 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8812 }
8813
8814 /* We want to limit the number of external references, because they are
8815 larger than local references: a relocation takes multiple words, and
8816 even a sig8 reference is always eight bytes, whereas a local reference
8817 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8818 So if we encounter multiple external references to the same type DIE, we
8819 make a local typedef stub for it and redirect all references there.
8820
8821 This is the element of the hash table for keeping track of these
8822 references. */
8823
8824 struct external_ref
8825 {
8826 dw_die_ref type;
8827 dw_die_ref stub;
8828 unsigned n_refs;
8829 };
8830
8831 /* Hashtable helpers. */
8832
8833 struct external_ref_hasher : free_ptr_hash <external_ref>
8834 {
8835 static inline hashval_t hash (const external_ref *);
8836 static inline bool equal (const external_ref *, const external_ref *);
8837 };
8838
8839 inline hashval_t
8840 external_ref_hasher::hash (const external_ref *r)
8841 {
8842 dw_die_ref die = r->type;
8843 hashval_t h = 0;
8844
8845 /* We can't use the address of the DIE for hashing, because
8846 that will make the order of the stub DIEs non-deterministic. */
8847 if (! die->comdat_type_p)
8848 /* We have a symbol; use it to compute a hash. */
8849 h = htab_hash_string (die->die_id.die_symbol);
8850 else
8851 {
8852 /* We have a type signature; use a subset of the bits as the hash.
8853 The 8-byte signature is at least as large as hashval_t. */
8854 comdat_type_node *type_node = die->die_id.die_type_node;
8855 memcpy (&h, type_node->signature, sizeof (h));
8856 }
8857 return h;
8858 }
8859
8860 inline bool
8861 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8862 {
8863 return r1->type == r2->type;
8864 }
8865
8866 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8867
8868 /* Return a pointer to the external_ref for references to DIE. */
8869
8870 static struct external_ref *
8871 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8872 {
8873 struct external_ref ref, *ref_p;
8874 external_ref **slot;
8875
8876 ref.type = die;
8877 slot = map->find_slot (&ref, INSERT);
8878 if (*slot != HTAB_EMPTY_ENTRY)
8879 return *slot;
8880
8881 ref_p = XCNEW (struct external_ref);
8882 ref_p->type = die;
8883 *slot = ref_p;
8884 return ref_p;
8885 }
8886
8887 /* Subroutine of optimize_external_refs, below.
8888
8889 If we see a type skeleton, record it as our stub. If we see external
8890 references, remember how many we've seen. */
8891
8892 static void
8893 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8894 {
8895 dw_die_ref c;
8896 dw_attr_node *a;
8897 unsigned ix;
8898 struct external_ref *ref_p;
8899
8900 if (is_type_die (die)
8901 && (c = get_AT_ref (die, DW_AT_signature)))
8902 {
8903 /* This is a local skeleton; use it for local references. */
8904 ref_p = lookup_external_ref (map, c);
8905 ref_p->stub = die;
8906 }
8907
8908 /* Scan the DIE references, and remember any that refer to DIEs from
8909 other CUs (i.e. those which are not marked). */
8910 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8911 if (AT_class (a) == dw_val_class_die_ref
8912 && (c = AT_ref (a))->die_mark == 0
8913 && is_type_die (c))
8914 {
8915 ref_p = lookup_external_ref (map, c);
8916 ref_p->n_refs++;
8917 }
8918
8919 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8920 }
8921
8922 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8923 points to an external_ref, DATA is the CU we're processing. If we don't
8924 already have a local stub, and we have multiple refs, build a stub. */
8925
8926 int
8927 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8928 {
8929 struct external_ref *ref_p = *slot;
8930
8931 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8932 {
8933 /* We have multiple references to this type, so build a small stub.
8934 Both of these forms are a bit dodgy from the perspective of the
8935 DWARF standard, since technically they should have names. */
8936 dw_die_ref cu = data;
8937 dw_die_ref type = ref_p->type;
8938 dw_die_ref stub = NULL;
8939
8940 if (type->comdat_type_p)
8941 {
8942 /* If we refer to this type via sig8, use AT_signature. */
8943 stub = new_die (type->die_tag, cu, NULL_TREE);
8944 add_AT_die_ref (stub, DW_AT_signature, type);
8945 }
8946 else
8947 {
8948 /* Otherwise, use a typedef with no name. */
8949 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8950 add_AT_die_ref (stub, DW_AT_type, type);
8951 }
8952
8953 stub->die_mark++;
8954 ref_p->stub = stub;
8955 }
8956 return 1;
8957 }
8958
8959 /* DIE is a unit; look through all the DIE references to see if there are
8960 any external references to types, and if so, create local stubs for
8961 them which will be applied in build_abbrev_table. This is useful because
8962 references to local DIEs are smaller. */
8963
8964 static external_ref_hash_type *
8965 optimize_external_refs (dw_die_ref die)
8966 {
8967 external_ref_hash_type *map = new external_ref_hash_type (10);
8968 optimize_external_refs_1 (die, map);
8969 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8970 return map;
8971 }
8972
8973 /* The following 3 variables are temporaries that are computed only during the
8974 build_abbrev_table call and used and released during the following
8975 optimize_abbrev_table call. */
8976
8977 /* First abbrev_id that can be optimized based on usage. */
8978 static unsigned int abbrev_opt_start;
8979
8980 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8981 abbrev_id smaller than this, because they must be already sized
8982 during build_abbrev_table). */
8983 static unsigned int abbrev_opt_base_type_end;
8984
8985 /* Vector of usage counts during build_abbrev_table. Indexed by
8986 abbrev_id - abbrev_opt_start. */
8987 static vec<unsigned int> abbrev_usage_count;
8988
8989 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8990 static vec<dw_die_ref> sorted_abbrev_dies;
8991
8992 /* The format of each DIE (and its attribute value pairs) is encoded in an
8993 abbreviation table. This routine builds the abbreviation table and assigns
8994 a unique abbreviation id for each abbreviation entry. The children of each
8995 die are visited recursively. */
8996
8997 static void
8998 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8999 {
9000 unsigned int abbrev_id = 0;
9001 dw_die_ref c;
9002 dw_attr_node *a;
9003 unsigned ix;
9004 dw_die_ref abbrev;
9005
9006 /* Scan the DIE references, and replace any that refer to
9007 DIEs from other CUs (i.e. those which are not marked) with
9008 the local stubs we built in optimize_external_refs. */
9009 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9010 if (AT_class (a) == dw_val_class_die_ref
9011 && (c = AT_ref (a))->die_mark == 0)
9012 {
9013 struct external_ref *ref_p;
9014 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9015
9016 ref_p = lookup_external_ref (extern_map, c);
9017 if (ref_p->stub && ref_p->stub != die)
9018 change_AT_die_ref (a, ref_p->stub);
9019 else
9020 /* We aren't changing this reference, so mark it external. */
9021 set_AT_ref_external (a, 1);
9022 }
9023
9024 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9025 {
9026 dw_attr_node *die_a, *abbrev_a;
9027 unsigned ix;
9028 bool ok = true;
9029
9030 if (abbrev_id == 0)
9031 continue;
9032 if (abbrev->die_tag != die->die_tag)
9033 continue;
9034 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9035 continue;
9036
9037 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9038 continue;
9039
9040 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9041 {
9042 abbrev_a = &(*abbrev->die_attr)[ix];
9043 if ((abbrev_a->dw_attr != die_a->dw_attr)
9044 || (value_format (abbrev_a) != value_format (die_a)))
9045 {
9046 ok = false;
9047 break;
9048 }
9049 }
9050 if (ok)
9051 break;
9052 }
9053
9054 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9055 {
9056 vec_safe_push (abbrev_die_table, die);
9057 if (abbrev_opt_start)
9058 abbrev_usage_count.safe_push (0);
9059 }
9060 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9061 {
9062 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9063 sorted_abbrev_dies.safe_push (die);
9064 }
9065
9066 die->die_abbrev = abbrev_id;
9067 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9068 }
9069
9070 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9071 by die_abbrev's usage count, from the most commonly used
9072 abbreviation to the least. */
9073
9074 static int
9075 die_abbrev_cmp (const void *p1, const void *p2)
9076 {
9077 dw_die_ref die1 = *(const dw_die_ref *) p1;
9078 dw_die_ref die2 = *(const dw_die_ref *) p2;
9079
9080 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9081 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9082
9083 if (die1->die_abbrev >= abbrev_opt_base_type_end
9084 && die2->die_abbrev >= abbrev_opt_base_type_end)
9085 {
9086 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9087 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9088 return -1;
9089 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9090 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9091 return 1;
9092 }
9093
9094 /* Stabilize the sort. */
9095 if (die1->die_abbrev < die2->die_abbrev)
9096 return -1;
9097 if (die1->die_abbrev > die2->die_abbrev)
9098 return 1;
9099
9100 return 0;
9101 }
9102
9103 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9104 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9105 into dw_val_class_const_implicit or
9106 dw_val_class_unsigned_const_implicit. */
9107
9108 static void
9109 optimize_implicit_const (unsigned int first_id, unsigned int end,
9110 vec<bool> &implicit_consts)
9111 {
9112 /* It never makes sense if there is just one DIE using the abbreviation. */
9113 if (end < first_id + 2)
9114 return;
9115
9116 dw_attr_node *a;
9117 unsigned ix, i;
9118 dw_die_ref die = sorted_abbrev_dies[first_id];
9119 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9120 if (implicit_consts[ix])
9121 {
9122 enum dw_val_class new_class = dw_val_class_none;
9123 switch (AT_class (a))
9124 {
9125 case dw_val_class_unsigned_const:
9126 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9127 continue;
9128
9129 /* The .debug_abbrev section will grow by
9130 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9131 in all the DIEs using that abbreviation. */
9132 if (constant_size (AT_unsigned (a)) * (end - first_id)
9133 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9134 continue;
9135
9136 new_class = dw_val_class_unsigned_const_implicit;
9137 break;
9138
9139 case dw_val_class_const:
9140 new_class = dw_val_class_const_implicit;
9141 break;
9142
9143 case dw_val_class_file:
9144 new_class = dw_val_class_file_implicit;
9145 break;
9146
9147 default:
9148 continue;
9149 }
9150 for (i = first_id; i < end; i++)
9151 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9152 = new_class;
9153 }
9154 }
9155
9156 /* Attempt to optimize abbreviation table from abbrev_opt_start
9157 abbreviation above. */
9158
9159 static void
9160 optimize_abbrev_table (void)
9161 {
9162 if (abbrev_opt_start
9163 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9164 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9165 {
9166 auto_vec<bool, 32> implicit_consts;
9167 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9168
9169 unsigned int abbrev_id = abbrev_opt_start - 1;
9170 unsigned int first_id = ~0U;
9171 unsigned int last_abbrev_id = 0;
9172 unsigned int i;
9173 dw_die_ref die;
9174 if (abbrev_opt_base_type_end > abbrev_opt_start)
9175 abbrev_id = abbrev_opt_base_type_end - 1;
9176 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9177 most commonly used abbreviations come first. */
9178 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9179 {
9180 dw_attr_node *a;
9181 unsigned ix;
9182
9183 /* If calc_base_type_die_sizes has been called, the CU and
9184 base types after it can't be optimized, because we've already
9185 calculated their DIE offsets. We've sorted them first. */
9186 if (die->die_abbrev < abbrev_opt_base_type_end)
9187 continue;
9188 if (die->die_abbrev != last_abbrev_id)
9189 {
9190 last_abbrev_id = die->die_abbrev;
9191 if (dwarf_version >= 5 && first_id != ~0U)
9192 optimize_implicit_const (first_id, i, implicit_consts);
9193 abbrev_id++;
9194 (*abbrev_die_table)[abbrev_id] = die;
9195 if (dwarf_version >= 5)
9196 {
9197 first_id = i;
9198 implicit_consts.truncate (0);
9199
9200 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9201 switch (AT_class (a))
9202 {
9203 case dw_val_class_const:
9204 case dw_val_class_unsigned_const:
9205 case dw_val_class_file:
9206 implicit_consts.safe_push (true);
9207 break;
9208 default:
9209 implicit_consts.safe_push (false);
9210 break;
9211 }
9212 }
9213 }
9214 else if (dwarf_version >= 5)
9215 {
9216 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9217 if (!implicit_consts[ix])
9218 continue;
9219 else
9220 {
9221 dw_attr_node *other_a
9222 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9223 if (!dw_val_equal_p (&a->dw_attr_val,
9224 &other_a->dw_attr_val))
9225 implicit_consts[ix] = false;
9226 }
9227 }
9228 die->die_abbrev = abbrev_id;
9229 }
9230 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9231 if (dwarf_version >= 5 && first_id != ~0U)
9232 optimize_implicit_const (first_id, i, implicit_consts);
9233 }
9234
9235 abbrev_opt_start = 0;
9236 abbrev_opt_base_type_end = 0;
9237 abbrev_usage_count.release ();
9238 sorted_abbrev_dies.release ();
9239 }
9240 \f
9241 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9242
9243 static int
9244 constant_size (unsigned HOST_WIDE_INT value)
9245 {
9246 int log;
9247
9248 if (value == 0)
9249 log = 0;
9250 else
9251 log = floor_log2 (value);
9252
9253 log = log / 8;
9254 log = 1 << (floor_log2 (log) + 1);
9255
9256 return log;
9257 }
9258
9259 /* Return the size of a DIE as it is represented in the
9260 .debug_info section. */
9261
9262 static unsigned long
9263 size_of_die (dw_die_ref die)
9264 {
9265 unsigned long size = 0;
9266 dw_attr_node *a;
9267 unsigned ix;
9268 enum dwarf_form form;
9269
9270 size += size_of_uleb128 (die->die_abbrev);
9271 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9272 {
9273 switch (AT_class (a))
9274 {
9275 case dw_val_class_addr:
9276 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9277 {
9278 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9279 size += size_of_uleb128 (AT_index (a));
9280 }
9281 else
9282 size += DWARF2_ADDR_SIZE;
9283 break;
9284 case dw_val_class_offset:
9285 size += DWARF_OFFSET_SIZE;
9286 break;
9287 case dw_val_class_loc:
9288 {
9289 unsigned long lsize = size_of_locs (AT_loc (a));
9290
9291 /* Block length. */
9292 if (dwarf_version >= 4)
9293 size += size_of_uleb128 (lsize);
9294 else
9295 size += constant_size (lsize);
9296 size += lsize;
9297 }
9298 break;
9299 case dw_val_class_loc_list:
9300 case dw_val_class_view_list:
9301 if (dwarf_split_debug_info && dwarf_version >= 5)
9302 {
9303 gcc_assert (AT_loc_list (a)->num_assigned);
9304 size += size_of_uleb128 (AT_loc_list (a)->hash);
9305 }
9306 else
9307 size += DWARF_OFFSET_SIZE;
9308 break;
9309 case dw_val_class_range_list:
9310 if (value_format (a) == DW_FORM_rnglistx)
9311 {
9312 gcc_assert (rnglist_idx);
9313 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9314 size += size_of_uleb128 (r->idx);
9315 }
9316 else
9317 size += DWARF_OFFSET_SIZE;
9318 break;
9319 case dw_val_class_const:
9320 size += size_of_sleb128 (AT_int (a));
9321 break;
9322 case dw_val_class_unsigned_const:
9323 {
9324 int csize = constant_size (AT_unsigned (a));
9325 if (dwarf_version == 3
9326 && a->dw_attr == DW_AT_data_member_location
9327 && csize >= 4)
9328 size += size_of_uleb128 (AT_unsigned (a));
9329 else
9330 size += csize;
9331 }
9332 break;
9333 case dw_val_class_symview:
9334 if (symview_upper_bound <= 0xff)
9335 size += 1;
9336 else if (symview_upper_bound <= 0xffff)
9337 size += 2;
9338 else if (symview_upper_bound <= 0xffffffff)
9339 size += 4;
9340 else
9341 size += 8;
9342 break;
9343 case dw_val_class_const_implicit:
9344 case dw_val_class_unsigned_const_implicit:
9345 case dw_val_class_file_implicit:
9346 /* These occupy no size in the DIE, just an extra sleb128 in
9347 .debug_abbrev. */
9348 break;
9349 case dw_val_class_const_double:
9350 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9351 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9352 size++; /* block */
9353 break;
9354 case dw_val_class_wide_int:
9355 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9356 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9357 if (get_full_len (*a->dw_attr_val.v.val_wide)
9358 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9359 size++; /* block */
9360 break;
9361 case dw_val_class_vec:
9362 size += constant_size (a->dw_attr_val.v.val_vec.length
9363 * a->dw_attr_val.v.val_vec.elt_size)
9364 + a->dw_attr_val.v.val_vec.length
9365 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9366 break;
9367 case dw_val_class_flag:
9368 if (dwarf_version >= 4)
9369 /* Currently all add_AT_flag calls pass in 1 as last argument,
9370 so DW_FORM_flag_present can be used. If that ever changes,
9371 we'll need to use DW_FORM_flag and have some optimization
9372 in build_abbrev_table that will change those to
9373 DW_FORM_flag_present if it is set to 1 in all DIEs using
9374 the same abbrev entry. */
9375 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9376 else
9377 size += 1;
9378 break;
9379 case dw_val_class_die_ref:
9380 if (AT_ref_external (a))
9381 {
9382 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9383 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9384 is sized by target address length, whereas in DWARF3
9385 it's always sized as an offset. */
9386 if (use_debug_types)
9387 size += DWARF_TYPE_SIGNATURE_SIZE;
9388 else if (dwarf_version == 2)
9389 size += DWARF2_ADDR_SIZE;
9390 else
9391 size += DWARF_OFFSET_SIZE;
9392 }
9393 else
9394 size += DWARF_OFFSET_SIZE;
9395 break;
9396 case dw_val_class_fde_ref:
9397 size += DWARF_OFFSET_SIZE;
9398 break;
9399 case dw_val_class_lbl_id:
9400 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9401 {
9402 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9403 size += size_of_uleb128 (AT_index (a));
9404 }
9405 else
9406 size += DWARF2_ADDR_SIZE;
9407 break;
9408 case dw_val_class_lineptr:
9409 case dw_val_class_macptr:
9410 case dw_val_class_loclistsptr:
9411 size += DWARF_OFFSET_SIZE;
9412 break;
9413 case dw_val_class_str:
9414 form = AT_string_form (a);
9415 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9416 size += DWARF_OFFSET_SIZE;
9417 else if (form == DW_FORM_GNU_str_index)
9418 size += size_of_uleb128 (AT_index (a));
9419 else
9420 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9421 break;
9422 case dw_val_class_file:
9423 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9424 break;
9425 case dw_val_class_data8:
9426 size += 8;
9427 break;
9428 case dw_val_class_vms_delta:
9429 size += DWARF_OFFSET_SIZE;
9430 break;
9431 case dw_val_class_high_pc:
9432 size += DWARF2_ADDR_SIZE;
9433 break;
9434 case dw_val_class_discr_value:
9435 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9436 break;
9437 case dw_val_class_discr_list:
9438 {
9439 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9440
9441 /* This is a block, so we have the block length and then its
9442 data. */
9443 size += constant_size (block_size) + block_size;
9444 }
9445 break;
9446 default:
9447 gcc_unreachable ();
9448 }
9449 }
9450
9451 return size;
9452 }
9453
9454 /* Size the debugging information associated with a given DIE. Visits the
9455 DIE's children recursively. Updates the global variable next_die_offset, on
9456 each time through. Uses the current value of next_die_offset to update the
9457 die_offset field in each DIE. */
9458
9459 static void
9460 calc_die_sizes (dw_die_ref die)
9461 {
9462 dw_die_ref c;
9463
9464 gcc_assert (die->die_offset == 0
9465 || (unsigned long int) die->die_offset == next_die_offset);
9466 die->die_offset = next_die_offset;
9467 next_die_offset += size_of_die (die);
9468
9469 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9470
9471 if (die->die_child != NULL)
9472 /* Count the null byte used to terminate sibling lists. */
9473 next_die_offset += 1;
9474 }
9475
9476 /* Size just the base type children at the start of the CU.
9477 This is needed because build_abbrev needs to size locs
9478 and sizing of type based stack ops needs to know die_offset
9479 values for the base types. */
9480
9481 static void
9482 calc_base_type_die_sizes (void)
9483 {
9484 unsigned long die_offset = (dwarf_split_debug_info
9485 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9486 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9487 unsigned int i;
9488 dw_die_ref base_type;
9489 #if ENABLE_ASSERT_CHECKING
9490 dw_die_ref prev = comp_unit_die ()->die_child;
9491 #endif
9492
9493 die_offset += size_of_die (comp_unit_die ());
9494 for (i = 0; base_types.iterate (i, &base_type); i++)
9495 {
9496 #if ENABLE_ASSERT_CHECKING
9497 gcc_assert (base_type->die_offset == 0
9498 && prev->die_sib == base_type
9499 && base_type->die_child == NULL
9500 && base_type->die_abbrev);
9501 prev = base_type;
9502 #endif
9503 if (abbrev_opt_start
9504 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9505 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9506 base_type->die_offset = die_offset;
9507 die_offset += size_of_die (base_type);
9508 }
9509 }
9510
9511 /* Set the marks for a die and its children. We do this so
9512 that we know whether or not a reference needs to use FORM_ref_addr; only
9513 DIEs in the same CU will be marked. We used to clear out the offset
9514 and use that as the flag, but ran into ordering problems. */
9515
9516 static void
9517 mark_dies (dw_die_ref die)
9518 {
9519 dw_die_ref c;
9520
9521 gcc_assert (!die->die_mark);
9522
9523 die->die_mark = 1;
9524 FOR_EACH_CHILD (die, c, mark_dies (c));
9525 }
9526
9527 /* Clear the marks for a die and its children. */
9528
9529 static void
9530 unmark_dies (dw_die_ref die)
9531 {
9532 dw_die_ref c;
9533
9534 if (! use_debug_types)
9535 gcc_assert (die->die_mark);
9536
9537 die->die_mark = 0;
9538 FOR_EACH_CHILD (die, c, unmark_dies (c));
9539 }
9540
9541 /* Clear the marks for a die, its children and referred dies. */
9542
9543 static void
9544 unmark_all_dies (dw_die_ref die)
9545 {
9546 dw_die_ref c;
9547 dw_attr_node *a;
9548 unsigned ix;
9549
9550 if (!die->die_mark)
9551 return;
9552 die->die_mark = 0;
9553
9554 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9555
9556 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9557 if (AT_class (a) == dw_val_class_die_ref)
9558 unmark_all_dies (AT_ref (a));
9559 }
9560
9561 /* Calculate if the entry should appear in the final output file. It may be
9562 from a pruned a type. */
9563
9564 static bool
9565 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9566 {
9567 /* By limiting gnu pubnames to definitions only, gold can generate a
9568 gdb index without entries for declarations, which don't include
9569 enough information to be useful. */
9570 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9571 return false;
9572
9573 if (table == pubname_table)
9574 {
9575 /* Enumerator names are part of the pubname table, but the
9576 parent DW_TAG_enumeration_type die may have been pruned.
9577 Don't output them if that is the case. */
9578 if (p->die->die_tag == DW_TAG_enumerator &&
9579 (p->die->die_parent == NULL
9580 || !p->die->die_parent->die_perennial_p))
9581 return false;
9582
9583 /* Everything else in the pubname table is included. */
9584 return true;
9585 }
9586
9587 /* The pubtypes table shouldn't include types that have been
9588 pruned. */
9589 return (p->die->die_offset != 0
9590 || !flag_eliminate_unused_debug_types);
9591 }
9592
9593 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9594 generated for the compilation unit. */
9595
9596 static unsigned long
9597 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9598 {
9599 unsigned long size;
9600 unsigned i;
9601 pubname_entry *p;
9602 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9603
9604 size = DWARF_PUBNAMES_HEADER_SIZE;
9605 FOR_EACH_VEC_ELT (*names, i, p)
9606 if (include_pubname_in_output (names, p))
9607 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9608
9609 size += DWARF_OFFSET_SIZE;
9610 return size;
9611 }
9612
9613 /* Return the size of the information in the .debug_aranges section. */
9614
9615 static unsigned long
9616 size_of_aranges (void)
9617 {
9618 unsigned long size;
9619
9620 size = DWARF_ARANGES_HEADER_SIZE;
9621
9622 /* Count the address/length pair for this compilation unit. */
9623 if (text_section_used)
9624 size += 2 * DWARF2_ADDR_SIZE;
9625 if (cold_text_section_used)
9626 size += 2 * DWARF2_ADDR_SIZE;
9627 if (have_multiple_function_sections)
9628 {
9629 unsigned fde_idx;
9630 dw_fde_ref fde;
9631
9632 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9633 {
9634 if (DECL_IGNORED_P (fde->decl))
9635 continue;
9636 if (!fde->in_std_section)
9637 size += 2 * DWARF2_ADDR_SIZE;
9638 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9639 size += 2 * DWARF2_ADDR_SIZE;
9640 }
9641 }
9642
9643 /* Count the two zero words used to terminated the address range table. */
9644 size += 2 * DWARF2_ADDR_SIZE;
9645 return size;
9646 }
9647 \f
9648 /* Select the encoding of an attribute value. */
9649
9650 static enum dwarf_form
9651 value_format (dw_attr_node *a)
9652 {
9653 switch (AT_class (a))
9654 {
9655 case dw_val_class_addr:
9656 /* Only very few attributes allow DW_FORM_addr. */
9657 switch (a->dw_attr)
9658 {
9659 case DW_AT_low_pc:
9660 case DW_AT_high_pc:
9661 case DW_AT_entry_pc:
9662 case DW_AT_trampoline:
9663 return (AT_index (a) == NOT_INDEXED
9664 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9665 default:
9666 break;
9667 }
9668 switch (DWARF2_ADDR_SIZE)
9669 {
9670 case 1:
9671 return DW_FORM_data1;
9672 case 2:
9673 return DW_FORM_data2;
9674 case 4:
9675 return DW_FORM_data4;
9676 case 8:
9677 return DW_FORM_data8;
9678 default:
9679 gcc_unreachable ();
9680 }
9681 case dw_val_class_loc_list:
9682 case dw_val_class_view_list:
9683 if (dwarf_split_debug_info
9684 && dwarf_version >= 5
9685 && AT_loc_list (a)->num_assigned)
9686 return DW_FORM_loclistx;
9687 /* FALLTHRU */
9688 case dw_val_class_range_list:
9689 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9690 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9691 care about sizes of .debug* sections in shared libraries and
9692 executables and don't take into account relocations that affect just
9693 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9694 table in the .debug_rnglists section. */
9695 if (dwarf_split_debug_info
9696 && dwarf_version >= 5
9697 && AT_class (a) == dw_val_class_range_list
9698 && rnglist_idx
9699 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9700 return DW_FORM_rnglistx;
9701 if (dwarf_version >= 4)
9702 return DW_FORM_sec_offset;
9703 /* FALLTHRU */
9704 case dw_val_class_vms_delta:
9705 case dw_val_class_offset:
9706 switch (DWARF_OFFSET_SIZE)
9707 {
9708 case 4:
9709 return DW_FORM_data4;
9710 case 8:
9711 return DW_FORM_data8;
9712 default:
9713 gcc_unreachable ();
9714 }
9715 case dw_val_class_loc:
9716 if (dwarf_version >= 4)
9717 return DW_FORM_exprloc;
9718 switch (constant_size (size_of_locs (AT_loc (a))))
9719 {
9720 case 1:
9721 return DW_FORM_block1;
9722 case 2:
9723 return DW_FORM_block2;
9724 case 4:
9725 return DW_FORM_block4;
9726 default:
9727 gcc_unreachable ();
9728 }
9729 case dw_val_class_const:
9730 return DW_FORM_sdata;
9731 case dw_val_class_unsigned_const:
9732 switch (constant_size (AT_unsigned (a)))
9733 {
9734 case 1:
9735 return DW_FORM_data1;
9736 case 2:
9737 return DW_FORM_data2;
9738 case 4:
9739 /* In DWARF3 DW_AT_data_member_location with
9740 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9741 constant, so we need to use DW_FORM_udata if we need
9742 a large constant. */
9743 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9744 return DW_FORM_udata;
9745 return DW_FORM_data4;
9746 case 8:
9747 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9748 return DW_FORM_udata;
9749 return DW_FORM_data8;
9750 default:
9751 gcc_unreachable ();
9752 }
9753 case dw_val_class_const_implicit:
9754 case dw_val_class_unsigned_const_implicit:
9755 case dw_val_class_file_implicit:
9756 return DW_FORM_implicit_const;
9757 case dw_val_class_const_double:
9758 switch (HOST_BITS_PER_WIDE_INT)
9759 {
9760 case 8:
9761 return DW_FORM_data2;
9762 case 16:
9763 return DW_FORM_data4;
9764 case 32:
9765 return DW_FORM_data8;
9766 case 64:
9767 if (dwarf_version >= 5)
9768 return DW_FORM_data16;
9769 /* FALLTHRU */
9770 default:
9771 return DW_FORM_block1;
9772 }
9773 case dw_val_class_wide_int:
9774 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9775 {
9776 case 8:
9777 return DW_FORM_data1;
9778 case 16:
9779 return DW_FORM_data2;
9780 case 32:
9781 return DW_FORM_data4;
9782 case 64:
9783 return DW_FORM_data8;
9784 case 128:
9785 if (dwarf_version >= 5)
9786 return DW_FORM_data16;
9787 /* FALLTHRU */
9788 default:
9789 return DW_FORM_block1;
9790 }
9791 case dw_val_class_symview:
9792 /* ??? We might use uleb128, but then we'd have to compute
9793 .debug_info offsets in the assembler. */
9794 if (symview_upper_bound <= 0xff)
9795 return DW_FORM_data1;
9796 else if (symview_upper_bound <= 0xffff)
9797 return DW_FORM_data2;
9798 else if (symview_upper_bound <= 0xffffffff)
9799 return DW_FORM_data4;
9800 else
9801 return DW_FORM_data8;
9802 case dw_val_class_vec:
9803 switch (constant_size (a->dw_attr_val.v.val_vec.length
9804 * a->dw_attr_val.v.val_vec.elt_size))
9805 {
9806 case 1:
9807 return DW_FORM_block1;
9808 case 2:
9809 return DW_FORM_block2;
9810 case 4:
9811 return DW_FORM_block4;
9812 default:
9813 gcc_unreachable ();
9814 }
9815 case dw_val_class_flag:
9816 if (dwarf_version >= 4)
9817 {
9818 /* Currently all add_AT_flag calls pass in 1 as last argument,
9819 so DW_FORM_flag_present can be used. If that ever changes,
9820 we'll need to use DW_FORM_flag and have some optimization
9821 in build_abbrev_table that will change those to
9822 DW_FORM_flag_present if it is set to 1 in all DIEs using
9823 the same abbrev entry. */
9824 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9825 return DW_FORM_flag_present;
9826 }
9827 return DW_FORM_flag;
9828 case dw_val_class_die_ref:
9829 if (AT_ref_external (a))
9830 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9831 else
9832 return DW_FORM_ref;
9833 case dw_val_class_fde_ref:
9834 return DW_FORM_data;
9835 case dw_val_class_lbl_id:
9836 return (AT_index (a) == NOT_INDEXED
9837 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9838 case dw_val_class_lineptr:
9839 case dw_val_class_macptr:
9840 case dw_val_class_loclistsptr:
9841 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9842 case dw_val_class_str:
9843 return AT_string_form (a);
9844 case dw_val_class_file:
9845 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9846 {
9847 case 1:
9848 return DW_FORM_data1;
9849 case 2:
9850 return DW_FORM_data2;
9851 case 4:
9852 return DW_FORM_data4;
9853 default:
9854 gcc_unreachable ();
9855 }
9856
9857 case dw_val_class_data8:
9858 return DW_FORM_data8;
9859
9860 case dw_val_class_high_pc:
9861 switch (DWARF2_ADDR_SIZE)
9862 {
9863 case 1:
9864 return DW_FORM_data1;
9865 case 2:
9866 return DW_FORM_data2;
9867 case 4:
9868 return DW_FORM_data4;
9869 case 8:
9870 return DW_FORM_data8;
9871 default:
9872 gcc_unreachable ();
9873 }
9874
9875 case dw_val_class_discr_value:
9876 return (a->dw_attr_val.v.val_discr_value.pos
9877 ? DW_FORM_udata
9878 : DW_FORM_sdata);
9879 case dw_val_class_discr_list:
9880 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9881 {
9882 case 1:
9883 return DW_FORM_block1;
9884 case 2:
9885 return DW_FORM_block2;
9886 case 4:
9887 return DW_FORM_block4;
9888 default:
9889 gcc_unreachable ();
9890 }
9891
9892 default:
9893 gcc_unreachable ();
9894 }
9895 }
9896
9897 /* Output the encoding of an attribute value. */
9898
9899 static void
9900 output_value_format (dw_attr_node *a)
9901 {
9902 enum dwarf_form form = value_format (a);
9903
9904 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9905 }
9906
9907 /* Given a die and id, produce the appropriate abbreviations. */
9908
9909 static void
9910 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9911 {
9912 unsigned ix;
9913 dw_attr_node *a_attr;
9914
9915 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9916 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9917 dwarf_tag_name (abbrev->die_tag));
9918
9919 if (abbrev->die_child != NULL)
9920 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9921 else
9922 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9923
9924 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9925 {
9926 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9927 dwarf_attr_name (a_attr->dw_attr));
9928 output_value_format (a_attr);
9929 if (value_format (a_attr) == DW_FORM_implicit_const)
9930 {
9931 if (AT_class (a_attr) == dw_val_class_file_implicit)
9932 {
9933 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9934 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9935 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9936 }
9937 else
9938 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9939 }
9940 }
9941
9942 dw2_asm_output_data (1, 0, NULL);
9943 dw2_asm_output_data (1, 0, NULL);
9944 }
9945
9946
9947 /* Output the .debug_abbrev section which defines the DIE abbreviation
9948 table. */
9949
9950 static void
9951 output_abbrev_section (void)
9952 {
9953 unsigned int abbrev_id;
9954 dw_die_ref abbrev;
9955
9956 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9957 if (abbrev_id != 0)
9958 output_die_abbrevs (abbrev_id, abbrev);
9959
9960 /* Terminate the table. */
9961 dw2_asm_output_data (1, 0, NULL);
9962 }
9963
9964 /* Return a new location list, given the begin and end range, and the
9965 expression. */
9966
9967 static inline dw_loc_list_ref
9968 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9969 const char *end, var_loc_view vend,
9970 const char *section)
9971 {
9972 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9973
9974 retlist->begin = begin;
9975 retlist->begin_entry = NULL;
9976 retlist->end = end;
9977 retlist->expr = expr;
9978 retlist->section = section;
9979 retlist->vbegin = vbegin;
9980 retlist->vend = vend;
9981
9982 return retlist;
9983 }
9984
9985 /* Return true iff there's any nonzero view number in the loc list. */
9986
9987 static bool
9988 loc_list_has_views (dw_loc_list_ref list)
9989 {
9990 if (!debug_variable_location_views)
9991 return false;
9992
9993 for (dw_loc_list_ref loc = list;
9994 loc != NULL; loc = loc->dw_loc_next)
9995 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
9996 return true;
9997
9998 return false;
9999 }
10000
10001 /* Generate a new internal symbol for this location list node, if it
10002 hasn't got one yet. */
10003
10004 static inline void
10005 gen_llsym (dw_loc_list_ref list)
10006 {
10007 gcc_assert (!list->ll_symbol);
10008 list->ll_symbol = gen_internal_sym ("LLST");
10009
10010 if (!loc_list_has_views (list))
10011 return;
10012
10013 if (dwarf2out_locviews_in_attribute ())
10014 {
10015 /* Use the same label_num for the view list. */
10016 label_num--;
10017 list->vl_symbol = gen_internal_sym ("LVUS");
10018 }
10019 else
10020 list->vl_symbol = list->ll_symbol;
10021 }
10022
10023 /* Generate a symbol for the list, but only if we really want to emit
10024 it as a list. */
10025
10026 static inline void
10027 maybe_gen_llsym (dw_loc_list_ref list)
10028 {
10029 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10030 return;
10031
10032 gen_llsym (list);
10033 }
10034
10035 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10036 NULL, don't consider size of the location expression. If we're not
10037 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10038 representation in *SIZEP. */
10039
10040 static bool
10041 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10042 {
10043 /* Don't output an entry that starts and ends at the same address. */
10044 if (strcmp (curr->begin, curr->end) == 0
10045 && curr->vbegin == curr->vend && !curr->force)
10046 return true;
10047
10048 if (!sizep)
10049 return false;
10050
10051 unsigned long size = size_of_locs (curr->expr);
10052
10053 /* If the expression is too large, drop it on the floor. We could
10054 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10055 in the expression, but >= 64KB expressions for a single value
10056 in a single range are unlikely very useful. */
10057 if (dwarf_version < 5 && size > 0xffff)
10058 return true;
10059
10060 *sizep = size;
10061
10062 return false;
10063 }
10064
10065 /* Output a view pair loclist entry for CURR, if it requires one. */
10066
10067 static void
10068 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10069 {
10070 if (!dwarf2out_locviews_in_loclist ())
10071 return;
10072
10073 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10074 return;
10075
10076 #ifdef DW_LLE_view_pair
10077 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10078
10079 if (dwarf2out_as_locview_support)
10080 {
10081 if (ZERO_VIEW_P (curr->vbegin))
10082 dw2_asm_output_data_uleb128 (0, "Location view begin");
10083 else
10084 {
10085 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10086 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10087 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10088 }
10089
10090 if (ZERO_VIEW_P (curr->vend))
10091 dw2_asm_output_data_uleb128 (0, "Location view end");
10092 else
10093 {
10094 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10095 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10096 dw2_asm_output_symname_uleb128 (label, "Location view end");
10097 }
10098 }
10099 else
10100 {
10101 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10102 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10103 }
10104 #endif /* DW_LLE_view_pair */
10105
10106 return;
10107 }
10108
10109 /* Output the location list given to us. */
10110
10111 static void
10112 output_loc_list (dw_loc_list_ref list_head)
10113 {
10114 int vcount = 0, lcount = 0;
10115
10116 if (list_head->emitted)
10117 return;
10118 list_head->emitted = true;
10119
10120 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10121 {
10122 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10123
10124 for (dw_loc_list_ref curr = list_head; curr != NULL;
10125 curr = curr->dw_loc_next)
10126 {
10127 unsigned long size;
10128
10129 if (skip_loc_list_entry (curr, &size))
10130 continue;
10131
10132 vcount++;
10133
10134 /* ?? dwarf_split_debug_info? */
10135 if (dwarf2out_as_locview_support)
10136 {
10137 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10138
10139 if (!ZERO_VIEW_P (curr->vbegin))
10140 {
10141 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10142 dw2_asm_output_symname_uleb128 (label,
10143 "View list begin (%s)",
10144 list_head->vl_symbol);
10145 }
10146 else
10147 dw2_asm_output_data_uleb128 (0,
10148 "View list begin (%s)",
10149 list_head->vl_symbol);
10150
10151 if (!ZERO_VIEW_P (curr->vend))
10152 {
10153 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10154 dw2_asm_output_symname_uleb128 (label,
10155 "View list end (%s)",
10156 list_head->vl_symbol);
10157 }
10158 else
10159 dw2_asm_output_data_uleb128 (0,
10160 "View list end (%s)",
10161 list_head->vl_symbol);
10162 }
10163 else
10164 {
10165 dw2_asm_output_data_uleb128 (curr->vbegin,
10166 "View list begin (%s)",
10167 list_head->vl_symbol);
10168 dw2_asm_output_data_uleb128 (curr->vend,
10169 "View list end (%s)",
10170 list_head->vl_symbol);
10171 }
10172 }
10173 }
10174
10175 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10176
10177 const char *last_section = NULL;
10178 const char *base_label = NULL;
10179
10180 /* Walk the location list, and output each range + expression. */
10181 for (dw_loc_list_ref curr = list_head; curr != NULL;
10182 curr = curr->dw_loc_next)
10183 {
10184 unsigned long size;
10185
10186 /* Skip this entry? If we skip it here, we must skip it in the
10187 view list above as well. */
10188 if (skip_loc_list_entry (curr, &size))
10189 continue;
10190
10191 lcount++;
10192
10193 if (dwarf_version >= 5)
10194 {
10195 if (dwarf_split_debug_info)
10196 {
10197 dwarf2out_maybe_output_loclist_view_pair (curr);
10198 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10199 uleb128 index into .debug_addr and uleb128 length. */
10200 dw2_asm_output_data (1, DW_LLE_startx_length,
10201 "DW_LLE_startx_length (%s)",
10202 list_head->ll_symbol);
10203 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10204 "Location list range start index "
10205 "(%s)", curr->begin);
10206 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10207 For that case we probably need to emit DW_LLE_startx_endx,
10208 but we'd need 2 .debug_addr entries rather than just one. */
10209 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10210 "Location list length (%s)",
10211 list_head->ll_symbol);
10212 }
10213 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10214 {
10215 dwarf2out_maybe_output_loclist_view_pair (curr);
10216 /* If all code is in .text section, the base address is
10217 already provided by the CU attributes. Use
10218 DW_LLE_offset_pair where both addresses are uleb128 encoded
10219 offsets against that base. */
10220 dw2_asm_output_data (1, DW_LLE_offset_pair,
10221 "DW_LLE_offset_pair (%s)",
10222 list_head->ll_symbol);
10223 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10224 "Location list begin address (%s)",
10225 list_head->ll_symbol);
10226 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10227 "Location list end address (%s)",
10228 list_head->ll_symbol);
10229 }
10230 else if (HAVE_AS_LEB128)
10231 {
10232 /* Otherwise, find out how many consecutive entries could share
10233 the same base entry. If just one, emit DW_LLE_start_length,
10234 otherwise emit DW_LLE_base_address for the base address
10235 followed by a series of DW_LLE_offset_pair. */
10236 if (last_section == NULL || curr->section != last_section)
10237 {
10238 dw_loc_list_ref curr2;
10239 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10240 curr2 = curr2->dw_loc_next)
10241 {
10242 if (strcmp (curr2->begin, curr2->end) == 0
10243 && !curr2->force)
10244 continue;
10245 break;
10246 }
10247 if (curr2 == NULL || curr->section != curr2->section)
10248 last_section = NULL;
10249 else
10250 {
10251 last_section = curr->section;
10252 base_label = curr->begin;
10253 dw2_asm_output_data (1, DW_LLE_base_address,
10254 "DW_LLE_base_address (%s)",
10255 list_head->ll_symbol);
10256 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10257 "Base address (%s)",
10258 list_head->ll_symbol);
10259 }
10260 }
10261 /* Only one entry with the same base address. Use
10262 DW_LLE_start_length with absolute address and uleb128
10263 length. */
10264 if (last_section == NULL)
10265 {
10266 dwarf2out_maybe_output_loclist_view_pair (curr);
10267 dw2_asm_output_data (1, DW_LLE_start_length,
10268 "DW_LLE_start_length (%s)",
10269 list_head->ll_symbol);
10270 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10271 "Location list begin address (%s)",
10272 list_head->ll_symbol);
10273 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10274 "Location list length "
10275 "(%s)", list_head->ll_symbol);
10276 }
10277 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10278 DW_LLE_base_address. */
10279 else
10280 {
10281 dwarf2out_maybe_output_loclist_view_pair (curr);
10282 dw2_asm_output_data (1, DW_LLE_offset_pair,
10283 "DW_LLE_offset_pair (%s)",
10284 list_head->ll_symbol);
10285 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10286 "Location list begin address "
10287 "(%s)", list_head->ll_symbol);
10288 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10289 "Location list end address "
10290 "(%s)", list_head->ll_symbol);
10291 }
10292 }
10293 /* The assembler does not support .uleb128 directive. Emit
10294 DW_LLE_start_end with a pair of absolute addresses. */
10295 else
10296 {
10297 dwarf2out_maybe_output_loclist_view_pair (curr);
10298 dw2_asm_output_data (1, DW_LLE_start_end,
10299 "DW_LLE_start_end (%s)",
10300 list_head->ll_symbol);
10301 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10302 "Location list begin address (%s)",
10303 list_head->ll_symbol);
10304 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10305 "Location list end address (%s)",
10306 list_head->ll_symbol);
10307 }
10308 }
10309 else if (dwarf_split_debug_info)
10310 {
10311 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10312 and 4 byte length. */
10313 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10314 "Location list start/length entry (%s)",
10315 list_head->ll_symbol);
10316 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10317 "Location list range start index (%s)",
10318 curr->begin);
10319 /* The length field is 4 bytes. If we ever need to support
10320 an 8-byte length, we can add a new DW_LLE code or fall back
10321 to DW_LLE_GNU_start_end_entry. */
10322 dw2_asm_output_delta (4, curr->end, curr->begin,
10323 "Location list range length (%s)",
10324 list_head->ll_symbol);
10325 }
10326 else if (!have_multiple_function_sections)
10327 {
10328 /* Pair of relative addresses against start of text section. */
10329 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10330 "Location list begin address (%s)",
10331 list_head->ll_symbol);
10332 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10333 "Location list end address (%s)",
10334 list_head->ll_symbol);
10335 }
10336 else
10337 {
10338 /* Pair of absolute addresses. */
10339 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10340 "Location list begin address (%s)",
10341 list_head->ll_symbol);
10342 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10343 "Location list end address (%s)",
10344 list_head->ll_symbol);
10345 }
10346
10347 /* Output the block length for this list of location operations. */
10348 if (dwarf_version >= 5)
10349 dw2_asm_output_data_uleb128 (size, "Location expression size");
10350 else
10351 {
10352 gcc_assert (size <= 0xffff);
10353 dw2_asm_output_data (2, size, "Location expression size");
10354 }
10355
10356 output_loc_sequence (curr->expr, -1);
10357 }
10358
10359 /* And finally list termination. */
10360 if (dwarf_version >= 5)
10361 dw2_asm_output_data (1, DW_LLE_end_of_list,
10362 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10363 else if (dwarf_split_debug_info)
10364 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10365 "Location list terminator (%s)",
10366 list_head->ll_symbol);
10367 else
10368 {
10369 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10370 "Location list terminator begin (%s)",
10371 list_head->ll_symbol);
10372 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10373 "Location list terminator end (%s)",
10374 list_head->ll_symbol);
10375 }
10376
10377 gcc_assert (!list_head->vl_symbol
10378 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10379 }
10380
10381 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10382 section. Emit a relocated reference if val_entry is NULL, otherwise,
10383 emit an indirect reference. */
10384
10385 static void
10386 output_range_list_offset (dw_attr_node *a)
10387 {
10388 const char *name = dwarf_attr_name (a->dw_attr);
10389
10390 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10391 {
10392 if (dwarf_version >= 5)
10393 {
10394 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10395 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10396 debug_ranges_section, "%s", name);
10397 }
10398 else
10399 {
10400 char *p = strchr (ranges_section_label, '\0');
10401 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10402 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10403 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10404 debug_ranges_section, "%s", name);
10405 *p = '\0';
10406 }
10407 }
10408 else if (dwarf_version >= 5)
10409 {
10410 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10411 gcc_assert (rnglist_idx);
10412 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10413 }
10414 else
10415 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10416 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10417 "%s (offset from %s)", name, ranges_section_label);
10418 }
10419
10420 /* Output the offset into the debug_loc section. */
10421
10422 static void
10423 output_loc_list_offset (dw_attr_node *a)
10424 {
10425 char *sym = AT_loc_list (a)->ll_symbol;
10426
10427 gcc_assert (sym);
10428 if (!dwarf_split_debug_info)
10429 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10430 "%s", dwarf_attr_name (a->dw_attr));
10431 else if (dwarf_version >= 5)
10432 {
10433 gcc_assert (AT_loc_list (a)->num_assigned);
10434 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10435 dwarf_attr_name (a->dw_attr),
10436 sym);
10437 }
10438 else
10439 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10440 "%s", dwarf_attr_name (a->dw_attr));
10441 }
10442
10443 /* Output the offset into the debug_loc section. */
10444
10445 static void
10446 output_view_list_offset (dw_attr_node *a)
10447 {
10448 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10449
10450 gcc_assert (sym);
10451 if (dwarf_split_debug_info)
10452 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10453 "%s", dwarf_attr_name (a->dw_attr));
10454 else
10455 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10456 "%s", dwarf_attr_name (a->dw_attr));
10457 }
10458
10459 /* Output an attribute's index or value appropriately. */
10460
10461 static void
10462 output_attr_index_or_value (dw_attr_node *a)
10463 {
10464 const char *name = dwarf_attr_name (a->dw_attr);
10465
10466 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10467 {
10468 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10469 return;
10470 }
10471 switch (AT_class (a))
10472 {
10473 case dw_val_class_addr:
10474 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10475 break;
10476 case dw_val_class_high_pc:
10477 case dw_val_class_lbl_id:
10478 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10479 break;
10480 default:
10481 gcc_unreachable ();
10482 }
10483 }
10484
10485 /* Output a type signature. */
10486
10487 static inline void
10488 output_signature (const char *sig, const char *name)
10489 {
10490 int i;
10491
10492 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10493 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10494 }
10495
10496 /* Output a discriminant value. */
10497
10498 static inline void
10499 output_discr_value (dw_discr_value *discr_value, const char *name)
10500 {
10501 if (discr_value->pos)
10502 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10503 else
10504 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10505 }
10506
10507 /* Output the DIE and its attributes. Called recursively to generate
10508 the definitions of each child DIE. */
10509
10510 static void
10511 output_die (dw_die_ref die)
10512 {
10513 dw_attr_node *a;
10514 dw_die_ref c;
10515 unsigned long size;
10516 unsigned ix;
10517
10518 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10519 (unsigned long)die->die_offset,
10520 dwarf_tag_name (die->die_tag));
10521
10522 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10523 {
10524 const char *name = dwarf_attr_name (a->dw_attr);
10525
10526 switch (AT_class (a))
10527 {
10528 case dw_val_class_addr:
10529 output_attr_index_or_value (a);
10530 break;
10531
10532 case dw_val_class_offset:
10533 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10534 "%s", name);
10535 break;
10536
10537 case dw_val_class_range_list:
10538 output_range_list_offset (a);
10539 break;
10540
10541 case dw_val_class_loc:
10542 size = size_of_locs (AT_loc (a));
10543
10544 /* Output the block length for this list of location operations. */
10545 if (dwarf_version >= 4)
10546 dw2_asm_output_data_uleb128 (size, "%s", name);
10547 else
10548 dw2_asm_output_data (constant_size (size), size, "%s", name);
10549
10550 output_loc_sequence (AT_loc (a), -1);
10551 break;
10552
10553 case dw_val_class_const:
10554 /* ??? It would be slightly more efficient to use a scheme like is
10555 used for unsigned constants below, but gdb 4.x does not sign
10556 extend. Gdb 5.x does sign extend. */
10557 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10558 break;
10559
10560 case dw_val_class_unsigned_const:
10561 {
10562 int csize = constant_size (AT_unsigned (a));
10563 if (dwarf_version == 3
10564 && a->dw_attr == DW_AT_data_member_location
10565 && csize >= 4)
10566 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10567 else
10568 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10569 }
10570 break;
10571
10572 case dw_val_class_symview:
10573 {
10574 int vsize;
10575 if (symview_upper_bound <= 0xff)
10576 vsize = 1;
10577 else if (symview_upper_bound <= 0xffff)
10578 vsize = 2;
10579 else if (symview_upper_bound <= 0xffffffff)
10580 vsize = 4;
10581 else
10582 vsize = 8;
10583 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10584 "%s", name);
10585 }
10586 break;
10587
10588 case dw_val_class_const_implicit:
10589 if (flag_debug_asm)
10590 fprintf (asm_out_file, "\t\t\t%s %s ("
10591 HOST_WIDE_INT_PRINT_DEC ")\n",
10592 ASM_COMMENT_START, name, AT_int (a));
10593 break;
10594
10595 case dw_val_class_unsigned_const_implicit:
10596 if (flag_debug_asm)
10597 fprintf (asm_out_file, "\t\t\t%s %s ("
10598 HOST_WIDE_INT_PRINT_HEX ")\n",
10599 ASM_COMMENT_START, name, AT_unsigned (a));
10600 break;
10601
10602 case dw_val_class_const_double:
10603 {
10604 unsigned HOST_WIDE_INT first, second;
10605
10606 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10607 dw2_asm_output_data (1,
10608 HOST_BITS_PER_DOUBLE_INT
10609 / HOST_BITS_PER_CHAR,
10610 NULL);
10611
10612 if (WORDS_BIG_ENDIAN)
10613 {
10614 first = a->dw_attr_val.v.val_double.high;
10615 second = a->dw_attr_val.v.val_double.low;
10616 }
10617 else
10618 {
10619 first = a->dw_attr_val.v.val_double.low;
10620 second = a->dw_attr_val.v.val_double.high;
10621 }
10622
10623 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10624 first, "%s", name);
10625 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10626 second, NULL);
10627 }
10628 break;
10629
10630 case dw_val_class_wide_int:
10631 {
10632 int i;
10633 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10634 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10635 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10636 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10637 * l, NULL);
10638
10639 if (WORDS_BIG_ENDIAN)
10640 for (i = len - 1; i >= 0; --i)
10641 {
10642 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10643 "%s", name);
10644 name = "";
10645 }
10646 else
10647 for (i = 0; i < len; ++i)
10648 {
10649 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10650 "%s", name);
10651 name = "";
10652 }
10653 }
10654 break;
10655
10656 case dw_val_class_vec:
10657 {
10658 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10659 unsigned int len = a->dw_attr_val.v.val_vec.length;
10660 unsigned int i;
10661 unsigned char *p;
10662
10663 dw2_asm_output_data (constant_size (len * elt_size),
10664 len * elt_size, "%s", name);
10665 if (elt_size > sizeof (HOST_WIDE_INT))
10666 {
10667 elt_size /= 2;
10668 len *= 2;
10669 }
10670 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10671 i < len;
10672 i++, p += elt_size)
10673 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10674 "fp or vector constant word %u", i);
10675 break;
10676 }
10677
10678 case dw_val_class_flag:
10679 if (dwarf_version >= 4)
10680 {
10681 /* Currently all add_AT_flag calls pass in 1 as last argument,
10682 so DW_FORM_flag_present can be used. If that ever changes,
10683 we'll need to use DW_FORM_flag and have some optimization
10684 in build_abbrev_table that will change those to
10685 DW_FORM_flag_present if it is set to 1 in all DIEs using
10686 the same abbrev entry. */
10687 gcc_assert (AT_flag (a) == 1);
10688 if (flag_debug_asm)
10689 fprintf (asm_out_file, "\t\t\t%s %s\n",
10690 ASM_COMMENT_START, name);
10691 break;
10692 }
10693 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10694 break;
10695
10696 case dw_val_class_loc_list:
10697 output_loc_list_offset (a);
10698 break;
10699
10700 case dw_val_class_view_list:
10701 output_view_list_offset (a);
10702 break;
10703
10704 case dw_val_class_die_ref:
10705 if (AT_ref_external (a))
10706 {
10707 if (AT_ref (a)->comdat_type_p)
10708 {
10709 comdat_type_node *type_node
10710 = AT_ref (a)->die_id.die_type_node;
10711
10712 gcc_assert (type_node);
10713 output_signature (type_node->signature, name);
10714 }
10715 else
10716 {
10717 const char *sym = AT_ref (a)->die_id.die_symbol;
10718 int size;
10719
10720 gcc_assert (sym);
10721 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10722 length, whereas in DWARF3 it's always sized as an
10723 offset. */
10724 if (dwarf_version == 2)
10725 size = DWARF2_ADDR_SIZE;
10726 else
10727 size = DWARF_OFFSET_SIZE;
10728 /* ??? We cannot unconditionally output die_offset if
10729 non-zero - others might create references to those
10730 DIEs via symbols.
10731 And we do not clear its DIE offset after outputting it
10732 (and the label refers to the actual DIEs, not the
10733 DWARF CU unit header which is when using label + offset
10734 would be the correct thing to do).
10735 ??? This is the reason for the with_offset flag. */
10736 if (AT_ref (a)->with_offset)
10737 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10738 debug_info_section, "%s", name);
10739 else
10740 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10741 name);
10742 }
10743 }
10744 else
10745 {
10746 gcc_assert (AT_ref (a)->die_offset);
10747 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10748 "%s", name);
10749 }
10750 break;
10751
10752 case dw_val_class_fde_ref:
10753 {
10754 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10755
10756 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10757 a->dw_attr_val.v.val_fde_index * 2);
10758 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10759 "%s", name);
10760 }
10761 break;
10762
10763 case dw_val_class_vms_delta:
10764 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10765 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10766 AT_vms_delta2 (a), AT_vms_delta1 (a),
10767 "%s", name);
10768 #else
10769 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10770 AT_vms_delta2 (a), AT_vms_delta1 (a),
10771 "%s", name);
10772 #endif
10773 break;
10774
10775 case dw_val_class_lbl_id:
10776 output_attr_index_or_value (a);
10777 break;
10778
10779 case dw_val_class_lineptr:
10780 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10781 debug_line_section, "%s", name);
10782 break;
10783
10784 case dw_val_class_macptr:
10785 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10786 debug_macinfo_section, "%s", name);
10787 break;
10788
10789 case dw_val_class_loclistsptr:
10790 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10791 debug_loc_section, "%s", name);
10792 break;
10793
10794 case dw_val_class_str:
10795 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10796 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10797 a->dw_attr_val.v.val_str->label,
10798 debug_str_section,
10799 "%s: \"%s\"", name, AT_string (a));
10800 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10801 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10802 a->dw_attr_val.v.val_str->label,
10803 debug_line_str_section,
10804 "%s: \"%s\"", name, AT_string (a));
10805 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10806 dw2_asm_output_data_uleb128 (AT_index (a),
10807 "%s: \"%s\"", name, AT_string (a));
10808 else
10809 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10810 break;
10811
10812 case dw_val_class_file:
10813 {
10814 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10815
10816 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10817 a->dw_attr_val.v.val_file->filename);
10818 break;
10819 }
10820
10821 case dw_val_class_file_implicit:
10822 if (flag_debug_asm)
10823 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10824 ASM_COMMENT_START, name,
10825 maybe_emit_file (a->dw_attr_val.v.val_file),
10826 a->dw_attr_val.v.val_file->filename);
10827 break;
10828
10829 case dw_val_class_data8:
10830 {
10831 int i;
10832
10833 for (i = 0; i < 8; i++)
10834 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10835 i == 0 ? "%s" : NULL, name);
10836 break;
10837 }
10838
10839 case dw_val_class_high_pc:
10840 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10841 get_AT_low_pc (die), "DW_AT_high_pc");
10842 break;
10843
10844 case dw_val_class_discr_value:
10845 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10846 break;
10847
10848 case dw_val_class_discr_list:
10849 {
10850 dw_discr_list_ref list = AT_discr_list (a);
10851 const int size = size_of_discr_list (list);
10852
10853 /* This is a block, so output its length first. */
10854 dw2_asm_output_data (constant_size (size), size,
10855 "%s: block size", name);
10856
10857 for (; list != NULL; list = list->dw_discr_next)
10858 {
10859 /* One byte for the discriminant value descriptor, and then as
10860 many LEB128 numbers as required. */
10861 if (list->dw_discr_range)
10862 dw2_asm_output_data (1, DW_DSC_range,
10863 "%s: DW_DSC_range", name);
10864 else
10865 dw2_asm_output_data (1, DW_DSC_label,
10866 "%s: DW_DSC_label", name);
10867
10868 output_discr_value (&list->dw_discr_lower_bound, name);
10869 if (list->dw_discr_range)
10870 output_discr_value (&list->dw_discr_upper_bound, name);
10871 }
10872 break;
10873 }
10874
10875 default:
10876 gcc_unreachable ();
10877 }
10878 }
10879
10880 FOR_EACH_CHILD (die, c, output_die (c));
10881
10882 /* Add null byte to terminate sibling list. */
10883 if (die->die_child != NULL)
10884 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10885 (unsigned long) die->die_offset);
10886 }
10887
10888 /* Output the dwarf version number. */
10889
10890 static void
10891 output_dwarf_version ()
10892 {
10893 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10894 views in loclist. That will change eventually. */
10895 if (dwarf_version == 6)
10896 {
10897 static bool once;
10898 if (!once)
10899 {
10900 warning (0,
10901 "-gdwarf-6 is output as version 5 with incompatibilities");
10902 once = true;
10903 }
10904 dw2_asm_output_data (2, 5, "DWARF version number");
10905 }
10906 else
10907 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10908 }
10909
10910 /* Output the compilation unit that appears at the beginning of the
10911 .debug_info section, and precedes the DIE descriptions. */
10912
10913 static void
10914 output_compilation_unit_header (enum dwarf_unit_type ut)
10915 {
10916 if (!XCOFF_DEBUGGING_INFO)
10917 {
10918 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10919 dw2_asm_output_data (4, 0xffffffff,
10920 "Initial length escape value indicating 64-bit DWARF extension");
10921 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10922 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10923 "Length of Compilation Unit Info");
10924 }
10925
10926 output_dwarf_version ();
10927 if (dwarf_version >= 5)
10928 {
10929 const char *name;
10930 switch (ut)
10931 {
10932 case DW_UT_compile: name = "DW_UT_compile"; break;
10933 case DW_UT_type: name = "DW_UT_type"; break;
10934 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10935 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10936 default: gcc_unreachable ();
10937 }
10938 dw2_asm_output_data (1, ut, "%s", name);
10939 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10940 }
10941 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10942 debug_abbrev_section,
10943 "Offset Into Abbrev. Section");
10944 if (dwarf_version < 5)
10945 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10946 }
10947
10948 /* Output the compilation unit DIE and its children. */
10949
10950 static void
10951 output_comp_unit (dw_die_ref die, int output_if_empty,
10952 const unsigned char *dwo_id)
10953 {
10954 const char *secname, *oldsym;
10955 char *tmp;
10956
10957 /* Unless we are outputting main CU, we may throw away empty ones. */
10958 if (!output_if_empty && die->die_child == NULL)
10959 return;
10960
10961 /* Even if there are no children of this DIE, we must output the information
10962 about the compilation unit. Otherwise, on an empty translation unit, we
10963 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10964 will then complain when examining the file. First mark all the DIEs in
10965 this CU so we know which get local refs. */
10966 mark_dies (die);
10967
10968 external_ref_hash_type *extern_map = optimize_external_refs (die);
10969
10970 /* For now, optimize only the main CU, in order to optimize the rest
10971 we'd need to see all of them earlier. Leave the rest for post-linking
10972 tools like DWZ. */
10973 if (die == comp_unit_die ())
10974 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10975
10976 build_abbrev_table (die, extern_map);
10977
10978 optimize_abbrev_table ();
10979
10980 delete extern_map;
10981
10982 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10983 next_die_offset = (dwo_id
10984 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10985 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10986 calc_die_sizes (die);
10987
10988 oldsym = die->die_id.die_symbol;
10989 if (oldsym && die->comdat_type_p)
10990 {
10991 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10992
10993 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10994 secname = tmp;
10995 die->die_id.die_symbol = NULL;
10996 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10997 }
10998 else
10999 {
11000 switch_to_section (debug_info_section);
11001 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11002 info_section_emitted = true;
11003 }
11004
11005 /* For LTO cross unit DIE refs we want a symbol on the start of the
11006 debuginfo section, not on the CU DIE. */
11007 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11008 {
11009 /* ??? No way to get visibility assembled without a decl. */
11010 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11011 get_identifier (oldsym), char_type_node);
11012 TREE_PUBLIC (decl) = true;
11013 TREE_STATIC (decl) = true;
11014 DECL_ARTIFICIAL (decl) = true;
11015 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11016 DECL_VISIBILITY_SPECIFIED (decl) = true;
11017 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11018 #ifdef ASM_WEAKEN_LABEL
11019 /* We prefer a .weak because that handles duplicates from duplicate
11020 archive members in a graceful way. */
11021 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11022 #else
11023 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11024 #endif
11025 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11026 }
11027
11028 /* Output debugging information. */
11029 output_compilation_unit_header (dwo_id
11030 ? DW_UT_split_compile : DW_UT_compile);
11031 if (dwarf_version >= 5)
11032 {
11033 if (dwo_id != NULL)
11034 for (int i = 0; i < 8; i++)
11035 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11036 }
11037 output_die (die);
11038
11039 /* Leave the marks on the main CU, so we can check them in
11040 output_pubnames. */
11041 if (oldsym)
11042 {
11043 unmark_dies (die);
11044 die->die_id.die_symbol = oldsym;
11045 }
11046 }
11047
11048 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11049 and .debug_pubtypes. This is configured per-target, but can be
11050 overridden by the -gpubnames or -gno-pubnames options. */
11051
11052 static inline bool
11053 want_pubnames (void)
11054 {
11055 if (debug_info_level <= DINFO_LEVEL_TERSE)
11056 return false;
11057 if (debug_generate_pub_sections != -1)
11058 return debug_generate_pub_sections;
11059 return targetm.want_debug_pub_sections;
11060 }
11061
11062 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11063
11064 static void
11065 add_AT_pubnames (dw_die_ref die)
11066 {
11067 if (want_pubnames ())
11068 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11069 }
11070
11071 /* Add a string attribute value to a skeleton DIE. */
11072
11073 static inline void
11074 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11075 const char *str)
11076 {
11077 dw_attr_node attr;
11078 struct indirect_string_node *node;
11079
11080 if (! skeleton_debug_str_hash)
11081 skeleton_debug_str_hash
11082 = hash_table<indirect_string_hasher>::create_ggc (10);
11083
11084 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11085 find_string_form (node);
11086 if (node->form == DW_FORM_GNU_str_index)
11087 node->form = DW_FORM_strp;
11088
11089 attr.dw_attr = attr_kind;
11090 attr.dw_attr_val.val_class = dw_val_class_str;
11091 attr.dw_attr_val.val_entry = NULL;
11092 attr.dw_attr_val.v.val_str = node;
11093 add_dwarf_attr (die, &attr);
11094 }
11095
11096 /* Helper function to generate top-level dies for skeleton debug_info and
11097 debug_types. */
11098
11099 static void
11100 add_top_level_skeleton_die_attrs (dw_die_ref die)
11101 {
11102 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11103 const char *comp_dir = comp_dir_string ();
11104
11105 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11106 if (comp_dir != NULL)
11107 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11108 add_AT_pubnames (die);
11109 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
11110 }
11111
11112 /* Output skeleton debug sections that point to the dwo file. */
11113
11114 static void
11115 output_skeleton_debug_sections (dw_die_ref comp_unit,
11116 const unsigned char *dwo_id)
11117 {
11118 /* These attributes will be found in the full debug_info section. */
11119 remove_AT (comp_unit, DW_AT_producer);
11120 remove_AT (comp_unit, DW_AT_language);
11121
11122 switch_to_section (debug_skeleton_info_section);
11123 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11124
11125 /* Produce the skeleton compilation-unit header. This one differs enough from
11126 a normal CU header that it's better not to call output_compilation_unit
11127 header. */
11128 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11129 dw2_asm_output_data (4, 0xffffffff,
11130 "Initial length escape value indicating 64-bit "
11131 "DWARF extension");
11132
11133 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11134 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11135 - DWARF_INITIAL_LENGTH_SIZE
11136 + size_of_die (comp_unit),
11137 "Length of Compilation Unit Info");
11138 output_dwarf_version ();
11139 if (dwarf_version >= 5)
11140 {
11141 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11142 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11143 }
11144 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11145 debug_skeleton_abbrev_section,
11146 "Offset Into Abbrev. Section");
11147 if (dwarf_version < 5)
11148 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11149 else
11150 for (int i = 0; i < 8; i++)
11151 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11152
11153 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11154 output_die (comp_unit);
11155
11156 /* Build the skeleton debug_abbrev section. */
11157 switch_to_section (debug_skeleton_abbrev_section);
11158 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11159
11160 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11161
11162 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11163 }
11164
11165 /* Output a comdat type unit DIE and its children. */
11166
11167 static void
11168 output_comdat_type_unit (comdat_type_node *node)
11169 {
11170 const char *secname;
11171 char *tmp;
11172 int i;
11173 #if defined (OBJECT_FORMAT_ELF)
11174 tree comdat_key;
11175 #endif
11176
11177 /* First mark all the DIEs in this CU so we know which get local refs. */
11178 mark_dies (node->root_die);
11179
11180 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11181
11182 build_abbrev_table (node->root_die, extern_map);
11183
11184 delete extern_map;
11185 extern_map = NULL;
11186
11187 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11188 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11189 calc_die_sizes (node->root_die);
11190
11191 #if defined (OBJECT_FORMAT_ELF)
11192 if (dwarf_version >= 5)
11193 {
11194 if (!dwarf_split_debug_info)
11195 secname = ".debug_info";
11196 else
11197 secname = ".debug_info.dwo";
11198 }
11199 else if (!dwarf_split_debug_info)
11200 secname = ".debug_types";
11201 else
11202 secname = ".debug_types.dwo";
11203
11204 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11205 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11206 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11207 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11208 comdat_key = get_identifier (tmp);
11209 targetm.asm_out.named_section (secname,
11210 SECTION_DEBUG | SECTION_LINKONCE,
11211 comdat_key);
11212 #else
11213 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11214 sprintf (tmp, (dwarf_version >= 5
11215 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11216 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11217 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11218 secname = tmp;
11219 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11220 #endif
11221
11222 /* Output debugging information. */
11223 output_compilation_unit_header (dwarf_split_debug_info
11224 ? DW_UT_split_type : DW_UT_type);
11225 output_signature (node->signature, "Type Signature");
11226 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11227 "Offset to Type DIE");
11228 output_die (node->root_die);
11229
11230 unmark_dies (node->root_die);
11231 }
11232
11233 /* Return the DWARF2/3 pubname associated with a decl. */
11234
11235 static const char *
11236 dwarf2_name (tree decl, int scope)
11237 {
11238 if (DECL_NAMELESS (decl))
11239 return NULL;
11240 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11241 }
11242
11243 /* Add a new entry to .debug_pubnames if appropriate. */
11244
11245 static void
11246 add_pubname_string (const char *str, dw_die_ref die)
11247 {
11248 pubname_entry e;
11249
11250 e.die = die;
11251 e.name = xstrdup (str);
11252 vec_safe_push (pubname_table, e);
11253 }
11254
11255 static void
11256 add_pubname (tree decl, dw_die_ref die)
11257 {
11258 if (!want_pubnames ())
11259 return;
11260
11261 /* Don't add items to the table when we expect that the consumer will have
11262 just read the enclosing die. For example, if the consumer is looking at a
11263 class_member, it will either be inside the class already, or will have just
11264 looked up the class to find the member. Either way, searching the class is
11265 faster than searching the index. */
11266 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11267 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11268 {
11269 const char *name = dwarf2_name (decl, 1);
11270
11271 if (name)
11272 add_pubname_string (name, die);
11273 }
11274 }
11275
11276 /* Add an enumerator to the pubnames section. */
11277
11278 static void
11279 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11280 {
11281 pubname_entry e;
11282
11283 gcc_assert (scope_name);
11284 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11285 e.die = die;
11286 vec_safe_push (pubname_table, e);
11287 }
11288
11289 /* Add a new entry to .debug_pubtypes if appropriate. */
11290
11291 static void
11292 add_pubtype (tree decl, dw_die_ref die)
11293 {
11294 pubname_entry e;
11295
11296 if (!want_pubnames ())
11297 return;
11298
11299 if ((TREE_PUBLIC (decl)
11300 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11301 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11302 {
11303 tree scope = NULL;
11304 const char *scope_name = "";
11305 const char *sep = is_cxx () ? "::" : ".";
11306 const char *name;
11307
11308 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11309 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11310 {
11311 scope_name = lang_hooks.dwarf_name (scope, 1);
11312 if (scope_name != NULL && scope_name[0] != '\0')
11313 scope_name = concat (scope_name, sep, NULL);
11314 else
11315 scope_name = "";
11316 }
11317
11318 if (TYPE_P (decl))
11319 name = type_tag (decl);
11320 else
11321 name = lang_hooks.dwarf_name (decl, 1);
11322
11323 /* If we don't have a name for the type, there's no point in adding
11324 it to the table. */
11325 if (name != NULL && name[0] != '\0')
11326 {
11327 e.die = die;
11328 e.name = concat (scope_name, name, NULL);
11329 vec_safe_push (pubtype_table, e);
11330 }
11331
11332 /* Although it might be more consistent to add the pubinfo for the
11333 enumerators as their dies are created, they should only be added if the
11334 enum type meets the criteria above. So rather than re-check the parent
11335 enum type whenever an enumerator die is created, just output them all
11336 here. This isn't protected by the name conditional because anonymous
11337 enums don't have names. */
11338 if (die->die_tag == DW_TAG_enumeration_type)
11339 {
11340 dw_die_ref c;
11341
11342 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11343 }
11344 }
11345 }
11346
11347 /* Output a single entry in the pubnames table. */
11348
11349 static void
11350 output_pubname (dw_offset die_offset, pubname_entry *entry)
11351 {
11352 dw_die_ref die = entry->die;
11353 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11354
11355 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11356
11357 if (debug_generate_pub_sections == 2)
11358 {
11359 /* This logic follows gdb's method for determining the value of the flag
11360 byte. */
11361 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11362 switch (die->die_tag)
11363 {
11364 case DW_TAG_typedef:
11365 case DW_TAG_base_type:
11366 case DW_TAG_subrange_type:
11367 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11368 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11369 break;
11370 case DW_TAG_enumerator:
11371 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11372 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11373 if (!is_cxx ())
11374 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11375 break;
11376 case DW_TAG_subprogram:
11377 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11378 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11379 if (!is_ada ())
11380 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11381 break;
11382 case DW_TAG_constant:
11383 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11384 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11385 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11386 break;
11387 case DW_TAG_variable:
11388 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11389 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11390 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11391 break;
11392 case DW_TAG_namespace:
11393 case DW_TAG_imported_declaration:
11394 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11395 break;
11396 case DW_TAG_class_type:
11397 case DW_TAG_interface_type:
11398 case DW_TAG_structure_type:
11399 case DW_TAG_union_type:
11400 case DW_TAG_enumeration_type:
11401 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11402 if (!is_cxx ())
11403 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11404 break;
11405 default:
11406 /* An unusual tag. Leave the flag-byte empty. */
11407 break;
11408 }
11409 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11410 "GDB-index flags");
11411 }
11412
11413 dw2_asm_output_nstring (entry->name, -1, "external name");
11414 }
11415
11416
11417 /* Output the public names table used to speed up access to externally
11418 visible names; or the public types table used to find type definitions. */
11419
11420 static void
11421 output_pubnames (vec<pubname_entry, va_gc> *names)
11422 {
11423 unsigned i;
11424 unsigned long pubnames_length = size_of_pubnames (names);
11425 pubname_entry *pub;
11426
11427 if (!XCOFF_DEBUGGING_INFO)
11428 {
11429 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11430 dw2_asm_output_data (4, 0xffffffff,
11431 "Initial length escape value indicating 64-bit DWARF extension");
11432 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11433 "Pub Info Length");
11434 }
11435
11436 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11437 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11438
11439 if (dwarf_split_debug_info)
11440 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11441 debug_skeleton_info_section,
11442 "Offset of Compilation Unit Info");
11443 else
11444 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11445 debug_info_section,
11446 "Offset of Compilation Unit Info");
11447 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11448 "Compilation Unit Length");
11449
11450 FOR_EACH_VEC_ELT (*names, i, pub)
11451 {
11452 if (include_pubname_in_output (names, pub))
11453 {
11454 dw_offset die_offset = pub->die->die_offset;
11455
11456 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11457 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11458 gcc_assert (pub->die->die_mark);
11459
11460 /* If we're putting types in their own .debug_types sections,
11461 the .debug_pubtypes table will still point to the compile
11462 unit (not the type unit), so we want to use the offset of
11463 the skeleton DIE (if there is one). */
11464 if (pub->die->comdat_type_p && names == pubtype_table)
11465 {
11466 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11467
11468 if (type_node != NULL)
11469 die_offset = (type_node->skeleton_die != NULL
11470 ? type_node->skeleton_die->die_offset
11471 : comp_unit_die ()->die_offset);
11472 }
11473
11474 output_pubname (die_offset, pub);
11475 }
11476 }
11477
11478 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11479 }
11480
11481 /* Output public names and types tables if necessary. */
11482
11483 static void
11484 output_pubtables (void)
11485 {
11486 if (!want_pubnames () || !info_section_emitted)
11487 return;
11488
11489 switch_to_section (debug_pubnames_section);
11490 output_pubnames (pubname_table);
11491 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11492 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11493 simply won't look for the section. */
11494 switch_to_section (debug_pubtypes_section);
11495 output_pubnames (pubtype_table);
11496 }
11497
11498
11499 /* Output the information that goes into the .debug_aranges table.
11500 Namely, define the beginning and ending address range of the
11501 text section generated for this compilation unit. */
11502
11503 static void
11504 output_aranges (void)
11505 {
11506 unsigned i;
11507 unsigned long aranges_length = size_of_aranges ();
11508
11509 if (!XCOFF_DEBUGGING_INFO)
11510 {
11511 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11512 dw2_asm_output_data (4, 0xffffffff,
11513 "Initial length escape value indicating 64-bit DWARF extension");
11514 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11515 "Length of Address Ranges Info");
11516 }
11517
11518 /* Version number for aranges is still 2, even up to DWARF5. */
11519 dw2_asm_output_data (2, 2, "DWARF aranges version");
11520 if (dwarf_split_debug_info)
11521 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11522 debug_skeleton_info_section,
11523 "Offset of Compilation Unit Info");
11524 else
11525 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11526 debug_info_section,
11527 "Offset of Compilation Unit Info");
11528 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11529 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11530
11531 /* We need to align to twice the pointer size here. */
11532 if (DWARF_ARANGES_PAD_SIZE)
11533 {
11534 /* Pad using a 2 byte words so that padding is correct for any
11535 pointer size. */
11536 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11537 2 * DWARF2_ADDR_SIZE);
11538 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11539 dw2_asm_output_data (2, 0, NULL);
11540 }
11541
11542 /* It is necessary not to output these entries if the sections were
11543 not used; if the sections were not used, the length will be 0 and
11544 the address may end up as 0 if the section is discarded by ld
11545 --gc-sections, leaving an invalid (0, 0) entry that can be
11546 confused with the terminator. */
11547 if (text_section_used)
11548 {
11549 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11550 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11551 text_section_label, "Length");
11552 }
11553 if (cold_text_section_used)
11554 {
11555 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11556 "Address");
11557 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11558 cold_text_section_label, "Length");
11559 }
11560
11561 if (have_multiple_function_sections)
11562 {
11563 unsigned fde_idx;
11564 dw_fde_ref fde;
11565
11566 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11567 {
11568 if (DECL_IGNORED_P (fde->decl))
11569 continue;
11570 if (!fde->in_std_section)
11571 {
11572 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11573 "Address");
11574 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11575 fde->dw_fde_begin, "Length");
11576 }
11577 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11578 {
11579 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11580 "Address");
11581 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11582 fde->dw_fde_second_begin, "Length");
11583 }
11584 }
11585 }
11586
11587 /* Output the terminator words. */
11588 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11589 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11590 }
11591
11592 /* Add a new entry to .debug_ranges. Return its index into
11593 ranges_table vector. */
11594
11595 static unsigned int
11596 add_ranges_num (int num, bool maybe_new_sec)
11597 {
11598 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11599 vec_safe_push (ranges_table, r);
11600 return vec_safe_length (ranges_table) - 1;
11601 }
11602
11603 /* Add a new entry to .debug_ranges corresponding to a block, or a
11604 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11605 this entry might be in a different section from previous range. */
11606
11607 static unsigned int
11608 add_ranges (const_tree block, bool maybe_new_sec)
11609 {
11610 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11611 }
11612
11613 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11614 chain, or middle entry of a chain that will be directly referred to. */
11615
11616 static void
11617 note_rnglist_head (unsigned int offset)
11618 {
11619 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11620 return;
11621 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11622 }
11623
11624 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11625 When using dwarf_split_debug_info, address attributes in dies destined
11626 for the final executable should be direct references--setting the
11627 parameter force_direct ensures this behavior. */
11628
11629 static void
11630 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11631 bool *added, bool force_direct)
11632 {
11633 unsigned int in_use = vec_safe_length (ranges_by_label);
11634 unsigned int offset;
11635 dw_ranges_by_label rbl = { begin, end };
11636 vec_safe_push (ranges_by_label, rbl);
11637 offset = add_ranges_num (-(int)in_use - 1, true);
11638 if (!*added)
11639 {
11640 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11641 *added = true;
11642 note_rnglist_head (offset);
11643 }
11644 }
11645
11646 /* Emit .debug_ranges section. */
11647
11648 static void
11649 output_ranges (void)
11650 {
11651 unsigned i;
11652 static const char *const start_fmt = "Offset %#x";
11653 const char *fmt = start_fmt;
11654 dw_ranges *r;
11655
11656 switch_to_section (debug_ranges_section);
11657 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11658 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11659 {
11660 int block_num = r->num;
11661
11662 if (block_num > 0)
11663 {
11664 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11665 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11666
11667 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11668 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11669
11670 /* If all code is in the text section, then the compilation
11671 unit base address defaults to DW_AT_low_pc, which is the
11672 base of the text section. */
11673 if (!have_multiple_function_sections)
11674 {
11675 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11676 text_section_label,
11677 fmt, i * 2 * DWARF2_ADDR_SIZE);
11678 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11679 text_section_label, NULL);
11680 }
11681
11682 /* Otherwise, the compilation unit base address is zero,
11683 which allows us to use absolute addresses, and not worry
11684 about whether the target supports cross-section
11685 arithmetic. */
11686 else
11687 {
11688 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11689 fmt, i * 2 * DWARF2_ADDR_SIZE);
11690 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11691 }
11692
11693 fmt = NULL;
11694 }
11695
11696 /* Negative block_num stands for an index into ranges_by_label. */
11697 else if (block_num < 0)
11698 {
11699 int lab_idx = - block_num - 1;
11700
11701 if (!have_multiple_function_sections)
11702 {
11703 gcc_unreachable ();
11704 #if 0
11705 /* If we ever use add_ranges_by_labels () for a single
11706 function section, all we have to do is to take out
11707 the #if 0 above. */
11708 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11709 (*ranges_by_label)[lab_idx].begin,
11710 text_section_label,
11711 fmt, i * 2 * DWARF2_ADDR_SIZE);
11712 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11713 (*ranges_by_label)[lab_idx].end,
11714 text_section_label, NULL);
11715 #endif
11716 }
11717 else
11718 {
11719 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11720 (*ranges_by_label)[lab_idx].begin,
11721 fmt, i * 2 * DWARF2_ADDR_SIZE);
11722 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11723 (*ranges_by_label)[lab_idx].end,
11724 NULL);
11725 }
11726 }
11727 else
11728 {
11729 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11730 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11731 fmt = start_fmt;
11732 }
11733 }
11734 }
11735
11736 /* Non-zero if .debug_line_str should be used for .debug_line section
11737 strings or strings that are likely shareable with those. */
11738 #define DWARF5_USE_DEBUG_LINE_STR \
11739 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11740 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11741 /* FIXME: there is no .debug_line_str.dwo section, \
11742 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11743 && !dwarf_split_debug_info)
11744
11745 /* Assign .debug_rnglists indexes. */
11746
11747 static void
11748 index_rnglists (void)
11749 {
11750 unsigned i;
11751 dw_ranges *r;
11752
11753 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11754 if (r->label)
11755 r->idx = rnglist_idx++;
11756 }
11757
11758 /* Emit .debug_rnglists section. */
11759
11760 static void
11761 output_rnglists (unsigned generation)
11762 {
11763 unsigned i;
11764 dw_ranges *r;
11765 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11766 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11767 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11768
11769 switch_to_section (debug_ranges_section);
11770 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11771 /* There are up to 4 unique ranges labels per generation.
11772 See also init_sections_and_labels. */
11773 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11774 2 + generation * 4);
11775 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11776 3 + generation * 4);
11777 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11778 dw2_asm_output_data (4, 0xffffffff,
11779 "Initial length escape value indicating "
11780 "64-bit DWARF extension");
11781 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11782 "Length of Range Lists");
11783 ASM_OUTPUT_LABEL (asm_out_file, l1);
11784 output_dwarf_version ();
11785 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11786 dw2_asm_output_data (1, 0, "Segment Size");
11787 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11788 about relocation sizes and primarily care about the size of .debug*
11789 sections in linked shared libraries and executables, then
11790 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11791 into it are usually larger than just DW_FORM_sec_offset offsets
11792 into the .debug_rnglists section. */
11793 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11794 "Offset Entry Count");
11795 if (dwarf_split_debug_info)
11796 {
11797 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11798 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11799 if (r->label)
11800 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11801 ranges_base_label, NULL);
11802 }
11803
11804 const char *lab = "";
11805 unsigned int len = vec_safe_length (ranges_table);
11806 const char *base = NULL;
11807 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11808 {
11809 int block_num = r->num;
11810
11811 if (r->label)
11812 {
11813 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11814 lab = r->label;
11815 }
11816 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11817 base = NULL;
11818 if (block_num > 0)
11819 {
11820 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11821 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11822
11823 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11824 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11825
11826 if (HAVE_AS_LEB128)
11827 {
11828 /* If all code is in the text section, then the compilation
11829 unit base address defaults to DW_AT_low_pc, which is the
11830 base of the text section. */
11831 if (!have_multiple_function_sections)
11832 {
11833 dw2_asm_output_data (1, DW_RLE_offset_pair,
11834 "DW_RLE_offset_pair (%s)", lab);
11835 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11836 "Range begin address (%s)", lab);
11837 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11838 "Range end address (%s)", lab);
11839 continue;
11840 }
11841 if (base == NULL)
11842 {
11843 dw_ranges *r2 = NULL;
11844 if (i < len - 1)
11845 r2 = &(*ranges_table)[i + 1];
11846 if (r2
11847 && r2->num != 0
11848 && r2->label == NULL
11849 && !r2->maybe_new_sec)
11850 {
11851 dw2_asm_output_data (1, DW_RLE_base_address,
11852 "DW_RLE_base_address (%s)", lab);
11853 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11854 "Base address (%s)", lab);
11855 strcpy (basebuf, blabel);
11856 base = basebuf;
11857 }
11858 }
11859 if (base)
11860 {
11861 dw2_asm_output_data (1, DW_RLE_offset_pair,
11862 "DW_RLE_offset_pair (%s)", lab);
11863 dw2_asm_output_delta_uleb128 (blabel, base,
11864 "Range begin address (%s)", lab);
11865 dw2_asm_output_delta_uleb128 (elabel, base,
11866 "Range end address (%s)", lab);
11867 continue;
11868 }
11869 dw2_asm_output_data (1, DW_RLE_start_length,
11870 "DW_RLE_start_length (%s)", lab);
11871 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11872 "Range begin address (%s)", lab);
11873 dw2_asm_output_delta_uleb128 (elabel, blabel,
11874 "Range length (%s)", lab);
11875 }
11876 else
11877 {
11878 dw2_asm_output_data (1, DW_RLE_start_end,
11879 "DW_RLE_start_end (%s)", lab);
11880 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11881 "Range begin address (%s)", lab);
11882 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11883 "Range end address (%s)", lab);
11884 }
11885 }
11886
11887 /* Negative block_num stands for an index into ranges_by_label. */
11888 else if (block_num < 0)
11889 {
11890 int lab_idx = - block_num - 1;
11891 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11892 const char *elabel = (*ranges_by_label)[lab_idx].end;
11893
11894 if (!have_multiple_function_sections)
11895 gcc_unreachable ();
11896 if (HAVE_AS_LEB128)
11897 {
11898 dw2_asm_output_data (1, DW_RLE_start_length,
11899 "DW_RLE_start_length (%s)", lab);
11900 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11901 "Range begin address (%s)", lab);
11902 dw2_asm_output_delta_uleb128 (elabel, blabel,
11903 "Range length (%s)", lab);
11904 }
11905 else
11906 {
11907 dw2_asm_output_data (1, DW_RLE_start_end,
11908 "DW_RLE_start_end (%s)", lab);
11909 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11910 "Range begin address (%s)", lab);
11911 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11912 "Range end address (%s)", lab);
11913 }
11914 }
11915 else
11916 dw2_asm_output_data (1, DW_RLE_end_of_list,
11917 "DW_RLE_end_of_list (%s)", lab);
11918 }
11919 ASM_OUTPUT_LABEL (asm_out_file, l2);
11920 }
11921
11922 /* Data structure containing information about input files. */
11923 struct file_info
11924 {
11925 const char *path; /* Complete file name. */
11926 const char *fname; /* File name part. */
11927 int length; /* Length of entire string. */
11928 struct dwarf_file_data * file_idx; /* Index in input file table. */
11929 int dir_idx; /* Index in directory table. */
11930 };
11931
11932 /* Data structure containing information about directories with source
11933 files. */
11934 struct dir_info
11935 {
11936 const char *path; /* Path including directory name. */
11937 int length; /* Path length. */
11938 int prefix; /* Index of directory entry which is a prefix. */
11939 int count; /* Number of files in this directory. */
11940 int dir_idx; /* Index of directory used as base. */
11941 };
11942
11943 /* Callback function for file_info comparison. We sort by looking at
11944 the directories in the path. */
11945
11946 static int
11947 file_info_cmp (const void *p1, const void *p2)
11948 {
11949 const struct file_info *const s1 = (const struct file_info *) p1;
11950 const struct file_info *const s2 = (const struct file_info *) p2;
11951 const unsigned char *cp1;
11952 const unsigned char *cp2;
11953
11954 /* Take care of file names without directories. We need to make sure that
11955 we return consistent values to qsort since some will get confused if
11956 we return the same value when identical operands are passed in opposite
11957 orders. So if neither has a directory, return 0 and otherwise return
11958 1 or -1 depending on which one has the directory. We want the one with
11959 the directory to sort after the one without, so all no directory files
11960 are at the start (normally only the compilation unit file). */
11961 if ((s1->path == s1->fname || s2->path == s2->fname))
11962 return (s2->path == s2->fname) - (s1->path == s1->fname);
11963
11964 cp1 = (const unsigned char *) s1->path;
11965 cp2 = (const unsigned char *) s2->path;
11966
11967 while (1)
11968 {
11969 ++cp1;
11970 ++cp2;
11971 /* Reached the end of the first path? If so, handle like above,
11972 but now we want longer directory prefixes before shorter ones. */
11973 if ((cp1 == (const unsigned char *) s1->fname)
11974 || (cp2 == (const unsigned char *) s2->fname))
11975 return ((cp1 == (const unsigned char *) s1->fname)
11976 - (cp2 == (const unsigned char *) s2->fname));
11977
11978 /* Character of current path component the same? */
11979 else if (*cp1 != *cp2)
11980 return *cp1 - *cp2;
11981 }
11982 }
11983
11984 struct file_name_acquire_data
11985 {
11986 struct file_info *files;
11987 int used_files;
11988 int max_files;
11989 };
11990
11991 /* Traversal function for the hash table. */
11992
11993 int
11994 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11995 {
11996 struct dwarf_file_data *d = *slot;
11997 struct file_info *fi;
11998 const char *f;
11999
12000 gcc_assert (fnad->max_files >= d->emitted_number);
12001
12002 if (! d->emitted_number)
12003 return 1;
12004
12005 gcc_assert (fnad->max_files != fnad->used_files);
12006
12007 fi = fnad->files + fnad->used_files++;
12008
12009 /* Skip all leading "./". */
12010 f = d->filename;
12011 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12012 f += 2;
12013
12014 /* Create a new array entry. */
12015 fi->path = f;
12016 fi->length = strlen (f);
12017 fi->file_idx = d;
12018
12019 /* Search for the file name part. */
12020 f = strrchr (f, DIR_SEPARATOR);
12021 #if defined (DIR_SEPARATOR_2)
12022 {
12023 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12024
12025 if (g != NULL)
12026 {
12027 if (f == NULL || f < g)
12028 f = g;
12029 }
12030 }
12031 #endif
12032
12033 fi->fname = f == NULL ? fi->path : f + 1;
12034 return 1;
12035 }
12036
12037 /* Helper function for output_file_names. Emit a FORM encoded
12038 string STR, with assembly comment start ENTRY_KIND and
12039 index IDX */
12040
12041 static void
12042 output_line_string (enum dwarf_form form, const char *str,
12043 const char *entry_kind, unsigned int idx)
12044 {
12045 switch (form)
12046 {
12047 case DW_FORM_string:
12048 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12049 break;
12050 case DW_FORM_line_strp:
12051 if (!debug_line_str_hash)
12052 debug_line_str_hash
12053 = hash_table<indirect_string_hasher>::create_ggc (10);
12054
12055 struct indirect_string_node *node;
12056 node = find_AT_string_in_table (str, debug_line_str_hash);
12057 set_indirect_string (node);
12058 node->form = form;
12059 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12060 debug_line_str_section, "%s: %#x: \"%s\"",
12061 entry_kind, 0, node->str);
12062 break;
12063 default:
12064 gcc_unreachable ();
12065 }
12066 }
12067
12068 /* Output the directory table and the file name table. We try to minimize
12069 the total amount of memory needed. A heuristic is used to avoid large
12070 slowdowns with many input files. */
12071
12072 static void
12073 output_file_names (void)
12074 {
12075 struct file_name_acquire_data fnad;
12076 int numfiles;
12077 struct file_info *files;
12078 struct dir_info *dirs;
12079 int *saved;
12080 int *savehere;
12081 int *backmap;
12082 int ndirs;
12083 int idx_offset;
12084 int i;
12085
12086 if (!last_emitted_file)
12087 {
12088 if (dwarf_version >= 5)
12089 {
12090 dw2_asm_output_data (1, 0, "Directory entry format count");
12091 dw2_asm_output_data_uleb128 (0, "Directories count");
12092 dw2_asm_output_data (1, 0, "File name entry format count");
12093 dw2_asm_output_data_uleb128 (0, "File names count");
12094 }
12095 else
12096 {
12097 dw2_asm_output_data (1, 0, "End directory table");
12098 dw2_asm_output_data (1, 0, "End file name table");
12099 }
12100 return;
12101 }
12102
12103 numfiles = last_emitted_file->emitted_number;
12104
12105 /* Allocate the various arrays we need. */
12106 files = XALLOCAVEC (struct file_info, numfiles);
12107 dirs = XALLOCAVEC (struct dir_info, numfiles);
12108
12109 fnad.files = files;
12110 fnad.used_files = 0;
12111 fnad.max_files = numfiles;
12112 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12113 gcc_assert (fnad.used_files == fnad.max_files);
12114
12115 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12116
12117 /* Find all the different directories used. */
12118 dirs[0].path = files[0].path;
12119 dirs[0].length = files[0].fname - files[0].path;
12120 dirs[0].prefix = -1;
12121 dirs[0].count = 1;
12122 dirs[0].dir_idx = 0;
12123 files[0].dir_idx = 0;
12124 ndirs = 1;
12125
12126 for (i = 1; i < numfiles; i++)
12127 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12128 && memcmp (dirs[ndirs - 1].path, files[i].path,
12129 dirs[ndirs - 1].length) == 0)
12130 {
12131 /* Same directory as last entry. */
12132 files[i].dir_idx = ndirs - 1;
12133 ++dirs[ndirs - 1].count;
12134 }
12135 else
12136 {
12137 int j;
12138
12139 /* This is a new directory. */
12140 dirs[ndirs].path = files[i].path;
12141 dirs[ndirs].length = files[i].fname - files[i].path;
12142 dirs[ndirs].count = 1;
12143 dirs[ndirs].dir_idx = ndirs;
12144 files[i].dir_idx = ndirs;
12145
12146 /* Search for a prefix. */
12147 dirs[ndirs].prefix = -1;
12148 for (j = 0; j < ndirs; j++)
12149 if (dirs[j].length < dirs[ndirs].length
12150 && dirs[j].length > 1
12151 && (dirs[ndirs].prefix == -1
12152 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12153 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12154 dirs[ndirs].prefix = j;
12155
12156 ++ndirs;
12157 }
12158
12159 /* Now to the actual work. We have to find a subset of the directories which
12160 allow expressing the file name using references to the directory table
12161 with the least amount of characters. We do not do an exhaustive search
12162 where we would have to check out every combination of every single
12163 possible prefix. Instead we use a heuristic which provides nearly optimal
12164 results in most cases and never is much off. */
12165 saved = XALLOCAVEC (int, ndirs);
12166 savehere = XALLOCAVEC (int, ndirs);
12167
12168 memset (saved, '\0', ndirs * sizeof (saved[0]));
12169 for (i = 0; i < ndirs; i++)
12170 {
12171 int j;
12172 int total;
12173
12174 /* We can always save some space for the current directory. But this
12175 does not mean it will be enough to justify adding the directory. */
12176 savehere[i] = dirs[i].length;
12177 total = (savehere[i] - saved[i]) * dirs[i].count;
12178
12179 for (j = i + 1; j < ndirs; j++)
12180 {
12181 savehere[j] = 0;
12182 if (saved[j] < dirs[i].length)
12183 {
12184 /* Determine whether the dirs[i] path is a prefix of the
12185 dirs[j] path. */
12186 int k;
12187
12188 k = dirs[j].prefix;
12189 while (k != -1 && k != (int) i)
12190 k = dirs[k].prefix;
12191
12192 if (k == (int) i)
12193 {
12194 /* Yes it is. We can possibly save some memory by
12195 writing the filenames in dirs[j] relative to
12196 dirs[i]. */
12197 savehere[j] = dirs[i].length;
12198 total += (savehere[j] - saved[j]) * dirs[j].count;
12199 }
12200 }
12201 }
12202
12203 /* Check whether we can save enough to justify adding the dirs[i]
12204 directory. */
12205 if (total > dirs[i].length + 1)
12206 {
12207 /* It's worthwhile adding. */
12208 for (j = i; j < ndirs; j++)
12209 if (savehere[j] > 0)
12210 {
12211 /* Remember how much we saved for this directory so far. */
12212 saved[j] = savehere[j];
12213
12214 /* Remember the prefix directory. */
12215 dirs[j].dir_idx = i;
12216 }
12217 }
12218 }
12219
12220 /* Emit the directory name table. */
12221 idx_offset = dirs[0].length > 0 ? 1 : 0;
12222 enum dwarf_form str_form = DW_FORM_string;
12223 enum dwarf_form idx_form = DW_FORM_udata;
12224 if (dwarf_version >= 5)
12225 {
12226 const char *comp_dir = comp_dir_string ();
12227 if (comp_dir == NULL)
12228 comp_dir = "";
12229 dw2_asm_output_data (1, 1, "Directory entry format count");
12230 if (DWARF5_USE_DEBUG_LINE_STR)
12231 str_form = DW_FORM_line_strp;
12232 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12233 dw2_asm_output_data_uleb128 (str_form, "%s",
12234 get_DW_FORM_name (str_form));
12235 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12236 if (str_form == DW_FORM_string)
12237 {
12238 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12239 for (i = 1 - idx_offset; i < ndirs; i++)
12240 dw2_asm_output_nstring (dirs[i].path,
12241 dirs[i].length
12242 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12243 "Directory Entry: %#x", i + idx_offset);
12244 }
12245 else
12246 {
12247 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12248 for (i = 1 - idx_offset; i < ndirs; i++)
12249 {
12250 const char *str
12251 = ggc_alloc_string (dirs[i].path,
12252 dirs[i].length
12253 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12254 output_line_string (str_form, str, "Directory Entry",
12255 (unsigned) i + idx_offset);
12256 }
12257 }
12258 }
12259 else
12260 {
12261 for (i = 1 - idx_offset; i < ndirs; i++)
12262 dw2_asm_output_nstring (dirs[i].path,
12263 dirs[i].length
12264 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12265 "Directory Entry: %#x", i + idx_offset);
12266
12267 dw2_asm_output_data (1, 0, "End directory table");
12268 }
12269
12270 /* We have to emit them in the order of emitted_number since that's
12271 used in the debug info generation. To do this efficiently we
12272 generate a back-mapping of the indices first. */
12273 backmap = XALLOCAVEC (int, numfiles);
12274 for (i = 0; i < numfiles; i++)
12275 backmap[files[i].file_idx->emitted_number - 1] = i;
12276
12277 if (dwarf_version >= 5)
12278 {
12279 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12280 if (filename0 == NULL)
12281 filename0 = "";
12282 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12283 DW_FORM_data2. Choose one based on the number of directories
12284 and how much space would they occupy in each encoding.
12285 If we have at most 256 directories, all indexes fit into
12286 a single byte, so DW_FORM_data1 is most compact (if there
12287 are at most 128 directories, DW_FORM_udata would be as
12288 compact as that, but not shorter and slower to decode). */
12289 if (ndirs + idx_offset <= 256)
12290 idx_form = DW_FORM_data1;
12291 /* If there are more than 65536 directories, we have to use
12292 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12293 Otherwise, compute what space would occupy if all the indexes
12294 used DW_FORM_udata - sum - and compare that to how large would
12295 be DW_FORM_data2 encoding, and pick the more efficient one. */
12296 else if (ndirs + idx_offset <= 65536)
12297 {
12298 unsigned HOST_WIDE_INT sum = 1;
12299 for (i = 0; i < numfiles; i++)
12300 {
12301 int file_idx = backmap[i];
12302 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12303 sum += size_of_uleb128 (dir_idx);
12304 }
12305 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12306 idx_form = DW_FORM_data2;
12307 }
12308 #ifdef VMS_DEBUGGING_INFO
12309 dw2_asm_output_data (1, 4, "File name entry format count");
12310 #else
12311 dw2_asm_output_data (1, 2, "File name entry format count");
12312 #endif
12313 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12314 dw2_asm_output_data_uleb128 (str_form, "%s",
12315 get_DW_FORM_name (str_form));
12316 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12317 "DW_LNCT_directory_index");
12318 dw2_asm_output_data_uleb128 (idx_form, "%s",
12319 get_DW_FORM_name (idx_form));
12320 #ifdef VMS_DEBUGGING_INFO
12321 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12322 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12323 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12324 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12325 #endif
12326 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12327
12328 output_line_string (str_form, filename0, "File Entry", 0);
12329
12330 /* Include directory index. */
12331 if (idx_form != DW_FORM_udata)
12332 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12333 0, NULL);
12334 else
12335 dw2_asm_output_data_uleb128 (0, NULL);
12336
12337 #ifdef VMS_DEBUGGING_INFO
12338 dw2_asm_output_data_uleb128 (0, NULL);
12339 dw2_asm_output_data_uleb128 (0, NULL);
12340 #endif
12341 }
12342
12343 /* Now write all the file names. */
12344 for (i = 0; i < numfiles; i++)
12345 {
12346 int file_idx = backmap[i];
12347 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12348
12349 #ifdef VMS_DEBUGGING_INFO
12350 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12351
12352 /* Setting these fields can lead to debugger miscomparisons,
12353 but VMS Debug requires them to be set correctly. */
12354
12355 int ver;
12356 long long cdt;
12357 long siz;
12358 int maxfilelen = (strlen (files[file_idx].path)
12359 + dirs[dir_idx].length
12360 + MAX_VMS_VERSION_LEN + 1);
12361 char *filebuf = XALLOCAVEC (char, maxfilelen);
12362
12363 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12364 snprintf (filebuf, maxfilelen, "%s;%d",
12365 files[file_idx].path + dirs[dir_idx].length, ver);
12366
12367 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12368
12369 /* Include directory index. */
12370 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12371 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12372 dir_idx + idx_offset, NULL);
12373 else
12374 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12375
12376 /* Modification time. */
12377 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12378 &cdt, 0, 0, 0) == 0)
12379 ? cdt : 0, NULL);
12380
12381 /* File length in bytes. */
12382 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12383 0, &siz, 0, 0) == 0)
12384 ? siz : 0, NULL);
12385 #else
12386 output_line_string (str_form,
12387 files[file_idx].path + dirs[dir_idx].length,
12388 "File Entry", (unsigned) i + 1);
12389
12390 /* Include directory index. */
12391 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12392 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12393 dir_idx + idx_offset, NULL);
12394 else
12395 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12396
12397 if (dwarf_version >= 5)
12398 continue;
12399
12400 /* Modification time. */
12401 dw2_asm_output_data_uleb128 (0, NULL);
12402
12403 /* File length in bytes. */
12404 dw2_asm_output_data_uleb128 (0, NULL);
12405 #endif /* VMS_DEBUGGING_INFO */
12406 }
12407
12408 if (dwarf_version < 5)
12409 dw2_asm_output_data (1, 0, "End file name table");
12410 }
12411
12412
12413 /* Output one line number table into the .debug_line section. */
12414
12415 static void
12416 output_one_line_info_table (dw_line_info_table *table)
12417 {
12418 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12419 unsigned int current_line = 1;
12420 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12421 dw_line_info_entry *ent, *prev_addr;
12422 size_t i;
12423 unsigned int view;
12424
12425 view = 0;
12426
12427 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12428 {
12429 switch (ent->opcode)
12430 {
12431 case LI_set_address:
12432 /* ??? Unfortunately, we have little choice here currently, and
12433 must always use the most general form. GCC does not know the
12434 address delta itself, so we can't use DW_LNS_advance_pc. Many
12435 ports do have length attributes which will give an upper bound
12436 on the address range. We could perhaps use length attributes
12437 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12438 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12439
12440 view = 0;
12441
12442 /* This can handle any delta. This takes
12443 4+DWARF2_ADDR_SIZE bytes. */
12444 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12445 debug_variable_location_views
12446 ? ", reset view to 0" : "");
12447 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12448 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12449 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12450
12451 prev_addr = ent;
12452 break;
12453
12454 case LI_adv_address:
12455 {
12456 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12457 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12458 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12459
12460 view++;
12461
12462 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12463 dw2_asm_output_delta (2, line_label, prev_label,
12464 "from %s to %s", prev_label, line_label);
12465
12466 prev_addr = ent;
12467 break;
12468 }
12469
12470 case LI_set_line:
12471 if (ent->val == current_line)
12472 {
12473 /* We still need to start a new row, so output a copy insn. */
12474 dw2_asm_output_data (1, DW_LNS_copy,
12475 "copy line %u", current_line);
12476 }
12477 else
12478 {
12479 int line_offset = ent->val - current_line;
12480 int line_delta = line_offset - DWARF_LINE_BASE;
12481
12482 current_line = ent->val;
12483 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12484 {
12485 /* This can handle deltas from -10 to 234, using the current
12486 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12487 This takes 1 byte. */
12488 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12489 "line %u", current_line);
12490 }
12491 else
12492 {
12493 /* This can handle any delta. This takes at least 4 bytes,
12494 depending on the value being encoded. */
12495 dw2_asm_output_data (1, DW_LNS_advance_line,
12496 "advance to line %u", current_line);
12497 dw2_asm_output_data_sleb128 (line_offset, NULL);
12498 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12499 }
12500 }
12501 break;
12502
12503 case LI_set_file:
12504 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12505 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12506 break;
12507
12508 case LI_set_column:
12509 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12510 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12511 break;
12512
12513 case LI_negate_stmt:
12514 current_is_stmt = !current_is_stmt;
12515 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12516 "is_stmt %d", current_is_stmt);
12517 break;
12518
12519 case LI_set_prologue_end:
12520 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12521 "set prologue end");
12522 break;
12523
12524 case LI_set_epilogue_begin:
12525 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12526 "set epilogue begin");
12527 break;
12528
12529 case LI_set_discriminator:
12530 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12531 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12532 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12533 dw2_asm_output_data_uleb128 (ent->val, NULL);
12534 break;
12535 }
12536 }
12537
12538 /* Emit debug info for the address of the end of the table. */
12539 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12540 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12541 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12542 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12543
12544 dw2_asm_output_data (1, 0, "end sequence");
12545 dw2_asm_output_data_uleb128 (1, NULL);
12546 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12547 }
12548
12549 /* Output the source line number correspondence information. This
12550 information goes into the .debug_line section. */
12551
12552 static void
12553 output_line_info (bool prologue_only)
12554 {
12555 static unsigned int generation;
12556 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12557 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12558 bool saw_one = false;
12559 int opc;
12560
12561 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12562 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12563 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12564 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12565
12566 if (!XCOFF_DEBUGGING_INFO)
12567 {
12568 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12569 dw2_asm_output_data (4, 0xffffffff,
12570 "Initial length escape value indicating 64-bit DWARF extension");
12571 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12572 "Length of Source Line Info");
12573 }
12574
12575 ASM_OUTPUT_LABEL (asm_out_file, l1);
12576
12577 output_dwarf_version ();
12578 if (dwarf_version >= 5)
12579 {
12580 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12581 dw2_asm_output_data (1, 0, "Segment Size");
12582 }
12583 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12584 ASM_OUTPUT_LABEL (asm_out_file, p1);
12585
12586 /* Define the architecture-dependent minimum instruction length (in bytes).
12587 In this implementation of DWARF, this field is used for information
12588 purposes only. Since GCC generates assembly language, we have no
12589 a priori knowledge of how many instruction bytes are generated for each
12590 source line, and therefore can use only the DW_LNE_set_address and
12591 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12592 this as '1', which is "correct enough" for all architectures,
12593 and don't let the target override. */
12594 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12595
12596 if (dwarf_version >= 4)
12597 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12598 "Maximum Operations Per Instruction");
12599 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12600 "Default is_stmt_start flag");
12601 dw2_asm_output_data (1, DWARF_LINE_BASE,
12602 "Line Base Value (Special Opcodes)");
12603 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12604 "Line Range Value (Special Opcodes)");
12605 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12606 "Special Opcode Base");
12607
12608 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12609 {
12610 int n_op_args;
12611 switch (opc)
12612 {
12613 case DW_LNS_advance_pc:
12614 case DW_LNS_advance_line:
12615 case DW_LNS_set_file:
12616 case DW_LNS_set_column:
12617 case DW_LNS_fixed_advance_pc:
12618 case DW_LNS_set_isa:
12619 n_op_args = 1;
12620 break;
12621 default:
12622 n_op_args = 0;
12623 break;
12624 }
12625
12626 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12627 opc, n_op_args);
12628 }
12629
12630 /* Write out the information about the files we use. */
12631 output_file_names ();
12632 ASM_OUTPUT_LABEL (asm_out_file, p2);
12633 if (prologue_only)
12634 {
12635 /* Output the marker for the end of the line number info. */
12636 ASM_OUTPUT_LABEL (asm_out_file, l2);
12637 return;
12638 }
12639
12640 if (separate_line_info)
12641 {
12642 dw_line_info_table *table;
12643 size_t i;
12644
12645 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12646 if (table->in_use)
12647 {
12648 output_one_line_info_table (table);
12649 saw_one = true;
12650 }
12651 }
12652 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12653 {
12654 output_one_line_info_table (cold_text_section_line_info);
12655 saw_one = true;
12656 }
12657
12658 /* ??? Some Darwin linkers crash on a .debug_line section with no
12659 sequences. Further, merely a DW_LNE_end_sequence entry is not
12660 sufficient -- the address column must also be initialized.
12661 Make sure to output at least one set_address/end_sequence pair,
12662 choosing .text since that section is always present. */
12663 if (text_section_line_info->in_use || !saw_one)
12664 output_one_line_info_table (text_section_line_info);
12665
12666 /* Output the marker for the end of the line number info. */
12667 ASM_OUTPUT_LABEL (asm_out_file, l2);
12668 }
12669 \f
12670 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12671
12672 static inline bool
12673 need_endianity_attribute_p (bool reverse)
12674 {
12675 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12676 }
12677
12678 /* Given a pointer to a tree node for some base type, return a pointer to
12679 a DIE that describes the given type. REVERSE is true if the type is
12680 to be interpreted in the reverse storage order wrt the target order.
12681
12682 This routine must only be called for GCC type nodes that correspond to
12683 Dwarf base (fundamental) types. */
12684
12685 static dw_die_ref
12686 base_type_die (tree type, bool reverse)
12687 {
12688 dw_die_ref base_type_result;
12689 enum dwarf_type encoding;
12690 bool fpt_used = false;
12691 struct fixed_point_type_info fpt_info;
12692 tree type_bias = NULL_TREE;
12693
12694 /* If this is a subtype that should not be emitted as a subrange type,
12695 use the base type. See subrange_type_for_debug_p. */
12696 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12697 type = TREE_TYPE (type);
12698
12699 switch (TREE_CODE (type))
12700 {
12701 case INTEGER_TYPE:
12702 if ((dwarf_version >= 4 || !dwarf_strict)
12703 && TYPE_NAME (type)
12704 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12705 && DECL_IS_BUILTIN (TYPE_NAME (type))
12706 && DECL_NAME (TYPE_NAME (type)))
12707 {
12708 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12709 if (strcmp (name, "char16_t") == 0
12710 || strcmp (name, "char32_t") == 0)
12711 {
12712 encoding = DW_ATE_UTF;
12713 break;
12714 }
12715 }
12716 if ((dwarf_version >= 3 || !dwarf_strict)
12717 && lang_hooks.types.get_fixed_point_type_info)
12718 {
12719 memset (&fpt_info, 0, sizeof (fpt_info));
12720 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12721 {
12722 fpt_used = true;
12723 encoding = ((TYPE_UNSIGNED (type))
12724 ? DW_ATE_unsigned_fixed
12725 : DW_ATE_signed_fixed);
12726 break;
12727 }
12728 }
12729 if (TYPE_STRING_FLAG (type))
12730 {
12731 if (TYPE_UNSIGNED (type))
12732 encoding = DW_ATE_unsigned_char;
12733 else
12734 encoding = DW_ATE_signed_char;
12735 }
12736 else if (TYPE_UNSIGNED (type))
12737 encoding = DW_ATE_unsigned;
12738 else
12739 encoding = DW_ATE_signed;
12740
12741 if (!dwarf_strict
12742 && lang_hooks.types.get_type_bias)
12743 type_bias = lang_hooks.types.get_type_bias (type);
12744 break;
12745
12746 case REAL_TYPE:
12747 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12748 {
12749 if (dwarf_version >= 3 || !dwarf_strict)
12750 encoding = DW_ATE_decimal_float;
12751 else
12752 encoding = DW_ATE_lo_user;
12753 }
12754 else
12755 encoding = DW_ATE_float;
12756 break;
12757
12758 case FIXED_POINT_TYPE:
12759 if (!(dwarf_version >= 3 || !dwarf_strict))
12760 encoding = DW_ATE_lo_user;
12761 else if (TYPE_UNSIGNED (type))
12762 encoding = DW_ATE_unsigned_fixed;
12763 else
12764 encoding = DW_ATE_signed_fixed;
12765 break;
12766
12767 /* Dwarf2 doesn't know anything about complex ints, so use
12768 a user defined type for it. */
12769 case COMPLEX_TYPE:
12770 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12771 encoding = DW_ATE_complex_float;
12772 else
12773 encoding = DW_ATE_lo_user;
12774 break;
12775
12776 case BOOLEAN_TYPE:
12777 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12778 encoding = DW_ATE_boolean;
12779 break;
12780
12781 default:
12782 /* No other TREE_CODEs are Dwarf fundamental types. */
12783 gcc_unreachable ();
12784 }
12785
12786 base_type_result = new_die_raw (DW_TAG_base_type);
12787
12788 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12789 int_size_in_bytes (type));
12790 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12791
12792 if (need_endianity_attribute_p (reverse))
12793 add_AT_unsigned (base_type_result, DW_AT_endianity,
12794 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12795
12796 add_alignment_attribute (base_type_result, type);
12797
12798 if (fpt_used)
12799 {
12800 switch (fpt_info.scale_factor_kind)
12801 {
12802 case fixed_point_scale_factor_binary:
12803 add_AT_int (base_type_result, DW_AT_binary_scale,
12804 fpt_info.scale_factor.binary);
12805 break;
12806
12807 case fixed_point_scale_factor_decimal:
12808 add_AT_int (base_type_result, DW_AT_decimal_scale,
12809 fpt_info.scale_factor.decimal);
12810 break;
12811
12812 case fixed_point_scale_factor_arbitrary:
12813 /* Arbitrary scale factors cannot be described in standard DWARF,
12814 yet. */
12815 if (!dwarf_strict)
12816 {
12817 /* Describe the scale factor as a rational constant. */
12818 const dw_die_ref scale_factor
12819 = new_die (DW_TAG_constant, comp_unit_die (), type);
12820
12821 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12822 fpt_info.scale_factor.arbitrary.numerator);
12823 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12824 fpt_info.scale_factor.arbitrary.denominator);
12825
12826 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12827 }
12828 break;
12829
12830 default:
12831 gcc_unreachable ();
12832 }
12833 }
12834
12835 if (type_bias)
12836 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12837 dw_scalar_form_constant
12838 | dw_scalar_form_exprloc
12839 | dw_scalar_form_reference,
12840 NULL);
12841
12842 return base_type_result;
12843 }
12844
12845 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12846 named 'auto' in its type: return true for it, false otherwise. */
12847
12848 static inline bool
12849 is_cxx_auto (tree type)
12850 {
12851 if (is_cxx ())
12852 {
12853 tree name = TYPE_IDENTIFIER (type);
12854 if (name == get_identifier ("auto")
12855 || name == get_identifier ("decltype(auto)"))
12856 return true;
12857 }
12858 return false;
12859 }
12860
12861 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12862 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12863
12864 static inline int
12865 is_base_type (tree type)
12866 {
12867 switch (TREE_CODE (type))
12868 {
12869 case INTEGER_TYPE:
12870 case REAL_TYPE:
12871 case FIXED_POINT_TYPE:
12872 case COMPLEX_TYPE:
12873 case BOOLEAN_TYPE:
12874 case POINTER_BOUNDS_TYPE:
12875 return 1;
12876
12877 case VOID_TYPE:
12878 case ARRAY_TYPE:
12879 case RECORD_TYPE:
12880 case UNION_TYPE:
12881 case QUAL_UNION_TYPE:
12882 case ENUMERAL_TYPE:
12883 case FUNCTION_TYPE:
12884 case METHOD_TYPE:
12885 case POINTER_TYPE:
12886 case REFERENCE_TYPE:
12887 case NULLPTR_TYPE:
12888 case OFFSET_TYPE:
12889 case LANG_TYPE:
12890 case VECTOR_TYPE:
12891 return 0;
12892
12893 default:
12894 if (is_cxx_auto (type))
12895 return 0;
12896 gcc_unreachable ();
12897 }
12898
12899 return 0;
12900 }
12901
12902 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12903 node, return the size in bits for the type if it is a constant, or else
12904 return the alignment for the type if the type's size is not constant, or
12905 else return BITS_PER_WORD if the type actually turns out to be an
12906 ERROR_MARK node. */
12907
12908 static inline unsigned HOST_WIDE_INT
12909 simple_type_size_in_bits (const_tree type)
12910 {
12911 if (TREE_CODE (type) == ERROR_MARK)
12912 return BITS_PER_WORD;
12913 else if (TYPE_SIZE (type) == NULL_TREE)
12914 return 0;
12915 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12916 return tree_to_uhwi (TYPE_SIZE (type));
12917 else
12918 return TYPE_ALIGN (type);
12919 }
12920
12921 /* Similarly, but return an offset_int instead of UHWI. */
12922
12923 static inline offset_int
12924 offset_int_type_size_in_bits (const_tree type)
12925 {
12926 if (TREE_CODE (type) == ERROR_MARK)
12927 return BITS_PER_WORD;
12928 else if (TYPE_SIZE (type) == NULL_TREE)
12929 return 0;
12930 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12931 return wi::to_offset (TYPE_SIZE (type));
12932 else
12933 return TYPE_ALIGN (type);
12934 }
12935
12936 /* Given a pointer to a tree node for a subrange type, return a pointer
12937 to a DIE that describes the given type. */
12938
12939 static dw_die_ref
12940 subrange_type_die (tree type, tree low, tree high, tree bias,
12941 dw_die_ref context_die)
12942 {
12943 dw_die_ref subrange_die;
12944 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12945
12946 if (context_die == NULL)
12947 context_die = comp_unit_die ();
12948
12949 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12950
12951 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12952 {
12953 /* The size of the subrange type and its base type do not match,
12954 so we need to generate a size attribute for the subrange type. */
12955 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12956 }
12957
12958 add_alignment_attribute (subrange_die, type);
12959
12960 if (low)
12961 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12962 if (high)
12963 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12964 if (bias && !dwarf_strict)
12965 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12966 dw_scalar_form_constant
12967 | dw_scalar_form_exprloc
12968 | dw_scalar_form_reference,
12969 NULL);
12970
12971 return subrange_die;
12972 }
12973
12974 /* Returns the (const and/or volatile) cv_qualifiers associated with
12975 the decl node. This will normally be augmented with the
12976 cv_qualifiers of the underlying type in add_type_attribute. */
12977
12978 static int
12979 decl_quals (const_tree decl)
12980 {
12981 return ((TREE_READONLY (decl)
12982 /* The C++ front-end correctly marks reference-typed
12983 variables as readonly, but from a language (and debug
12984 info) standpoint they are not const-qualified. */
12985 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12986 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12987 | (TREE_THIS_VOLATILE (decl)
12988 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12989 }
12990
12991 /* Determine the TYPE whose qualifiers match the largest strict subset
12992 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12993 qualifiers outside QUAL_MASK. */
12994
12995 static int
12996 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12997 {
12998 tree t;
12999 int best_rank = 0, best_qual = 0, max_rank;
13000
13001 type_quals &= qual_mask;
13002 max_rank = popcount_hwi (type_quals) - 1;
13003
13004 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13005 t = TYPE_NEXT_VARIANT (t))
13006 {
13007 int q = TYPE_QUALS (t) & qual_mask;
13008
13009 if ((q & type_quals) == q && q != type_quals
13010 && check_base_type (t, type))
13011 {
13012 int rank = popcount_hwi (q);
13013
13014 if (rank > best_rank)
13015 {
13016 best_rank = rank;
13017 best_qual = q;
13018 }
13019 }
13020 }
13021
13022 return best_qual;
13023 }
13024
13025 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13026 static const dwarf_qual_info_t dwarf_qual_info[] =
13027 {
13028 { TYPE_QUAL_CONST, DW_TAG_const_type },
13029 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13030 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13031 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13032 };
13033 static const unsigned int dwarf_qual_info_size
13034 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13035
13036 /* If DIE is a qualified DIE of some base DIE with the same parent,
13037 return the base DIE, otherwise return NULL. Set MASK to the
13038 qualifiers added compared to the returned DIE. */
13039
13040 static dw_die_ref
13041 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13042 {
13043 unsigned int i;
13044 for (i = 0; i < dwarf_qual_info_size; i++)
13045 if (die->die_tag == dwarf_qual_info[i].t)
13046 break;
13047 if (i == dwarf_qual_info_size)
13048 return NULL;
13049 if (vec_safe_length (die->die_attr) != 1)
13050 return NULL;
13051 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13052 if (type == NULL || type->die_parent != die->die_parent)
13053 return NULL;
13054 *mask |= dwarf_qual_info[i].q;
13055 if (depth)
13056 {
13057 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13058 if (ret)
13059 return ret;
13060 }
13061 return type;
13062 }
13063
13064 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13065 entry that chains the modifiers specified by CV_QUALS in front of the
13066 given type. REVERSE is true if the type is to be interpreted in the
13067 reverse storage order wrt the target order. */
13068
13069 static dw_die_ref
13070 modified_type_die (tree type, int cv_quals, bool reverse,
13071 dw_die_ref context_die)
13072 {
13073 enum tree_code code = TREE_CODE (type);
13074 dw_die_ref mod_type_die;
13075 dw_die_ref sub_die = NULL;
13076 tree item_type = NULL;
13077 tree qualified_type;
13078 tree name, low, high;
13079 dw_die_ref mod_scope;
13080 /* Only these cv-qualifiers are currently handled. */
13081 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13082 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13083 ENCODE_QUAL_ADDR_SPACE(~0U));
13084 const bool reverse_base_type
13085 = need_endianity_attribute_p (reverse) && is_base_type (type);
13086
13087 if (code == ERROR_MARK)
13088 return NULL;
13089
13090 if (lang_hooks.types.get_debug_type)
13091 {
13092 tree debug_type = lang_hooks.types.get_debug_type (type);
13093
13094 if (debug_type != NULL_TREE && debug_type != type)
13095 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13096 }
13097
13098 cv_quals &= cv_qual_mask;
13099
13100 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13101 tag modifier (and not an attribute) old consumers won't be able
13102 to handle it. */
13103 if (dwarf_version < 3)
13104 cv_quals &= ~TYPE_QUAL_RESTRICT;
13105
13106 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13107 if (dwarf_version < 5)
13108 cv_quals &= ~TYPE_QUAL_ATOMIC;
13109
13110 /* See if we already have the appropriately qualified variant of
13111 this type. */
13112 qualified_type = get_qualified_type (type, cv_quals);
13113
13114 if (qualified_type == sizetype)
13115 {
13116 /* Try not to expose the internal sizetype type's name. */
13117 if (TYPE_NAME (qualified_type)
13118 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13119 {
13120 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13121
13122 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13123 && (TYPE_PRECISION (t)
13124 == TYPE_PRECISION (qualified_type))
13125 && (TYPE_UNSIGNED (t)
13126 == TYPE_UNSIGNED (qualified_type)));
13127 qualified_type = t;
13128 }
13129 else if (qualified_type == sizetype
13130 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13131 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13132 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13133 qualified_type = size_type_node;
13134 }
13135
13136 /* If we do, then we can just use its DIE, if it exists. */
13137 if (qualified_type)
13138 {
13139 mod_type_die = lookup_type_die (qualified_type);
13140
13141 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13142 dealt with specially: the DIE with the attribute, if it exists, is
13143 placed immediately after the regular DIE for the same base type. */
13144 if (mod_type_die
13145 && (!reverse_base_type
13146 || ((mod_type_die = mod_type_die->die_sib) != NULL
13147 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13148 return mod_type_die;
13149 }
13150
13151 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13152
13153 /* Handle C typedef types. */
13154 if (name
13155 && TREE_CODE (name) == TYPE_DECL
13156 && DECL_ORIGINAL_TYPE (name)
13157 && !DECL_ARTIFICIAL (name))
13158 {
13159 tree dtype = TREE_TYPE (name);
13160
13161 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13162 if (qualified_type == dtype && !reverse_base_type)
13163 {
13164 tree origin = decl_ultimate_origin (name);
13165
13166 /* Typedef variants that have an abstract origin don't get their own
13167 type DIE (see gen_typedef_die), so fall back on the ultimate
13168 abstract origin instead. */
13169 if (origin != NULL && origin != name)
13170 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13171 context_die);
13172
13173 /* For a named type, use the typedef. */
13174 gen_type_die (qualified_type, context_die);
13175 return lookup_type_die (qualified_type);
13176 }
13177 else
13178 {
13179 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13180 dquals &= cv_qual_mask;
13181 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13182 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13183 /* cv-unqualified version of named type. Just use
13184 the unnamed type to which it refers. */
13185 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13186 reverse, context_die);
13187 /* Else cv-qualified version of named type; fall through. */
13188 }
13189 }
13190
13191 mod_scope = scope_die_for (type, context_die);
13192
13193 if (cv_quals)
13194 {
13195 int sub_quals = 0, first_quals = 0;
13196 unsigned i;
13197 dw_die_ref first = NULL, last = NULL;
13198
13199 /* Determine a lesser qualified type that most closely matches
13200 this one. Then generate DW_TAG_* entries for the remaining
13201 qualifiers. */
13202 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13203 cv_qual_mask);
13204 if (sub_quals && use_debug_types)
13205 {
13206 bool needed = false;
13207 /* If emitting type units, make sure the order of qualifiers
13208 is canonical. Thus, start from unqualified type if
13209 an earlier qualifier is missing in sub_quals, but some later
13210 one is present there. */
13211 for (i = 0; i < dwarf_qual_info_size; i++)
13212 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13213 needed = true;
13214 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13215 {
13216 sub_quals = 0;
13217 break;
13218 }
13219 }
13220 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13221 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13222 {
13223 /* As not all intermediate qualified DIEs have corresponding
13224 tree types, ensure that qualified DIEs in the same scope
13225 as their DW_AT_type are emitted after their DW_AT_type,
13226 only with other qualified DIEs for the same type possibly
13227 in between them. Determine the range of such qualified
13228 DIEs now (first being the base type, last being corresponding
13229 last qualified DIE for it). */
13230 unsigned int count = 0;
13231 first = qualified_die_p (mod_type_die, &first_quals,
13232 dwarf_qual_info_size);
13233 if (first == NULL)
13234 first = mod_type_die;
13235 gcc_assert ((first_quals & ~sub_quals) == 0);
13236 for (count = 0, last = first;
13237 count < (1U << dwarf_qual_info_size);
13238 count++, last = last->die_sib)
13239 {
13240 int quals = 0;
13241 if (last == mod_scope->die_child)
13242 break;
13243 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13244 != first)
13245 break;
13246 }
13247 }
13248
13249 for (i = 0; i < dwarf_qual_info_size; i++)
13250 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13251 {
13252 dw_die_ref d;
13253 if (first && first != last)
13254 {
13255 for (d = first->die_sib; ; d = d->die_sib)
13256 {
13257 int quals = 0;
13258 qualified_die_p (d, &quals, dwarf_qual_info_size);
13259 if (quals == (first_quals | dwarf_qual_info[i].q))
13260 break;
13261 if (d == last)
13262 {
13263 d = NULL;
13264 break;
13265 }
13266 }
13267 if (d)
13268 {
13269 mod_type_die = d;
13270 continue;
13271 }
13272 }
13273 if (first)
13274 {
13275 d = new_die_raw (dwarf_qual_info[i].t);
13276 add_child_die_after (mod_scope, d, last);
13277 last = d;
13278 }
13279 else
13280 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13281 if (mod_type_die)
13282 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13283 mod_type_die = d;
13284 first_quals |= dwarf_qual_info[i].q;
13285 }
13286 }
13287 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13288 {
13289 dwarf_tag tag = DW_TAG_pointer_type;
13290 if (code == REFERENCE_TYPE)
13291 {
13292 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13293 tag = DW_TAG_rvalue_reference_type;
13294 else
13295 tag = DW_TAG_reference_type;
13296 }
13297 mod_type_die = new_die (tag, mod_scope, type);
13298
13299 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13300 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13301 add_alignment_attribute (mod_type_die, type);
13302 item_type = TREE_TYPE (type);
13303
13304 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13305 if (!ADDR_SPACE_GENERIC_P (as))
13306 {
13307 int action = targetm.addr_space.debug (as);
13308 if (action >= 0)
13309 {
13310 /* Positive values indicate an address_class. */
13311 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13312 }
13313 else
13314 {
13315 /* Negative values indicate an (inverted) segment base reg. */
13316 dw_loc_descr_ref d
13317 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13318 add_AT_loc (mod_type_die, DW_AT_segment, d);
13319 }
13320 }
13321 }
13322 else if (code == INTEGER_TYPE
13323 && TREE_TYPE (type) != NULL_TREE
13324 && subrange_type_for_debug_p (type, &low, &high))
13325 {
13326 tree bias = NULL_TREE;
13327 if (lang_hooks.types.get_type_bias)
13328 bias = lang_hooks.types.get_type_bias (type);
13329 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13330 item_type = TREE_TYPE (type);
13331 }
13332 else if (is_base_type (type))
13333 {
13334 mod_type_die = base_type_die (type, reverse);
13335
13336 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13337 if (reverse_base_type)
13338 {
13339 dw_die_ref after_die
13340 = modified_type_die (type, cv_quals, false, context_die);
13341 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13342 }
13343 else
13344 add_child_die (comp_unit_die (), mod_type_die);
13345
13346 add_pubtype (type, mod_type_die);
13347 }
13348 else
13349 {
13350 gen_type_die (type, context_die);
13351
13352 /* We have to get the type_main_variant here (and pass that to the
13353 `lookup_type_die' routine) because the ..._TYPE node we have
13354 might simply be a *copy* of some original type node (where the
13355 copy was created to help us keep track of typedef names) and
13356 that copy might have a different TYPE_UID from the original
13357 ..._TYPE node. */
13358 if (TREE_CODE (type) == FUNCTION_TYPE
13359 || TREE_CODE (type) == METHOD_TYPE)
13360 {
13361 /* For function/method types, can't just use type_main_variant here,
13362 because that can have different ref-qualifiers for C++,
13363 but try to canonicalize. */
13364 tree main = TYPE_MAIN_VARIANT (type);
13365 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13366 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13367 && check_base_type (t, main)
13368 && check_lang_type (t, type))
13369 return lookup_type_die (t);
13370 return lookup_type_die (type);
13371 }
13372 else if (TREE_CODE (type) != VECTOR_TYPE
13373 && TREE_CODE (type) != ARRAY_TYPE)
13374 return lookup_type_die (type_main_variant (type));
13375 else
13376 /* Vectors have the debugging information in the type,
13377 not the main variant. */
13378 return lookup_type_die (type);
13379 }
13380
13381 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13382 don't output a DW_TAG_typedef, since there isn't one in the
13383 user's program; just attach a DW_AT_name to the type.
13384 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13385 if the base type already has the same name. */
13386 if (name
13387 && ((TREE_CODE (name) != TYPE_DECL
13388 && (qualified_type == TYPE_MAIN_VARIANT (type)
13389 || (cv_quals == TYPE_UNQUALIFIED)))
13390 || (TREE_CODE (name) == TYPE_DECL
13391 && TREE_TYPE (name) == qualified_type
13392 && DECL_NAME (name))))
13393 {
13394 if (TREE_CODE (name) == TYPE_DECL)
13395 /* Could just call add_name_and_src_coords_attributes here,
13396 but since this is a builtin type it doesn't have any
13397 useful source coordinates anyway. */
13398 name = DECL_NAME (name);
13399 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13400 }
13401 /* This probably indicates a bug. */
13402 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13403 {
13404 name = TYPE_IDENTIFIER (type);
13405 add_name_attribute (mod_type_die,
13406 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13407 }
13408
13409 if (qualified_type && !reverse_base_type)
13410 equate_type_number_to_die (qualified_type, mod_type_die);
13411
13412 if (item_type)
13413 /* We must do this after the equate_type_number_to_die call, in case
13414 this is a recursive type. This ensures that the modified_type_die
13415 recursion will terminate even if the type is recursive. Recursive
13416 types are possible in Ada. */
13417 sub_die = modified_type_die (item_type,
13418 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13419 reverse,
13420 context_die);
13421
13422 if (sub_die != NULL)
13423 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13424
13425 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13426 if (TYPE_ARTIFICIAL (type))
13427 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13428
13429 return mod_type_die;
13430 }
13431
13432 /* Generate DIEs for the generic parameters of T.
13433 T must be either a generic type or a generic function.
13434 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13435
13436 static void
13437 gen_generic_params_dies (tree t)
13438 {
13439 tree parms, args;
13440 int parms_num, i;
13441 dw_die_ref die = NULL;
13442 int non_default;
13443
13444 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13445 return;
13446
13447 if (TYPE_P (t))
13448 die = lookup_type_die (t);
13449 else if (DECL_P (t))
13450 die = lookup_decl_die (t);
13451
13452 gcc_assert (die);
13453
13454 parms = lang_hooks.get_innermost_generic_parms (t);
13455 if (!parms)
13456 /* T has no generic parameter. It means T is neither a generic type
13457 or function. End of story. */
13458 return;
13459
13460 parms_num = TREE_VEC_LENGTH (parms);
13461 args = lang_hooks.get_innermost_generic_args (t);
13462 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13463 non_default = int_cst_value (TREE_CHAIN (args));
13464 else
13465 non_default = TREE_VEC_LENGTH (args);
13466 for (i = 0; i < parms_num; i++)
13467 {
13468 tree parm, arg, arg_pack_elems;
13469 dw_die_ref parm_die;
13470
13471 parm = TREE_VEC_ELT (parms, i);
13472 arg = TREE_VEC_ELT (args, i);
13473 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13474 gcc_assert (parm && TREE_VALUE (parm) && arg);
13475
13476 if (parm && TREE_VALUE (parm) && arg)
13477 {
13478 /* If PARM represents a template parameter pack,
13479 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13480 by DW_TAG_template_*_parameter DIEs for the argument
13481 pack elements of ARG. Note that ARG would then be
13482 an argument pack. */
13483 if (arg_pack_elems)
13484 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13485 arg_pack_elems,
13486 die);
13487 else
13488 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13489 true /* emit name */, die);
13490 if (i >= non_default)
13491 add_AT_flag (parm_die, DW_AT_default_value, 1);
13492 }
13493 }
13494 }
13495
13496 /* Create and return a DIE for PARM which should be
13497 the representation of a generic type parameter.
13498 For instance, in the C++ front end, PARM would be a template parameter.
13499 ARG is the argument to PARM.
13500 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13501 name of the PARM.
13502 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13503 as a child node. */
13504
13505 static dw_die_ref
13506 generic_parameter_die (tree parm, tree arg,
13507 bool emit_name_p,
13508 dw_die_ref parent_die)
13509 {
13510 dw_die_ref tmpl_die = NULL;
13511 const char *name = NULL;
13512
13513 if (!parm || !DECL_NAME (parm) || !arg)
13514 return NULL;
13515
13516 /* We support non-type generic parameters and arguments,
13517 type generic parameters and arguments, as well as
13518 generic generic parameters (a.k.a. template template parameters in C++)
13519 and arguments. */
13520 if (TREE_CODE (parm) == PARM_DECL)
13521 /* PARM is a nontype generic parameter */
13522 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13523 else if (TREE_CODE (parm) == TYPE_DECL)
13524 /* PARM is a type generic parameter. */
13525 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13526 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13527 /* PARM is a generic generic parameter.
13528 Its DIE is a GNU extension. It shall have a
13529 DW_AT_name attribute to represent the name of the template template
13530 parameter, and a DW_AT_GNU_template_name attribute to represent the
13531 name of the template template argument. */
13532 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13533 parent_die, parm);
13534 else
13535 gcc_unreachable ();
13536
13537 if (tmpl_die)
13538 {
13539 tree tmpl_type;
13540
13541 /* If PARM is a generic parameter pack, it means we are
13542 emitting debug info for a template argument pack element.
13543 In other terms, ARG is a template argument pack element.
13544 In that case, we don't emit any DW_AT_name attribute for
13545 the die. */
13546 if (emit_name_p)
13547 {
13548 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13549 gcc_assert (name);
13550 add_AT_string (tmpl_die, DW_AT_name, name);
13551 }
13552
13553 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13554 {
13555 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13556 TMPL_DIE should have a child DW_AT_type attribute that is set
13557 to the type of the argument to PARM, which is ARG.
13558 If PARM is a type generic parameter, TMPL_DIE should have a
13559 child DW_AT_type that is set to ARG. */
13560 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13561 add_type_attribute (tmpl_die, tmpl_type,
13562 (TREE_THIS_VOLATILE (tmpl_type)
13563 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13564 false, parent_die);
13565 }
13566 else
13567 {
13568 /* So TMPL_DIE is a DIE representing a
13569 a generic generic template parameter, a.k.a template template
13570 parameter in C++ and arg is a template. */
13571
13572 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13573 to the name of the argument. */
13574 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13575 if (name)
13576 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13577 }
13578
13579 if (TREE_CODE (parm) == PARM_DECL)
13580 /* So PARM is a non-type generic parameter.
13581 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13582 attribute of TMPL_DIE which value represents the value
13583 of ARG.
13584 We must be careful here:
13585 The value of ARG might reference some function decls.
13586 We might currently be emitting debug info for a generic
13587 type and types are emitted before function decls, we don't
13588 know if the function decls referenced by ARG will actually be
13589 emitted after cgraph computations.
13590 So must defer the generation of the DW_AT_const_value to
13591 after cgraph is ready. */
13592 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13593 }
13594
13595 return tmpl_die;
13596 }
13597
13598 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13599 PARM_PACK must be a template parameter pack. The returned DIE
13600 will be child DIE of PARENT_DIE. */
13601
13602 static dw_die_ref
13603 template_parameter_pack_die (tree parm_pack,
13604 tree parm_pack_args,
13605 dw_die_ref parent_die)
13606 {
13607 dw_die_ref die;
13608 int j;
13609
13610 gcc_assert (parent_die && parm_pack);
13611
13612 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13613 add_name_and_src_coords_attributes (die, parm_pack);
13614 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13615 generic_parameter_die (parm_pack,
13616 TREE_VEC_ELT (parm_pack_args, j),
13617 false /* Don't emit DW_AT_name */,
13618 die);
13619 return die;
13620 }
13621
13622 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13623 an enumerated type. */
13624
13625 static inline int
13626 type_is_enum (const_tree type)
13627 {
13628 return TREE_CODE (type) == ENUMERAL_TYPE;
13629 }
13630
13631 /* Return the DBX register number described by a given RTL node. */
13632
13633 static unsigned int
13634 dbx_reg_number (const_rtx rtl)
13635 {
13636 unsigned regno = REGNO (rtl);
13637
13638 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13639
13640 #ifdef LEAF_REG_REMAP
13641 if (crtl->uses_only_leaf_regs)
13642 {
13643 int leaf_reg = LEAF_REG_REMAP (regno);
13644 if (leaf_reg != -1)
13645 regno = (unsigned) leaf_reg;
13646 }
13647 #endif
13648
13649 regno = DBX_REGISTER_NUMBER (regno);
13650 gcc_assert (regno != INVALID_REGNUM);
13651 return regno;
13652 }
13653
13654 /* Optionally add a DW_OP_piece term to a location description expression.
13655 DW_OP_piece is only added if the location description expression already
13656 doesn't end with DW_OP_piece. */
13657
13658 static void
13659 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13660 {
13661 dw_loc_descr_ref loc;
13662
13663 if (*list_head != NULL)
13664 {
13665 /* Find the end of the chain. */
13666 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13667 ;
13668
13669 if (loc->dw_loc_opc != DW_OP_piece)
13670 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13671 }
13672 }
13673
13674 /* Return a location descriptor that designates a machine register or
13675 zero if there is none. */
13676
13677 static dw_loc_descr_ref
13678 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13679 {
13680 rtx regs;
13681
13682 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13683 return 0;
13684
13685 /* We only use "frame base" when we're sure we're talking about the
13686 post-prologue local stack frame. We do this by *not* running
13687 register elimination until this point, and recognizing the special
13688 argument pointer and soft frame pointer rtx's.
13689 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13690 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13691 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13692 {
13693 dw_loc_descr_ref result = NULL;
13694
13695 if (dwarf_version >= 4 || !dwarf_strict)
13696 {
13697 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13698 initialized);
13699 if (result)
13700 add_loc_descr (&result,
13701 new_loc_descr (DW_OP_stack_value, 0, 0));
13702 }
13703 return result;
13704 }
13705
13706 regs = targetm.dwarf_register_span (rtl);
13707
13708 if (REG_NREGS (rtl) > 1 || regs)
13709 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13710 else
13711 {
13712 unsigned int dbx_regnum = dbx_reg_number (rtl);
13713 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13714 return 0;
13715 return one_reg_loc_descriptor (dbx_regnum, initialized);
13716 }
13717 }
13718
13719 /* Return a location descriptor that designates a machine register for
13720 a given hard register number. */
13721
13722 static dw_loc_descr_ref
13723 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13724 {
13725 dw_loc_descr_ref reg_loc_descr;
13726
13727 if (regno <= 31)
13728 reg_loc_descr
13729 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13730 else
13731 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13732
13733 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13734 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13735
13736 return reg_loc_descr;
13737 }
13738
13739 /* Given an RTL of a register, return a location descriptor that
13740 designates a value that spans more than one register. */
13741
13742 static dw_loc_descr_ref
13743 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13744 enum var_init_status initialized)
13745 {
13746 int size, i;
13747 dw_loc_descr_ref loc_result = NULL;
13748
13749 /* Simple, contiguous registers. */
13750 if (regs == NULL_RTX)
13751 {
13752 unsigned reg = REGNO (rtl);
13753 int nregs;
13754
13755 #ifdef LEAF_REG_REMAP
13756 if (crtl->uses_only_leaf_regs)
13757 {
13758 int leaf_reg = LEAF_REG_REMAP (reg);
13759 if (leaf_reg != -1)
13760 reg = (unsigned) leaf_reg;
13761 }
13762 #endif
13763
13764 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13765 nregs = REG_NREGS (rtl);
13766
13767 /* At present we only track constant-sized pieces. */
13768 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13769 return NULL;
13770 size /= nregs;
13771
13772 loc_result = NULL;
13773 while (nregs--)
13774 {
13775 dw_loc_descr_ref t;
13776
13777 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13778 VAR_INIT_STATUS_INITIALIZED);
13779 add_loc_descr (&loc_result, t);
13780 add_loc_descr_op_piece (&loc_result, size);
13781 ++reg;
13782 }
13783 return loc_result;
13784 }
13785
13786 /* Now onto stupid register sets in non contiguous locations. */
13787
13788 gcc_assert (GET_CODE (regs) == PARALLEL);
13789
13790 /* At present we only track constant-sized pieces. */
13791 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13792 return NULL;
13793 loc_result = NULL;
13794
13795 for (i = 0; i < XVECLEN (regs, 0); ++i)
13796 {
13797 dw_loc_descr_ref t;
13798
13799 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13800 VAR_INIT_STATUS_INITIALIZED);
13801 add_loc_descr (&loc_result, t);
13802 add_loc_descr_op_piece (&loc_result, size);
13803 }
13804
13805 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13806 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13807 return loc_result;
13808 }
13809
13810 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13811
13812 /* Return a location descriptor that designates a constant i,
13813 as a compound operation from constant (i >> shift), constant shift
13814 and DW_OP_shl. */
13815
13816 static dw_loc_descr_ref
13817 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13818 {
13819 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13820 add_loc_descr (&ret, int_loc_descriptor (shift));
13821 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13822 return ret;
13823 }
13824
13825 /* Return a location descriptor that designates constant POLY_I. */
13826
13827 static dw_loc_descr_ref
13828 int_loc_descriptor (poly_int64 poly_i)
13829 {
13830 enum dwarf_location_atom op;
13831
13832 HOST_WIDE_INT i;
13833 if (!poly_i.is_constant (&i))
13834 {
13835 /* Create location descriptions for the non-constant part and
13836 add any constant offset at the end. */
13837 dw_loc_descr_ref ret = NULL;
13838 HOST_WIDE_INT constant = poly_i.coeffs[0];
13839 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13840 {
13841 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13842 if (coeff != 0)
13843 {
13844 dw_loc_descr_ref start = ret;
13845 unsigned int factor;
13846 int bias;
13847 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13848 (j, &factor, &bias);
13849
13850 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13851 add COEFF * (REGNO / FACTOR) now and subtract
13852 COEFF * BIAS from the final constant part. */
13853 constant -= coeff * bias;
13854 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13855 if (coeff % factor == 0)
13856 coeff /= factor;
13857 else
13858 {
13859 int amount = exact_log2 (factor);
13860 gcc_assert (amount >= 0);
13861 add_loc_descr (&ret, int_loc_descriptor (amount));
13862 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13863 }
13864 if (coeff != 1)
13865 {
13866 add_loc_descr (&ret, int_loc_descriptor (coeff));
13867 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13868 }
13869 if (start)
13870 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13871 }
13872 }
13873 loc_descr_plus_const (&ret, constant);
13874 return ret;
13875 }
13876
13877 /* Pick the smallest representation of a constant, rather than just
13878 defaulting to the LEB encoding. */
13879 if (i >= 0)
13880 {
13881 int clz = clz_hwi (i);
13882 int ctz = ctz_hwi (i);
13883 if (i <= 31)
13884 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13885 else if (i <= 0xff)
13886 op = DW_OP_const1u;
13887 else if (i <= 0xffff)
13888 op = DW_OP_const2u;
13889 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13890 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13891 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13892 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13893 while DW_OP_const4u is 5 bytes. */
13894 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13895 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13896 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13897 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13898 while DW_OP_const4u is 5 bytes. */
13899 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13900
13901 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13902 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13903 <= 4)
13904 {
13905 /* As i >= 2**31, the double cast above will yield a negative number.
13906 Since wrapping is defined in DWARF expressions we can output big
13907 positive integers as small negative ones, regardless of the size
13908 of host wide ints.
13909
13910 Here, since the evaluator will handle 32-bit values and since i >=
13911 2**31, we know it's going to be interpreted as a negative literal:
13912 store it this way if we can do better than 5 bytes this way. */
13913 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13914 }
13915 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13916 op = DW_OP_const4u;
13917
13918 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13919 least 6 bytes: see if we can do better before falling back to it. */
13920 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13921 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13922 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13923 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13924 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13925 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13926 >= HOST_BITS_PER_WIDE_INT)
13927 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13928 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13929 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13930 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13931 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13932 && size_of_uleb128 (i) > 6)
13933 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13934 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13935 else
13936 op = DW_OP_constu;
13937 }
13938 else
13939 {
13940 if (i >= -0x80)
13941 op = DW_OP_const1s;
13942 else if (i >= -0x8000)
13943 op = DW_OP_const2s;
13944 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13945 {
13946 if (size_of_int_loc_descriptor (i) < 5)
13947 {
13948 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13949 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13950 return ret;
13951 }
13952 op = DW_OP_const4s;
13953 }
13954 else
13955 {
13956 if (size_of_int_loc_descriptor (i)
13957 < (unsigned long) 1 + size_of_sleb128 (i))
13958 {
13959 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13960 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13961 return ret;
13962 }
13963 op = DW_OP_consts;
13964 }
13965 }
13966
13967 return new_loc_descr (op, i, 0);
13968 }
13969
13970 /* Likewise, for unsigned constants. */
13971
13972 static dw_loc_descr_ref
13973 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13974 {
13975 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13976 const unsigned HOST_WIDE_INT max_uint
13977 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13978
13979 /* If possible, use the clever signed constants handling. */
13980 if (i <= max_int)
13981 return int_loc_descriptor ((HOST_WIDE_INT) i);
13982
13983 /* Here, we are left with positive numbers that cannot be represented as
13984 HOST_WIDE_INT, i.e.:
13985 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13986
13987 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13988 whereas may be better to output a negative integer: thanks to integer
13989 wrapping, we know that:
13990 x = x - 2 ** DWARF2_ADDR_SIZE
13991 = x - 2 * (max (HOST_WIDE_INT) + 1)
13992 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13993 small negative integers. Let's try that in cases it will clearly improve
13994 the encoding: there is no gain turning DW_OP_const4u into
13995 DW_OP_const4s. */
13996 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13997 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13998 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13999 {
14000 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14001
14002 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14003 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14004 const HOST_WIDE_INT second_shift
14005 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14006
14007 /* So we finally have:
14008 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14009 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14010 return int_loc_descriptor (second_shift);
14011 }
14012
14013 /* Last chance: fallback to a simple constant operation. */
14014 return new_loc_descr
14015 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14016 ? DW_OP_const4u
14017 : DW_OP_const8u,
14018 i, 0);
14019 }
14020
14021 /* Generate and return a location description that computes the unsigned
14022 comparison of the two stack top entries (a OP b where b is the top-most
14023 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14024 LE_EXPR, GT_EXPR or GE_EXPR. */
14025
14026 static dw_loc_descr_ref
14027 uint_comparison_loc_list (enum tree_code kind)
14028 {
14029 enum dwarf_location_atom op, flip_op;
14030 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14031
14032 switch (kind)
14033 {
14034 case LT_EXPR:
14035 op = DW_OP_lt;
14036 break;
14037 case LE_EXPR:
14038 op = DW_OP_le;
14039 break;
14040 case GT_EXPR:
14041 op = DW_OP_gt;
14042 break;
14043 case GE_EXPR:
14044 op = DW_OP_ge;
14045 break;
14046 default:
14047 gcc_unreachable ();
14048 }
14049
14050 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14051 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14052
14053 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14054 possible to perform unsigned comparisons: we just have to distinguish
14055 three cases:
14056
14057 1. when a and b have the same sign (as signed integers); then we should
14058 return: a OP(signed) b;
14059
14060 2. when a is a negative signed integer while b is a positive one, then a
14061 is a greater unsigned integer than b; likewise when a and b's roles
14062 are flipped.
14063
14064 So first, compare the sign of the two operands. */
14065 ret = new_loc_descr (DW_OP_over, 0, 0);
14066 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14067 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14068 /* If they have different signs (i.e. they have different sign bits), then
14069 the stack top value has now the sign bit set and thus it's smaller than
14070 zero. */
14071 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14072 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14073 add_loc_descr (&ret, bra_node);
14074
14075 /* We are in case 1. At this point, we know both operands have the same
14076 sign, to it's safe to use the built-in signed comparison. */
14077 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14078 add_loc_descr (&ret, jmp_node);
14079
14080 /* We are in case 2. Here, we know both operands do not have the same sign,
14081 so we have to flip the signed comparison. */
14082 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14083 tmp = new_loc_descr (flip_op, 0, 0);
14084 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14085 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14086 add_loc_descr (&ret, tmp);
14087
14088 /* This dummy operation is necessary to make the two branches join. */
14089 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14090 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14091 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14092 add_loc_descr (&ret, tmp);
14093
14094 return ret;
14095 }
14096
14097 /* Likewise, but takes the location description lists (might be destructive on
14098 them). Return NULL if either is NULL or if concatenation fails. */
14099
14100 static dw_loc_list_ref
14101 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14102 enum tree_code kind)
14103 {
14104 if (left == NULL || right == NULL)
14105 return NULL;
14106
14107 add_loc_list (&left, right);
14108 if (left == NULL)
14109 return NULL;
14110
14111 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14112 return left;
14113 }
14114
14115 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14116 without actually allocating it. */
14117
14118 static unsigned long
14119 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14120 {
14121 return size_of_int_loc_descriptor (i >> shift)
14122 + size_of_int_loc_descriptor (shift)
14123 + 1;
14124 }
14125
14126 /* Return size_of_locs (int_loc_descriptor (i)) without
14127 actually allocating it. */
14128
14129 static unsigned long
14130 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14131 {
14132 unsigned long s;
14133
14134 if (i >= 0)
14135 {
14136 int clz, ctz;
14137 if (i <= 31)
14138 return 1;
14139 else if (i <= 0xff)
14140 return 2;
14141 else if (i <= 0xffff)
14142 return 3;
14143 clz = clz_hwi (i);
14144 ctz = ctz_hwi (i);
14145 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14146 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14147 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14148 - clz - 5);
14149 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14150 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14151 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14152 - clz - 8);
14153 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14154 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14155 <= 4)
14156 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14157 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14158 return 5;
14159 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14160 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14161 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14162 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14163 - clz - 8);
14164 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14165 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14166 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14167 - clz - 16);
14168 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14169 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14170 && s > 6)
14171 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14172 - clz - 32);
14173 else
14174 return 1 + s;
14175 }
14176 else
14177 {
14178 if (i >= -0x80)
14179 return 2;
14180 else if (i >= -0x8000)
14181 return 3;
14182 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14183 {
14184 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14185 {
14186 s = size_of_int_loc_descriptor (-i) + 1;
14187 if (s < 5)
14188 return s;
14189 }
14190 return 5;
14191 }
14192 else
14193 {
14194 unsigned long r = 1 + size_of_sleb128 (i);
14195 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14196 {
14197 s = size_of_int_loc_descriptor (-i) + 1;
14198 if (s < r)
14199 return s;
14200 }
14201 return r;
14202 }
14203 }
14204 }
14205
14206 /* Return loc description representing "address" of integer value.
14207 This can appear only as toplevel expression. */
14208
14209 static dw_loc_descr_ref
14210 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14211 {
14212 int litsize;
14213 dw_loc_descr_ref loc_result = NULL;
14214
14215 if (!(dwarf_version >= 4 || !dwarf_strict))
14216 return NULL;
14217
14218 litsize = size_of_int_loc_descriptor (i);
14219 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14220 is more compact. For DW_OP_stack_value we need:
14221 litsize + 1 (DW_OP_stack_value)
14222 and for DW_OP_implicit_value:
14223 1 (DW_OP_implicit_value) + 1 (length) + size. */
14224 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14225 {
14226 loc_result = int_loc_descriptor (i);
14227 add_loc_descr (&loc_result,
14228 new_loc_descr (DW_OP_stack_value, 0, 0));
14229 return loc_result;
14230 }
14231
14232 loc_result = new_loc_descr (DW_OP_implicit_value,
14233 size, 0);
14234 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14235 loc_result->dw_loc_oprnd2.v.val_int = i;
14236 return loc_result;
14237 }
14238
14239 /* Return a location descriptor that designates a base+offset location. */
14240
14241 static dw_loc_descr_ref
14242 based_loc_descr (rtx reg, poly_int64 offset,
14243 enum var_init_status initialized)
14244 {
14245 unsigned int regno;
14246 dw_loc_descr_ref result;
14247 dw_fde_ref fde = cfun->fde;
14248
14249 /* We only use "frame base" when we're sure we're talking about the
14250 post-prologue local stack frame. We do this by *not* running
14251 register elimination until this point, and recognizing the special
14252 argument pointer and soft frame pointer rtx's. */
14253 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14254 {
14255 rtx elim = (ira_use_lra_p
14256 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14257 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14258
14259 if (elim != reg)
14260 {
14261 elim = strip_offset_and_add (elim, &offset);
14262 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14263 && (elim == hard_frame_pointer_rtx
14264 || elim == stack_pointer_rtx))
14265 || elim == (frame_pointer_needed
14266 ? hard_frame_pointer_rtx
14267 : stack_pointer_rtx));
14268
14269 /* If drap register is used to align stack, use frame
14270 pointer + offset to access stack variables. If stack
14271 is aligned without drap, use stack pointer + offset to
14272 access stack variables. */
14273 if (crtl->stack_realign_tried
14274 && reg == frame_pointer_rtx)
14275 {
14276 int base_reg
14277 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14278 ? HARD_FRAME_POINTER_REGNUM
14279 : REGNO (elim));
14280 return new_reg_loc_descr (base_reg, offset);
14281 }
14282
14283 gcc_assert (frame_pointer_fb_offset_valid);
14284 offset += frame_pointer_fb_offset;
14285 HOST_WIDE_INT const_offset;
14286 if (offset.is_constant (&const_offset))
14287 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14288 else
14289 {
14290 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14291 loc_descr_plus_const (&ret, offset);
14292 return ret;
14293 }
14294 }
14295 }
14296
14297 regno = REGNO (reg);
14298 #ifdef LEAF_REG_REMAP
14299 if (crtl->uses_only_leaf_regs)
14300 {
14301 int leaf_reg = LEAF_REG_REMAP (regno);
14302 if (leaf_reg != -1)
14303 regno = (unsigned) leaf_reg;
14304 }
14305 #endif
14306 regno = DWARF_FRAME_REGNUM (regno);
14307
14308 HOST_WIDE_INT const_offset;
14309 if (!optimize && fde
14310 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14311 && offset.is_constant (&const_offset))
14312 {
14313 /* Use cfa+offset to represent the location of arguments passed
14314 on the stack when drap is used to align stack.
14315 Only do this when not optimizing, for optimized code var-tracking
14316 is supposed to track where the arguments live and the register
14317 used as vdrap or drap in some spot might be used for something
14318 else in other part of the routine. */
14319 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14320 }
14321
14322 result = new_reg_loc_descr (regno, offset);
14323
14324 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14325 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14326
14327 return result;
14328 }
14329
14330 /* Return true if this RTL expression describes a base+offset calculation. */
14331
14332 static inline int
14333 is_based_loc (const_rtx rtl)
14334 {
14335 return (GET_CODE (rtl) == PLUS
14336 && ((REG_P (XEXP (rtl, 0))
14337 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14338 && CONST_INT_P (XEXP (rtl, 1)))));
14339 }
14340
14341 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14342 failed. */
14343
14344 static dw_loc_descr_ref
14345 tls_mem_loc_descriptor (rtx mem)
14346 {
14347 tree base;
14348 dw_loc_descr_ref loc_result;
14349
14350 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14351 return NULL;
14352
14353 base = get_base_address (MEM_EXPR (mem));
14354 if (base == NULL
14355 || !VAR_P (base)
14356 || !DECL_THREAD_LOCAL_P (base))
14357 return NULL;
14358
14359 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14360 if (loc_result == NULL)
14361 return NULL;
14362
14363 if (maybe_ne (MEM_OFFSET (mem), 0))
14364 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14365
14366 return loc_result;
14367 }
14368
14369 /* Output debug info about reason why we failed to expand expression as dwarf
14370 expression. */
14371
14372 static void
14373 expansion_failed (tree expr, rtx rtl, char const *reason)
14374 {
14375 if (dump_file && (dump_flags & TDF_DETAILS))
14376 {
14377 fprintf (dump_file, "Failed to expand as dwarf: ");
14378 if (expr)
14379 print_generic_expr (dump_file, expr, dump_flags);
14380 if (rtl)
14381 {
14382 fprintf (dump_file, "\n");
14383 print_rtl (dump_file, rtl);
14384 }
14385 fprintf (dump_file, "\nReason: %s\n", reason);
14386 }
14387 }
14388
14389 /* Helper function for const_ok_for_output. */
14390
14391 static bool
14392 const_ok_for_output_1 (rtx rtl)
14393 {
14394 if (targetm.const_not_ok_for_debug_p (rtl))
14395 {
14396 if (GET_CODE (rtl) != UNSPEC)
14397 {
14398 expansion_failed (NULL_TREE, rtl,
14399 "Expression rejected for debug by the backend.\n");
14400 return false;
14401 }
14402
14403 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14404 the target hook doesn't explicitly allow it in debug info, assume
14405 we can't express it in the debug info. */
14406 /* Don't complain about TLS UNSPECs, those are just too hard to
14407 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14408 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14409 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14410 if (flag_checking
14411 && (XVECLEN (rtl, 0) == 0
14412 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14413 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14414 inform (current_function_decl
14415 ? DECL_SOURCE_LOCATION (current_function_decl)
14416 : UNKNOWN_LOCATION,
14417 #if NUM_UNSPEC_VALUES > 0
14418 "non-delegitimized UNSPEC %s (%d) found in variable location",
14419 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14420 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14421 XINT (rtl, 1));
14422 #else
14423 "non-delegitimized UNSPEC %d found in variable location",
14424 XINT (rtl, 1));
14425 #endif
14426 expansion_failed (NULL_TREE, rtl,
14427 "UNSPEC hasn't been delegitimized.\n");
14428 return false;
14429 }
14430
14431 if (CONST_POLY_INT_P (rtl))
14432 return false;
14433
14434 if (targetm.const_not_ok_for_debug_p (rtl))
14435 {
14436 expansion_failed (NULL_TREE, rtl,
14437 "Expression rejected for debug by the backend.\n");
14438 return false;
14439 }
14440
14441 /* FIXME: Refer to PR60655. It is possible for simplification
14442 of rtl expressions in var tracking to produce such expressions.
14443 We should really identify / validate expressions
14444 enclosed in CONST that can be handled by assemblers on various
14445 targets and only handle legitimate cases here. */
14446 switch (GET_CODE (rtl))
14447 {
14448 case SYMBOL_REF:
14449 break;
14450 case NOT:
14451 case NEG:
14452 return false;
14453 default:
14454 return true;
14455 }
14456
14457 if (CONSTANT_POOL_ADDRESS_P (rtl))
14458 {
14459 bool marked;
14460 get_pool_constant_mark (rtl, &marked);
14461 /* If all references to this pool constant were optimized away,
14462 it was not output and thus we can't represent it. */
14463 if (!marked)
14464 {
14465 expansion_failed (NULL_TREE, rtl,
14466 "Constant was removed from constant pool.\n");
14467 return false;
14468 }
14469 }
14470
14471 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14472 return false;
14473
14474 /* Avoid references to external symbols in debug info, on several targets
14475 the linker might even refuse to link when linking a shared library,
14476 and in many other cases the relocations for .debug_info/.debug_loc are
14477 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14478 to be defined within the same shared library or executable are fine. */
14479 if (SYMBOL_REF_EXTERNAL_P (rtl))
14480 {
14481 tree decl = SYMBOL_REF_DECL (rtl);
14482
14483 if (decl == NULL || !targetm.binds_local_p (decl))
14484 {
14485 expansion_failed (NULL_TREE, rtl,
14486 "Symbol not defined in current TU.\n");
14487 return false;
14488 }
14489 }
14490
14491 return true;
14492 }
14493
14494 /* Return true if constant RTL can be emitted in DW_OP_addr or
14495 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14496 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14497
14498 static bool
14499 const_ok_for_output (rtx rtl)
14500 {
14501 if (GET_CODE (rtl) == SYMBOL_REF)
14502 return const_ok_for_output_1 (rtl);
14503
14504 if (GET_CODE (rtl) == CONST)
14505 {
14506 subrtx_var_iterator::array_type array;
14507 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14508 if (!const_ok_for_output_1 (*iter))
14509 return false;
14510 return true;
14511 }
14512
14513 return true;
14514 }
14515
14516 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14517 if possible, NULL otherwise. */
14518
14519 static dw_die_ref
14520 base_type_for_mode (machine_mode mode, bool unsignedp)
14521 {
14522 dw_die_ref type_die;
14523 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14524
14525 if (type == NULL)
14526 return NULL;
14527 switch (TREE_CODE (type))
14528 {
14529 case INTEGER_TYPE:
14530 case REAL_TYPE:
14531 break;
14532 default:
14533 return NULL;
14534 }
14535 type_die = lookup_type_die (type);
14536 if (!type_die)
14537 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14538 comp_unit_die ());
14539 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14540 return NULL;
14541 return type_die;
14542 }
14543
14544 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14545 type matching MODE, or, if MODE is narrower than or as wide as
14546 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14547 possible. */
14548
14549 static dw_loc_descr_ref
14550 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14551 {
14552 machine_mode outer_mode = mode;
14553 dw_die_ref type_die;
14554 dw_loc_descr_ref cvt;
14555
14556 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14557 {
14558 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14559 return op;
14560 }
14561 type_die = base_type_for_mode (outer_mode, 1);
14562 if (type_die == NULL)
14563 return NULL;
14564 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14565 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14566 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14567 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14568 add_loc_descr (&op, cvt);
14569 return op;
14570 }
14571
14572 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14573
14574 static dw_loc_descr_ref
14575 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14576 dw_loc_descr_ref op1)
14577 {
14578 dw_loc_descr_ref ret = op0;
14579 add_loc_descr (&ret, op1);
14580 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14581 if (STORE_FLAG_VALUE != 1)
14582 {
14583 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14584 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14585 }
14586 return ret;
14587 }
14588
14589 /* Subroutine of scompare_loc_descriptor for the case in which we're
14590 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14591 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14592
14593 static dw_loc_descr_ref
14594 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14595 scalar_int_mode op_mode,
14596 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14597 {
14598 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14599 dw_loc_descr_ref cvt;
14600
14601 if (type_die == NULL)
14602 return NULL;
14603 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14604 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14605 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14606 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14607 add_loc_descr (&op0, cvt);
14608 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14609 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14610 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14611 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14612 add_loc_descr (&op1, cvt);
14613 return compare_loc_descriptor (op, op0, op1);
14614 }
14615
14616 /* Subroutine of scompare_loc_descriptor for the case in which we're
14617 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14618 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14619
14620 static dw_loc_descr_ref
14621 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14622 scalar_int_mode op_mode,
14623 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14624 {
14625 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14626 /* For eq/ne, if the operands are known to be zero-extended,
14627 there is no need to do the fancy shifting up. */
14628 if (op == DW_OP_eq || op == DW_OP_ne)
14629 {
14630 dw_loc_descr_ref last0, last1;
14631 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14632 ;
14633 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14634 ;
14635 /* deref_size zero extends, and for constants we can check
14636 whether they are zero extended or not. */
14637 if (((last0->dw_loc_opc == DW_OP_deref_size
14638 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14639 || (CONST_INT_P (XEXP (rtl, 0))
14640 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14641 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14642 && ((last1->dw_loc_opc == DW_OP_deref_size
14643 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14644 || (CONST_INT_P (XEXP (rtl, 1))
14645 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14646 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14647 return compare_loc_descriptor (op, op0, op1);
14648
14649 /* EQ/NE comparison against constant in narrower type than
14650 DWARF2_ADDR_SIZE can be performed either as
14651 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14652 DW_OP_{eq,ne}
14653 or
14654 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14655 DW_OP_{eq,ne}. Pick whatever is shorter. */
14656 if (CONST_INT_P (XEXP (rtl, 1))
14657 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14658 && (size_of_int_loc_descriptor (shift) + 1
14659 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14660 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14661 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14662 & GET_MODE_MASK (op_mode))))
14663 {
14664 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14665 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14666 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14667 & GET_MODE_MASK (op_mode));
14668 return compare_loc_descriptor (op, op0, op1);
14669 }
14670 }
14671 add_loc_descr (&op0, int_loc_descriptor (shift));
14672 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14673 if (CONST_INT_P (XEXP (rtl, 1)))
14674 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14675 else
14676 {
14677 add_loc_descr (&op1, int_loc_descriptor (shift));
14678 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14679 }
14680 return compare_loc_descriptor (op, op0, op1);
14681 }
14682
14683 /* Return location descriptor for unsigned comparison OP RTL. */
14684
14685 static dw_loc_descr_ref
14686 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14687 machine_mode mem_mode)
14688 {
14689 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14690 dw_loc_descr_ref op0, op1;
14691
14692 if (op_mode == VOIDmode)
14693 op_mode = GET_MODE (XEXP (rtl, 1));
14694 if (op_mode == VOIDmode)
14695 return NULL;
14696
14697 scalar_int_mode int_op_mode;
14698 if (dwarf_strict
14699 && dwarf_version < 5
14700 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14701 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14702 return NULL;
14703
14704 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14705 VAR_INIT_STATUS_INITIALIZED);
14706 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14707 VAR_INIT_STATUS_INITIALIZED);
14708
14709 if (op0 == NULL || op1 == NULL)
14710 return NULL;
14711
14712 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14713 {
14714 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14715 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14716
14717 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14718 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14719 }
14720 return compare_loc_descriptor (op, op0, op1);
14721 }
14722
14723 /* Return location descriptor for unsigned comparison OP RTL. */
14724
14725 static dw_loc_descr_ref
14726 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14727 machine_mode mem_mode)
14728 {
14729 dw_loc_descr_ref op0, op1;
14730
14731 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14732 if (test_op_mode == VOIDmode)
14733 test_op_mode = GET_MODE (XEXP (rtl, 1));
14734
14735 scalar_int_mode op_mode;
14736 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14737 return NULL;
14738
14739 if (dwarf_strict
14740 && dwarf_version < 5
14741 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14742 return NULL;
14743
14744 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14745 VAR_INIT_STATUS_INITIALIZED);
14746 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14747 VAR_INIT_STATUS_INITIALIZED);
14748
14749 if (op0 == NULL || op1 == NULL)
14750 return NULL;
14751
14752 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14753 {
14754 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14755 dw_loc_descr_ref last0, last1;
14756 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14757 ;
14758 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14759 ;
14760 if (CONST_INT_P (XEXP (rtl, 0)))
14761 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14762 /* deref_size zero extends, so no need to mask it again. */
14763 else if (last0->dw_loc_opc != DW_OP_deref_size
14764 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14765 {
14766 add_loc_descr (&op0, int_loc_descriptor (mask));
14767 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14768 }
14769 if (CONST_INT_P (XEXP (rtl, 1)))
14770 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14771 /* deref_size zero extends, so no need to mask it again. */
14772 else if (last1->dw_loc_opc != DW_OP_deref_size
14773 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14774 {
14775 add_loc_descr (&op1, int_loc_descriptor (mask));
14776 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14777 }
14778 }
14779 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14780 {
14781 HOST_WIDE_INT bias = 1;
14782 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14783 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14784 if (CONST_INT_P (XEXP (rtl, 1)))
14785 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14786 + INTVAL (XEXP (rtl, 1)));
14787 else
14788 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14789 bias, 0));
14790 }
14791 return compare_loc_descriptor (op, op0, op1);
14792 }
14793
14794 /* Return location descriptor for {U,S}{MIN,MAX}. */
14795
14796 static dw_loc_descr_ref
14797 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14798 machine_mode mem_mode)
14799 {
14800 enum dwarf_location_atom op;
14801 dw_loc_descr_ref op0, op1, ret;
14802 dw_loc_descr_ref bra_node, drop_node;
14803
14804 scalar_int_mode int_mode;
14805 if (dwarf_strict
14806 && dwarf_version < 5
14807 && (!is_a <scalar_int_mode> (mode, &int_mode)
14808 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14809 return NULL;
14810
14811 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14812 VAR_INIT_STATUS_INITIALIZED);
14813 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14814 VAR_INIT_STATUS_INITIALIZED);
14815
14816 if (op0 == NULL || op1 == NULL)
14817 return NULL;
14818
14819 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14820 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14821 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14822 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14823 {
14824 /* Checked by the caller. */
14825 int_mode = as_a <scalar_int_mode> (mode);
14826 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14827 {
14828 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14829 add_loc_descr (&op0, int_loc_descriptor (mask));
14830 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14831 add_loc_descr (&op1, int_loc_descriptor (mask));
14832 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14833 }
14834 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14835 {
14836 HOST_WIDE_INT bias = 1;
14837 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14838 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14839 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14840 }
14841 }
14842 else if (is_a <scalar_int_mode> (mode, &int_mode)
14843 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14844 {
14845 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14846 add_loc_descr (&op0, int_loc_descriptor (shift));
14847 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14848 add_loc_descr (&op1, int_loc_descriptor (shift));
14849 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14850 }
14851 else if (is_a <scalar_int_mode> (mode, &int_mode)
14852 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14853 {
14854 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14855 dw_loc_descr_ref cvt;
14856 if (type_die == NULL)
14857 return NULL;
14858 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14859 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14860 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14861 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14862 add_loc_descr (&op0, cvt);
14863 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14864 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14865 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14866 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14867 add_loc_descr (&op1, cvt);
14868 }
14869
14870 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14871 op = DW_OP_lt;
14872 else
14873 op = DW_OP_gt;
14874 ret = op0;
14875 add_loc_descr (&ret, op1);
14876 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14877 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14878 add_loc_descr (&ret, bra_node);
14879 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14880 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14881 add_loc_descr (&ret, drop_node);
14882 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14883 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14884 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14885 && is_a <scalar_int_mode> (mode, &int_mode)
14886 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14887 ret = convert_descriptor_to_mode (int_mode, ret);
14888 return ret;
14889 }
14890
14891 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14892 but after converting arguments to type_die, afterwards
14893 convert back to unsigned. */
14894
14895 static dw_loc_descr_ref
14896 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14897 scalar_int_mode mode, machine_mode mem_mode)
14898 {
14899 dw_loc_descr_ref cvt, op0, op1;
14900
14901 if (type_die == NULL)
14902 return NULL;
14903 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14904 VAR_INIT_STATUS_INITIALIZED);
14905 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14906 VAR_INIT_STATUS_INITIALIZED);
14907 if (op0 == NULL || op1 == NULL)
14908 return NULL;
14909 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14910 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14911 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14912 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14913 add_loc_descr (&op0, cvt);
14914 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14915 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14916 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14917 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14918 add_loc_descr (&op1, cvt);
14919 add_loc_descr (&op0, op1);
14920 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14921 return convert_descriptor_to_mode (mode, op0);
14922 }
14923
14924 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14925 const0 is DW_OP_lit0 or corresponding typed constant,
14926 const1 is DW_OP_lit1 or corresponding typed constant
14927 and constMSB is constant with just the MSB bit set
14928 for the mode):
14929 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14930 L1: const0 DW_OP_swap
14931 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14932 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14933 L3: DW_OP_drop
14934 L4: DW_OP_nop
14935
14936 CTZ is similar:
14937 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14938 L1: const0 DW_OP_swap
14939 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14940 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14941 L3: DW_OP_drop
14942 L4: DW_OP_nop
14943
14944 FFS is similar:
14945 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14946 L1: const1 DW_OP_swap
14947 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14948 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14949 L3: DW_OP_drop
14950 L4: DW_OP_nop */
14951
14952 static dw_loc_descr_ref
14953 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14954 machine_mode mem_mode)
14955 {
14956 dw_loc_descr_ref op0, ret, tmp;
14957 HOST_WIDE_INT valv;
14958 dw_loc_descr_ref l1jump, l1label;
14959 dw_loc_descr_ref l2jump, l2label;
14960 dw_loc_descr_ref l3jump, l3label;
14961 dw_loc_descr_ref l4jump, l4label;
14962 rtx msb;
14963
14964 if (GET_MODE (XEXP (rtl, 0)) != mode)
14965 return NULL;
14966
14967 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14968 VAR_INIT_STATUS_INITIALIZED);
14969 if (op0 == NULL)
14970 return NULL;
14971 ret = op0;
14972 if (GET_CODE (rtl) == CLZ)
14973 {
14974 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14975 valv = GET_MODE_BITSIZE (mode);
14976 }
14977 else if (GET_CODE (rtl) == FFS)
14978 valv = 0;
14979 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14980 valv = GET_MODE_BITSIZE (mode);
14981 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14982 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14983 add_loc_descr (&ret, l1jump);
14984 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14985 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14986 VAR_INIT_STATUS_INITIALIZED);
14987 if (tmp == NULL)
14988 return NULL;
14989 add_loc_descr (&ret, tmp);
14990 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14991 add_loc_descr (&ret, l4jump);
14992 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14993 ? const1_rtx : const0_rtx,
14994 mode, mem_mode,
14995 VAR_INIT_STATUS_INITIALIZED);
14996 if (l1label == NULL)
14997 return NULL;
14998 add_loc_descr (&ret, l1label);
14999 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15000 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15001 add_loc_descr (&ret, l2label);
15002 if (GET_CODE (rtl) != CLZ)
15003 msb = const1_rtx;
15004 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15005 msb = GEN_INT (HOST_WIDE_INT_1U
15006 << (GET_MODE_BITSIZE (mode) - 1));
15007 else
15008 msb = immed_wide_int_const
15009 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15010 GET_MODE_PRECISION (mode)), mode);
15011 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15012 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15013 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15014 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15015 else
15016 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15017 VAR_INIT_STATUS_INITIALIZED);
15018 if (tmp == NULL)
15019 return NULL;
15020 add_loc_descr (&ret, tmp);
15021 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15022 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15023 add_loc_descr (&ret, l3jump);
15024 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15025 VAR_INIT_STATUS_INITIALIZED);
15026 if (tmp == NULL)
15027 return NULL;
15028 add_loc_descr (&ret, tmp);
15029 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15030 ? DW_OP_shl : DW_OP_shr, 0, 0));
15031 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15032 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15033 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15034 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15035 add_loc_descr (&ret, l2jump);
15036 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15037 add_loc_descr (&ret, l3label);
15038 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15039 add_loc_descr (&ret, l4label);
15040 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15041 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15042 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15043 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15044 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15045 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15046 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15047 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15048 return ret;
15049 }
15050
15051 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15052 const1 is DW_OP_lit1 or corresponding typed constant):
15053 const0 DW_OP_swap
15054 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15055 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15056 L2: DW_OP_drop
15057
15058 PARITY is similar:
15059 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15060 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15061 L2: DW_OP_drop */
15062
15063 static dw_loc_descr_ref
15064 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15065 machine_mode mem_mode)
15066 {
15067 dw_loc_descr_ref op0, ret, tmp;
15068 dw_loc_descr_ref l1jump, l1label;
15069 dw_loc_descr_ref l2jump, l2label;
15070
15071 if (GET_MODE (XEXP (rtl, 0)) != mode)
15072 return NULL;
15073
15074 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15075 VAR_INIT_STATUS_INITIALIZED);
15076 if (op0 == NULL)
15077 return NULL;
15078 ret = op0;
15079 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15080 VAR_INIT_STATUS_INITIALIZED);
15081 if (tmp == NULL)
15082 return NULL;
15083 add_loc_descr (&ret, tmp);
15084 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15085 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15086 add_loc_descr (&ret, l1label);
15087 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15088 add_loc_descr (&ret, l2jump);
15089 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15090 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15091 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15092 VAR_INIT_STATUS_INITIALIZED);
15093 if (tmp == NULL)
15094 return NULL;
15095 add_loc_descr (&ret, tmp);
15096 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15097 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15098 ? DW_OP_plus : DW_OP_xor, 0, 0));
15099 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15100 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15101 VAR_INIT_STATUS_INITIALIZED);
15102 add_loc_descr (&ret, tmp);
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15104 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15105 add_loc_descr (&ret, l1jump);
15106 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15107 add_loc_descr (&ret, l2label);
15108 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15109 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15110 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15111 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15112 return ret;
15113 }
15114
15115 /* BSWAP (constS is initial shift count, either 56 or 24):
15116 constS const0
15117 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15118 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15119 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15120 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15121 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15122
15123 static dw_loc_descr_ref
15124 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15125 machine_mode mem_mode)
15126 {
15127 dw_loc_descr_ref op0, ret, tmp;
15128 dw_loc_descr_ref l1jump, l1label;
15129 dw_loc_descr_ref l2jump, l2label;
15130
15131 if (BITS_PER_UNIT != 8
15132 || (GET_MODE_BITSIZE (mode) != 32
15133 && GET_MODE_BITSIZE (mode) != 64))
15134 return NULL;
15135
15136 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15137 VAR_INIT_STATUS_INITIALIZED);
15138 if (op0 == NULL)
15139 return NULL;
15140
15141 ret = op0;
15142 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15143 mode, mem_mode,
15144 VAR_INIT_STATUS_INITIALIZED);
15145 if (tmp == NULL)
15146 return NULL;
15147 add_loc_descr (&ret, tmp);
15148 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15149 VAR_INIT_STATUS_INITIALIZED);
15150 if (tmp == NULL)
15151 return NULL;
15152 add_loc_descr (&ret, tmp);
15153 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15154 add_loc_descr (&ret, l1label);
15155 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15156 mode, mem_mode,
15157 VAR_INIT_STATUS_INITIALIZED);
15158 add_loc_descr (&ret, tmp);
15159 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15160 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15161 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15162 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15163 VAR_INIT_STATUS_INITIALIZED);
15164 if (tmp == NULL)
15165 return NULL;
15166 add_loc_descr (&ret, tmp);
15167 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15168 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15169 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15170 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15171 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15172 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15173 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15174 VAR_INIT_STATUS_INITIALIZED);
15175 add_loc_descr (&ret, tmp);
15176 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15177 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15178 add_loc_descr (&ret, l2jump);
15179 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15180 VAR_INIT_STATUS_INITIALIZED);
15181 add_loc_descr (&ret, tmp);
15182 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15183 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15184 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15185 add_loc_descr (&ret, l1jump);
15186 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15187 add_loc_descr (&ret, l2label);
15188 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15189 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15190 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15191 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15192 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15193 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15194 return ret;
15195 }
15196
15197 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15198 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15199 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15200 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15201
15202 ROTATERT is similar:
15203 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15204 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15205 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15206
15207 static dw_loc_descr_ref
15208 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15209 machine_mode mem_mode)
15210 {
15211 rtx rtlop1 = XEXP (rtl, 1);
15212 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15213 int i;
15214
15215 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15216 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15217 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15218 VAR_INIT_STATUS_INITIALIZED);
15219 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15220 VAR_INIT_STATUS_INITIALIZED);
15221 if (op0 == NULL || op1 == NULL)
15222 return NULL;
15223 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15224 for (i = 0; i < 2; i++)
15225 {
15226 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15227 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15228 mode, mem_mode,
15229 VAR_INIT_STATUS_INITIALIZED);
15230 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15231 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15232 ? DW_OP_const4u
15233 : HOST_BITS_PER_WIDE_INT == 64
15234 ? DW_OP_const8u : DW_OP_constu,
15235 GET_MODE_MASK (mode), 0);
15236 else
15237 mask[i] = NULL;
15238 if (mask[i] == NULL)
15239 return NULL;
15240 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15241 }
15242 ret = op0;
15243 add_loc_descr (&ret, op1);
15244 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15245 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15246 if (GET_CODE (rtl) == ROTATERT)
15247 {
15248 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15249 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15250 GET_MODE_BITSIZE (mode), 0));
15251 }
15252 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15253 if (mask[0] != NULL)
15254 add_loc_descr (&ret, mask[0]);
15255 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15256 if (mask[1] != NULL)
15257 {
15258 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15259 add_loc_descr (&ret, mask[1]);
15260 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15261 }
15262 if (GET_CODE (rtl) == ROTATE)
15263 {
15264 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15265 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15266 GET_MODE_BITSIZE (mode), 0));
15267 }
15268 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15269 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15270 return ret;
15271 }
15272
15273 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15274 for DEBUG_PARAMETER_REF RTL. */
15275
15276 static dw_loc_descr_ref
15277 parameter_ref_descriptor (rtx rtl)
15278 {
15279 dw_loc_descr_ref ret;
15280 dw_die_ref ref;
15281
15282 if (dwarf_strict)
15283 return NULL;
15284 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15285 /* With LTO during LTRANS we get the late DIE that refers to the early
15286 DIE, thus we add another indirection here. This seems to confuse
15287 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15288 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15289 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15290 if (ref)
15291 {
15292 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15293 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15294 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15295 }
15296 else
15297 {
15298 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15299 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15300 }
15301 return ret;
15302 }
15303
15304 /* The following routine converts the RTL for a variable or parameter
15305 (resident in memory) into an equivalent Dwarf representation of a
15306 mechanism for getting the address of that same variable onto the top of a
15307 hypothetical "address evaluation" stack.
15308
15309 When creating memory location descriptors, we are effectively transforming
15310 the RTL for a memory-resident object into its Dwarf postfix expression
15311 equivalent. This routine recursively descends an RTL tree, turning
15312 it into Dwarf postfix code as it goes.
15313
15314 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15315
15316 MEM_MODE is the mode of the memory reference, needed to handle some
15317 autoincrement addressing modes.
15318
15319 Return 0 if we can't represent the location. */
15320
15321 dw_loc_descr_ref
15322 mem_loc_descriptor (rtx rtl, machine_mode mode,
15323 machine_mode mem_mode,
15324 enum var_init_status initialized)
15325 {
15326 dw_loc_descr_ref mem_loc_result = NULL;
15327 enum dwarf_location_atom op;
15328 dw_loc_descr_ref op0, op1;
15329 rtx inner = NULL_RTX;
15330 poly_int64 offset;
15331
15332 if (mode == VOIDmode)
15333 mode = GET_MODE (rtl);
15334
15335 /* Note that for a dynamically sized array, the location we will generate a
15336 description of here will be the lowest numbered location which is
15337 actually within the array. That's *not* necessarily the same as the
15338 zeroth element of the array. */
15339
15340 rtl = targetm.delegitimize_address (rtl);
15341
15342 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15343 return NULL;
15344
15345 scalar_int_mode int_mode, inner_mode, op1_mode;
15346 switch (GET_CODE (rtl))
15347 {
15348 case POST_INC:
15349 case POST_DEC:
15350 case POST_MODIFY:
15351 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15352
15353 case SUBREG:
15354 /* The case of a subreg may arise when we have a local (register)
15355 variable or a formal (register) parameter which doesn't quite fill
15356 up an entire register. For now, just assume that it is
15357 legitimate to make the Dwarf info refer to the whole register which
15358 contains the given subreg. */
15359 if (!subreg_lowpart_p (rtl))
15360 break;
15361 inner = SUBREG_REG (rtl);
15362 /* FALLTHRU */
15363 case TRUNCATE:
15364 if (inner == NULL_RTX)
15365 inner = XEXP (rtl, 0);
15366 if (is_a <scalar_int_mode> (mode, &int_mode)
15367 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15368 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15369 #ifdef POINTERS_EXTEND_UNSIGNED
15370 || (int_mode == Pmode && mem_mode != VOIDmode)
15371 #endif
15372 )
15373 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15374 {
15375 mem_loc_result = mem_loc_descriptor (inner,
15376 inner_mode,
15377 mem_mode, initialized);
15378 break;
15379 }
15380 if (dwarf_strict && dwarf_version < 5)
15381 break;
15382 if (is_a <scalar_int_mode> (mode, &int_mode)
15383 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15384 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15385 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15386 {
15387 dw_die_ref type_die;
15388 dw_loc_descr_ref cvt;
15389
15390 mem_loc_result = mem_loc_descriptor (inner,
15391 GET_MODE (inner),
15392 mem_mode, initialized);
15393 if (mem_loc_result == NULL)
15394 break;
15395 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15396 if (type_die == NULL)
15397 {
15398 mem_loc_result = NULL;
15399 break;
15400 }
15401 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15402 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15403 else
15404 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15405 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15406 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15407 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15408 add_loc_descr (&mem_loc_result, cvt);
15409 if (is_a <scalar_int_mode> (mode, &int_mode)
15410 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15411 {
15412 /* Convert it to untyped afterwards. */
15413 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15414 add_loc_descr (&mem_loc_result, cvt);
15415 }
15416 }
15417 break;
15418
15419 case REG:
15420 if (!is_a <scalar_int_mode> (mode, &int_mode)
15421 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15422 && rtl != arg_pointer_rtx
15423 && rtl != frame_pointer_rtx
15424 #ifdef POINTERS_EXTEND_UNSIGNED
15425 && (int_mode != Pmode || mem_mode == VOIDmode)
15426 #endif
15427 ))
15428 {
15429 dw_die_ref type_die;
15430 unsigned int dbx_regnum;
15431
15432 if (dwarf_strict && dwarf_version < 5)
15433 break;
15434 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15435 break;
15436 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15437 if (type_die == NULL)
15438 break;
15439
15440 dbx_regnum = dbx_reg_number (rtl);
15441 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15442 break;
15443 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15444 dbx_regnum, 0);
15445 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15446 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15447 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15448 break;
15449 }
15450 /* Whenever a register number forms a part of the description of the
15451 method for calculating the (dynamic) address of a memory resident
15452 object, DWARF rules require the register number be referred to as
15453 a "base register". This distinction is not based in any way upon
15454 what category of register the hardware believes the given register
15455 belongs to. This is strictly DWARF terminology we're dealing with
15456 here. Note that in cases where the location of a memory-resident
15457 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15458 OP_CONST (0)) the actual DWARF location descriptor that we generate
15459 may just be OP_BASEREG (basereg). This may look deceptively like
15460 the object in question was allocated to a register (rather than in
15461 memory) so DWARF consumers need to be aware of the subtle
15462 distinction between OP_REG and OP_BASEREG. */
15463 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15464 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15465 else if (stack_realign_drap
15466 && crtl->drap_reg
15467 && crtl->args.internal_arg_pointer == rtl
15468 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15469 {
15470 /* If RTL is internal_arg_pointer, which has been optimized
15471 out, use DRAP instead. */
15472 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15473 VAR_INIT_STATUS_INITIALIZED);
15474 }
15475 break;
15476
15477 case SIGN_EXTEND:
15478 case ZERO_EXTEND:
15479 if (!is_a <scalar_int_mode> (mode, &int_mode)
15480 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15481 break;
15482 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15483 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15484 if (op0 == 0)
15485 break;
15486 else if (GET_CODE (rtl) == ZERO_EXTEND
15487 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15488 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15489 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15490 to expand zero extend as two shifts instead of
15491 masking. */
15492 && GET_MODE_SIZE (inner_mode) <= 4)
15493 {
15494 mem_loc_result = op0;
15495 add_loc_descr (&mem_loc_result,
15496 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15497 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15498 }
15499 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15500 {
15501 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15502 shift *= BITS_PER_UNIT;
15503 if (GET_CODE (rtl) == SIGN_EXTEND)
15504 op = DW_OP_shra;
15505 else
15506 op = DW_OP_shr;
15507 mem_loc_result = op0;
15508 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15509 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15510 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15511 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15512 }
15513 else if (!dwarf_strict || dwarf_version >= 5)
15514 {
15515 dw_die_ref type_die1, type_die2;
15516 dw_loc_descr_ref cvt;
15517
15518 type_die1 = base_type_for_mode (inner_mode,
15519 GET_CODE (rtl) == ZERO_EXTEND);
15520 if (type_die1 == NULL)
15521 break;
15522 type_die2 = base_type_for_mode (int_mode, 1);
15523 if (type_die2 == NULL)
15524 break;
15525 mem_loc_result = op0;
15526 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15527 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15528 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15529 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15530 add_loc_descr (&mem_loc_result, cvt);
15531 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15532 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15533 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15534 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15535 add_loc_descr (&mem_loc_result, cvt);
15536 }
15537 break;
15538
15539 case MEM:
15540 {
15541 rtx new_rtl = avoid_constant_pool_reference (rtl);
15542 if (new_rtl != rtl)
15543 {
15544 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15545 initialized);
15546 if (mem_loc_result != NULL)
15547 return mem_loc_result;
15548 }
15549 }
15550 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15551 get_address_mode (rtl), mode,
15552 VAR_INIT_STATUS_INITIALIZED);
15553 if (mem_loc_result == NULL)
15554 mem_loc_result = tls_mem_loc_descriptor (rtl);
15555 if (mem_loc_result != NULL)
15556 {
15557 if (!is_a <scalar_int_mode> (mode, &int_mode)
15558 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15559 {
15560 dw_die_ref type_die;
15561 dw_loc_descr_ref deref;
15562 HOST_WIDE_INT size;
15563
15564 if (dwarf_strict && dwarf_version < 5)
15565 return NULL;
15566 if (!GET_MODE_SIZE (mode).is_constant (&size))
15567 return NULL;
15568 type_die
15569 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15570 if (type_die == NULL)
15571 return NULL;
15572 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15573 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15574 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15575 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15576 add_loc_descr (&mem_loc_result, deref);
15577 }
15578 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15579 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15580 else
15581 add_loc_descr (&mem_loc_result,
15582 new_loc_descr (DW_OP_deref_size,
15583 GET_MODE_SIZE (int_mode), 0));
15584 }
15585 break;
15586
15587 case LO_SUM:
15588 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15589
15590 case LABEL_REF:
15591 /* Some ports can transform a symbol ref into a label ref, because
15592 the symbol ref is too far away and has to be dumped into a constant
15593 pool. */
15594 case CONST:
15595 case SYMBOL_REF:
15596 if (!is_a <scalar_int_mode> (mode, &int_mode)
15597 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15598 #ifdef POINTERS_EXTEND_UNSIGNED
15599 && (int_mode != Pmode || mem_mode == VOIDmode)
15600 #endif
15601 ))
15602 break;
15603 if (GET_CODE (rtl) == SYMBOL_REF
15604 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15605 {
15606 dw_loc_descr_ref temp;
15607
15608 /* If this is not defined, we have no way to emit the data. */
15609 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15610 break;
15611
15612 temp = new_addr_loc_descr (rtl, dtprel_true);
15613
15614 /* We check for DWARF 5 here because gdb did not implement
15615 DW_OP_form_tls_address until after 7.12. */
15616 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15617 ? DW_OP_form_tls_address
15618 : DW_OP_GNU_push_tls_address),
15619 0, 0);
15620 add_loc_descr (&mem_loc_result, temp);
15621
15622 break;
15623 }
15624
15625 if (!const_ok_for_output (rtl))
15626 {
15627 if (GET_CODE (rtl) == CONST)
15628 switch (GET_CODE (XEXP (rtl, 0)))
15629 {
15630 case NOT:
15631 op = DW_OP_not;
15632 goto try_const_unop;
15633 case NEG:
15634 op = DW_OP_neg;
15635 goto try_const_unop;
15636 try_const_unop:
15637 rtx arg;
15638 arg = XEXP (XEXP (rtl, 0), 0);
15639 if (!CONSTANT_P (arg))
15640 arg = gen_rtx_CONST (int_mode, arg);
15641 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15642 initialized);
15643 if (op0)
15644 {
15645 mem_loc_result = op0;
15646 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15647 }
15648 break;
15649 default:
15650 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15651 mem_mode, initialized);
15652 break;
15653 }
15654 break;
15655 }
15656
15657 symref:
15658 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15659 vec_safe_push (used_rtx_array, rtl);
15660 break;
15661
15662 case CONCAT:
15663 case CONCATN:
15664 case VAR_LOCATION:
15665 case DEBUG_IMPLICIT_PTR:
15666 expansion_failed (NULL_TREE, rtl,
15667 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15668 return 0;
15669
15670 case ENTRY_VALUE:
15671 if (dwarf_strict && dwarf_version < 5)
15672 return NULL;
15673 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15674 {
15675 if (!is_a <scalar_int_mode> (mode, &int_mode)
15676 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15677 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15678 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15679 else
15680 {
15681 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15682 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15683 return NULL;
15684 op0 = one_reg_loc_descriptor (dbx_regnum,
15685 VAR_INIT_STATUS_INITIALIZED);
15686 }
15687 }
15688 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15689 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15690 {
15691 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15692 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15693 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15694 return NULL;
15695 }
15696 else
15697 gcc_unreachable ();
15698 if (op0 == NULL)
15699 return NULL;
15700 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15701 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15702 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15703 break;
15704
15705 case DEBUG_PARAMETER_REF:
15706 mem_loc_result = parameter_ref_descriptor (rtl);
15707 break;
15708
15709 case PRE_MODIFY:
15710 /* Extract the PLUS expression nested inside and fall into
15711 PLUS code below. */
15712 rtl = XEXP (rtl, 1);
15713 goto plus;
15714
15715 case PRE_INC:
15716 case PRE_DEC:
15717 /* Turn these into a PLUS expression and fall into the PLUS code
15718 below. */
15719 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15720 gen_int_mode (GET_CODE (rtl) == PRE_INC
15721 ? GET_MODE_UNIT_SIZE (mem_mode)
15722 : -GET_MODE_UNIT_SIZE (mem_mode),
15723 mode));
15724
15725 /* fall through */
15726
15727 case PLUS:
15728 plus:
15729 if (is_based_loc (rtl)
15730 && is_a <scalar_int_mode> (mode, &int_mode)
15731 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15732 || XEXP (rtl, 0) == arg_pointer_rtx
15733 || XEXP (rtl, 0) == frame_pointer_rtx))
15734 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15735 INTVAL (XEXP (rtl, 1)),
15736 VAR_INIT_STATUS_INITIALIZED);
15737 else
15738 {
15739 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15740 VAR_INIT_STATUS_INITIALIZED);
15741 if (mem_loc_result == 0)
15742 break;
15743
15744 if (CONST_INT_P (XEXP (rtl, 1))
15745 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15746 <= DWARF2_ADDR_SIZE))
15747 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15748 else
15749 {
15750 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15751 VAR_INIT_STATUS_INITIALIZED);
15752 if (op1 == 0)
15753 return NULL;
15754 add_loc_descr (&mem_loc_result, op1);
15755 add_loc_descr (&mem_loc_result,
15756 new_loc_descr (DW_OP_plus, 0, 0));
15757 }
15758 }
15759 break;
15760
15761 /* If a pseudo-reg is optimized away, it is possible for it to
15762 be replaced with a MEM containing a multiply or shift. */
15763 case MINUS:
15764 op = DW_OP_minus;
15765 goto do_binop;
15766
15767 case MULT:
15768 op = DW_OP_mul;
15769 goto do_binop;
15770
15771 case DIV:
15772 if ((!dwarf_strict || dwarf_version >= 5)
15773 && is_a <scalar_int_mode> (mode, &int_mode)
15774 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15775 {
15776 mem_loc_result = typed_binop (DW_OP_div, rtl,
15777 base_type_for_mode (mode, 0),
15778 int_mode, mem_mode);
15779 break;
15780 }
15781 op = DW_OP_div;
15782 goto do_binop;
15783
15784 case UMOD:
15785 op = DW_OP_mod;
15786 goto do_binop;
15787
15788 case ASHIFT:
15789 op = DW_OP_shl;
15790 goto do_shift;
15791
15792 case ASHIFTRT:
15793 op = DW_OP_shra;
15794 goto do_shift;
15795
15796 case LSHIFTRT:
15797 op = DW_OP_shr;
15798 goto do_shift;
15799
15800 do_shift:
15801 if (!is_a <scalar_int_mode> (mode, &int_mode))
15802 break;
15803 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15804 VAR_INIT_STATUS_INITIALIZED);
15805 {
15806 rtx rtlop1 = XEXP (rtl, 1);
15807 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15808 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15809 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15810 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15811 VAR_INIT_STATUS_INITIALIZED);
15812 }
15813
15814 if (op0 == 0 || op1 == 0)
15815 break;
15816
15817 mem_loc_result = op0;
15818 add_loc_descr (&mem_loc_result, op1);
15819 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15820 break;
15821
15822 case AND:
15823 op = DW_OP_and;
15824 goto do_binop;
15825
15826 case IOR:
15827 op = DW_OP_or;
15828 goto do_binop;
15829
15830 case XOR:
15831 op = DW_OP_xor;
15832 goto do_binop;
15833
15834 do_binop:
15835 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15836 VAR_INIT_STATUS_INITIALIZED);
15837 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15838 VAR_INIT_STATUS_INITIALIZED);
15839
15840 if (op0 == 0 || op1 == 0)
15841 break;
15842
15843 mem_loc_result = op0;
15844 add_loc_descr (&mem_loc_result, op1);
15845 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15846 break;
15847
15848 case MOD:
15849 if ((!dwarf_strict || dwarf_version >= 5)
15850 && is_a <scalar_int_mode> (mode, &int_mode)
15851 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15852 {
15853 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15854 base_type_for_mode (mode, 0),
15855 int_mode, mem_mode);
15856 break;
15857 }
15858
15859 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15860 VAR_INIT_STATUS_INITIALIZED);
15861 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15862 VAR_INIT_STATUS_INITIALIZED);
15863
15864 if (op0 == 0 || op1 == 0)
15865 break;
15866
15867 mem_loc_result = op0;
15868 add_loc_descr (&mem_loc_result, op1);
15869 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15870 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15871 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15872 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15873 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15874 break;
15875
15876 case UDIV:
15877 if ((!dwarf_strict || dwarf_version >= 5)
15878 && is_a <scalar_int_mode> (mode, &int_mode))
15879 {
15880 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15881 {
15882 op = DW_OP_div;
15883 goto do_binop;
15884 }
15885 mem_loc_result = typed_binop (DW_OP_div, rtl,
15886 base_type_for_mode (int_mode, 1),
15887 int_mode, mem_mode);
15888 }
15889 break;
15890
15891 case NOT:
15892 op = DW_OP_not;
15893 goto do_unop;
15894
15895 case ABS:
15896 op = DW_OP_abs;
15897 goto do_unop;
15898
15899 case NEG:
15900 op = DW_OP_neg;
15901 goto do_unop;
15902
15903 do_unop:
15904 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15905 VAR_INIT_STATUS_INITIALIZED);
15906
15907 if (op0 == 0)
15908 break;
15909
15910 mem_loc_result = op0;
15911 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15912 break;
15913
15914 case CONST_INT:
15915 if (!is_a <scalar_int_mode> (mode, &int_mode)
15916 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15917 #ifdef POINTERS_EXTEND_UNSIGNED
15918 || (int_mode == Pmode
15919 && mem_mode != VOIDmode
15920 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15921 #endif
15922 )
15923 {
15924 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15925 break;
15926 }
15927 if ((!dwarf_strict || dwarf_version >= 5)
15928 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15929 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15930 {
15931 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15932 scalar_int_mode amode;
15933 if (type_die == NULL)
15934 return NULL;
15935 if (INTVAL (rtl) >= 0
15936 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15937 .exists (&amode))
15938 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15939 /* const DW_OP_convert <XXX> vs.
15940 DW_OP_const_type <XXX, 1, const>. */
15941 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15942 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15943 {
15944 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15945 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15946 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15947 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15948 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15949 add_loc_descr (&mem_loc_result, op0);
15950 return mem_loc_result;
15951 }
15952 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15953 INTVAL (rtl));
15954 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15955 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15956 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15957 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15958 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15959 else
15960 {
15961 mem_loc_result->dw_loc_oprnd2.val_class
15962 = dw_val_class_const_double;
15963 mem_loc_result->dw_loc_oprnd2.v.val_double
15964 = double_int::from_shwi (INTVAL (rtl));
15965 }
15966 }
15967 break;
15968
15969 case CONST_DOUBLE:
15970 if (!dwarf_strict || dwarf_version >= 5)
15971 {
15972 dw_die_ref type_die;
15973
15974 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15975 CONST_DOUBLE rtx could represent either a large integer
15976 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15977 the value is always a floating point constant.
15978
15979 When it is an integer, a CONST_DOUBLE is used whenever
15980 the constant requires 2 HWIs to be adequately represented.
15981 We output CONST_DOUBLEs as blocks. */
15982 if (mode == VOIDmode
15983 || (GET_MODE (rtl) == VOIDmode
15984 && maybe_ne (GET_MODE_BITSIZE (mode),
15985 HOST_BITS_PER_DOUBLE_INT)))
15986 break;
15987 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15988 if (type_die == NULL)
15989 return NULL;
15990 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15991 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15992 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15993 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15994 #if TARGET_SUPPORTS_WIDE_INT == 0
15995 if (!SCALAR_FLOAT_MODE_P (mode))
15996 {
15997 mem_loc_result->dw_loc_oprnd2.val_class
15998 = dw_val_class_const_double;
15999 mem_loc_result->dw_loc_oprnd2.v.val_double
16000 = rtx_to_double_int (rtl);
16001 }
16002 else
16003 #endif
16004 {
16005 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16006 unsigned int length = GET_MODE_SIZE (float_mode);
16007 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16008
16009 insert_float (rtl, array);
16010 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16011 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16012 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16013 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16014 }
16015 }
16016 break;
16017
16018 case CONST_WIDE_INT:
16019 if (!dwarf_strict || dwarf_version >= 5)
16020 {
16021 dw_die_ref type_die;
16022
16023 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16024 if (type_die == NULL)
16025 return NULL;
16026 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16027 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16028 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16029 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16030 mem_loc_result->dw_loc_oprnd2.val_class
16031 = dw_val_class_wide_int;
16032 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16033 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16034 }
16035 break;
16036
16037 case CONST_POLY_INT:
16038 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16039 break;
16040
16041 case EQ:
16042 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16043 break;
16044
16045 case GE:
16046 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16047 break;
16048
16049 case GT:
16050 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16051 break;
16052
16053 case LE:
16054 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16055 break;
16056
16057 case LT:
16058 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16059 break;
16060
16061 case NE:
16062 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16063 break;
16064
16065 case GEU:
16066 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16067 break;
16068
16069 case GTU:
16070 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16071 break;
16072
16073 case LEU:
16074 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16075 break;
16076
16077 case LTU:
16078 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16079 break;
16080
16081 case UMIN:
16082 case UMAX:
16083 if (!SCALAR_INT_MODE_P (mode))
16084 break;
16085 /* FALLTHRU */
16086 case SMIN:
16087 case SMAX:
16088 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16089 break;
16090
16091 case ZERO_EXTRACT:
16092 case SIGN_EXTRACT:
16093 if (CONST_INT_P (XEXP (rtl, 1))
16094 && CONST_INT_P (XEXP (rtl, 2))
16095 && is_a <scalar_int_mode> (mode, &int_mode)
16096 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16097 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16098 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16099 && ((unsigned) INTVAL (XEXP (rtl, 1))
16100 + (unsigned) INTVAL (XEXP (rtl, 2))
16101 <= GET_MODE_BITSIZE (int_mode)))
16102 {
16103 int shift, size;
16104 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16105 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16106 if (op0 == 0)
16107 break;
16108 if (GET_CODE (rtl) == SIGN_EXTRACT)
16109 op = DW_OP_shra;
16110 else
16111 op = DW_OP_shr;
16112 mem_loc_result = op0;
16113 size = INTVAL (XEXP (rtl, 1));
16114 shift = INTVAL (XEXP (rtl, 2));
16115 if (BITS_BIG_ENDIAN)
16116 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16117 if (shift + size != (int) DWARF2_ADDR_SIZE)
16118 {
16119 add_loc_descr (&mem_loc_result,
16120 int_loc_descriptor (DWARF2_ADDR_SIZE
16121 - shift - size));
16122 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16123 }
16124 if (size != (int) DWARF2_ADDR_SIZE)
16125 {
16126 add_loc_descr (&mem_loc_result,
16127 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16128 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16129 }
16130 }
16131 break;
16132
16133 case IF_THEN_ELSE:
16134 {
16135 dw_loc_descr_ref op2, bra_node, drop_node;
16136 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16137 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16138 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16139 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16140 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16141 VAR_INIT_STATUS_INITIALIZED);
16142 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16143 VAR_INIT_STATUS_INITIALIZED);
16144 if (op0 == NULL || op1 == NULL || op2 == NULL)
16145 break;
16146
16147 mem_loc_result = op1;
16148 add_loc_descr (&mem_loc_result, op2);
16149 add_loc_descr (&mem_loc_result, op0);
16150 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16151 add_loc_descr (&mem_loc_result, bra_node);
16152 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16153 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16154 add_loc_descr (&mem_loc_result, drop_node);
16155 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16156 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16157 }
16158 break;
16159
16160 case FLOAT_EXTEND:
16161 case FLOAT_TRUNCATE:
16162 case FLOAT:
16163 case UNSIGNED_FLOAT:
16164 case FIX:
16165 case UNSIGNED_FIX:
16166 if (!dwarf_strict || dwarf_version >= 5)
16167 {
16168 dw_die_ref type_die;
16169 dw_loc_descr_ref cvt;
16170
16171 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16172 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16173 if (op0 == NULL)
16174 break;
16175 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16176 && (GET_CODE (rtl) == FLOAT
16177 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16178 {
16179 type_die = base_type_for_mode (int_mode,
16180 GET_CODE (rtl) == UNSIGNED_FLOAT);
16181 if (type_die == NULL)
16182 break;
16183 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16184 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16185 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16186 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16187 add_loc_descr (&op0, cvt);
16188 }
16189 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16190 if (type_die == NULL)
16191 break;
16192 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16193 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16194 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16195 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16196 add_loc_descr (&op0, cvt);
16197 if (is_a <scalar_int_mode> (mode, &int_mode)
16198 && (GET_CODE (rtl) == FIX
16199 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16200 {
16201 op0 = convert_descriptor_to_mode (int_mode, op0);
16202 if (op0 == NULL)
16203 break;
16204 }
16205 mem_loc_result = op0;
16206 }
16207 break;
16208
16209 case CLZ:
16210 case CTZ:
16211 case FFS:
16212 if (is_a <scalar_int_mode> (mode, &int_mode))
16213 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16214 break;
16215
16216 case POPCOUNT:
16217 case PARITY:
16218 if (is_a <scalar_int_mode> (mode, &int_mode))
16219 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16220 break;
16221
16222 case BSWAP:
16223 if (is_a <scalar_int_mode> (mode, &int_mode))
16224 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16225 break;
16226
16227 case ROTATE:
16228 case ROTATERT:
16229 if (is_a <scalar_int_mode> (mode, &int_mode))
16230 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16231 break;
16232
16233 case COMPARE:
16234 /* In theory, we could implement the above. */
16235 /* DWARF cannot represent the unsigned compare operations
16236 natively. */
16237 case SS_MULT:
16238 case US_MULT:
16239 case SS_DIV:
16240 case US_DIV:
16241 case SS_PLUS:
16242 case US_PLUS:
16243 case SS_MINUS:
16244 case US_MINUS:
16245 case SS_NEG:
16246 case US_NEG:
16247 case SS_ABS:
16248 case SS_ASHIFT:
16249 case US_ASHIFT:
16250 case SS_TRUNCATE:
16251 case US_TRUNCATE:
16252 case UNORDERED:
16253 case ORDERED:
16254 case UNEQ:
16255 case UNGE:
16256 case UNGT:
16257 case UNLE:
16258 case UNLT:
16259 case LTGT:
16260 case FRACT_CONVERT:
16261 case UNSIGNED_FRACT_CONVERT:
16262 case SAT_FRACT:
16263 case UNSIGNED_SAT_FRACT:
16264 case SQRT:
16265 case ASM_OPERANDS:
16266 case VEC_MERGE:
16267 case VEC_SELECT:
16268 case VEC_CONCAT:
16269 case VEC_DUPLICATE:
16270 case VEC_SERIES:
16271 case UNSPEC:
16272 case HIGH:
16273 case FMA:
16274 case STRICT_LOW_PART:
16275 case CONST_VECTOR:
16276 case CONST_FIXED:
16277 case CLRSB:
16278 case CLOBBER:
16279 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16280 can't express it in the debug info. This can happen e.g. with some
16281 TLS UNSPECs. */
16282 break;
16283
16284 case CONST_STRING:
16285 resolve_one_addr (&rtl);
16286 goto symref;
16287
16288 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16289 the expression. An UNSPEC rtx represents a raw DWARF operation,
16290 new_loc_descr is called for it to build the operation directly.
16291 Otherwise mem_loc_descriptor is called recursively. */
16292 case PARALLEL:
16293 {
16294 int index = 0;
16295 dw_loc_descr_ref exp_result = NULL;
16296
16297 for (; index < XVECLEN (rtl, 0); index++)
16298 {
16299 rtx elem = XVECEXP (rtl, 0, index);
16300 if (GET_CODE (elem) == UNSPEC)
16301 {
16302 /* Each DWARF operation UNSPEC contain two operands, if
16303 one operand is not used for the operation, const0_rtx is
16304 passed. */
16305 gcc_assert (XVECLEN (elem, 0) == 2);
16306
16307 HOST_WIDE_INT dw_op = XINT (elem, 1);
16308 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16309 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16310 exp_result
16311 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16312 oprnd2);
16313 }
16314 else
16315 exp_result
16316 = mem_loc_descriptor (elem, mode, mem_mode,
16317 VAR_INIT_STATUS_INITIALIZED);
16318
16319 if (!mem_loc_result)
16320 mem_loc_result = exp_result;
16321 else
16322 add_loc_descr (&mem_loc_result, exp_result);
16323 }
16324
16325 break;
16326 }
16327
16328 default:
16329 if (flag_checking)
16330 {
16331 print_rtl (stderr, rtl);
16332 gcc_unreachable ();
16333 }
16334 break;
16335 }
16336
16337 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16338 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16339
16340 return mem_loc_result;
16341 }
16342
16343 /* Return a descriptor that describes the concatenation of two locations.
16344 This is typically a complex variable. */
16345
16346 static dw_loc_descr_ref
16347 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16348 {
16349 /* At present we only track constant-sized pieces. */
16350 unsigned int size0, size1;
16351 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16352 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16353 return 0;
16354
16355 dw_loc_descr_ref cc_loc_result = NULL;
16356 dw_loc_descr_ref x0_ref
16357 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16358 dw_loc_descr_ref x1_ref
16359 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16360
16361 if (x0_ref == 0 || x1_ref == 0)
16362 return 0;
16363
16364 cc_loc_result = x0_ref;
16365 add_loc_descr_op_piece (&cc_loc_result, size0);
16366
16367 add_loc_descr (&cc_loc_result, x1_ref);
16368 add_loc_descr_op_piece (&cc_loc_result, size1);
16369
16370 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16371 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16372
16373 return cc_loc_result;
16374 }
16375
16376 /* Return a descriptor that describes the concatenation of N
16377 locations. */
16378
16379 static dw_loc_descr_ref
16380 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16381 {
16382 unsigned int i;
16383 dw_loc_descr_ref cc_loc_result = NULL;
16384 unsigned int n = XVECLEN (concatn, 0);
16385 unsigned int size;
16386
16387 for (i = 0; i < n; ++i)
16388 {
16389 dw_loc_descr_ref ref;
16390 rtx x = XVECEXP (concatn, 0, i);
16391
16392 /* At present we only track constant-sized pieces. */
16393 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16394 return NULL;
16395
16396 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16397 if (ref == NULL)
16398 return NULL;
16399
16400 add_loc_descr (&cc_loc_result, ref);
16401 add_loc_descr_op_piece (&cc_loc_result, size);
16402 }
16403
16404 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16405 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16406
16407 return cc_loc_result;
16408 }
16409
16410 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16411 for DEBUG_IMPLICIT_PTR RTL. */
16412
16413 static dw_loc_descr_ref
16414 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16415 {
16416 dw_loc_descr_ref ret;
16417 dw_die_ref ref;
16418
16419 if (dwarf_strict && dwarf_version < 5)
16420 return NULL;
16421 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16422 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16423 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16424 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16425 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16426 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16427 if (ref)
16428 {
16429 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16430 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16431 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16432 }
16433 else
16434 {
16435 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16436 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16437 }
16438 return ret;
16439 }
16440
16441 /* Output a proper Dwarf location descriptor for a variable or parameter
16442 which is either allocated in a register or in a memory location. For a
16443 register, we just generate an OP_REG and the register number. For a
16444 memory location we provide a Dwarf postfix expression describing how to
16445 generate the (dynamic) address of the object onto the address stack.
16446
16447 MODE is mode of the decl if this loc_descriptor is going to be used in
16448 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16449 allowed, VOIDmode otherwise.
16450
16451 If we don't know how to describe it, return 0. */
16452
16453 static dw_loc_descr_ref
16454 loc_descriptor (rtx rtl, machine_mode mode,
16455 enum var_init_status initialized)
16456 {
16457 dw_loc_descr_ref loc_result = NULL;
16458 scalar_int_mode int_mode;
16459
16460 switch (GET_CODE (rtl))
16461 {
16462 case SUBREG:
16463 /* The case of a subreg may arise when we have a local (register)
16464 variable or a formal (register) parameter which doesn't quite fill
16465 up an entire register. For now, just assume that it is
16466 legitimate to make the Dwarf info refer to the whole register which
16467 contains the given subreg. */
16468 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16469 loc_result = loc_descriptor (SUBREG_REG (rtl),
16470 GET_MODE (SUBREG_REG (rtl)), initialized);
16471 else
16472 goto do_default;
16473 break;
16474
16475 case REG:
16476 loc_result = reg_loc_descriptor (rtl, initialized);
16477 break;
16478
16479 case MEM:
16480 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16481 GET_MODE (rtl), initialized);
16482 if (loc_result == NULL)
16483 loc_result = tls_mem_loc_descriptor (rtl);
16484 if (loc_result == NULL)
16485 {
16486 rtx new_rtl = avoid_constant_pool_reference (rtl);
16487 if (new_rtl != rtl)
16488 loc_result = loc_descriptor (new_rtl, mode, initialized);
16489 }
16490 break;
16491
16492 case CONCAT:
16493 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16494 initialized);
16495 break;
16496
16497 case CONCATN:
16498 loc_result = concatn_loc_descriptor (rtl, initialized);
16499 break;
16500
16501 case VAR_LOCATION:
16502 /* Single part. */
16503 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16504 {
16505 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16506 if (GET_CODE (loc) == EXPR_LIST)
16507 loc = XEXP (loc, 0);
16508 loc_result = loc_descriptor (loc, mode, initialized);
16509 break;
16510 }
16511
16512 rtl = XEXP (rtl, 1);
16513 /* FALLTHRU */
16514
16515 case PARALLEL:
16516 {
16517 rtvec par_elems = XVEC (rtl, 0);
16518 int num_elem = GET_NUM_ELEM (par_elems);
16519 machine_mode mode;
16520 int i, size;
16521
16522 /* Create the first one, so we have something to add to. */
16523 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16524 VOIDmode, initialized);
16525 if (loc_result == NULL)
16526 return NULL;
16527 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16528 /* At present we only track constant-sized pieces. */
16529 if (!GET_MODE_SIZE (mode).is_constant (&size))
16530 return NULL;
16531 add_loc_descr_op_piece (&loc_result, size);
16532 for (i = 1; i < num_elem; i++)
16533 {
16534 dw_loc_descr_ref temp;
16535
16536 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16537 VOIDmode, initialized);
16538 if (temp == NULL)
16539 return NULL;
16540 add_loc_descr (&loc_result, temp);
16541 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16542 /* At present we only track constant-sized pieces. */
16543 if (!GET_MODE_SIZE (mode).is_constant (&size))
16544 return NULL;
16545 add_loc_descr_op_piece (&loc_result, size);
16546 }
16547 }
16548 break;
16549
16550 case CONST_INT:
16551 if (mode != VOIDmode && mode != BLKmode)
16552 {
16553 int_mode = as_a <scalar_int_mode> (mode);
16554 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16555 INTVAL (rtl));
16556 }
16557 break;
16558
16559 case CONST_DOUBLE:
16560 if (mode == VOIDmode)
16561 mode = GET_MODE (rtl);
16562
16563 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16564 {
16565 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16566
16567 /* Note that a CONST_DOUBLE rtx could represent either an integer
16568 or a floating-point constant. A CONST_DOUBLE is used whenever
16569 the constant requires more than one word in order to be
16570 adequately represented. We output CONST_DOUBLEs as blocks. */
16571 scalar_mode smode = as_a <scalar_mode> (mode);
16572 loc_result = new_loc_descr (DW_OP_implicit_value,
16573 GET_MODE_SIZE (smode), 0);
16574 #if TARGET_SUPPORTS_WIDE_INT == 0
16575 if (!SCALAR_FLOAT_MODE_P (smode))
16576 {
16577 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16578 loc_result->dw_loc_oprnd2.v.val_double
16579 = rtx_to_double_int (rtl);
16580 }
16581 else
16582 #endif
16583 {
16584 unsigned int length = GET_MODE_SIZE (smode);
16585 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16586
16587 insert_float (rtl, array);
16588 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16589 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16590 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16591 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16592 }
16593 }
16594 break;
16595
16596 case CONST_WIDE_INT:
16597 if (mode == VOIDmode)
16598 mode = GET_MODE (rtl);
16599
16600 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16601 {
16602 int_mode = as_a <scalar_int_mode> (mode);
16603 loc_result = new_loc_descr (DW_OP_implicit_value,
16604 GET_MODE_SIZE (int_mode), 0);
16605 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16606 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16607 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16608 }
16609 break;
16610
16611 case CONST_VECTOR:
16612 if (mode == VOIDmode)
16613 mode = GET_MODE (rtl);
16614
16615 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16616 {
16617 unsigned int length;
16618 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16619 return NULL;
16620
16621 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16622 unsigned char *array
16623 = ggc_vec_alloc<unsigned char> (length * elt_size);
16624 unsigned int i;
16625 unsigned char *p;
16626 machine_mode imode = GET_MODE_INNER (mode);
16627
16628 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16629 switch (GET_MODE_CLASS (mode))
16630 {
16631 case MODE_VECTOR_INT:
16632 for (i = 0, p = array; i < length; i++, p += elt_size)
16633 {
16634 rtx elt = CONST_VECTOR_ELT (rtl, i);
16635 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16636 }
16637 break;
16638
16639 case MODE_VECTOR_FLOAT:
16640 for (i = 0, p = array; i < length; i++, p += elt_size)
16641 {
16642 rtx elt = CONST_VECTOR_ELT (rtl, i);
16643 insert_float (elt, p);
16644 }
16645 break;
16646
16647 default:
16648 gcc_unreachable ();
16649 }
16650
16651 loc_result = new_loc_descr (DW_OP_implicit_value,
16652 length * elt_size, 0);
16653 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16654 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16655 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16656 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16657 }
16658 break;
16659
16660 case CONST:
16661 if (mode == VOIDmode
16662 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16663 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16664 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16665 {
16666 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16667 break;
16668 }
16669 /* FALLTHROUGH */
16670 case SYMBOL_REF:
16671 if (!const_ok_for_output (rtl))
16672 break;
16673 /* FALLTHROUGH */
16674 case LABEL_REF:
16675 if (is_a <scalar_int_mode> (mode, &int_mode)
16676 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16677 && (dwarf_version >= 4 || !dwarf_strict))
16678 {
16679 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16680 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16681 vec_safe_push (used_rtx_array, rtl);
16682 }
16683 break;
16684
16685 case DEBUG_IMPLICIT_PTR:
16686 loc_result = implicit_ptr_descriptor (rtl, 0);
16687 break;
16688
16689 case PLUS:
16690 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16691 && CONST_INT_P (XEXP (rtl, 1)))
16692 {
16693 loc_result
16694 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16695 break;
16696 }
16697 /* FALLTHRU */
16698 do_default:
16699 default:
16700 if ((is_a <scalar_int_mode> (mode, &int_mode)
16701 && GET_MODE (rtl) == int_mode
16702 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16703 && dwarf_version >= 4)
16704 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16705 {
16706 /* Value expression. */
16707 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16708 if (loc_result)
16709 add_loc_descr (&loc_result,
16710 new_loc_descr (DW_OP_stack_value, 0, 0));
16711 }
16712 break;
16713 }
16714
16715 return loc_result;
16716 }
16717
16718 /* We need to figure out what section we should use as the base for the
16719 address ranges where a given location is valid.
16720 1. If this particular DECL has a section associated with it, use that.
16721 2. If this function has a section associated with it, use that.
16722 3. Otherwise, use the text section.
16723 XXX: If you split a variable across multiple sections, we won't notice. */
16724
16725 static const char *
16726 secname_for_decl (const_tree decl)
16727 {
16728 const char *secname;
16729
16730 if (VAR_OR_FUNCTION_DECL_P (decl)
16731 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16732 && DECL_SECTION_NAME (decl))
16733 secname = DECL_SECTION_NAME (decl);
16734 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16735 secname = DECL_SECTION_NAME (current_function_decl);
16736 else if (cfun && in_cold_section_p)
16737 secname = crtl->subsections.cold_section_label;
16738 else
16739 secname = text_section_label;
16740
16741 return secname;
16742 }
16743
16744 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16745
16746 static bool
16747 decl_by_reference_p (tree decl)
16748 {
16749 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16750 || VAR_P (decl))
16751 && DECL_BY_REFERENCE (decl));
16752 }
16753
16754 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16755 for VARLOC. */
16756
16757 static dw_loc_descr_ref
16758 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16759 enum var_init_status initialized)
16760 {
16761 int have_address = 0;
16762 dw_loc_descr_ref descr;
16763 machine_mode mode;
16764
16765 if (want_address != 2)
16766 {
16767 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16768 /* Single part. */
16769 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16770 {
16771 varloc = PAT_VAR_LOCATION_LOC (varloc);
16772 if (GET_CODE (varloc) == EXPR_LIST)
16773 varloc = XEXP (varloc, 0);
16774 mode = GET_MODE (varloc);
16775 if (MEM_P (varloc))
16776 {
16777 rtx addr = XEXP (varloc, 0);
16778 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16779 mode, initialized);
16780 if (descr)
16781 have_address = 1;
16782 else
16783 {
16784 rtx x = avoid_constant_pool_reference (varloc);
16785 if (x != varloc)
16786 descr = mem_loc_descriptor (x, mode, VOIDmode,
16787 initialized);
16788 }
16789 }
16790 else
16791 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16792 }
16793 else
16794 return 0;
16795 }
16796 else
16797 {
16798 if (GET_CODE (varloc) == VAR_LOCATION)
16799 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16800 else
16801 mode = DECL_MODE (loc);
16802 descr = loc_descriptor (varloc, mode, initialized);
16803 have_address = 1;
16804 }
16805
16806 if (!descr)
16807 return 0;
16808
16809 if (want_address == 2 && !have_address
16810 && (dwarf_version >= 4 || !dwarf_strict))
16811 {
16812 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16813 {
16814 expansion_failed (loc, NULL_RTX,
16815 "DWARF address size mismatch");
16816 return 0;
16817 }
16818 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16819 have_address = 1;
16820 }
16821 /* Show if we can't fill the request for an address. */
16822 if (want_address && !have_address)
16823 {
16824 expansion_failed (loc, NULL_RTX,
16825 "Want address and only have value");
16826 return 0;
16827 }
16828
16829 /* If we've got an address and don't want one, dereference. */
16830 if (!want_address && have_address)
16831 {
16832 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16833 enum dwarf_location_atom op;
16834
16835 if (size > DWARF2_ADDR_SIZE || size == -1)
16836 {
16837 expansion_failed (loc, NULL_RTX,
16838 "DWARF address size mismatch");
16839 return 0;
16840 }
16841 else if (size == DWARF2_ADDR_SIZE)
16842 op = DW_OP_deref;
16843 else
16844 op = DW_OP_deref_size;
16845
16846 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16847 }
16848
16849 return descr;
16850 }
16851
16852 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16853 if it is not possible. */
16854
16855 static dw_loc_descr_ref
16856 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16857 {
16858 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16859 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16860 else if (dwarf_version >= 3 || !dwarf_strict)
16861 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16862 else
16863 return NULL;
16864 }
16865
16866 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16867 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16868
16869 static dw_loc_descr_ref
16870 dw_sra_loc_expr (tree decl, rtx loc)
16871 {
16872 rtx p;
16873 unsigned HOST_WIDE_INT padsize = 0;
16874 dw_loc_descr_ref descr, *descr_tail;
16875 unsigned HOST_WIDE_INT decl_size;
16876 rtx varloc;
16877 enum var_init_status initialized;
16878
16879 if (DECL_SIZE (decl) == NULL
16880 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16881 return NULL;
16882
16883 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16884 descr = NULL;
16885 descr_tail = &descr;
16886
16887 for (p = loc; p; p = XEXP (p, 1))
16888 {
16889 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16890 rtx loc_note = *decl_piece_varloc_ptr (p);
16891 dw_loc_descr_ref cur_descr;
16892 dw_loc_descr_ref *tail, last = NULL;
16893 unsigned HOST_WIDE_INT opsize = 0;
16894
16895 if (loc_note == NULL_RTX
16896 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16897 {
16898 padsize += bitsize;
16899 continue;
16900 }
16901 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16902 varloc = NOTE_VAR_LOCATION (loc_note);
16903 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16904 if (cur_descr == NULL)
16905 {
16906 padsize += bitsize;
16907 continue;
16908 }
16909
16910 /* Check that cur_descr either doesn't use
16911 DW_OP_*piece operations, or their sum is equal
16912 to bitsize. Otherwise we can't embed it. */
16913 for (tail = &cur_descr; *tail != NULL;
16914 tail = &(*tail)->dw_loc_next)
16915 if ((*tail)->dw_loc_opc == DW_OP_piece)
16916 {
16917 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16918 * BITS_PER_UNIT;
16919 last = *tail;
16920 }
16921 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16922 {
16923 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16924 last = *tail;
16925 }
16926
16927 if (last != NULL && opsize != bitsize)
16928 {
16929 padsize += bitsize;
16930 /* Discard the current piece of the descriptor and release any
16931 addr_table entries it uses. */
16932 remove_loc_list_addr_table_entries (cur_descr);
16933 continue;
16934 }
16935
16936 /* If there is a hole, add DW_OP_*piece after empty DWARF
16937 expression, which means that those bits are optimized out. */
16938 if (padsize)
16939 {
16940 if (padsize > decl_size)
16941 {
16942 remove_loc_list_addr_table_entries (cur_descr);
16943 goto discard_descr;
16944 }
16945 decl_size -= padsize;
16946 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16947 if (*descr_tail == NULL)
16948 {
16949 remove_loc_list_addr_table_entries (cur_descr);
16950 goto discard_descr;
16951 }
16952 descr_tail = &(*descr_tail)->dw_loc_next;
16953 padsize = 0;
16954 }
16955 *descr_tail = cur_descr;
16956 descr_tail = tail;
16957 if (bitsize > decl_size)
16958 goto discard_descr;
16959 decl_size -= bitsize;
16960 if (last == NULL)
16961 {
16962 HOST_WIDE_INT offset = 0;
16963 if (GET_CODE (varloc) == VAR_LOCATION
16964 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16965 {
16966 varloc = PAT_VAR_LOCATION_LOC (varloc);
16967 if (GET_CODE (varloc) == EXPR_LIST)
16968 varloc = XEXP (varloc, 0);
16969 }
16970 do
16971 {
16972 if (GET_CODE (varloc) == CONST
16973 || GET_CODE (varloc) == SIGN_EXTEND
16974 || GET_CODE (varloc) == ZERO_EXTEND)
16975 varloc = XEXP (varloc, 0);
16976 else if (GET_CODE (varloc) == SUBREG)
16977 varloc = SUBREG_REG (varloc);
16978 else
16979 break;
16980 }
16981 while (1);
16982 /* DW_OP_bit_size offset should be zero for register
16983 or implicit location descriptions and empty location
16984 descriptions, but for memory addresses needs big endian
16985 adjustment. */
16986 if (MEM_P (varloc))
16987 {
16988 unsigned HOST_WIDE_INT memsize;
16989 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16990 goto discard_descr;
16991 memsize *= BITS_PER_UNIT;
16992 if (memsize != bitsize)
16993 {
16994 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16995 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16996 goto discard_descr;
16997 if (memsize < bitsize)
16998 goto discard_descr;
16999 if (BITS_BIG_ENDIAN)
17000 offset = memsize - bitsize;
17001 }
17002 }
17003
17004 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17005 if (*descr_tail == NULL)
17006 goto discard_descr;
17007 descr_tail = &(*descr_tail)->dw_loc_next;
17008 }
17009 }
17010
17011 /* If there were any non-empty expressions, add padding till the end of
17012 the decl. */
17013 if (descr != NULL && decl_size != 0)
17014 {
17015 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17016 if (*descr_tail == NULL)
17017 goto discard_descr;
17018 }
17019 return descr;
17020
17021 discard_descr:
17022 /* Discard the descriptor and release any addr_table entries it uses. */
17023 remove_loc_list_addr_table_entries (descr);
17024 return NULL;
17025 }
17026
17027 /* Return the dwarf representation of the location list LOC_LIST of
17028 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17029 function. */
17030
17031 static dw_loc_list_ref
17032 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17033 {
17034 const char *endname, *secname;
17035 var_loc_view endview;
17036 rtx varloc;
17037 enum var_init_status initialized;
17038 struct var_loc_node *node;
17039 dw_loc_descr_ref descr;
17040 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17041 dw_loc_list_ref list = NULL;
17042 dw_loc_list_ref *listp = &list;
17043
17044 /* Now that we know what section we are using for a base,
17045 actually construct the list of locations.
17046 The first location information is what is passed to the
17047 function that creates the location list, and the remaining
17048 locations just get added on to that list.
17049 Note that we only know the start address for a location
17050 (IE location changes), so to build the range, we use
17051 the range [current location start, next location start].
17052 This means we have to special case the last node, and generate
17053 a range of [last location start, end of function label]. */
17054
17055 if (cfun && crtl->has_bb_partition)
17056 {
17057 bool save_in_cold_section_p = in_cold_section_p;
17058 in_cold_section_p = first_function_block_is_cold;
17059 if (loc_list->last_before_switch == NULL)
17060 in_cold_section_p = !in_cold_section_p;
17061 secname = secname_for_decl (decl);
17062 in_cold_section_p = save_in_cold_section_p;
17063 }
17064 else
17065 secname = secname_for_decl (decl);
17066
17067 for (node = loc_list->first; node; node = node->next)
17068 {
17069 bool range_across_switch = false;
17070 if (GET_CODE (node->loc) == EXPR_LIST
17071 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17072 {
17073 if (GET_CODE (node->loc) == EXPR_LIST)
17074 {
17075 descr = NULL;
17076 /* This requires DW_OP_{,bit_}piece, which is not usable
17077 inside DWARF expressions. */
17078 if (want_address == 2)
17079 descr = dw_sra_loc_expr (decl, node->loc);
17080 }
17081 else
17082 {
17083 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17084 varloc = NOTE_VAR_LOCATION (node->loc);
17085 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17086 }
17087 if (descr)
17088 {
17089 /* If section switch happens in between node->label
17090 and node->next->label (or end of function) and
17091 we can't emit it as a single entry list,
17092 emit two ranges, first one ending at the end
17093 of first partition and second one starting at the
17094 beginning of second partition. */
17095 if (node == loc_list->last_before_switch
17096 && (node != loc_list->first || loc_list->first->next)
17097 && current_function_decl)
17098 {
17099 endname = cfun->fde->dw_fde_end;
17100 endview = 0;
17101 range_across_switch = true;
17102 }
17103 /* The variable has a location between NODE->LABEL and
17104 NODE->NEXT->LABEL. */
17105 else if (node->next)
17106 endname = node->next->label, endview = node->next->view;
17107 /* If the variable has a location at the last label
17108 it keeps its location until the end of function. */
17109 else if (!current_function_decl)
17110 endname = text_end_label, endview = 0;
17111 else
17112 {
17113 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17114 current_function_funcdef_no);
17115 endname = ggc_strdup (label_id);
17116 endview = 0;
17117 }
17118
17119 *listp = new_loc_list (descr, node->label, node->view,
17120 endname, endview, secname);
17121 if (TREE_CODE (decl) == PARM_DECL
17122 && node == loc_list->first
17123 && NOTE_P (node->loc)
17124 && strcmp (node->label, endname) == 0)
17125 (*listp)->force = true;
17126 listp = &(*listp)->dw_loc_next;
17127 }
17128 }
17129
17130 if (cfun
17131 && crtl->has_bb_partition
17132 && node == loc_list->last_before_switch)
17133 {
17134 bool save_in_cold_section_p = in_cold_section_p;
17135 in_cold_section_p = !first_function_block_is_cold;
17136 secname = secname_for_decl (decl);
17137 in_cold_section_p = save_in_cold_section_p;
17138 }
17139
17140 if (range_across_switch)
17141 {
17142 if (GET_CODE (node->loc) == EXPR_LIST)
17143 descr = dw_sra_loc_expr (decl, node->loc);
17144 else
17145 {
17146 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17147 varloc = NOTE_VAR_LOCATION (node->loc);
17148 descr = dw_loc_list_1 (decl, varloc, want_address,
17149 initialized);
17150 }
17151 gcc_assert (descr);
17152 /* The variable has a location between NODE->LABEL and
17153 NODE->NEXT->LABEL. */
17154 if (node->next)
17155 endname = node->next->label, endview = node->next->view;
17156 else
17157 endname = cfun->fde->dw_fde_second_end, endview = 0;
17158 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17159 endname, endview, secname);
17160 listp = &(*listp)->dw_loc_next;
17161 }
17162 }
17163
17164 /* Try to avoid the overhead of a location list emitting a location
17165 expression instead, but only if we didn't have more than one
17166 location entry in the first place. If some entries were not
17167 representable, we don't want to pretend a single entry that was
17168 applies to the entire scope in which the variable is
17169 available. */
17170 if (list && loc_list->first->next)
17171 gen_llsym (list);
17172 else
17173 maybe_gen_llsym (list);
17174
17175 return list;
17176 }
17177
17178 /* Return if the loc_list has only single element and thus can be represented
17179 as location description. */
17180
17181 static bool
17182 single_element_loc_list_p (dw_loc_list_ref list)
17183 {
17184 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17185 return !list->ll_symbol;
17186 }
17187
17188 /* Duplicate a single element of location list. */
17189
17190 static inline dw_loc_descr_ref
17191 copy_loc_descr (dw_loc_descr_ref ref)
17192 {
17193 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17194 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17195 return copy;
17196 }
17197
17198 /* To each location in list LIST append loc descr REF. */
17199
17200 static void
17201 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17202 {
17203 dw_loc_descr_ref copy;
17204 add_loc_descr (&list->expr, ref);
17205 list = list->dw_loc_next;
17206 while (list)
17207 {
17208 copy = copy_loc_descr (ref);
17209 add_loc_descr (&list->expr, copy);
17210 while (copy->dw_loc_next)
17211 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17212 list = list->dw_loc_next;
17213 }
17214 }
17215
17216 /* To each location in list LIST prepend loc descr REF. */
17217
17218 static void
17219 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17220 {
17221 dw_loc_descr_ref copy;
17222 dw_loc_descr_ref ref_end = list->expr;
17223 add_loc_descr (&ref, list->expr);
17224 list->expr = ref;
17225 list = list->dw_loc_next;
17226 while (list)
17227 {
17228 dw_loc_descr_ref end = list->expr;
17229 list->expr = copy = copy_loc_descr (ref);
17230 while (copy->dw_loc_next != ref_end)
17231 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17232 copy->dw_loc_next = end;
17233 list = list->dw_loc_next;
17234 }
17235 }
17236
17237 /* Given two lists RET and LIST
17238 produce location list that is result of adding expression in LIST
17239 to expression in RET on each position in program.
17240 Might be destructive on both RET and LIST.
17241
17242 TODO: We handle only simple cases of RET or LIST having at most one
17243 element. General case would involve sorting the lists in program order
17244 and merging them that will need some additional work.
17245 Adding that will improve quality of debug info especially for SRA-ed
17246 structures. */
17247
17248 static void
17249 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17250 {
17251 if (!list)
17252 return;
17253 if (!*ret)
17254 {
17255 *ret = list;
17256 return;
17257 }
17258 if (!list->dw_loc_next)
17259 {
17260 add_loc_descr_to_each (*ret, list->expr);
17261 return;
17262 }
17263 if (!(*ret)->dw_loc_next)
17264 {
17265 prepend_loc_descr_to_each (list, (*ret)->expr);
17266 *ret = list;
17267 return;
17268 }
17269 expansion_failed (NULL_TREE, NULL_RTX,
17270 "Don't know how to merge two non-trivial"
17271 " location lists.\n");
17272 *ret = NULL;
17273 return;
17274 }
17275
17276 /* LOC is constant expression. Try a luck, look it up in constant
17277 pool and return its loc_descr of its address. */
17278
17279 static dw_loc_descr_ref
17280 cst_pool_loc_descr (tree loc)
17281 {
17282 /* Get an RTL for this, if something has been emitted. */
17283 rtx rtl = lookup_constant_def (loc);
17284
17285 if (!rtl || !MEM_P (rtl))
17286 {
17287 gcc_assert (!rtl);
17288 return 0;
17289 }
17290 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17291
17292 /* TODO: We might get more coverage if we was actually delaying expansion
17293 of all expressions till end of compilation when constant pools are fully
17294 populated. */
17295 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17296 {
17297 expansion_failed (loc, NULL_RTX,
17298 "CST value in contant pool but not marked.");
17299 return 0;
17300 }
17301 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17302 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17303 }
17304
17305 /* Return dw_loc_list representing address of addr_expr LOC
17306 by looking for inner INDIRECT_REF expression and turning
17307 it into simple arithmetics.
17308
17309 See loc_list_from_tree for the meaning of CONTEXT. */
17310
17311 static dw_loc_list_ref
17312 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17313 loc_descr_context *context)
17314 {
17315 tree obj, offset;
17316 poly_int64 bitsize, bitpos, bytepos;
17317 machine_mode mode;
17318 int unsignedp, reversep, volatilep = 0;
17319 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17320
17321 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17322 &bitsize, &bitpos, &offset, &mode,
17323 &unsignedp, &reversep, &volatilep);
17324 STRIP_NOPS (obj);
17325 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17326 {
17327 expansion_failed (loc, NULL_RTX, "bitfield access");
17328 return 0;
17329 }
17330 if (!INDIRECT_REF_P (obj))
17331 {
17332 expansion_failed (obj,
17333 NULL_RTX, "no indirect ref in inner refrence");
17334 return 0;
17335 }
17336 if (!offset && known_eq (bitpos, 0))
17337 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17338 context);
17339 else if (toplev
17340 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17341 && (dwarf_version >= 4 || !dwarf_strict))
17342 {
17343 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17344 if (!list_ret)
17345 return 0;
17346 if (offset)
17347 {
17348 /* Variable offset. */
17349 list_ret1 = loc_list_from_tree (offset, 0, context);
17350 if (list_ret1 == 0)
17351 return 0;
17352 add_loc_list (&list_ret, list_ret1);
17353 if (!list_ret)
17354 return 0;
17355 add_loc_descr_to_each (list_ret,
17356 new_loc_descr (DW_OP_plus, 0, 0));
17357 }
17358 HOST_WIDE_INT value;
17359 if (bytepos.is_constant (&value) && value > 0)
17360 add_loc_descr_to_each (list_ret,
17361 new_loc_descr (DW_OP_plus_uconst, value, 0));
17362 else if (maybe_ne (bytepos, 0))
17363 loc_list_plus_const (list_ret, bytepos);
17364 add_loc_descr_to_each (list_ret,
17365 new_loc_descr (DW_OP_stack_value, 0, 0));
17366 }
17367 return list_ret;
17368 }
17369
17370 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17371 all operations from LOC are nops, move to the last one. Insert in NOPS all
17372 operations that are skipped. */
17373
17374 static void
17375 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17376 hash_set<dw_loc_descr_ref> &nops)
17377 {
17378 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17379 {
17380 nops.add (loc);
17381 loc = loc->dw_loc_next;
17382 }
17383 }
17384
17385 /* Helper for loc_descr_without_nops: free the location description operation
17386 P. */
17387
17388 bool
17389 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17390 {
17391 ggc_free (loc);
17392 return true;
17393 }
17394
17395 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17396 finishes LOC. */
17397
17398 static void
17399 loc_descr_without_nops (dw_loc_descr_ref &loc)
17400 {
17401 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17402 return;
17403
17404 /* Set of all DW_OP_nop operations we remove. */
17405 hash_set<dw_loc_descr_ref> nops;
17406
17407 /* First, strip all prefix NOP operations in order to keep the head of the
17408 operations list. */
17409 loc_descr_to_next_no_nop (loc, nops);
17410
17411 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17412 {
17413 /* For control flow operations: strip "prefix" nops in destination
17414 labels. */
17415 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17416 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17417 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17418 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17419
17420 /* Do the same for the operations that follow, then move to the next
17421 iteration. */
17422 if (cur->dw_loc_next != NULL)
17423 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17424 cur = cur->dw_loc_next;
17425 }
17426
17427 nops.traverse<void *, free_loc_descr> (NULL);
17428 }
17429
17430
17431 struct dwarf_procedure_info;
17432
17433 /* Helper structure for location descriptions generation. */
17434 struct loc_descr_context
17435 {
17436 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17437 NULL_TREE if DW_OP_push_object_address in invalid for this location
17438 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17439 tree context_type;
17440 /* The ..._DECL node that should be translated as a
17441 DW_OP_push_object_address operation. */
17442 tree base_decl;
17443 /* Information about the DWARF procedure we are currently generating. NULL if
17444 we are not generating a DWARF procedure. */
17445 struct dwarf_procedure_info *dpi;
17446 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17447 by consumer. Used for DW_TAG_generic_subrange attributes. */
17448 bool placeholder_arg;
17449 /* True if PLACEHOLDER_EXPR has been seen. */
17450 bool placeholder_seen;
17451 };
17452
17453 /* DWARF procedures generation
17454
17455 DWARF expressions (aka. location descriptions) are used to encode variable
17456 things such as sizes or offsets. Such computations can have redundant parts
17457 that can be factorized in order to reduce the size of the output debug
17458 information. This is the whole point of DWARF procedures.
17459
17460 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17461 already factorized into functions ("size functions") in order to handle very
17462 big and complex types. Such functions are quite simple: they have integral
17463 arguments, they return an integral result and their body contains only a
17464 return statement with arithmetic expressions. This is the only kind of
17465 function we are interested in translating into DWARF procedures, here.
17466
17467 DWARF expressions and DWARF procedure are executed using a stack, so we have
17468 to define some calling convention for them to interact. Let's say that:
17469
17470 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17471 all arguments in reverse order (right-to-left) so that when the DWARF
17472 procedure execution starts, the first argument is the top of the stack.
17473
17474 - Then, when returning, the DWARF procedure must have consumed all arguments
17475 on the stack, must have pushed the result and touched nothing else.
17476
17477 - Each integral argument and the result are integral types can be hold in a
17478 single stack slot.
17479
17480 - We call "frame offset" the number of stack slots that are "under DWARF
17481 procedure control": it includes the arguments slots, the temporaries and
17482 the result slot. Thus, it is equal to the number of arguments when the
17483 procedure execution starts and must be equal to one (the result) when it
17484 returns. */
17485
17486 /* Helper structure used when generating operations for a DWARF procedure. */
17487 struct dwarf_procedure_info
17488 {
17489 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17490 currently translated. */
17491 tree fndecl;
17492 /* The number of arguments FNDECL takes. */
17493 unsigned args_count;
17494 };
17495
17496 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17497 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17498 equate it to this DIE. */
17499
17500 static dw_die_ref
17501 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17502 dw_die_ref parent_die)
17503 {
17504 dw_die_ref dwarf_proc_die;
17505
17506 if ((dwarf_version < 3 && dwarf_strict)
17507 || location == NULL)
17508 return NULL;
17509
17510 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17511 if (fndecl)
17512 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17513 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17514 return dwarf_proc_die;
17515 }
17516
17517 /* Return whether TYPE is a supported type as a DWARF procedure argument
17518 type or return type (we handle only scalar types and pointer types that
17519 aren't wider than the DWARF expression evaluation stack. */
17520
17521 static bool
17522 is_handled_procedure_type (tree type)
17523 {
17524 return ((INTEGRAL_TYPE_P (type)
17525 || TREE_CODE (type) == OFFSET_TYPE
17526 || TREE_CODE (type) == POINTER_TYPE)
17527 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17528 }
17529
17530 /* Helper for resolve_args_picking: do the same but stop when coming across
17531 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17532 offset *before* evaluating the corresponding operation. */
17533
17534 static bool
17535 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17536 struct dwarf_procedure_info *dpi,
17537 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17538 {
17539 /* The "frame_offset" identifier is already used to name a macro... */
17540 unsigned frame_offset_ = initial_frame_offset;
17541 dw_loc_descr_ref l;
17542
17543 for (l = loc; l != NULL;)
17544 {
17545 bool existed;
17546 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17547
17548 /* If we already met this node, there is nothing to compute anymore. */
17549 if (existed)
17550 {
17551 /* Make sure that the stack size is consistent wherever the execution
17552 flow comes from. */
17553 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17554 break;
17555 }
17556 l_frame_offset = frame_offset_;
17557
17558 /* If needed, relocate the picking offset with respect to the frame
17559 offset. */
17560 if (l->frame_offset_rel)
17561 {
17562 unsigned HOST_WIDE_INT off;
17563 switch (l->dw_loc_opc)
17564 {
17565 case DW_OP_pick:
17566 off = l->dw_loc_oprnd1.v.val_unsigned;
17567 break;
17568 case DW_OP_dup:
17569 off = 0;
17570 break;
17571 case DW_OP_over:
17572 off = 1;
17573 break;
17574 default:
17575 gcc_unreachable ();
17576 }
17577 /* frame_offset_ is the size of the current stack frame, including
17578 incoming arguments. Besides, the arguments are pushed
17579 right-to-left. Thus, in order to access the Nth argument from
17580 this operation node, the picking has to skip temporaries *plus*
17581 one stack slot per argument (0 for the first one, 1 for the second
17582 one, etc.).
17583
17584 The targetted argument number (N) is already set as the operand,
17585 and the number of temporaries can be computed with:
17586 frame_offsets_ - dpi->args_count */
17587 off += frame_offset_ - dpi->args_count;
17588
17589 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17590 if (off > 255)
17591 return false;
17592
17593 if (off == 0)
17594 {
17595 l->dw_loc_opc = DW_OP_dup;
17596 l->dw_loc_oprnd1.v.val_unsigned = 0;
17597 }
17598 else if (off == 1)
17599 {
17600 l->dw_loc_opc = DW_OP_over;
17601 l->dw_loc_oprnd1.v.val_unsigned = 0;
17602 }
17603 else
17604 {
17605 l->dw_loc_opc = DW_OP_pick;
17606 l->dw_loc_oprnd1.v.val_unsigned = off;
17607 }
17608 }
17609
17610 /* Update frame_offset according to the effect the current operation has
17611 on the stack. */
17612 switch (l->dw_loc_opc)
17613 {
17614 case DW_OP_deref:
17615 case DW_OP_swap:
17616 case DW_OP_rot:
17617 case DW_OP_abs:
17618 case DW_OP_neg:
17619 case DW_OP_not:
17620 case DW_OP_plus_uconst:
17621 case DW_OP_skip:
17622 case DW_OP_reg0:
17623 case DW_OP_reg1:
17624 case DW_OP_reg2:
17625 case DW_OP_reg3:
17626 case DW_OP_reg4:
17627 case DW_OP_reg5:
17628 case DW_OP_reg6:
17629 case DW_OP_reg7:
17630 case DW_OP_reg8:
17631 case DW_OP_reg9:
17632 case DW_OP_reg10:
17633 case DW_OP_reg11:
17634 case DW_OP_reg12:
17635 case DW_OP_reg13:
17636 case DW_OP_reg14:
17637 case DW_OP_reg15:
17638 case DW_OP_reg16:
17639 case DW_OP_reg17:
17640 case DW_OP_reg18:
17641 case DW_OP_reg19:
17642 case DW_OP_reg20:
17643 case DW_OP_reg21:
17644 case DW_OP_reg22:
17645 case DW_OP_reg23:
17646 case DW_OP_reg24:
17647 case DW_OP_reg25:
17648 case DW_OP_reg26:
17649 case DW_OP_reg27:
17650 case DW_OP_reg28:
17651 case DW_OP_reg29:
17652 case DW_OP_reg30:
17653 case DW_OP_reg31:
17654 case DW_OP_bregx:
17655 case DW_OP_piece:
17656 case DW_OP_deref_size:
17657 case DW_OP_nop:
17658 case DW_OP_bit_piece:
17659 case DW_OP_implicit_value:
17660 case DW_OP_stack_value:
17661 break;
17662
17663 case DW_OP_addr:
17664 case DW_OP_const1u:
17665 case DW_OP_const1s:
17666 case DW_OP_const2u:
17667 case DW_OP_const2s:
17668 case DW_OP_const4u:
17669 case DW_OP_const4s:
17670 case DW_OP_const8u:
17671 case DW_OP_const8s:
17672 case DW_OP_constu:
17673 case DW_OP_consts:
17674 case DW_OP_dup:
17675 case DW_OP_over:
17676 case DW_OP_pick:
17677 case DW_OP_lit0:
17678 case DW_OP_lit1:
17679 case DW_OP_lit2:
17680 case DW_OP_lit3:
17681 case DW_OP_lit4:
17682 case DW_OP_lit5:
17683 case DW_OP_lit6:
17684 case DW_OP_lit7:
17685 case DW_OP_lit8:
17686 case DW_OP_lit9:
17687 case DW_OP_lit10:
17688 case DW_OP_lit11:
17689 case DW_OP_lit12:
17690 case DW_OP_lit13:
17691 case DW_OP_lit14:
17692 case DW_OP_lit15:
17693 case DW_OP_lit16:
17694 case DW_OP_lit17:
17695 case DW_OP_lit18:
17696 case DW_OP_lit19:
17697 case DW_OP_lit20:
17698 case DW_OP_lit21:
17699 case DW_OP_lit22:
17700 case DW_OP_lit23:
17701 case DW_OP_lit24:
17702 case DW_OP_lit25:
17703 case DW_OP_lit26:
17704 case DW_OP_lit27:
17705 case DW_OP_lit28:
17706 case DW_OP_lit29:
17707 case DW_OP_lit30:
17708 case DW_OP_lit31:
17709 case DW_OP_breg0:
17710 case DW_OP_breg1:
17711 case DW_OP_breg2:
17712 case DW_OP_breg3:
17713 case DW_OP_breg4:
17714 case DW_OP_breg5:
17715 case DW_OP_breg6:
17716 case DW_OP_breg7:
17717 case DW_OP_breg8:
17718 case DW_OP_breg9:
17719 case DW_OP_breg10:
17720 case DW_OP_breg11:
17721 case DW_OP_breg12:
17722 case DW_OP_breg13:
17723 case DW_OP_breg14:
17724 case DW_OP_breg15:
17725 case DW_OP_breg16:
17726 case DW_OP_breg17:
17727 case DW_OP_breg18:
17728 case DW_OP_breg19:
17729 case DW_OP_breg20:
17730 case DW_OP_breg21:
17731 case DW_OP_breg22:
17732 case DW_OP_breg23:
17733 case DW_OP_breg24:
17734 case DW_OP_breg25:
17735 case DW_OP_breg26:
17736 case DW_OP_breg27:
17737 case DW_OP_breg28:
17738 case DW_OP_breg29:
17739 case DW_OP_breg30:
17740 case DW_OP_breg31:
17741 case DW_OP_fbreg:
17742 case DW_OP_push_object_address:
17743 case DW_OP_call_frame_cfa:
17744 case DW_OP_GNU_variable_value:
17745 ++frame_offset_;
17746 break;
17747
17748 case DW_OP_drop:
17749 case DW_OP_xderef:
17750 case DW_OP_and:
17751 case DW_OP_div:
17752 case DW_OP_minus:
17753 case DW_OP_mod:
17754 case DW_OP_mul:
17755 case DW_OP_or:
17756 case DW_OP_plus:
17757 case DW_OP_shl:
17758 case DW_OP_shr:
17759 case DW_OP_shra:
17760 case DW_OP_xor:
17761 case DW_OP_bra:
17762 case DW_OP_eq:
17763 case DW_OP_ge:
17764 case DW_OP_gt:
17765 case DW_OP_le:
17766 case DW_OP_lt:
17767 case DW_OP_ne:
17768 case DW_OP_regx:
17769 case DW_OP_xderef_size:
17770 --frame_offset_;
17771 break;
17772
17773 case DW_OP_call2:
17774 case DW_OP_call4:
17775 case DW_OP_call_ref:
17776 {
17777 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17778 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17779
17780 if (stack_usage == NULL)
17781 return false;
17782 frame_offset_ += *stack_usage;
17783 break;
17784 }
17785
17786 case DW_OP_implicit_pointer:
17787 case DW_OP_entry_value:
17788 case DW_OP_const_type:
17789 case DW_OP_regval_type:
17790 case DW_OP_deref_type:
17791 case DW_OP_convert:
17792 case DW_OP_reinterpret:
17793 case DW_OP_form_tls_address:
17794 case DW_OP_GNU_push_tls_address:
17795 case DW_OP_GNU_uninit:
17796 case DW_OP_GNU_encoded_addr:
17797 case DW_OP_GNU_implicit_pointer:
17798 case DW_OP_GNU_entry_value:
17799 case DW_OP_GNU_const_type:
17800 case DW_OP_GNU_regval_type:
17801 case DW_OP_GNU_deref_type:
17802 case DW_OP_GNU_convert:
17803 case DW_OP_GNU_reinterpret:
17804 case DW_OP_GNU_parameter_ref:
17805 /* loc_list_from_tree will probably not output these operations for
17806 size functions, so assume they will not appear here. */
17807 /* Fall through... */
17808
17809 default:
17810 gcc_unreachable ();
17811 }
17812
17813 /* Now, follow the control flow (except subroutine calls). */
17814 switch (l->dw_loc_opc)
17815 {
17816 case DW_OP_bra:
17817 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17818 frame_offsets))
17819 return false;
17820 /* Fall through. */
17821
17822 case DW_OP_skip:
17823 l = l->dw_loc_oprnd1.v.val_loc;
17824 break;
17825
17826 case DW_OP_stack_value:
17827 return true;
17828
17829 default:
17830 l = l->dw_loc_next;
17831 break;
17832 }
17833 }
17834
17835 return true;
17836 }
17837
17838 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17839 operations) in order to resolve the operand of DW_OP_pick operations that
17840 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17841 offset *before* LOC is executed. Return if all relocations were
17842 successful. */
17843
17844 static bool
17845 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17846 struct dwarf_procedure_info *dpi)
17847 {
17848 /* Associate to all visited operations the frame offset *before* evaluating
17849 this operation. */
17850 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17851
17852 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17853 frame_offsets);
17854 }
17855
17856 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17857 Return NULL if it is not possible. */
17858
17859 static dw_die_ref
17860 function_to_dwarf_procedure (tree fndecl)
17861 {
17862 struct loc_descr_context ctx;
17863 struct dwarf_procedure_info dpi;
17864 dw_die_ref dwarf_proc_die;
17865 tree tree_body = DECL_SAVED_TREE (fndecl);
17866 dw_loc_descr_ref loc_body, epilogue;
17867
17868 tree cursor;
17869 unsigned i;
17870
17871 /* Do not generate multiple DWARF procedures for the same function
17872 declaration. */
17873 dwarf_proc_die = lookup_decl_die (fndecl);
17874 if (dwarf_proc_die != NULL)
17875 return dwarf_proc_die;
17876
17877 /* DWARF procedures are available starting with the DWARFv3 standard. */
17878 if (dwarf_version < 3 && dwarf_strict)
17879 return NULL;
17880
17881 /* We handle only functions for which we still have a body, that return a
17882 supported type and that takes arguments with supported types. Note that
17883 there is no point translating functions that return nothing. */
17884 if (tree_body == NULL_TREE
17885 || DECL_RESULT (fndecl) == NULL_TREE
17886 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17887 return NULL;
17888
17889 for (cursor = DECL_ARGUMENTS (fndecl);
17890 cursor != NULL_TREE;
17891 cursor = TREE_CHAIN (cursor))
17892 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17893 return NULL;
17894
17895 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17896 if (TREE_CODE (tree_body) != RETURN_EXPR)
17897 return NULL;
17898 tree_body = TREE_OPERAND (tree_body, 0);
17899 if (TREE_CODE (tree_body) != MODIFY_EXPR
17900 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17901 return NULL;
17902 tree_body = TREE_OPERAND (tree_body, 1);
17903
17904 /* Try to translate the body expression itself. Note that this will probably
17905 cause an infinite recursion if its call graph has a cycle. This is very
17906 unlikely for size functions, however, so don't bother with such things at
17907 the moment. */
17908 ctx.context_type = NULL_TREE;
17909 ctx.base_decl = NULL_TREE;
17910 ctx.dpi = &dpi;
17911 ctx.placeholder_arg = false;
17912 ctx.placeholder_seen = false;
17913 dpi.fndecl = fndecl;
17914 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17915 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17916 if (!loc_body)
17917 return NULL;
17918
17919 /* After evaluating all operands in "loc_body", we should still have on the
17920 stack all arguments plus the desired function result (top of the stack).
17921 Generate code in order to keep only the result in our stack frame. */
17922 epilogue = NULL;
17923 for (i = 0; i < dpi.args_count; ++i)
17924 {
17925 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17926 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17927 op_couple->dw_loc_next->dw_loc_next = epilogue;
17928 epilogue = op_couple;
17929 }
17930 add_loc_descr (&loc_body, epilogue);
17931 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17932 return NULL;
17933
17934 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17935 because they are considered useful. Now there is an epilogue, they are
17936 not anymore, so give it another try. */
17937 loc_descr_without_nops (loc_body);
17938
17939 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17940 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17941 though, given that size functions do not come from source, so they should
17942 not have a dedicated DW_TAG_subprogram DIE. */
17943 dwarf_proc_die
17944 = new_dwarf_proc_die (loc_body, fndecl,
17945 get_context_die (DECL_CONTEXT (fndecl)));
17946
17947 /* The called DWARF procedure consumes one stack slot per argument and
17948 returns one stack slot. */
17949 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17950
17951 return dwarf_proc_die;
17952 }
17953
17954
17955 /* Generate Dwarf location list representing LOC.
17956 If WANT_ADDRESS is false, expression computing LOC will be computed
17957 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17958 if WANT_ADDRESS is 2, expression computing address useable in location
17959 will be returned (i.e. DW_OP_reg can be used
17960 to refer to register values).
17961
17962 CONTEXT provides information to customize the location descriptions
17963 generation. Its context_type field specifies what type is implicitly
17964 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17965 will not be generated.
17966
17967 Its DPI field determines whether we are generating a DWARF expression for a
17968 DWARF procedure, so PARM_DECL references are processed specifically.
17969
17970 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17971 and dpi fields were null. */
17972
17973 static dw_loc_list_ref
17974 loc_list_from_tree_1 (tree loc, int want_address,
17975 struct loc_descr_context *context)
17976 {
17977 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17978 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17979 int have_address = 0;
17980 enum dwarf_location_atom op;
17981
17982 /* ??? Most of the time we do not take proper care for sign/zero
17983 extending the values properly. Hopefully this won't be a real
17984 problem... */
17985
17986 if (context != NULL
17987 && context->base_decl == loc
17988 && want_address == 0)
17989 {
17990 if (dwarf_version >= 3 || !dwarf_strict)
17991 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17992 NULL, 0, NULL, 0, NULL);
17993 else
17994 return NULL;
17995 }
17996
17997 switch (TREE_CODE (loc))
17998 {
17999 case ERROR_MARK:
18000 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18001 return 0;
18002
18003 case PLACEHOLDER_EXPR:
18004 /* This case involves extracting fields from an object to determine the
18005 position of other fields. It is supposed to appear only as the first
18006 operand of COMPONENT_REF nodes and to reference precisely the type
18007 that the context allows. */
18008 if (context != NULL
18009 && TREE_TYPE (loc) == context->context_type
18010 && want_address >= 1)
18011 {
18012 if (dwarf_version >= 3 || !dwarf_strict)
18013 {
18014 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18015 have_address = 1;
18016 break;
18017 }
18018 else
18019 return NULL;
18020 }
18021 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18022 the single argument passed by consumer. */
18023 else if (context != NULL
18024 && context->placeholder_arg
18025 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18026 && want_address == 0)
18027 {
18028 ret = new_loc_descr (DW_OP_pick, 0, 0);
18029 ret->frame_offset_rel = 1;
18030 context->placeholder_seen = true;
18031 break;
18032 }
18033 else
18034 expansion_failed (loc, NULL_RTX,
18035 "PLACEHOLDER_EXPR for an unexpected type");
18036 break;
18037
18038 case CALL_EXPR:
18039 {
18040 const int nargs = call_expr_nargs (loc);
18041 tree callee = get_callee_fndecl (loc);
18042 int i;
18043 dw_die_ref dwarf_proc;
18044
18045 if (callee == NULL_TREE)
18046 goto call_expansion_failed;
18047
18048 /* We handle only functions that return an integer. */
18049 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18050 goto call_expansion_failed;
18051
18052 dwarf_proc = function_to_dwarf_procedure (callee);
18053 if (dwarf_proc == NULL)
18054 goto call_expansion_failed;
18055
18056 /* Evaluate arguments right-to-left so that the first argument will
18057 be the top-most one on the stack. */
18058 for (i = nargs - 1; i >= 0; --i)
18059 {
18060 dw_loc_descr_ref loc_descr
18061 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18062 context);
18063
18064 if (loc_descr == NULL)
18065 goto call_expansion_failed;
18066
18067 add_loc_descr (&ret, loc_descr);
18068 }
18069
18070 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18071 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18072 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18073 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18074 add_loc_descr (&ret, ret1);
18075 break;
18076
18077 call_expansion_failed:
18078 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18079 /* There are no opcodes for these operations. */
18080 return 0;
18081 }
18082
18083 case PREINCREMENT_EXPR:
18084 case PREDECREMENT_EXPR:
18085 case POSTINCREMENT_EXPR:
18086 case POSTDECREMENT_EXPR:
18087 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18088 /* There are no opcodes for these operations. */
18089 return 0;
18090
18091 case ADDR_EXPR:
18092 /* If we already want an address, see if there is INDIRECT_REF inside
18093 e.g. for &this->field. */
18094 if (want_address)
18095 {
18096 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18097 (loc, want_address == 2, context);
18098 if (list_ret)
18099 have_address = 1;
18100 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18101 && (ret = cst_pool_loc_descr (loc)))
18102 have_address = 1;
18103 }
18104 /* Otherwise, process the argument and look for the address. */
18105 if (!list_ret && !ret)
18106 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18107 else
18108 {
18109 if (want_address)
18110 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18111 return NULL;
18112 }
18113 break;
18114
18115 case VAR_DECL:
18116 if (DECL_THREAD_LOCAL_P (loc))
18117 {
18118 rtx rtl;
18119 enum dwarf_location_atom tls_op;
18120 enum dtprel_bool dtprel = dtprel_false;
18121
18122 if (targetm.have_tls)
18123 {
18124 /* If this is not defined, we have no way to emit the
18125 data. */
18126 if (!targetm.asm_out.output_dwarf_dtprel)
18127 return 0;
18128
18129 /* The way DW_OP_GNU_push_tls_address is specified, we
18130 can only look up addresses of objects in the current
18131 module. We used DW_OP_addr as first op, but that's
18132 wrong, because DW_OP_addr is relocated by the debug
18133 info consumer, while DW_OP_GNU_push_tls_address
18134 operand shouldn't be. */
18135 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18136 return 0;
18137 dtprel = dtprel_true;
18138 /* We check for DWARF 5 here because gdb did not implement
18139 DW_OP_form_tls_address until after 7.12. */
18140 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18141 : DW_OP_GNU_push_tls_address);
18142 }
18143 else
18144 {
18145 if (!targetm.emutls.debug_form_tls_address
18146 || !(dwarf_version >= 3 || !dwarf_strict))
18147 return 0;
18148 /* We stuffed the control variable into the DECL_VALUE_EXPR
18149 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18150 no longer appear in gimple code. We used the control
18151 variable in specific so that we could pick it up here. */
18152 loc = DECL_VALUE_EXPR (loc);
18153 tls_op = DW_OP_form_tls_address;
18154 }
18155
18156 rtl = rtl_for_decl_location (loc);
18157 if (rtl == NULL_RTX)
18158 return 0;
18159
18160 if (!MEM_P (rtl))
18161 return 0;
18162 rtl = XEXP (rtl, 0);
18163 if (! CONSTANT_P (rtl))
18164 return 0;
18165
18166 ret = new_addr_loc_descr (rtl, dtprel);
18167 ret1 = new_loc_descr (tls_op, 0, 0);
18168 add_loc_descr (&ret, ret1);
18169
18170 have_address = 1;
18171 break;
18172 }
18173 /* FALLTHRU */
18174
18175 case PARM_DECL:
18176 if (context != NULL && context->dpi != NULL
18177 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18178 {
18179 /* We are generating code for a DWARF procedure and we want to access
18180 one of its arguments: find the appropriate argument offset and let
18181 the resolve_args_picking pass compute the offset that complies
18182 with the stack frame size. */
18183 unsigned i = 0;
18184 tree cursor;
18185
18186 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18187 cursor != NULL_TREE && cursor != loc;
18188 cursor = TREE_CHAIN (cursor), ++i)
18189 ;
18190 /* If we are translating a DWARF procedure, all referenced parameters
18191 must belong to the current function. */
18192 gcc_assert (cursor != NULL_TREE);
18193
18194 ret = new_loc_descr (DW_OP_pick, i, 0);
18195 ret->frame_offset_rel = 1;
18196 break;
18197 }
18198 /* FALLTHRU */
18199
18200 case RESULT_DECL:
18201 if (DECL_HAS_VALUE_EXPR_P (loc))
18202 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18203 want_address, context);
18204 /* FALLTHRU */
18205
18206 case FUNCTION_DECL:
18207 {
18208 rtx rtl;
18209 var_loc_list *loc_list = lookup_decl_loc (loc);
18210
18211 if (loc_list && loc_list->first)
18212 {
18213 list_ret = dw_loc_list (loc_list, loc, want_address);
18214 have_address = want_address != 0;
18215 break;
18216 }
18217 rtl = rtl_for_decl_location (loc);
18218 if (rtl == NULL_RTX)
18219 {
18220 if (TREE_CODE (loc) != FUNCTION_DECL
18221 && early_dwarf
18222 && current_function_decl
18223 && want_address != 1
18224 && ! DECL_IGNORED_P (loc)
18225 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18226 || POINTER_TYPE_P (TREE_TYPE (loc)))
18227 && DECL_CONTEXT (loc) == current_function_decl
18228 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18229 <= DWARF2_ADDR_SIZE))
18230 {
18231 dw_die_ref ref = lookup_decl_die (loc);
18232 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18233 if (ref)
18234 {
18235 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18236 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18237 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18238 }
18239 else
18240 {
18241 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18242 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18243 }
18244 break;
18245 }
18246 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18247 return 0;
18248 }
18249 else if (CONST_INT_P (rtl))
18250 {
18251 HOST_WIDE_INT val = INTVAL (rtl);
18252 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18253 val &= GET_MODE_MASK (DECL_MODE (loc));
18254 ret = int_loc_descriptor (val);
18255 }
18256 else if (GET_CODE (rtl) == CONST_STRING)
18257 {
18258 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18259 return 0;
18260 }
18261 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18262 ret = new_addr_loc_descr (rtl, dtprel_false);
18263 else
18264 {
18265 machine_mode mode, mem_mode;
18266
18267 /* Certain constructs can only be represented at top-level. */
18268 if (want_address == 2)
18269 {
18270 ret = loc_descriptor (rtl, VOIDmode,
18271 VAR_INIT_STATUS_INITIALIZED);
18272 have_address = 1;
18273 }
18274 else
18275 {
18276 mode = GET_MODE (rtl);
18277 mem_mode = VOIDmode;
18278 if (MEM_P (rtl))
18279 {
18280 mem_mode = mode;
18281 mode = get_address_mode (rtl);
18282 rtl = XEXP (rtl, 0);
18283 have_address = 1;
18284 }
18285 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18286 VAR_INIT_STATUS_INITIALIZED);
18287 }
18288 if (!ret)
18289 expansion_failed (loc, rtl,
18290 "failed to produce loc descriptor for rtl");
18291 }
18292 }
18293 break;
18294
18295 case MEM_REF:
18296 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18297 {
18298 have_address = 1;
18299 goto do_plus;
18300 }
18301 /* Fallthru. */
18302 case INDIRECT_REF:
18303 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18304 have_address = 1;
18305 break;
18306
18307 case TARGET_MEM_REF:
18308 case SSA_NAME:
18309 case DEBUG_EXPR_DECL:
18310 return NULL;
18311
18312 case COMPOUND_EXPR:
18313 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18314 context);
18315
18316 CASE_CONVERT:
18317 case VIEW_CONVERT_EXPR:
18318 case SAVE_EXPR:
18319 case MODIFY_EXPR:
18320 case NON_LVALUE_EXPR:
18321 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18322 context);
18323
18324 case COMPONENT_REF:
18325 case BIT_FIELD_REF:
18326 case ARRAY_REF:
18327 case ARRAY_RANGE_REF:
18328 case REALPART_EXPR:
18329 case IMAGPART_EXPR:
18330 {
18331 tree obj, offset;
18332 poly_int64 bitsize, bitpos, bytepos;
18333 machine_mode mode;
18334 int unsignedp, reversep, volatilep = 0;
18335
18336 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18337 &unsignedp, &reversep, &volatilep);
18338
18339 gcc_assert (obj != loc);
18340
18341 list_ret = loc_list_from_tree_1 (obj,
18342 want_address == 2
18343 && known_eq (bitpos, 0)
18344 && !offset ? 2 : 1,
18345 context);
18346 /* TODO: We can extract value of the small expression via shifting even
18347 for nonzero bitpos. */
18348 if (list_ret == 0)
18349 return 0;
18350 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18351 || !multiple_p (bitsize, BITS_PER_UNIT))
18352 {
18353 expansion_failed (loc, NULL_RTX,
18354 "bitfield access");
18355 return 0;
18356 }
18357
18358 if (offset != NULL_TREE)
18359 {
18360 /* Variable offset. */
18361 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18362 if (list_ret1 == 0)
18363 return 0;
18364 add_loc_list (&list_ret, list_ret1);
18365 if (!list_ret)
18366 return 0;
18367 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18368 }
18369
18370 HOST_WIDE_INT value;
18371 if (bytepos.is_constant (&value) && value > 0)
18372 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18373 value, 0));
18374 else if (maybe_ne (bytepos, 0))
18375 loc_list_plus_const (list_ret, bytepos);
18376
18377 have_address = 1;
18378 break;
18379 }
18380
18381 case INTEGER_CST:
18382 if ((want_address || !tree_fits_shwi_p (loc))
18383 && (ret = cst_pool_loc_descr (loc)))
18384 have_address = 1;
18385 else if (want_address == 2
18386 && tree_fits_shwi_p (loc)
18387 && (ret = address_of_int_loc_descriptor
18388 (int_size_in_bytes (TREE_TYPE (loc)),
18389 tree_to_shwi (loc))))
18390 have_address = 1;
18391 else if (tree_fits_shwi_p (loc))
18392 ret = int_loc_descriptor (tree_to_shwi (loc));
18393 else if (tree_fits_uhwi_p (loc))
18394 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18395 else
18396 {
18397 expansion_failed (loc, NULL_RTX,
18398 "Integer operand is not host integer");
18399 return 0;
18400 }
18401 break;
18402
18403 case CONSTRUCTOR:
18404 case REAL_CST:
18405 case STRING_CST:
18406 case COMPLEX_CST:
18407 if ((ret = cst_pool_loc_descr (loc)))
18408 have_address = 1;
18409 else if (TREE_CODE (loc) == CONSTRUCTOR)
18410 {
18411 tree type = TREE_TYPE (loc);
18412 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18413 unsigned HOST_WIDE_INT offset = 0;
18414 unsigned HOST_WIDE_INT cnt;
18415 constructor_elt *ce;
18416
18417 if (TREE_CODE (type) == RECORD_TYPE)
18418 {
18419 /* This is very limited, but it's enough to output
18420 pointers to member functions, as long as the
18421 referenced function is defined in the current
18422 translation unit. */
18423 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18424 {
18425 tree val = ce->value;
18426
18427 tree field = ce->index;
18428
18429 if (val)
18430 STRIP_NOPS (val);
18431
18432 if (!field || DECL_BIT_FIELD (field))
18433 {
18434 expansion_failed (loc, NULL_RTX,
18435 "bitfield in record type constructor");
18436 size = offset = (unsigned HOST_WIDE_INT)-1;
18437 ret = NULL;
18438 break;
18439 }
18440
18441 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18442 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18443 gcc_assert (pos + fieldsize <= size);
18444 if (pos < offset)
18445 {
18446 expansion_failed (loc, NULL_RTX,
18447 "out-of-order fields in record constructor");
18448 size = offset = (unsigned HOST_WIDE_INT)-1;
18449 ret = NULL;
18450 break;
18451 }
18452 if (pos > offset)
18453 {
18454 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18455 add_loc_descr (&ret, ret1);
18456 offset = pos;
18457 }
18458 if (val && fieldsize != 0)
18459 {
18460 ret1 = loc_descriptor_from_tree (val, want_address, context);
18461 if (!ret1)
18462 {
18463 expansion_failed (loc, NULL_RTX,
18464 "unsupported expression in field");
18465 size = offset = (unsigned HOST_WIDE_INT)-1;
18466 ret = NULL;
18467 break;
18468 }
18469 add_loc_descr (&ret, ret1);
18470 }
18471 if (fieldsize)
18472 {
18473 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18474 add_loc_descr (&ret, ret1);
18475 offset = pos + fieldsize;
18476 }
18477 }
18478
18479 if (offset != size)
18480 {
18481 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18482 add_loc_descr (&ret, ret1);
18483 offset = size;
18484 }
18485
18486 have_address = !!want_address;
18487 }
18488 else
18489 expansion_failed (loc, NULL_RTX,
18490 "constructor of non-record type");
18491 }
18492 else
18493 /* We can construct small constants here using int_loc_descriptor. */
18494 expansion_failed (loc, NULL_RTX,
18495 "constructor or constant not in constant pool");
18496 break;
18497
18498 case TRUTH_AND_EXPR:
18499 case TRUTH_ANDIF_EXPR:
18500 case BIT_AND_EXPR:
18501 op = DW_OP_and;
18502 goto do_binop;
18503
18504 case TRUTH_XOR_EXPR:
18505 case BIT_XOR_EXPR:
18506 op = DW_OP_xor;
18507 goto do_binop;
18508
18509 case TRUTH_OR_EXPR:
18510 case TRUTH_ORIF_EXPR:
18511 case BIT_IOR_EXPR:
18512 op = DW_OP_or;
18513 goto do_binop;
18514
18515 case FLOOR_DIV_EXPR:
18516 case CEIL_DIV_EXPR:
18517 case ROUND_DIV_EXPR:
18518 case TRUNC_DIV_EXPR:
18519 case EXACT_DIV_EXPR:
18520 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18521 return 0;
18522 op = DW_OP_div;
18523 goto do_binop;
18524
18525 case MINUS_EXPR:
18526 op = DW_OP_minus;
18527 goto do_binop;
18528
18529 case FLOOR_MOD_EXPR:
18530 case CEIL_MOD_EXPR:
18531 case ROUND_MOD_EXPR:
18532 case TRUNC_MOD_EXPR:
18533 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18534 {
18535 op = DW_OP_mod;
18536 goto do_binop;
18537 }
18538 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18539 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18540 if (list_ret == 0 || list_ret1 == 0)
18541 return 0;
18542
18543 add_loc_list (&list_ret, list_ret1);
18544 if (list_ret == 0)
18545 return 0;
18546 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18547 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18548 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18549 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18550 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18551 break;
18552
18553 case MULT_EXPR:
18554 op = DW_OP_mul;
18555 goto do_binop;
18556
18557 case LSHIFT_EXPR:
18558 op = DW_OP_shl;
18559 goto do_binop;
18560
18561 case RSHIFT_EXPR:
18562 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18563 goto do_binop;
18564
18565 case POINTER_PLUS_EXPR:
18566 case PLUS_EXPR:
18567 do_plus:
18568 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18569 {
18570 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18571 smarter to encode their opposite. The DW_OP_plus_uconst operation
18572 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18573 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18574 bytes, Y being the size of the operation that pushes the opposite
18575 of the addend. So let's choose the smallest representation. */
18576 const tree tree_addend = TREE_OPERAND (loc, 1);
18577 offset_int wi_addend;
18578 HOST_WIDE_INT shwi_addend;
18579 dw_loc_descr_ref loc_naddend;
18580
18581 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18582 if (list_ret == 0)
18583 return 0;
18584
18585 /* Try to get the literal to push. It is the opposite of the addend,
18586 so as we rely on wrapping during DWARF evaluation, first decode
18587 the literal as a "DWARF-sized" signed number. */
18588 wi_addend = wi::to_offset (tree_addend);
18589 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18590 shwi_addend = wi_addend.to_shwi ();
18591 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18592 ? int_loc_descriptor (-shwi_addend)
18593 : NULL;
18594
18595 if (loc_naddend != NULL
18596 && ((unsigned) size_of_uleb128 (shwi_addend)
18597 > size_of_loc_descr (loc_naddend)))
18598 {
18599 add_loc_descr_to_each (list_ret, loc_naddend);
18600 add_loc_descr_to_each (list_ret,
18601 new_loc_descr (DW_OP_minus, 0, 0));
18602 }
18603 else
18604 {
18605 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18606 {
18607 loc_naddend = loc_cur;
18608 loc_cur = loc_cur->dw_loc_next;
18609 ggc_free (loc_naddend);
18610 }
18611 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18612 }
18613 break;
18614 }
18615
18616 op = DW_OP_plus;
18617 goto do_binop;
18618
18619 case LE_EXPR:
18620 op = DW_OP_le;
18621 goto do_comp_binop;
18622
18623 case GE_EXPR:
18624 op = DW_OP_ge;
18625 goto do_comp_binop;
18626
18627 case LT_EXPR:
18628 op = DW_OP_lt;
18629 goto do_comp_binop;
18630
18631 case GT_EXPR:
18632 op = DW_OP_gt;
18633 goto do_comp_binop;
18634
18635 do_comp_binop:
18636 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18637 {
18638 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18639 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18640 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18641 TREE_CODE (loc));
18642 break;
18643 }
18644 else
18645 goto do_binop;
18646
18647 case EQ_EXPR:
18648 op = DW_OP_eq;
18649 goto do_binop;
18650
18651 case NE_EXPR:
18652 op = DW_OP_ne;
18653 goto do_binop;
18654
18655 do_binop:
18656 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18657 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18658 if (list_ret == 0 || list_ret1 == 0)
18659 return 0;
18660
18661 add_loc_list (&list_ret, list_ret1);
18662 if (list_ret == 0)
18663 return 0;
18664 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18665 break;
18666
18667 case TRUTH_NOT_EXPR:
18668 case BIT_NOT_EXPR:
18669 op = DW_OP_not;
18670 goto do_unop;
18671
18672 case ABS_EXPR:
18673 op = DW_OP_abs;
18674 goto do_unop;
18675
18676 case NEGATE_EXPR:
18677 op = DW_OP_neg;
18678 goto do_unop;
18679
18680 do_unop:
18681 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18682 if (list_ret == 0)
18683 return 0;
18684
18685 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18686 break;
18687
18688 case MIN_EXPR:
18689 case MAX_EXPR:
18690 {
18691 const enum tree_code code =
18692 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18693
18694 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18695 build2 (code, integer_type_node,
18696 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18697 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18698 }
18699
18700 /* fall through */
18701
18702 case COND_EXPR:
18703 {
18704 dw_loc_descr_ref lhs
18705 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18706 dw_loc_list_ref rhs
18707 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18708 dw_loc_descr_ref bra_node, jump_node, tmp;
18709
18710 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18711 if (list_ret == 0 || lhs == 0 || rhs == 0)
18712 return 0;
18713
18714 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18715 add_loc_descr_to_each (list_ret, bra_node);
18716
18717 add_loc_list (&list_ret, rhs);
18718 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18719 add_loc_descr_to_each (list_ret, jump_node);
18720
18721 add_loc_descr_to_each (list_ret, lhs);
18722 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18723 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18724
18725 /* ??? Need a node to point the skip at. Use a nop. */
18726 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18727 add_loc_descr_to_each (list_ret, tmp);
18728 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18729 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18730 }
18731 break;
18732
18733 case FIX_TRUNC_EXPR:
18734 return 0;
18735
18736 default:
18737 /* Leave front-end specific codes as simply unknown. This comes
18738 up, for instance, with the C STMT_EXPR. */
18739 if ((unsigned int) TREE_CODE (loc)
18740 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18741 {
18742 expansion_failed (loc, NULL_RTX,
18743 "language specific tree node");
18744 return 0;
18745 }
18746
18747 /* Otherwise this is a generic code; we should just lists all of
18748 these explicitly. We forgot one. */
18749 if (flag_checking)
18750 gcc_unreachable ();
18751
18752 /* In a release build, we want to degrade gracefully: better to
18753 generate incomplete debugging information than to crash. */
18754 return NULL;
18755 }
18756
18757 if (!ret && !list_ret)
18758 return 0;
18759
18760 if (want_address == 2 && !have_address
18761 && (dwarf_version >= 4 || !dwarf_strict))
18762 {
18763 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18764 {
18765 expansion_failed (loc, NULL_RTX,
18766 "DWARF address size mismatch");
18767 return 0;
18768 }
18769 if (ret)
18770 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18771 else
18772 add_loc_descr_to_each (list_ret,
18773 new_loc_descr (DW_OP_stack_value, 0, 0));
18774 have_address = 1;
18775 }
18776 /* Show if we can't fill the request for an address. */
18777 if (want_address && !have_address)
18778 {
18779 expansion_failed (loc, NULL_RTX,
18780 "Want address and only have value");
18781 return 0;
18782 }
18783
18784 gcc_assert (!ret || !list_ret);
18785
18786 /* If we've got an address and don't want one, dereference. */
18787 if (!want_address && have_address)
18788 {
18789 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18790
18791 if (size > DWARF2_ADDR_SIZE || size == -1)
18792 {
18793 expansion_failed (loc, NULL_RTX,
18794 "DWARF address size mismatch");
18795 return 0;
18796 }
18797 else if (size == DWARF2_ADDR_SIZE)
18798 op = DW_OP_deref;
18799 else
18800 op = DW_OP_deref_size;
18801
18802 if (ret)
18803 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18804 else
18805 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18806 }
18807 if (ret)
18808 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18809
18810 return list_ret;
18811 }
18812
18813 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18814 expressions. */
18815
18816 static dw_loc_list_ref
18817 loc_list_from_tree (tree loc, int want_address,
18818 struct loc_descr_context *context)
18819 {
18820 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18821
18822 for (dw_loc_list_ref loc_cur = result;
18823 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18824 loc_descr_without_nops (loc_cur->expr);
18825 return result;
18826 }
18827
18828 /* Same as above but return only single location expression. */
18829 static dw_loc_descr_ref
18830 loc_descriptor_from_tree (tree loc, int want_address,
18831 struct loc_descr_context *context)
18832 {
18833 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18834 if (!ret)
18835 return NULL;
18836 if (ret->dw_loc_next)
18837 {
18838 expansion_failed (loc, NULL_RTX,
18839 "Location list where only loc descriptor needed");
18840 return NULL;
18841 }
18842 return ret->expr;
18843 }
18844
18845 /* Given a value, round it up to the lowest multiple of `boundary'
18846 which is not less than the value itself. */
18847
18848 static inline HOST_WIDE_INT
18849 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18850 {
18851 return (((value + boundary - 1) / boundary) * boundary);
18852 }
18853
18854 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18855 pointer to the declared type for the relevant field variable, or return
18856 `integer_type_node' if the given node turns out to be an
18857 ERROR_MARK node. */
18858
18859 static inline tree
18860 field_type (const_tree decl)
18861 {
18862 tree type;
18863
18864 if (TREE_CODE (decl) == ERROR_MARK)
18865 return integer_type_node;
18866
18867 type = DECL_BIT_FIELD_TYPE (decl);
18868 if (type == NULL_TREE)
18869 type = TREE_TYPE (decl);
18870
18871 return type;
18872 }
18873
18874 /* Given a pointer to a tree node, return the alignment in bits for
18875 it, or else return BITS_PER_WORD if the node actually turns out to
18876 be an ERROR_MARK node. */
18877
18878 static inline unsigned
18879 simple_type_align_in_bits (const_tree type)
18880 {
18881 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18882 }
18883
18884 static inline unsigned
18885 simple_decl_align_in_bits (const_tree decl)
18886 {
18887 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18888 }
18889
18890 /* Return the result of rounding T up to ALIGN. */
18891
18892 static inline offset_int
18893 round_up_to_align (const offset_int &t, unsigned int align)
18894 {
18895 return wi::udiv_trunc (t + align - 1, align) * align;
18896 }
18897
18898 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18899 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18900 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18901 if we fail to return the size in one of these two forms. */
18902
18903 static dw_loc_descr_ref
18904 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18905 {
18906 tree tree_size;
18907 struct loc_descr_context ctx;
18908
18909 /* Return a constant integer in priority, if possible. */
18910 *cst_size = int_size_in_bytes (type);
18911 if (*cst_size != -1)
18912 return NULL;
18913
18914 ctx.context_type = const_cast<tree> (type);
18915 ctx.base_decl = NULL_TREE;
18916 ctx.dpi = NULL;
18917 ctx.placeholder_arg = false;
18918 ctx.placeholder_seen = false;
18919
18920 type = TYPE_MAIN_VARIANT (type);
18921 tree_size = TYPE_SIZE_UNIT (type);
18922 return ((tree_size != NULL_TREE)
18923 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18924 : NULL);
18925 }
18926
18927 /* Helper structure for RECORD_TYPE processing. */
18928 struct vlr_context
18929 {
18930 /* Root RECORD_TYPE. It is needed to generate data member location
18931 descriptions in variable-length records (VLR), but also to cope with
18932 variants, which are composed of nested structures multiplexed with
18933 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18934 function processing a FIELD_DECL, it is required to be non null. */
18935 tree struct_type;
18936 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18937 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18938 this variant part as part of the root record (in storage units). For
18939 regular records, it must be NULL_TREE. */
18940 tree variant_part_offset;
18941 };
18942
18943 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18944 addressed byte of the "containing object" for the given FIELD_DECL. If
18945 possible, return a native constant through CST_OFFSET (in which case NULL is
18946 returned); otherwise return a DWARF expression that computes the offset.
18947
18948 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18949 that offset is, either because the argument turns out to be a pointer to an
18950 ERROR_MARK node, or because the offset expression is too complex for us.
18951
18952 CTX is required: see the comment for VLR_CONTEXT. */
18953
18954 static dw_loc_descr_ref
18955 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18956 HOST_WIDE_INT *cst_offset)
18957 {
18958 tree tree_result;
18959 dw_loc_list_ref loc_result;
18960
18961 *cst_offset = 0;
18962
18963 if (TREE_CODE (decl) == ERROR_MARK)
18964 return NULL;
18965 else
18966 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18967
18968 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18969 case. */
18970 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18971 return NULL;
18972
18973 #ifdef PCC_BITFIELD_TYPE_MATTERS
18974 /* We used to handle only constant offsets in all cases. Now, we handle
18975 properly dynamic byte offsets only when PCC bitfield type doesn't
18976 matter. */
18977 if (PCC_BITFIELD_TYPE_MATTERS
18978 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18979 {
18980 offset_int object_offset_in_bits;
18981 offset_int object_offset_in_bytes;
18982 offset_int bitpos_int;
18983 tree type;
18984 tree field_size_tree;
18985 offset_int deepest_bitpos;
18986 offset_int field_size_in_bits;
18987 unsigned int type_align_in_bits;
18988 unsigned int decl_align_in_bits;
18989 offset_int type_size_in_bits;
18990
18991 bitpos_int = wi::to_offset (bit_position (decl));
18992 type = field_type (decl);
18993 type_size_in_bits = offset_int_type_size_in_bits (type);
18994 type_align_in_bits = simple_type_align_in_bits (type);
18995
18996 field_size_tree = DECL_SIZE (decl);
18997
18998 /* The size could be unspecified if there was an error, or for
18999 a flexible array member. */
19000 if (!field_size_tree)
19001 field_size_tree = bitsize_zero_node;
19002
19003 /* If the size of the field is not constant, use the type size. */
19004 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19005 field_size_in_bits = wi::to_offset (field_size_tree);
19006 else
19007 field_size_in_bits = type_size_in_bits;
19008
19009 decl_align_in_bits = simple_decl_align_in_bits (decl);
19010
19011 /* The GCC front-end doesn't make any attempt to keep track of the
19012 starting bit offset (relative to the start of the containing
19013 structure type) of the hypothetical "containing object" for a
19014 bit-field. Thus, when computing the byte offset value for the
19015 start of the "containing object" of a bit-field, we must deduce
19016 this information on our own. This can be rather tricky to do in
19017 some cases. For example, handling the following structure type
19018 definition when compiling for an i386/i486 target (which only
19019 aligns long long's to 32-bit boundaries) can be very tricky:
19020
19021 struct S { int field1; long long field2:31; };
19022
19023 Fortunately, there is a simple rule-of-thumb which can be used
19024 in such cases. When compiling for an i386/i486, GCC will
19025 allocate 8 bytes for the structure shown above. It decides to
19026 do this based upon one simple rule for bit-field allocation.
19027 GCC allocates each "containing object" for each bit-field at
19028 the first (i.e. lowest addressed) legitimate alignment boundary
19029 (based upon the required minimum alignment for the declared
19030 type of the field) which it can possibly use, subject to the
19031 condition that there is still enough available space remaining
19032 in the containing object (when allocated at the selected point)
19033 to fully accommodate all of the bits of the bit-field itself.
19034
19035 This simple rule makes it obvious why GCC allocates 8 bytes for
19036 each object of the structure type shown above. When looking
19037 for a place to allocate the "containing object" for `field2',
19038 the compiler simply tries to allocate a 64-bit "containing
19039 object" at each successive 32-bit boundary (starting at zero)
19040 until it finds a place to allocate that 64- bit field such that
19041 at least 31 contiguous (and previously unallocated) bits remain
19042 within that selected 64 bit field. (As it turns out, for the
19043 example above, the compiler finds it is OK to allocate the
19044 "containing object" 64-bit field at bit-offset zero within the
19045 structure type.)
19046
19047 Here we attempt to work backwards from the limited set of facts
19048 we're given, and we try to deduce from those facts, where GCC
19049 must have believed that the containing object started (within
19050 the structure type). The value we deduce is then used (by the
19051 callers of this routine) to generate DW_AT_location and
19052 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19053 the case of DW_AT_location, regular fields as well). */
19054
19055 /* Figure out the bit-distance from the start of the structure to
19056 the "deepest" bit of the bit-field. */
19057 deepest_bitpos = bitpos_int + field_size_in_bits;
19058
19059 /* This is the tricky part. Use some fancy footwork to deduce
19060 where the lowest addressed bit of the containing object must
19061 be. */
19062 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19063
19064 /* Round up to type_align by default. This works best for
19065 bitfields. */
19066 object_offset_in_bits
19067 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19068
19069 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19070 {
19071 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19072
19073 /* Round up to decl_align instead. */
19074 object_offset_in_bits
19075 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19076 }
19077
19078 object_offset_in_bytes
19079 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19080 if (ctx->variant_part_offset == NULL_TREE)
19081 {
19082 *cst_offset = object_offset_in_bytes.to_shwi ();
19083 return NULL;
19084 }
19085 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19086 }
19087 else
19088 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19089 tree_result = byte_position (decl);
19090
19091 if (ctx->variant_part_offset != NULL_TREE)
19092 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19093 ctx->variant_part_offset, tree_result);
19094
19095 /* If the byte offset is a constant, it's simplier to handle a native
19096 constant rather than a DWARF expression. */
19097 if (TREE_CODE (tree_result) == INTEGER_CST)
19098 {
19099 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19100 return NULL;
19101 }
19102 struct loc_descr_context loc_ctx = {
19103 ctx->struct_type, /* context_type */
19104 NULL_TREE, /* base_decl */
19105 NULL, /* dpi */
19106 false, /* placeholder_arg */
19107 false /* placeholder_seen */
19108 };
19109 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19110
19111 /* We want a DWARF expression: abort if we only have a location list with
19112 multiple elements. */
19113 if (!loc_result || !single_element_loc_list_p (loc_result))
19114 return NULL;
19115 else
19116 return loc_result->expr;
19117 }
19118 \f
19119 /* The following routines define various Dwarf attributes and any data
19120 associated with them. */
19121
19122 /* Add a location description attribute value to a DIE.
19123
19124 This emits location attributes suitable for whole variables and
19125 whole parameters. Note that the location attributes for struct fields are
19126 generated by the routine `data_member_location_attribute' below. */
19127
19128 static inline void
19129 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19130 dw_loc_list_ref descr)
19131 {
19132 bool check_no_locviews = true;
19133 if (descr == 0)
19134 return;
19135 if (single_element_loc_list_p (descr))
19136 add_AT_loc (die, attr_kind, descr->expr);
19137 else
19138 {
19139 add_AT_loc_list (die, attr_kind, descr);
19140 gcc_assert (descr->ll_symbol);
19141 if (attr_kind == DW_AT_location && descr->vl_symbol
19142 && dwarf2out_locviews_in_attribute ())
19143 {
19144 add_AT_view_list (die, DW_AT_GNU_locviews);
19145 check_no_locviews = false;
19146 }
19147 }
19148
19149 if (check_no_locviews)
19150 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19151 }
19152
19153 /* Add DW_AT_accessibility attribute to DIE if needed. */
19154
19155 static void
19156 add_accessibility_attribute (dw_die_ref die, tree decl)
19157 {
19158 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19159 children, otherwise the default is DW_ACCESS_public. In DWARF2
19160 the default has always been DW_ACCESS_public. */
19161 if (TREE_PROTECTED (decl))
19162 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19163 else if (TREE_PRIVATE (decl))
19164 {
19165 if (dwarf_version == 2
19166 || die->die_parent == NULL
19167 || die->die_parent->die_tag != DW_TAG_class_type)
19168 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19169 }
19170 else if (dwarf_version > 2
19171 && die->die_parent
19172 && die->die_parent->die_tag == DW_TAG_class_type)
19173 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19174 }
19175
19176 /* Attach the specialized form of location attribute used for data members of
19177 struct and union types. In the special case of a FIELD_DECL node which
19178 represents a bit-field, the "offset" part of this special location
19179 descriptor must indicate the distance in bytes from the lowest-addressed
19180 byte of the containing struct or union type to the lowest-addressed byte of
19181 the "containing object" for the bit-field. (See the `field_byte_offset'
19182 function above).
19183
19184 For any given bit-field, the "containing object" is a hypothetical object
19185 (of some integral or enum type) within which the given bit-field lives. The
19186 type of this hypothetical "containing object" is always the same as the
19187 declared type of the individual bit-field itself (for GCC anyway... the
19188 DWARF spec doesn't actually mandate this). Note that it is the size (in
19189 bytes) of the hypothetical "containing object" which will be given in the
19190 DW_AT_byte_size attribute for this bit-field. (See the
19191 `byte_size_attribute' function below.) It is also used when calculating the
19192 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19193 function below.)
19194
19195 CTX is required: see the comment for VLR_CONTEXT. */
19196
19197 static void
19198 add_data_member_location_attribute (dw_die_ref die,
19199 tree decl,
19200 struct vlr_context *ctx)
19201 {
19202 HOST_WIDE_INT offset;
19203 dw_loc_descr_ref loc_descr = 0;
19204
19205 if (TREE_CODE (decl) == TREE_BINFO)
19206 {
19207 /* We're working on the TAG_inheritance for a base class. */
19208 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19209 {
19210 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19211 aren't at a fixed offset from all (sub)objects of the same
19212 type. We need to extract the appropriate offset from our
19213 vtable. The following dwarf expression means
19214
19215 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19216
19217 This is specific to the V3 ABI, of course. */
19218
19219 dw_loc_descr_ref tmp;
19220
19221 /* Make a copy of the object address. */
19222 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19223 add_loc_descr (&loc_descr, tmp);
19224
19225 /* Extract the vtable address. */
19226 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19227 add_loc_descr (&loc_descr, tmp);
19228
19229 /* Calculate the address of the offset. */
19230 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19231 gcc_assert (offset < 0);
19232
19233 tmp = int_loc_descriptor (-offset);
19234 add_loc_descr (&loc_descr, tmp);
19235 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19236 add_loc_descr (&loc_descr, tmp);
19237
19238 /* Extract the offset. */
19239 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19240 add_loc_descr (&loc_descr, tmp);
19241
19242 /* Add it to the object address. */
19243 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19244 add_loc_descr (&loc_descr, tmp);
19245 }
19246 else
19247 offset = tree_to_shwi (BINFO_OFFSET (decl));
19248 }
19249 else
19250 {
19251 loc_descr = field_byte_offset (decl, ctx, &offset);
19252
19253 /* If loc_descr is available then we know the field offset is dynamic.
19254 However, GDB does not handle dynamic field offsets very well at the
19255 moment. */
19256 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19257 {
19258 loc_descr = NULL;
19259 offset = 0;
19260 }
19261
19262 /* Data member location evalutation starts with the base address on the
19263 stack. Compute the field offset and add it to this base address. */
19264 else if (loc_descr != NULL)
19265 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19266 }
19267
19268 if (! loc_descr)
19269 {
19270 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19271 e.g. GDB only added support to it in November 2016. For DWARF5
19272 we need newer debug info consumers anyway. We might change this
19273 to dwarf_version >= 4 once most consumers catched up. */
19274 if (dwarf_version >= 5
19275 && TREE_CODE (decl) == FIELD_DECL
19276 && DECL_BIT_FIELD_TYPE (decl))
19277 {
19278 tree off = bit_position (decl);
19279 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19280 {
19281 remove_AT (die, DW_AT_byte_size);
19282 remove_AT (die, DW_AT_bit_offset);
19283 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19284 return;
19285 }
19286 }
19287 if (dwarf_version > 2)
19288 {
19289 /* Don't need to output a location expression, just the constant. */
19290 if (offset < 0)
19291 add_AT_int (die, DW_AT_data_member_location, offset);
19292 else
19293 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19294 return;
19295 }
19296 else
19297 {
19298 enum dwarf_location_atom op;
19299
19300 /* The DWARF2 standard says that we should assume that the structure
19301 address is already on the stack, so we can specify a structure
19302 field address by using DW_OP_plus_uconst. */
19303 op = DW_OP_plus_uconst;
19304 loc_descr = new_loc_descr (op, offset, 0);
19305 }
19306 }
19307
19308 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19309 }
19310
19311 /* Writes integer values to dw_vec_const array. */
19312
19313 static void
19314 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19315 {
19316 while (size != 0)
19317 {
19318 *dest++ = val & 0xff;
19319 val >>= 8;
19320 --size;
19321 }
19322 }
19323
19324 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19325
19326 static HOST_WIDE_INT
19327 extract_int (const unsigned char *src, unsigned int size)
19328 {
19329 HOST_WIDE_INT val = 0;
19330
19331 src += size;
19332 while (size != 0)
19333 {
19334 val <<= 8;
19335 val |= *--src & 0xff;
19336 --size;
19337 }
19338 return val;
19339 }
19340
19341 /* Writes wide_int values to dw_vec_const array. */
19342
19343 static void
19344 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19345 {
19346 int i;
19347
19348 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19349 {
19350 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19351 return;
19352 }
19353
19354 /* We'd have to extend this code to support odd sizes. */
19355 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19356
19357 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19358
19359 if (WORDS_BIG_ENDIAN)
19360 for (i = n - 1; i >= 0; i--)
19361 {
19362 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19363 dest += sizeof (HOST_WIDE_INT);
19364 }
19365 else
19366 for (i = 0; i < n; i++)
19367 {
19368 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19369 dest += sizeof (HOST_WIDE_INT);
19370 }
19371 }
19372
19373 /* Writes floating point values to dw_vec_const array. */
19374
19375 static void
19376 insert_float (const_rtx rtl, unsigned char *array)
19377 {
19378 long val[4];
19379 int i;
19380 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19381
19382 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19383
19384 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19385 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19386 {
19387 insert_int (val[i], 4, array);
19388 array += 4;
19389 }
19390 }
19391
19392 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19393 does not have a "location" either in memory or in a register. These
19394 things can arise in GNU C when a constant is passed as an actual parameter
19395 to an inlined function. They can also arise in C++ where declared
19396 constants do not necessarily get memory "homes". */
19397
19398 static bool
19399 add_const_value_attribute (dw_die_ref die, rtx rtl)
19400 {
19401 switch (GET_CODE (rtl))
19402 {
19403 case CONST_INT:
19404 {
19405 HOST_WIDE_INT val = INTVAL (rtl);
19406
19407 if (val < 0)
19408 add_AT_int (die, DW_AT_const_value, val);
19409 else
19410 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19411 }
19412 return true;
19413
19414 case CONST_WIDE_INT:
19415 {
19416 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19417 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19418 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19419 wide_int w = wi::zext (w1, prec);
19420 add_AT_wide (die, DW_AT_const_value, w);
19421 }
19422 return true;
19423
19424 case CONST_DOUBLE:
19425 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19426 floating-point constant. A CONST_DOUBLE is used whenever the
19427 constant requires more than one word in order to be adequately
19428 represented. */
19429 if (TARGET_SUPPORTS_WIDE_INT == 0
19430 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19431 add_AT_double (die, DW_AT_const_value,
19432 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19433 else
19434 {
19435 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19436 unsigned int length = GET_MODE_SIZE (mode);
19437 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19438
19439 insert_float (rtl, array);
19440 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19441 }
19442 return true;
19443
19444 case CONST_VECTOR:
19445 {
19446 unsigned int length;
19447 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19448 return false;
19449
19450 machine_mode mode = GET_MODE (rtl);
19451 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19452 unsigned char *array
19453 = ggc_vec_alloc<unsigned char> (length * elt_size);
19454 unsigned int i;
19455 unsigned char *p;
19456 machine_mode imode = GET_MODE_INNER (mode);
19457
19458 switch (GET_MODE_CLASS (mode))
19459 {
19460 case MODE_VECTOR_INT:
19461 for (i = 0, p = array; i < length; i++, p += elt_size)
19462 {
19463 rtx elt = CONST_VECTOR_ELT (rtl, i);
19464 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19465 }
19466 break;
19467
19468 case MODE_VECTOR_FLOAT:
19469 for (i = 0, p = array; i < length; i++, p += elt_size)
19470 {
19471 rtx elt = CONST_VECTOR_ELT (rtl, i);
19472 insert_float (elt, p);
19473 }
19474 break;
19475
19476 default:
19477 gcc_unreachable ();
19478 }
19479
19480 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19481 }
19482 return true;
19483
19484 case CONST_STRING:
19485 if (dwarf_version >= 4 || !dwarf_strict)
19486 {
19487 dw_loc_descr_ref loc_result;
19488 resolve_one_addr (&rtl);
19489 rtl_addr:
19490 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19491 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19492 add_AT_loc (die, DW_AT_location, loc_result);
19493 vec_safe_push (used_rtx_array, rtl);
19494 return true;
19495 }
19496 return false;
19497
19498 case CONST:
19499 if (CONSTANT_P (XEXP (rtl, 0)))
19500 return add_const_value_attribute (die, XEXP (rtl, 0));
19501 /* FALLTHROUGH */
19502 case SYMBOL_REF:
19503 if (!const_ok_for_output (rtl))
19504 return false;
19505 /* FALLTHROUGH */
19506 case LABEL_REF:
19507 if (dwarf_version >= 4 || !dwarf_strict)
19508 goto rtl_addr;
19509 return false;
19510
19511 case PLUS:
19512 /* In cases where an inlined instance of an inline function is passed
19513 the address of an `auto' variable (which is local to the caller) we
19514 can get a situation where the DECL_RTL of the artificial local
19515 variable (for the inlining) which acts as a stand-in for the
19516 corresponding formal parameter (of the inline function) will look
19517 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19518 exactly a compile-time constant expression, but it isn't the address
19519 of the (artificial) local variable either. Rather, it represents the
19520 *value* which the artificial local variable always has during its
19521 lifetime. We currently have no way to represent such quasi-constant
19522 values in Dwarf, so for now we just punt and generate nothing. */
19523 return false;
19524
19525 case HIGH:
19526 case CONST_FIXED:
19527 return false;
19528
19529 case MEM:
19530 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19531 && MEM_READONLY_P (rtl)
19532 && GET_MODE (rtl) == BLKmode)
19533 {
19534 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19535 return true;
19536 }
19537 return false;
19538
19539 default:
19540 /* No other kinds of rtx should be possible here. */
19541 gcc_unreachable ();
19542 }
19543 return false;
19544 }
19545
19546 /* Determine whether the evaluation of EXPR references any variables
19547 or functions which aren't otherwise used (and therefore may not be
19548 output). */
19549 static tree
19550 reference_to_unused (tree * tp, int * walk_subtrees,
19551 void * data ATTRIBUTE_UNUSED)
19552 {
19553 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19554 *walk_subtrees = 0;
19555
19556 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19557 && ! TREE_ASM_WRITTEN (*tp))
19558 return *tp;
19559 /* ??? The C++ FE emits debug information for using decls, so
19560 putting gcc_unreachable here falls over. See PR31899. For now
19561 be conservative. */
19562 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19563 return *tp;
19564 else if (VAR_P (*tp))
19565 {
19566 varpool_node *node = varpool_node::get (*tp);
19567 if (!node || !node->definition)
19568 return *tp;
19569 }
19570 else if (TREE_CODE (*tp) == FUNCTION_DECL
19571 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19572 {
19573 /* The call graph machinery must have finished analyzing,
19574 optimizing and gimplifying the CU by now.
19575 So if *TP has no call graph node associated
19576 to it, it means *TP will not be emitted. */
19577 if (!cgraph_node::get (*tp))
19578 return *tp;
19579 }
19580 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19581 return *tp;
19582
19583 return NULL_TREE;
19584 }
19585
19586 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19587 for use in a later add_const_value_attribute call. */
19588
19589 static rtx
19590 rtl_for_decl_init (tree init, tree type)
19591 {
19592 rtx rtl = NULL_RTX;
19593
19594 STRIP_NOPS (init);
19595
19596 /* If a variable is initialized with a string constant without embedded
19597 zeros, build CONST_STRING. */
19598 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19599 {
19600 tree enttype = TREE_TYPE (type);
19601 tree domain = TYPE_DOMAIN (type);
19602 scalar_int_mode mode;
19603
19604 if (is_int_mode (TYPE_MODE (enttype), &mode)
19605 && GET_MODE_SIZE (mode) == 1
19606 && domain
19607 && TYPE_MAX_VALUE (domain)
19608 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19609 && integer_zerop (TYPE_MIN_VALUE (domain))
19610 && compare_tree_int (TYPE_MAX_VALUE (domain),
19611 TREE_STRING_LENGTH (init) - 1) == 0
19612 && ((size_t) TREE_STRING_LENGTH (init)
19613 == strlen (TREE_STRING_POINTER (init)) + 1))
19614 {
19615 rtl = gen_rtx_CONST_STRING (VOIDmode,
19616 ggc_strdup (TREE_STRING_POINTER (init)));
19617 rtl = gen_rtx_MEM (BLKmode, rtl);
19618 MEM_READONLY_P (rtl) = 1;
19619 }
19620 }
19621 /* Other aggregates, and complex values, could be represented using
19622 CONCAT: FIXME! */
19623 else if (AGGREGATE_TYPE_P (type)
19624 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19625 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19626 || TREE_CODE (type) == COMPLEX_TYPE)
19627 ;
19628 /* Vectors only work if their mode is supported by the target.
19629 FIXME: generic vectors ought to work too. */
19630 else if (TREE_CODE (type) == VECTOR_TYPE
19631 && !VECTOR_MODE_P (TYPE_MODE (type)))
19632 ;
19633 /* If the initializer is something that we know will expand into an
19634 immediate RTL constant, expand it now. We must be careful not to
19635 reference variables which won't be output. */
19636 else if (initializer_constant_valid_p (init, type)
19637 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19638 {
19639 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19640 possible. */
19641 if (TREE_CODE (type) == VECTOR_TYPE)
19642 switch (TREE_CODE (init))
19643 {
19644 case VECTOR_CST:
19645 break;
19646 case CONSTRUCTOR:
19647 if (TREE_CONSTANT (init))
19648 {
19649 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19650 bool constant_p = true;
19651 tree value;
19652 unsigned HOST_WIDE_INT ix;
19653
19654 /* Even when ctor is constant, it might contain non-*_CST
19655 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19656 belong into VECTOR_CST nodes. */
19657 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19658 if (!CONSTANT_CLASS_P (value))
19659 {
19660 constant_p = false;
19661 break;
19662 }
19663
19664 if (constant_p)
19665 {
19666 init = build_vector_from_ctor (type, elts);
19667 break;
19668 }
19669 }
19670 /* FALLTHRU */
19671
19672 default:
19673 return NULL;
19674 }
19675
19676 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19677
19678 /* If expand_expr returns a MEM, it wasn't immediate. */
19679 gcc_assert (!rtl || !MEM_P (rtl));
19680 }
19681
19682 return rtl;
19683 }
19684
19685 /* Generate RTL for the variable DECL to represent its location. */
19686
19687 static rtx
19688 rtl_for_decl_location (tree decl)
19689 {
19690 rtx rtl;
19691
19692 /* Here we have to decide where we are going to say the parameter "lives"
19693 (as far as the debugger is concerned). We only have a couple of
19694 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19695
19696 DECL_RTL normally indicates where the parameter lives during most of the
19697 activation of the function. If optimization is enabled however, this
19698 could be either NULL or else a pseudo-reg. Both of those cases indicate
19699 that the parameter doesn't really live anywhere (as far as the code
19700 generation parts of GCC are concerned) during most of the function's
19701 activation. That will happen (for example) if the parameter is never
19702 referenced within the function.
19703
19704 We could just generate a location descriptor here for all non-NULL
19705 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19706 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19707 where DECL_RTL is NULL or is a pseudo-reg.
19708
19709 Note however that we can only get away with using DECL_INCOMING_RTL as
19710 a backup substitute for DECL_RTL in certain limited cases. In cases
19711 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19712 we can be sure that the parameter was passed using the same type as it is
19713 declared to have within the function, and that its DECL_INCOMING_RTL
19714 points us to a place where a value of that type is passed.
19715
19716 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19717 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19718 because in these cases DECL_INCOMING_RTL points us to a value of some
19719 type which is *different* from the type of the parameter itself. Thus,
19720 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19721 such cases, the debugger would end up (for example) trying to fetch a
19722 `float' from a place which actually contains the first part of a
19723 `double'. That would lead to really incorrect and confusing
19724 output at debug-time.
19725
19726 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19727 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19728 are a couple of exceptions however. On little-endian machines we can
19729 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19730 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19731 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19732 when (on a little-endian machine) a non-prototyped function has a
19733 parameter declared to be of type `short' or `char'. In such cases,
19734 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19735 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19736 passed `int' value. If the debugger then uses that address to fetch
19737 a `short' or a `char' (on a little-endian machine) the result will be
19738 the correct data, so we allow for such exceptional cases below.
19739
19740 Note that our goal here is to describe the place where the given formal
19741 parameter lives during most of the function's activation (i.e. between the
19742 end of the prologue and the start of the epilogue). We'll do that as best
19743 as we can. Note however that if the given formal parameter is modified
19744 sometime during the execution of the function, then a stack backtrace (at
19745 debug-time) will show the function as having been called with the *new*
19746 value rather than the value which was originally passed in. This happens
19747 rarely enough that it is not a major problem, but it *is* a problem, and
19748 I'd like to fix it.
19749
19750 A future version of dwarf2out.c may generate two additional attributes for
19751 any given DW_TAG_formal_parameter DIE which will describe the "passed
19752 type" and the "passed location" for the given formal parameter in addition
19753 to the attributes we now generate to indicate the "declared type" and the
19754 "active location" for each parameter. This additional set of attributes
19755 could be used by debuggers for stack backtraces. Separately, note that
19756 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19757 This happens (for example) for inlined-instances of inline function formal
19758 parameters which are never referenced. This really shouldn't be
19759 happening. All PARM_DECL nodes should get valid non-NULL
19760 DECL_INCOMING_RTL values. FIXME. */
19761
19762 /* Use DECL_RTL as the "location" unless we find something better. */
19763 rtl = DECL_RTL_IF_SET (decl);
19764
19765 /* When generating abstract instances, ignore everything except
19766 constants, symbols living in memory, and symbols living in
19767 fixed registers. */
19768 if (! reload_completed)
19769 {
19770 if (rtl
19771 && (CONSTANT_P (rtl)
19772 || (MEM_P (rtl)
19773 && CONSTANT_P (XEXP (rtl, 0)))
19774 || (REG_P (rtl)
19775 && VAR_P (decl)
19776 && TREE_STATIC (decl))))
19777 {
19778 rtl = targetm.delegitimize_address (rtl);
19779 return rtl;
19780 }
19781 rtl = NULL_RTX;
19782 }
19783 else if (TREE_CODE (decl) == PARM_DECL)
19784 {
19785 if (rtl == NULL_RTX
19786 || is_pseudo_reg (rtl)
19787 || (MEM_P (rtl)
19788 && is_pseudo_reg (XEXP (rtl, 0))
19789 && DECL_INCOMING_RTL (decl)
19790 && MEM_P (DECL_INCOMING_RTL (decl))
19791 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19792 {
19793 tree declared_type = TREE_TYPE (decl);
19794 tree passed_type = DECL_ARG_TYPE (decl);
19795 machine_mode dmode = TYPE_MODE (declared_type);
19796 machine_mode pmode = TYPE_MODE (passed_type);
19797
19798 /* This decl represents a formal parameter which was optimized out.
19799 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19800 all cases where (rtl == NULL_RTX) just below. */
19801 if (dmode == pmode)
19802 rtl = DECL_INCOMING_RTL (decl);
19803 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19804 && SCALAR_INT_MODE_P (dmode)
19805 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19806 && DECL_INCOMING_RTL (decl))
19807 {
19808 rtx inc = DECL_INCOMING_RTL (decl);
19809 if (REG_P (inc))
19810 rtl = inc;
19811 else if (MEM_P (inc))
19812 {
19813 if (BYTES_BIG_ENDIAN)
19814 rtl = adjust_address_nv (inc, dmode,
19815 GET_MODE_SIZE (pmode)
19816 - GET_MODE_SIZE (dmode));
19817 else
19818 rtl = inc;
19819 }
19820 }
19821 }
19822
19823 /* If the parm was passed in registers, but lives on the stack, then
19824 make a big endian correction if the mode of the type of the
19825 parameter is not the same as the mode of the rtl. */
19826 /* ??? This is the same series of checks that are made in dbxout.c before
19827 we reach the big endian correction code there. It isn't clear if all
19828 of these checks are necessary here, but keeping them all is the safe
19829 thing to do. */
19830 else if (MEM_P (rtl)
19831 && XEXP (rtl, 0) != const0_rtx
19832 && ! CONSTANT_P (XEXP (rtl, 0))
19833 /* Not passed in memory. */
19834 && !MEM_P (DECL_INCOMING_RTL (decl))
19835 /* Not passed by invisible reference. */
19836 && (!REG_P (XEXP (rtl, 0))
19837 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19838 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19839 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19840 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19841 #endif
19842 )
19843 /* Big endian correction check. */
19844 && BYTES_BIG_ENDIAN
19845 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19846 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19847 UNITS_PER_WORD))
19848 {
19849 machine_mode addr_mode = get_address_mode (rtl);
19850 poly_int64 offset = (UNITS_PER_WORD
19851 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19852
19853 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19854 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19855 }
19856 }
19857 else if (VAR_P (decl)
19858 && rtl
19859 && MEM_P (rtl)
19860 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19861 {
19862 machine_mode addr_mode = get_address_mode (rtl);
19863 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19864 GET_MODE (rtl));
19865
19866 /* If a variable is declared "register" yet is smaller than
19867 a register, then if we store the variable to memory, it
19868 looks like we're storing a register-sized value, when in
19869 fact we are not. We need to adjust the offset of the
19870 storage location to reflect the actual value's bytes,
19871 else gdb will not be able to display it. */
19872 if (maybe_ne (offset, 0))
19873 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19874 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19875 }
19876
19877 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19878 and will have been substituted directly into all expressions that use it.
19879 C does not have such a concept, but C++ and other languages do. */
19880 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19881 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19882
19883 if (rtl)
19884 rtl = targetm.delegitimize_address (rtl);
19885
19886 /* If we don't look past the constant pool, we risk emitting a
19887 reference to a constant pool entry that isn't referenced from
19888 code, and thus is not emitted. */
19889 if (rtl)
19890 rtl = avoid_constant_pool_reference (rtl);
19891
19892 /* Try harder to get a rtl. If this symbol ends up not being emitted
19893 in the current CU, resolve_addr will remove the expression referencing
19894 it. */
19895 if (rtl == NULL_RTX
19896 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19897 && VAR_P (decl)
19898 && !DECL_EXTERNAL (decl)
19899 && TREE_STATIC (decl)
19900 && DECL_NAME (decl)
19901 && !DECL_HARD_REGISTER (decl)
19902 && DECL_MODE (decl) != VOIDmode)
19903 {
19904 rtl = make_decl_rtl_for_debug (decl);
19905 if (!MEM_P (rtl)
19906 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19907 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19908 rtl = NULL_RTX;
19909 }
19910
19911 return rtl;
19912 }
19913
19914 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19915 returned. If so, the decl for the COMMON block is returned, and the
19916 value is the offset into the common block for the symbol. */
19917
19918 static tree
19919 fortran_common (tree decl, HOST_WIDE_INT *value)
19920 {
19921 tree val_expr, cvar;
19922 machine_mode mode;
19923 poly_int64 bitsize, bitpos;
19924 tree offset;
19925 HOST_WIDE_INT cbitpos;
19926 int unsignedp, reversep, volatilep = 0;
19927
19928 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19929 it does not have a value (the offset into the common area), or if it
19930 is thread local (as opposed to global) then it isn't common, and shouldn't
19931 be handled as such. */
19932 if (!VAR_P (decl)
19933 || !TREE_STATIC (decl)
19934 || !DECL_HAS_VALUE_EXPR_P (decl)
19935 || !is_fortran ())
19936 return NULL_TREE;
19937
19938 val_expr = DECL_VALUE_EXPR (decl);
19939 if (TREE_CODE (val_expr) != COMPONENT_REF)
19940 return NULL_TREE;
19941
19942 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19943 &unsignedp, &reversep, &volatilep);
19944
19945 if (cvar == NULL_TREE
19946 || !VAR_P (cvar)
19947 || DECL_ARTIFICIAL (cvar)
19948 || !TREE_PUBLIC (cvar)
19949 /* We don't expect to have to cope with variable offsets,
19950 since at present all static data must have a constant size. */
19951 || !bitpos.is_constant (&cbitpos))
19952 return NULL_TREE;
19953
19954 *value = 0;
19955 if (offset != NULL)
19956 {
19957 if (!tree_fits_shwi_p (offset))
19958 return NULL_TREE;
19959 *value = tree_to_shwi (offset);
19960 }
19961 if (cbitpos != 0)
19962 *value += cbitpos / BITS_PER_UNIT;
19963
19964 return cvar;
19965 }
19966
19967 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19968 data attribute for a variable or a parameter. We generate the
19969 DW_AT_const_value attribute only in those cases where the given variable
19970 or parameter does not have a true "location" either in memory or in a
19971 register. This can happen (for example) when a constant is passed as an
19972 actual argument in a call to an inline function. (It's possible that
19973 these things can crop up in other ways also.) Note that one type of
19974 constant value which can be passed into an inlined function is a constant
19975 pointer. This can happen for example if an actual argument in an inlined
19976 function call evaluates to a compile-time constant address.
19977
19978 CACHE_P is true if it is worth caching the location list for DECL,
19979 so that future calls can reuse it rather than regenerate it from scratch.
19980 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19981 since we will need to refer to them each time the function is inlined. */
19982
19983 static bool
19984 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19985 {
19986 rtx rtl;
19987 dw_loc_list_ref list;
19988 var_loc_list *loc_list;
19989 cached_dw_loc_list *cache;
19990
19991 if (early_dwarf)
19992 return false;
19993
19994 if (TREE_CODE (decl) == ERROR_MARK)
19995 return false;
19996
19997 if (get_AT (die, DW_AT_location)
19998 || get_AT (die, DW_AT_const_value))
19999 return true;
20000
20001 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20002 || TREE_CODE (decl) == RESULT_DECL);
20003
20004 /* Try to get some constant RTL for this decl, and use that as the value of
20005 the location. */
20006
20007 rtl = rtl_for_decl_location (decl);
20008 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20009 && add_const_value_attribute (die, rtl))
20010 return true;
20011
20012 /* See if we have single element location list that is equivalent to
20013 a constant value. That way we are better to use add_const_value_attribute
20014 rather than expanding constant value equivalent. */
20015 loc_list = lookup_decl_loc (decl);
20016 if (loc_list
20017 && loc_list->first
20018 && loc_list->first->next == NULL
20019 && NOTE_P (loc_list->first->loc)
20020 && NOTE_VAR_LOCATION (loc_list->first->loc)
20021 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20022 {
20023 struct var_loc_node *node;
20024
20025 node = loc_list->first;
20026 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20027 if (GET_CODE (rtl) == EXPR_LIST)
20028 rtl = XEXP (rtl, 0);
20029 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20030 && add_const_value_attribute (die, rtl))
20031 return true;
20032 }
20033 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20034 list several times. See if we've already cached the contents. */
20035 list = NULL;
20036 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20037 cache_p = false;
20038 if (cache_p)
20039 {
20040 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20041 if (cache)
20042 list = cache->loc_list;
20043 }
20044 if (list == NULL)
20045 {
20046 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20047 NULL);
20048 /* It is usually worth caching this result if the decl is from
20049 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20050 if (cache_p && list && list->dw_loc_next)
20051 {
20052 cached_dw_loc_list **slot
20053 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20054 DECL_UID (decl),
20055 INSERT);
20056 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20057 cache->decl_id = DECL_UID (decl);
20058 cache->loc_list = list;
20059 *slot = cache;
20060 }
20061 }
20062 if (list)
20063 {
20064 add_AT_location_description (die, DW_AT_location, list);
20065 return true;
20066 }
20067 /* None of that worked, so it must not really have a location;
20068 try adding a constant value attribute from the DECL_INITIAL. */
20069 return tree_add_const_value_attribute_for_decl (die, decl);
20070 }
20071
20072 /* Helper function for tree_add_const_value_attribute. Natively encode
20073 initializer INIT into an array. Return true if successful. */
20074
20075 static bool
20076 native_encode_initializer (tree init, unsigned char *array, int size)
20077 {
20078 tree type;
20079
20080 if (init == NULL_TREE)
20081 return false;
20082
20083 STRIP_NOPS (init);
20084 switch (TREE_CODE (init))
20085 {
20086 case STRING_CST:
20087 type = TREE_TYPE (init);
20088 if (TREE_CODE (type) == ARRAY_TYPE)
20089 {
20090 tree enttype = TREE_TYPE (type);
20091 scalar_int_mode mode;
20092
20093 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20094 || GET_MODE_SIZE (mode) != 1)
20095 return false;
20096 if (int_size_in_bytes (type) != size)
20097 return false;
20098 if (size > TREE_STRING_LENGTH (init))
20099 {
20100 memcpy (array, TREE_STRING_POINTER (init),
20101 TREE_STRING_LENGTH (init));
20102 memset (array + TREE_STRING_LENGTH (init),
20103 '\0', size - TREE_STRING_LENGTH (init));
20104 }
20105 else
20106 memcpy (array, TREE_STRING_POINTER (init), size);
20107 return true;
20108 }
20109 return false;
20110 case CONSTRUCTOR:
20111 type = TREE_TYPE (init);
20112 if (int_size_in_bytes (type) != size)
20113 return false;
20114 if (TREE_CODE (type) == ARRAY_TYPE)
20115 {
20116 HOST_WIDE_INT min_index;
20117 unsigned HOST_WIDE_INT cnt;
20118 int curpos = 0, fieldsize;
20119 constructor_elt *ce;
20120
20121 if (TYPE_DOMAIN (type) == NULL_TREE
20122 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20123 return false;
20124
20125 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20126 if (fieldsize <= 0)
20127 return false;
20128
20129 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20130 memset (array, '\0', size);
20131 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20132 {
20133 tree val = ce->value;
20134 tree index = ce->index;
20135 int pos = curpos;
20136 if (index && TREE_CODE (index) == RANGE_EXPR)
20137 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20138 * fieldsize;
20139 else if (index)
20140 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20141
20142 if (val)
20143 {
20144 STRIP_NOPS (val);
20145 if (!native_encode_initializer (val, array + pos, fieldsize))
20146 return false;
20147 }
20148 curpos = pos + fieldsize;
20149 if (index && TREE_CODE (index) == RANGE_EXPR)
20150 {
20151 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20152 - tree_to_shwi (TREE_OPERAND (index, 0));
20153 while (count-- > 0)
20154 {
20155 if (val)
20156 memcpy (array + curpos, array + pos, fieldsize);
20157 curpos += fieldsize;
20158 }
20159 }
20160 gcc_assert (curpos <= size);
20161 }
20162 return true;
20163 }
20164 else if (TREE_CODE (type) == RECORD_TYPE
20165 || TREE_CODE (type) == UNION_TYPE)
20166 {
20167 tree field = NULL_TREE;
20168 unsigned HOST_WIDE_INT cnt;
20169 constructor_elt *ce;
20170
20171 if (int_size_in_bytes (type) != size)
20172 return false;
20173
20174 if (TREE_CODE (type) == RECORD_TYPE)
20175 field = TYPE_FIELDS (type);
20176
20177 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20178 {
20179 tree val = ce->value;
20180 int pos, fieldsize;
20181
20182 if (ce->index != 0)
20183 field = ce->index;
20184
20185 if (val)
20186 STRIP_NOPS (val);
20187
20188 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20189 return false;
20190
20191 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20192 && TYPE_DOMAIN (TREE_TYPE (field))
20193 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20194 return false;
20195 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20196 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20197 return false;
20198 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20199 pos = int_byte_position (field);
20200 gcc_assert (pos + fieldsize <= size);
20201 if (val && fieldsize != 0
20202 && !native_encode_initializer (val, array + pos, fieldsize))
20203 return false;
20204 }
20205 return true;
20206 }
20207 return false;
20208 case VIEW_CONVERT_EXPR:
20209 case NON_LVALUE_EXPR:
20210 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20211 default:
20212 return native_encode_expr (init, array, size) == size;
20213 }
20214 }
20215
20216 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20217 attribute is the const value T. */
20218
20219 static bool
20220 tree_add_const_value_attribute (dw_die_ref die, tree t)
20221 {
20222 tree init;
20223 tree type = TREE_TYPE (t);
20224 rtx rtl;
20225
20226 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20227 return false;
20228
20229 init = t;
20230 gcc_assert (!DECL_P (init));
20231
20232 if (TREE_CODE (init) == INTEGER_CST)
20233 {
20234 if (tree_fits_uhwi_p (init))
20235 {
20236 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20237 return true;
20238 }
20239 if (tree_fits_shwi_p (init))
20240 {
20241 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20242 return true;
20243 }
20244 }
20245 if (! early_dwarf)
20246 {
20247 rtl = rtl_for_decl_init (init, type);
20248 if (rtl)
20249 return add_const_value_attribute (die, rtl);
20250 }
20251 /* If the host and target are sane, try harder. */
20252 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20253 && initializer_constant_valid_p (init, type))
20254 {
20255 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20256 if (size > 0 && (int) size == size)
20257 {
20258 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20259
20260 if (native_encode_initializer (init, array, size))
20261 {
20262 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20263 return true;
20264 }
20265 ggc_free (array);
20266 }
20267 }
20268 return false;
20269 }
20270
20271 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20272 attribute is the const value of T, where T is an integral constant
20273 variable with static storage duration
20274 (so it can't be a PARM_DECL or a RESULT_DECL). */
20275
20276 static bool
20277 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20278 {
20279
20280 if (!decl
20281 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20282 || (VAR_P (decl) && !TREE_STATIC (decl)))
20283 return false;
20284
20285 if (TREE_READONLY (decl)
20286 && ! TREE_THIS_VOLATILE (decl)
20287 && DECL_INITIAL (decl))
20288 /* OK */;
20289 else
20290 return false;
20291
20292 /* Don't add DW_AT_const_value if abstract origin already has one. */
20293 if (get_AT (var_die, DW_AT_const_value))
20294 return false;
20295
20296 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20297 }
20298
20299 /* Convert the CFI instructions for the current function into a
20300 location list. This is used for DW_AT_frame_base when we targeting
20301 a dwarf2 consumer that does not support the dwarf3
20302 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20303 expressions. */
20304
20305 static dw_loc_list_ref
20306 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20307 {
20308 int ix;
20309 dw_fde_ref fde;
20310 dw_loc_list_ref list, *list_tail;
20311 dw_cfi_ref cfi;
20312 dw_cfa_location last_cfa, next_cfa;
20313 const char *start_label, *last_label, *section;
20314 dw_cfa_location remember;
20315
20316 fde = cfun->fde;
20317 gcc_assert (fde != NULL);
20318
20319 section = secname_for_decl (current_function_decl);
20320 list_tail = &list;
20321 list = NULL;
20322
20323 memset (&next_cfa, 0, sizeof (next_cfa));
20324 next_cfa.reg = INVALID_REGNUM;
20325 remember = next_cfa;
20326
20327 start_label = fde->dw_fde_begin;
20328
20329 /* ??? Bald assumption that the CIE opcode list does not contain
20330 advance opcodes. */
20331 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20332 lookup_cfa_1 (cfi, &next_cfa, &remember);
20333
20334 last_cfa = next_cfa;
20335 last_label = start_label;
20336
20337 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20338 {
20339 /* If the first partition contained no CFI adjustments, the
20340 CIE opcodes apply to the whole first partition. */
20341 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20342 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20343 list_tail =&(*list_tail)->dw_loc_next;
20344 start_label = last_label = fde->dw_fde_second_begin;
20345 }
20346
20347 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20348 {
20349 switch (cfi->dw_cfi_opc)
20350 {
20351 case DW_CFA_set_loc:
20352 case DW_CFA_advance_loc1:
20353 case DW_CFA_advance_loc2:
20354 case DW_CFA_advance_loc4:
20355 if (!cfa_equal_p (&last_cfa, &next_cfa))
20356 {
20357 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20358 start_label, 0, last_label, 0, section);
20359
20360 list_tail = &(*list_tail)->dw_loc_next;
20361 last_cfa = next_cfa;
20362 start_label = last_label;
20363 }
20364 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20365 break;
20366
20367 case DW_CFA_advance_loc:
20368 /* The encoding is complex enough that we should never emit this. */
20369 gcc_unreachable ();
20370
20371 default:
20372 lookup_cfa_1 (cfi, &next_cfa, &remember);
20373 break;
20374 }
20375 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20376 {
20377 if (!cfa_equal_p (&last_cfa, &next_cfa))
20378 {
20379 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20380 start_label, 0, last_label, 0, section);
20381
20382 list_tail = &(*list_tail)->dw_loc_next;
20383 last_cfa = next_cfa;
20384 start_label = last_label;
20385 }
20386 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20387 start_label, 0, fde->dw_fde_end, 0, section);
20388 list_tail = &(*list_tail)->dw_loc_next;
20389 start_label = last_label = fde->dw_fde_second_begin;
20390 }
20391 }
20392
20393 if (!cfa_equal_p (&last_cfa, &next_cfa))
20394 {
20395 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20396 start_label, 0, last_label, 0, section);
20397 list_tail = &(*list_tail)->dw_loc_next;
20398 start_label = last_label;
20399 }
20400
20401 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20402 start_label, 0,
20403 fde->dw_fde_second_begin
20404 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20405 section);
20406
20407 maybe_gen_llsym (list);
20408
20409 return list;
20410 }
20411
20412 /* Compute a displacement from the "steady-state frame pointer" to the
20413 frame base (often the same as the CFA), and store it in
20414 frame_pointer_fb_offset. OFFSET is added to the displacement
20415 before the latter is negated. */
20416
20417 static void
20418 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20419 {
20420 rtx reg, elim;
20421
20422 #ifdef FRAME_POINTER_CFA_OFFSET
20423 reg = frame_pointer_rtx;
20424 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20425 #else
20426 reg = arg_pointer_rtx;
20427 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20428 #endif
20429
20430 elim = (ira_use_lra_p
20431 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20432 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20433 elim = strip_offset_and_add (elim, &offset);
20434
20435 frame_pointer_fb_offset = -offset;
20436
20437 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20438 in which to eliminate. This is because it's stack pointer isn't
20439 directly accessible as a register within the ISA. To work around
20440 this, assume that while we cannot provide a proper value for
20441 frame_pointer_fb_offset, we won't need one either. */
20442 frame_pointer_fb_offset_valid
20443 = ((SUPPORTS_STACK_ALIGNMENT
20444 && (elim == hard_frame_pointer_rtx
20445 || elim == stack_pointer_rtx))
20446 || elim == (frame_pointer_needed
20447 ? hard_frame_pointer_rtx
20448 : stack_pointer_rtx));
20449 }
20450
20451 /* Generate a DW_AT_name attribute given some string value to be included as
20452 the value of the attribute. */
20453
20454 static void
20455 add_name_attribute (dw_die_ref die, const char *name_string)
20456 {
20457 if (name_string != NULL && *name_string != 0)
20458 {
20459 if (demangle_name_func)
20460 name_string = (*demangle_name_func) (name_string);
20461
20462 add_AT_string (die, DW_AT_name, name_string);
20463 }
20464 }
20465
20466 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20467 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20468 of TYPE accordingly.
20469
20470 ??? This is a temporary measure until after we're able to generate
20471 regular DWARF for the complex Ada type system. */
20472
20473 static void
20474 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20475 dw_die_ref context_die)
20476 {
20477 tree dtype;
20478 dw_die_ref dtype_die;
20479
20480 if (!lang_hooks.types.descriptive_type)
20481 return;
20482
20483 dtype = lang_hooks.types.descriptive_type (type);
20484 if (!dtype)
20485 return;
20486
20487 dtype_die = lookup_type_die (dtype);
20488 if (!dtype_die)
20489 {
20490 gen_type_die (dtype, context_die);
20491 dtype_die = lookup_type_die (dtype);
20492 gcc_assert (dtype_die);
20493 }
20494
20495 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20496 }
20497
20498 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20499
20500 static const char *
20501 comp_dir_string (void)
20502 {
20503 const char *wd;
20504 char *wd1;
20505 static const char *cached_wd = NULL;
20506
20507 if (cached_wd != NULL)
20508 return cached_wd;
20509
20510 wd = get_src_pwd ();
20511 if (wd == NULL)
20512 return NULL;
20513
20514 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20515 {
20516 int wdlen;
20517
20518 wdlen = strlen (wd);
20519 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20520 strcpy (wd1, wd);
20521 wd1 [wdlen] = DIR_SEPARATOR;
20522 wd1 [wdlen + 1] = 0;
20523 wd = wd1;
20524 }
20525
20526 cached_wd = remap_debug_filename (wd);
20527 return cached_wd;
20528 }
20529
20530 /* Generate a DW_AT_comp_dir attribute for DIE. */
20531
20532 static void
20533 add_comp_dir_attribute (dw_die_ref die)
20534 {
20535 const char * wd = comp_dir_string ();
20536 if (wd != NULL)
20537 add_AT_string (die, DW_AT_comp_dir, wd);
20538 }
20539
20540 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20541 pointer computation, ...), output a representation for that bound according
20542 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20543 loc_list_from_tree for the meaning of CONTEXT. */
20544
20545 static void
20546 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20547 int forms, struct loc_descr_context *context)
20548 {
20549 dw_die_ref context_die, decl_die;
20550 dw_loc_list_ref list;
20551 bool strip_conversions = true;
20552 bool placeholder_seen = false;
20553
20554 while (strip_conversions)
20555 switch (TREE_CODE (value))
20556 {
20557 case ERROR_MARK:
20558 case SAVE_EXPR:
20559 return;
20560
20561 CASE_CONVERT:
20562 case VIEW_CONVERT_EXPR:
20563 value = TREE_OPERAND (value, 0);
20564 break;
20565
20566 default:
20567 strip_conversions = false;
20568 break;
20569 }
20570
20571 /* If possible and permitted, output the attribute as a constant. */
20572 if ((forms & dw_scalar_form_constant) != 0
20573 && TREE_CODE (value) == INTEGER_CST)
20574 {
20575 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20576
20577 /* If HOST_WIDE_INT is big enough then represent the bound as
20578 a constant value. We need to choose a form based on
20579 whether the type is signed or unsigned. We cannot just
20580 call add_AT_unsigned if the value itself is positive
20581 (add_AT_unsigned might add the unsigned value encoded as
20582 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20583 bounds type and then sign extend any unsigned values found
20584 for signed types. This is needed only for
20585 DW_AT_{lower,upper}_bound, since for most other attributes,
20586 consumers will treat DW_FORM_data[1248] as unsigned values,
20587 regardless of the underlying type. */
20588 if (prec <= HOST_BITS_PER_WIDE_INT
20589 || tree_fits_uhwi_p (value))
20590 {
20591 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20592 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20593 else
20594 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20595 }
20596 else
20597 /* Otherwise represent the bound as an unsigned value with
20598 the precision of its type. The precision and signedness
20599 of the type will be necessary to re-interpret it
20600 unambiguously. */
20601 add_AT_wide (die, attr, wi::to_wide (value));
20602 return;
20603 }
20604
20605 /* Otherwise, if it's possible and permitted too, output a reference to
20606 another DIE. */
20607 if ((forms & dw_scalar_form_reference) != 0)
20608 {
20609 tree decl = NULL_TREE;
20610
20611 /* Some type attributes reference an outer type. For instance, the upper
20612 bound of an array may reference an embedding record (this happens in
20613 Ada). */
20614 if (TREE_CODE (value) == COMPONENT_REF
20615 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20616 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20617 decl = TREE_OPERAND (value, 1);
20618
20619 else if (VAR_P (value)
20620 || TREE_CODE (value) == PARM_DECL
20621 || TREE_CODE (value) == RESULT_DECL)
20622 decl = value;
20623
20624 if (decl != NULL_TREE)
20625 {
20626 dw_die_ref decl_die = lookup_decl_die (decl);
20627
20628 /* ??? Can this happen, or should the variable have been bound
20629 first? Probably it can, since I imagine that we try to create
20630 the types of parameters in the order in which they exist in
20631 the list, and won't have created a forward reference to a
20632 later parameter. */
20633 if (decl_die != NULL)
20634 {
20635 add_AT_die_ref (die, attr, decl_die);
20636 return;
20637 }
20638 }
20639 }
20640
20641 /* Last chance: try to create a stack operation procedure to evaluate the
20642 value. Do nothing if even that is not possible or permitted. */
20643 if ((forms & dw_scalar_form_exprloc) == 0)
20644 return;
20645
20646 list = loc_list_from_tree (value, 2, context);
20647 if (context && context->placeholder_arg)
20648 {
20649 placeholder_seen = context->placeholder_seen;
20650 context->placeholder_seen = false;
20651 }
20652 if (list == NULL || single_element_loc_list_p (list))
20653 {
20654 /* If this attribute is not a reference nor constant, it is
20655 a DWARF expression rather than location description. For that
20656 loc_list_from_tree (value, 0, &context) is needed. */
20657 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20658 if (list2 && single_element_loc_list_p (list2))
20659 {
20660 if (placeholder_seen)
20661 {
20662 struct dwarf_procedure_info dpi;
20663 dpi.fndecl = NULL_TREE;
20664 dpi.args_count = 1;
20665 if (!resolve_args_picking (list2->expr, 1, &dpi))
20666 return;
20667 }
20668 add_AT_loc (die, attr, list2->expr);
20669 return;
20670 }
20671 }
20672
20673 /* If that failed to give a single element location list, fall back to
20674 outputting this as a reference... still if permitted. */
20675 if (list == NULL
20676 || (forms & dw_scalar_form_reference) == 0
20677 || placeholder_seen)
20678 return;
20679
20680 if (current_function_decl == 0)
20681 context_die = comp_unit_die ();
20682 else
20683 context_die = lookup_decl_die (current_function_decl);
20684
20685 decl_die = new_die (DW_TAG_variable, context_die, value);
20686 add_AT_flag (decl_die, DW_AT_artificial, 1);
20687 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20688 context_die);
20689 add_AT_location_description (decl_die, DW_AT_location, list);
20690 add_AT_die_ref (die, attr, decl_die);
20691 }
20692
20693 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20694 default. */
20695
20696 static int
20697 lower_bound_default (void)
20698 {
20699 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20700 {
20701 case DW_LANG_C:
20702 case DW_LANG_C89:
20703 case DW_LANG_C99:
20704 case DW_LANG_C11:
20705 case DW_LANG_C_plus_plus:
20706 case DW_LANG_C_plus_plus_11:
20707 case DW_LANG_C_plus_plus_14:
20708 case DW_LANG_ObjC:
20709 case DW_LANG_ObjC_plus_plus:
20710 return 0;
20711 case DW_LANG_Fortran77:
20712 case DW_LANG_Fortran90:
20713 case DW_LANG_Fortran95:
20714 case DW_LANG_Fortran03:
20715 case DW_LANG_Fortran08:
20716 return 1;
20717 case DW_LANG_UPC:
20718 case DW_LANG_D:
20719 case DW_LANG_Python:
20720 return dwarf_version >= 4 ? 0 : -1;
20721 case DW_LANG_Ada95:
20722 case DW_LANG_Ada83:
20723 case DW_LANG_Cobol74:
20724 case DW_LANG_Cobol85:
20725 case DW_LANG_Modula2:
20726 case DW_LANG_PLI:
20727 return dwarf_version >= 4 ? 1 : -1;
20728 default:
20729 return -1;
20730 }
20731 }
20732
20733 /* Given a tree node describing an array bound (either lower or upper) output
20734 a representation for that bound. */
20735
20736 static void
20737 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20738 tree bound, struct loc_descr_context *context)
20739 {
20740 int dflt;
20741
20742 while (1)
20743 switch (TREE_CODE (bound))
20744 {
20745 /* Strip all conversions. */
20746 CASE_CONVERT:
20747 case VIEW_CONVERT_EXPR:
20748 bound = TREE_OPERAND (bound, 0);
20749 break;
20750
20751 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20752 are even omitted when they are the default. */
20753 case INTEGER_CST:
20754 /* If the value for this bound is the default one, we can even omit the
20755 attribute. */
20756 if (bound_attr == DW_AT_lower_bound
20757 && tree_fits_shwi_p (bound)
20758 && (dflt = lower_bound_default ()) != -1
20759 && tree_to_shwi (bound) == dflt)
20760 return;
20761
20762 /* FALLTHRU */
20763
20764 default:
20765 /* Because of the complex interaction there can be with other GNAT
20766 encodings, GDB isn't ready yet to handle proper DWARF description
20767 for self-referencial subrange bounds: let GNAT encodings do the
20768 magic in such a case. */
20769 if (is_ada ()
20770 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20771 && contains_placeholder_p (bound))
20772 return;
20773
20774 add_scalar_info (subrange_die, bound_attr, bound,
20775 dw_scalar_form_constant
20776 | dw_scalar_form_exprloc
20777 | dw_scalar_form_reference,
20778 context);
20779 return;
20780 }
20781 }
20782
20783 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20784 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20785 Note that the block of subscript information for an array type also
20786 includes information about the element type of the given array type.
20787
20788 This function reuses previously set type and bound information if
20789 available. */
20790
20791 static void
20792 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20793 {
20794 unsigned dimension_number;
20795 tree lower, upper;
20796 dw_die_ref child = type_die->die_child;
20797
20798 for (dimension_number = 0;
20799 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20800 type = TREE_TYPE (type), dimension_number++)
20801 {
20802 tree domain = TYPE_DOMAIN (type);
20803
20804 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20805 break;
20806
20807 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20808 and (in GNU C only) variable bounds. Handle all three forms
20809 here. */
20810
20811 /* Find and reuse a previously generated DW_TAG_subrange_type if
20812 available.
20813
20814 For multi-dimensional arrays, as we iterate through the
20815 various dimensions in the enclosing for loop above, we also
20816 iterate through the DIE children and pick at each
20817 DW_TAG_subrange_type previously generated (if available).
20818 Each child DW_TAG_subrange_type DIE describes the range of
20819 the current dimension. At this point we should have as many
20820 DW_TAG_subrange_type's as we have dimensions in the
20821 array. */
20822 dw_die_ref subrange_die = NULL;
20823 if (child)
20824 while (1)
20825 {
20826 child = child->die_sib;
20827 if (child->die_tag == DW_TAG_subrange_type)
20828 subrange_die = child;
20829 if (child == type_die->die_child)
20830 {
20831 /* If we wrapped around, stop looking next time. */
20832 child = NULL;
20833 break;
20834 }
20835 if (child->die_tag == DW_TAG_subrange_type)
20836 break;
20837 }
20838 if (!subrange_die)
20839 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20840
20841 if (domain)
20842 {
20843 /* We have an array type with specified bounds. */
20844 lower = TYPE_MIN_VALUE (domain);
20845 upper = TYPE_MAX_VALUE (domain);
20846
20847 /* Define the index type. */
20848 if (TREE_TYPE (domain)
20849 && !get_AT (subrange_die, DW_AT_type))
20850 {
20851 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20852 TREE_TYPE field. We can't emit debug info for this
20853 because it is an unnamed integral type. */
20854 if (TREE_CODE (domain) == INTEGER_TYPE
20855 && TYPE_NAME (domain) == NULL_TREE
20856 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20857 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20858 ;
20859 else
20860 add_type_attribute (subrange_die, TREE_TYPE (domain),
20861 TYPE_UNQUALIFIED, false, type_die);
20862 }
20863
20864 /* ??? If upper is NULL, the array has unspecified length,
20865 but it does have a lower bound. This happens with Fortran
20866 dimension arr(N:*)
20867 Since the debugger is definitely going to need to know N
20868 to produce useful results, go ahead and output the lower
20869 bound solo, and hope the debugger can cope. */
20870
20871 if (!get_AT (subrange_die, DW_AT_lower_bound))
20872 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20873 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20874 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20875 }
20876
20877 /* Otherwise we have an array type with an unspecified length. The
20878 DWARF-2 spec does not say how to handle this; let's just leave out the
20879 bounds. */
20880 }
20881 }
20882
20883 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20884
20885 static void
20886 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20887 {
20888 dw_die_ref decl_die;
20889 HOST_WIDE_INT size;
20890 dw_loc_descr_ref size_expr = NULL;
20891
20892 switch (TREE_CODE (tree_node))
20893 {
20894 case ERROR_MARK:
20895 size = 0;
20896 break;
20897 case ENUMERAL_TYPE:
20898 case RECORD_TYPE:
20899 case UNION_TYPE:
20900 case QUAL_UNION_TYPE:
20901 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20902 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20903 {
20904 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20905 return;
20906 }
20907 size_expr = type_byte_size (tree_node, &size);
20908 break;
20909 case FIELD_DECL:
20910 /* For a data member of a struct or union, the DW_AT_byte_size is
20911 generally given as the number of bytes normally allocated for an
20912 object of the *declared* type of the member itself. This is true
20913 even for bit-fields. */
20914 size = int_size_in_bytes (field_type (tree_node));
20915 break;
20916 default:
20917 gcc_unreachable ();
20918 }
20919
20920 /* Support for dynamically-sized objects was introduced by DWARFv3.
20921 At the moment, GDB does not handle variable byte sizes very well,
20922 though. */
20923 if ((dwarf_version >= 3 || !dwarf_strict)
20924 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20925 && size_expr != NULL)
20926 add_AT_loc (die, DW_AT_byte_size, size_expr);
20927
20928 /* Note that `size' might be -1 when we get to this point. If it is, that
20929 indicates that the byte size of the entity in question is variable and
20930 that we could not generate a DWARF expression that computes it. */
20931 if (size >= 0)
20932 add_AT_unsigned (die, DW_AT_byte_size, size);
20933 }
20934
20935 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20936 alignment. */
20937
20938 static void
20939 add_alignment_attribute (dw_die_ref die, tree tree_node)
20940 {
20941 if (dwarf_version < 5 && dwarf_strict)
20942 return;
20943
20944 unsigned align;
20945
20946 if (DECL_P (tree_node))
20947 {
20948 if (!DECL_USER_ALIGN (tree_node))
20949 return;
20950
20951 align = DECL_ALIGN_UNIT (tree_node);
20952 }
20953 else if (TYPE_P (tree_node))
20954 {
20955 if (!TYPE_USER_ALIGN (tree_node))
20956 return;
20957
20958 align = TYPE_ALIGN_UNIT (tree_node);
20959 }
20960 else
20961 gcc_unreachable ();
20962
20963 add_AT_unsigned (die, DW_AT_alignment, align);
20964 }
20965
20966 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20967 which specifies the distance in bits from the highest order bit of the
20968 "containing object" for the bit-field to the highest order bit of the
20969 bit-field itself.
20970
20971 For any given bit-field, the "containing object" is a hypothetical object
20972 (of some integral or enum type) within which the given bit-field lives. The
20973 type of this hypothetical "containing object" is always the same as the
20974 declared type of the individual bit-field itself. The determination of the
20975 exact location of the "containing object" for a bit-field is rather
20976 complicated. It's handled by the `field_byte_offset' function (above).
20977
20978 CTX is required: see the comment for VLR_CONTEXT.
20979
20980 Note that it is the size (in bytes) of the hypothetical "containing object"
20981 which will be given in the DW_AT_byte_size attribute for this bit-field.
20982 (See `byte_size_attribute' above). */
20983
20984 static inline void
20985 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20986 {
20987 HOST_WIDE_INT object_offset_in_bytes;
20988 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20989 HOST_WIDE_INT bitpos_int;
20990 HOST_WIDE_INT highest_order_object_bit_offset;
20991 HOST_WIDE_INT highest_order_field_bit_offset;
20992 HOST_WIDE_INT bit_offset;
20993
20994 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20995
20996 /* Must be a field and a bit field. */
20997 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20998
20999 /* We can't yet handle bit-fields whose offsets are variable, so if we
21000 encounter such things, just return without generating any attribute
21001 whatsoever. Likewise for variable or too large size. */
21002 if (! tree_fits_shwi_p (bit_position (decl))
21003 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21004 return;
21005
21006 bitpos_int = int_bit_position (decl);
21007
21008 /* Note that the bit offset is always the distance (in bits) from the
21009 highest-order bit of the "containing object" to the highest-order bit of
21010 the bit-field itself. Since the "high-order end" of any object or field
21011 is different on big-endian and little-endian machines, the computation
21012 below must take account of these differences. */
21013 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21014 highest_order_field_bit_offset = bitpos_int;
21015
21016 if (! BYTES_BIG_ENDIAN)
21017 {
21018 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21019 highest_order_object_bit_offset +=
21020 simple_type_size_in_bits (original_type);
21021 }
21022
21023 bit_offset
21024 = (! BYTES_BIG_ENDIAN
21025 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21026 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21027
21028 if (bit_offset < 0)
21029 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21030 else
21031 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21032 }
21033
21034 /* For a FIELD_DECL node which represents a bit field, output an attribute
21035 which specifies the length in bits of the given field. */
21036
21037 static inline void
21038 add_bit_size_attribute (dw_die_ref die, tree decl)
21039 {
21040 /* Must be a field and a bit field. */
21041 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21042 && DECL_BIT_FIELD_TYPE (decl));
21043
21044 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21045 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21046 }
21047
21048 /* If the compiled language is ANSI C, then add a 'prototyped'
21049 attribute, if arg types are given for the parameters of a function. */
21050
21051 static inline void
21052 add_prototyped_attribute (dw_die_ref die, tree func_type)
21053 {
21054 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21055 {
21056 case DW_LANG_C:
21057 case DW_LANG_C89:
21058 case DW_LANG_C99:
21059 case DW_LANG_C11:
21060 case DW_LANG_ObjC:
21061 if (prototype_p (func_type))
21062 add_AT_flag (die, DW_AT_prototyped, 1);
21063 break;
21064 default:
21065 break;
21066 }
21067 }
21068
21069 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21070 by looking in the type declaration, the object declaration equate table or
21071 the block mapping. */
21072
21073 static inline dw_die_ref
21074 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21075 {
21076 dw_die_ref origin_die = NULL;
21077
21078 if (DECL_P (origin))
21079 {
21080 dw_die_ref c;
21081 origin_die = lookup_decl_die (origin);
21082 /* "Unwrap" the decls DIE which we put in the imported unit context.
21083 We are looking for the abstract copy here. */
21084 if (in_lto_p
21085 && origin_die
21086 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21087 /* ??? Identify this better. */
21088 && c->with_offset)
21089 origin_die = c;
21090 }
21091 else if (TYPE_P (origin))
21092 origin_die = lookup_type_die (origin);
21093 else if (TREE_CODE (origin) == BLOCK)
21094 origin_die = BLOCK_DIE (origin);
21095
21096 /* XXX: Functions that are never lowered don't always have correct block
21097 trees (in the case of java, they simply have no block tree, in some other
21098 languages). For these functions, there is nothing we can really do to
21099 output correct debug info for inlined functions in all cases. Rather
21100 than die, we'll just produce deficient debug info now, in that we will
21101 have variables without a proper abstract origin. In the future, when all
21102 functions are lowered, we should re-add a gcc_assert (origin_die)
21103 here. */
21104
21105 if (origin_die)
21106 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21107 return origin_die;
21108 }
21109
21110 /* We do not currently support the pure_virtual attribute. */
21111
21112 static inline void
21113 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21114 {
21115 if (DECL_VINDEX (func_decl))
21116 {
21117 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21118
21119 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21120 add_AT_loc (die, DW_AT_vtable_elem_location,
21121 new_loc_descr (DW_OP_constu,
21122 tree_to_shwi (DECL_VINDEX (func_decl)),
21123 0));
21124
21125 /* GNU extension: Record what type this method came from originally. */
21126 if (debug_info_level > DINFO_LEVEL_TERSE
21127 && DECL_CONTEXT (func_decl))
21128 add_AT_die_ref (die, DW_AT_containing_type,
21129 lookup_type_die (DECL_CONTEXT (func_decl)));
21130 }
21131 }
21132 \f
21133 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21134 given decl. This used to be a vendor extension until after DWARF 4
21135 standardized it. */
21136
21137 static void
21138 add_linkage_attr (dw_die_ref die, tree decl)
21139 {
21140 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21141
21142 /* Mimic what assemble_name_raw does with a leading '*'. */
21143 if (name[0] == '*')
21144 name = &name[1];
21145
21146 if (dwarf_version >= 4)
21147 add_AT_string (die, DW_AT_linkage_name, name);
21148 else
21149 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21150 }
21151
21152 /* Add source coordinate attributes for the given decl. */
21153
21154 static void
21155 add_src_coords_attributes (dw_die_ref die, tree decl)
21156 {
21157 expanded_location s;
21158
21159 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21160 return;
21161 s = expand_location (DECL_SOURCE_LOCATION (decl));
21162 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21163 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21164 if (debug_column_info && s.column)
21165 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21166 }
21167
21168 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21169
21170 static void
21171 add_linkage_name_raw (dw_die_ref die, tree decl)
21172 {
21173 /* Defer until we have an assembler name set. */
21174 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21175 {
21176 limbo_die_node *asm_name;
21177
21178 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21179 asm_name->die = die;
21180 asm_name->created_for = decl;
21181 asm_name->next = deferred_asm_name;
21182 deferred_asm_name = asm_name;
21183 }
21184 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21185 add_linkage_attr (die, decl);
21186 }
21187
21188 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21189
21190 static void
21191 add_linkage_name (dw_die_ref die, tree decl)
21192 {
21193 if (debug_info_level > DINFO_LEVEL_NONE
21194 && VAR_OR_FUNCTION_DECL_P (decl)
21195 && TREE_PUBLIC (decl)
21196 && !(VAR_P (decl) && DECL_REGISTER (decl))
21197 && die->die_tag != DW_TAG_member)
21198 add_linkage_name_raw (die, decl);
21199 }
21200
21201 /* Add a DW_AT_name attribute and source coordinate attribute for the
21202 given decl, but only if it actually has a name. */
21203
21204 static void
21205 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21206 bool no_linkage_name)
21207 {
21208 tree decl_name;
21209
21210 decl_name = DECL_NAME (decl);
21211 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21212 {
21213 const char *name = dwarf2_name (decl, 0);
21214 if (name)
21215 add_name_attribute (die, name);
21216 if (! DECL_ARTIFICIAL (decl))
21217 add_src_coords_attributes (die, decl);
21218
21219 if (!no_linkage_name)
21220 add_linkage_name (die, decl);
21221 }
21222
21223 #ifdef VMS_DEBUGGING_INFO
21224 /* Get the function's name, as described by its RTL. This may be different
21225 from the DECL_NAME name used in the source file. */
21226 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21227 {
21228 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21229 XEXP (DECL_RTL (decl), 0), false);
21230 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21231 }
21232 #endif /* VMS_DEBUGGING_INFO */
21233 }
21234
21235 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21236
21237 static void
21238 add_discr_value (dw_die_ref die, dw_discr_value *value)
21239 {
21240 dw_attr_node attr;
21241
21242 attr.dw_attr = DW_AT_discr_value;
21243 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21244 attr.dw_attr_val.val_entry = NULL;
21245 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21246 if (value->pos)
21247 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21248 else
21249 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21250 add_dwarf_attr (die, &attr);
21251 }
21252
21253 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21254
21255 static void
21256 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21257 {
21258 dw_attr_node attr;
21259
21260 attr.dw_attr = DW_AT_discr_list;
21261 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21262 attr.dw_attr_val.val_entry = NULL;
21263 attr.dw_attr_val.v.val_discr_list = discr_list;
21264 add_dwarf_attr (die, &attr);
21265 }
21266
21267 static inline dw_discr_list_ref
21268 AT_discr_list (dw_attr_node *attr)
21269 {
21270 return attr->dw_attr_val.v.val_discr_list;
21271 }
21272
21273 #ifdef VMS_DEBUGGING_INFO
21274 /* Output the debug main pointer die for VMS */
21275
21276 void
21277 dwarf2out_vms_debug_main_pointer (void)
21278 {
21279 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21280 dw_die_ref die;
21281
21282 /* Allocate the VMS debug main subprogram die. */
21283 die = new_die_raw (DW_TAG_subprogram);
21284 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21285 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21286 current_function_funcdef_no);
21287 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21288
21289 /* Make it the first child of comp_unit_die (). */
21290 die->die_parent = comp_unit_die ();
21291 if (comp_unit_die ()->die_child)
21292 {
21293 die->die_sib = comp_unit_die ()->die_child->die_sib;
21294 comp_unit_die ()->die_child->die_sib = die;
21295 }
21296 else
21297 {
21298 die->die_sib = die;
21299 comp_unit_die ()->die_child = die;
21300 }
21301 }
21302 #endif /* VMS_DEBUGGING_INFO */
21303
21304 /* Push a new declaration scope. */
21305
21306 static void
21307 push_decl_scope (tree scope)
21308 {
21309 vec_safe_push (decl_scope_table, scope);
21310 }
21311
21312 /* Pop a declaration scope. */
21313
21314 static inline void
21315 pop_decl_scope (void)
21316 {
21317 decl_scope_table->pop ();
21318 }
21319
21320 /* walk_tree helper function for uses_local_type, below. */
21321
21322 static tree
21323 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21324 {
21325 if (!TYPE_P (*tp))
21326 *walk_subtrees = 0;
21327 else
21328 {
21329 tree name = TYPE_NAME (*tp);
21330 if (name && DECL_P (name) && decl_function_context (name))
21331 return *tp;
21332 }
21333 return NULL_TREE;
21334 }
21335
21336 /* If TYPE involves a function-local type (including a local typedef to a
21337 non-local type), returns that type; otherwise returns NULL_TREE. */
21338
21339 static tree
21340 uses_local_type (tree type)
21341 {
21342 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21343 return used;
21344 }
21345
21346 /* Return the DIE for the scope that immediately contains this type.
21347 Non-named types that do not involve a function-local type get global
21348 scope. Named types nested in namespaces or other types get their
21349 containing scope. All other types (i.e. function-local named types) get
21350 the current active scope. */
21351
21352 static dw_die_ref
21353 scope_die_for (tree t, dw_die_ref context_die)
21354 {
21355 dw_die_ref scope_die = NULL;
21356 tree containing_scope;
21357
21358 /* Non-types always go in the current scope. */
21359 gcc_assert (TYPE_P (t));
21360
21361 /* Use the scope of the typedef, rather than the scope of the type
21362 it refers to. */
21363 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21364 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21365 else
21366 containing_scope = TYPE_CONTEXT (t);
21367
21368 /* Use the containing namespace if there is one. */
21369 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21370 {
21371 if (context_die == lookup_decl_die (containing_scope))
21372 /* OK */;
21373 else if (debug_info_level > DINFO_LEVEL_TERSE)
21374 context_die = get_context_die (containing_scope);
21375 else
21376 containing_scope = NULL_TREE;
21377 }
21378
21379 /* Ignore function type "scopes" from the C frontend. They mean that
21380 a tagged type is local to a parmlist of a function declarator, but
21381 that isn't useful to DWARF. */
21382 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21383 containing_scope = NULL_TREE;
21384
21385 if (SCOPE_FILE_SCOPE_P (containing_scope))
21386 {
21387 /* If T uses a local type keep it local as well, to avoid references
21388 to function-local DIEs from outside the function. */
21389 if (current_function_decl && uses_local_type (t))
21390 scope_die = context_die;
21391 else
21392 scope_die = comp_unit_die ();
21393 }
21394 else if (TYPE_P (containing_scope))
21395 {
21396 /* For types, we can just look up the appropriate DIE. */
21397 if (debug_info_level > DINFO_LEVEL_TERSE)
21398 scope_die = get_context_die (containing_scope);
21399 else
21400 {
21401 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21402 if (scope_die == NULL)
21403 scope_die = comp_unit_die ();
21404 }
21405 }
21406 else
21407 scope_die = context_die;
21408
21409 return scope_die;
21410 }
21411
21412 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21413
21414 static inline int
21415 local_scope_p (dw_die_ref context_die)
21416 {
21417 for (; context_die; context_die = context_die->die_parent)
21418 if (context_die->die_tag == DW_TAG_inlined_subroutine
21419 || context_die->die_tag == DW_TAG_subprogram)
21420 return 1;
21421
21422 return 0;
21423 }
21424
21425 /* Returns nonzero if CONTEXT_DIE is a class. */
21426
21427 static inline int
21428 class_scope_p (dw_die_ref context_die)
21429 {
21430 return (context_die
21431 && (context_die->die_tag == DW_TAG_structure_type
21432 || context_die->die_tag == DW_TAG_class_type
21433 || context_die->die_tag == DW_TAG_interface_type
21434 || context_die->die_tag == DW_TAG_union_type));
21435 }
21436
21437 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21438 whether or not to treat a DIE in this context as a declaration. */
21439
21440 static inline int
21441 class_or_namespace_scope_p (dw_die_ref context_die)
21442 {
21443 return (class_scope_p (context_die)
21444 || (context_die && context_die->die_tag == DW_TAG_namespace));
21445 }
21446
21447 /* Many forms of DIEs require a "type description" attribute. This
21448 routine locates the proper "type descriptor" die for the type given
21449 by 'type' plus any additional qualifiers given by 'cv_quals', and
21450 adds a DW_AT_type attribute below the given die. */
21451
21452 static void
21453 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21454 bool reverse, dw_die_ref context_die)
21455 {
21456 enum tree_code code = TREE_CODE (type);
21457 dw_die_ref type_die = NULL;
21458
21459 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21460 or fixed-point type, use the inner type. This is because we have no
21461 support for unnamed types in base_type_die. This can happen if this is
21462 an Ada subrange type. Correct solution is emit a subrange type die. */
21463 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21464 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21465 type = TREE_TYPE (type), code = TREE_CODE (type);
21466
21467 if (code == ERROR_MARK
21468 /* Handle a special case. For functions whose return type is void, we
21469 generate *no* type attribute. (Note that no object may have type
21470 `void', so this only applies to function return types). */
21471 || code == VOID_TYPE)
21472 return;
21473
21474 type_die = modified_type_die (type,
21475 cv_quals | TYPE_QUALS (type),
21476 reverse,
21477 context_die);
21478
21479 if (type_die != NULL)
21480 add_AT_die_ref (object_die, DW_AT_type, type_die);
21481 }
21482
21483 /* Given an object die, add the calling convention attribute for the
21484 function call type. */
21485 static void
21486 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21487 {
21488 enum dwarf_calling_convention value = DW_CC_normal;
21489
21490 value = ((enum dwarf_calling_convention)
21491 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21492
21493 if (is_fortran ()
21494 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21495 {
21496 /* DWARF 2 doesn't provide a way to identify a program's source-level
21497 entry point. DW_AT_calling_convention attributes are only meant
21498 to describe functions' calling conventions. However, lacking a
21499 better way to signal the Fortran main program, we used this for
21500 a long time, following existing custom. Now, DWARF 4 has
21501 DW_AT_main_subprogram, which we add below, but some tools still
21502 rely on the old way, which we thus keep. */
21503 value = DW_CC_program;
21504
21505 if (dwarf_version >= 4 || !dwarf_strict)
21506 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21507 }
21508
21509 /* Only add the attribute if the backend requests it, and
21510 is not DW_CC_normal. */
21511 if (value && (value != DW_CC_normal))
21512 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21513 }
21514
21515 /* Given a tree pointer to a struct, class, union, or enum type node, return
21516 a pointer to the (string) tag name for the given type, or zero if the type
21517 was declared without a tag. */
21518
21519 static const char *
21520 type_tag (const_tree type)
21521 {
21522 const char *name = 0;
21523
21524 if (TYPE_NAME (type) != 0)
21525 {
21526 tree t = 0;
21527
21528 /* Find the IDENTIFIER_NODE for the type name. */
21529 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21530 && !TYPE_NAMELESS (type))
21531 t = TYPE_NAME (type);
21532
21533 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21534 a TYPE_DECL node, regardless of whether or not a `typedef' was
21535 involved. */
21536 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21537 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21538 {
21539 /* We want to be extra verbose. Don't call dwarf_name if
21540 DECL_NAME isn't set. The default hook for decl_printable_name
21541 doesn't like that, and in this context it's correct to return
21542 0, instead of "<anonymous>" or the like. */
21543 if (DECL_NAME (TYPE_NAME (type))
21544 && !DECL_NAMELESS (TYPE_NAME (type)))
21545 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21546 }
21547
21548 /* Now get the name as a string, or invent one. */
21549 if (!name && t != 0)
21550 name = IDENTIFIER_POINTER (t);
21551 }
21552
21553 return (name == 0 || *name == '\0') ? 0 : name;
21554 }
21555
21556 /* Return the type associated with a data member, make a special check
21557 for bit field types. */
21558
21559 static inline tree
21560 member_declared_type (const_tree member)
21561 {
21562 return (DECL_BIT_FIELD_TYPE (member)
21563 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21564 }
21565
21566 /* Get the decl's label, as described by its RTL. This may be different
21567 from the DECL_NAME name used in the source file. */
21568
21569 #if 0
21570 static const char *
21571 decl_start_label (tree decl)
21572 {
21573 rtx x;
21574 const char *fnname;
21575
21576 x = DECL_RTL (decl);
21577 gcc_assert (MEM_P (x));
21578
21579 x = XEXP (x, 0);
21580 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21581
21582 fnname = XSTR (x, 0);
21583 return fnname;
21584 }
21585 #endif
21586 \f
21587 /* For variable-length arrays that have been previously generated, but
21588 may be incomplete due to missing subscript info, fill the subscript
21589 info. Return TRUE if this is one of those cases. */
21590 static bool
21591 fill_variable_array_bounds (tree type)
21592 {
21593 if (TREE_ASM_WRITTEN (type)
21594 && TREE_CODE (type) == ARRAY_TYPE
21595 && variably_modified_type_p (type, NULL))
21596 {
21597 dw_die_ref array_die = lookup_type_die (type);
21598 if (!array_die)
21599 return false;
21600 add_subscript_info (array_die, type, !is_ada ());
21601 return true;
21602 }
21603 return false;
21604 }
21605
21606 /* These routines generate the internal representation of the DIE's for
21607 the compilation unit. Debugging information is collected by walking
21608 the declaration trees passed in from dwarf2out_decl(). */
21609
21610 static void
21611 gen_array_type_die (tree type, dw_die_ref context_die)
21612 {
21613 dw_die_ref array_die;
21614
21615 /* GNU compilers represent multidimensional array types as sequences of one
21616 dimensional array types whose element types are themselves array types.
21617 We sometimes squish that down to a single array_type DIE with multiple
21618 subscripts in the Dwarf debugging info. The draft Dwarf specification
21619 say that we are allowed to do this kind of compression in C, because
21620 there is no difference between an array of arrays and a multidimensional
21621 array. We don't do this for Ada to remain as close as possible to the
21622 actual representation, which is especially important against the language
21623 flexibilty wrt arrays of variable size. */
21624
21625 bool collapse_nested_arrays = !is_ada ();
21626
21627 if (fill_variable_array_bounds (type))
21628 return;
21629
21630 dw_die_ref scope_die = scope_die_for (type, context_die);
21631 tree element_type;
21632
21633 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21634 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21635 if (TYPE_STRING_FLAG (type)
21636 && TREE_CODE (type) == ARRAY_TYPE
21637 && is_fortran ()
21638 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21639 {
21640 HOST_WIDE_INT size;
21641
21642 array_die = new_die (DW_TAG_string_type, scope_die, type);
21643 add_name_attribute (array_die, type_tag (type));
21644 equate_type_number_to_die (type, array_die);
21645 size = int_size_in_bytes (type);
21646 if (size >= 0)
21647 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21648 /* ??? We can't annotate types late, but for LTO we may not
21649 generate a location early either (gfortran.dg/save_6.f90). */
21650 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21651 && TYPE_DOMAIN (type) != NULL_TREE
21652 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21653 {
21654 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21655 tree rszdecl = szdecl;
21656
21657 size = int_size_in_bytes (TREE_TYPE (szdecl));
21658 if (!DECL_P (szdecl))
21659 {
21660 if (TREE_CODE (szdecl) == INDIRECT_REF
21661 && DECL_P (TREE_OPERAND (szdecl, 0)))
21662 {
21663 rszdecl = TREE_OPERAND (szdecl, 0);
21664 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21665 != DWARF2_ADDR_SIZE)
21666 size = 0;
21667 }
21668 else
21669 size = 0;
21670 }
21671 if (size > 0)
21672 {
21673 dw_loc_list_ref loc
21674 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21675 NULL);
21676 if (loc)
21677 {
21678 add_AT_location_description (array_die, DW_AT_string_length,
21679 loc);
21680 if (size != DWARF2_ADDR_SIZE)
21681 add_AT_unsigned (array_die, dwarf_version >= 5
21682 ? DW_AT_string_length_byte_size
21683 : DW_AT_byte_size, size);
21684 }
21685 }
21686 }
21687 return;
21688 }
21689
21690 array_die = new_die (DW_TAG_array_type, scope_die, type);
21691 add_name_attribute (array_die, type_tag (type));
21692 equate_type_number_to_die (type, array_die);
21693
21694 if (TREE_CODE (type) == VECTOR_TYPE)
21695 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21696
21697 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21698 if (is_fortran ()
21699 && TREE_CODE (type) == ARRAY_TYPE
21700 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21701 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21702 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21703
21704 #if 0
21705 /* We default the array ordering. Debuggers will probably do the right
21706 things even if DW_AT_ordering is not present. It's not even an issue
21707 until we start to get into multidimensional arrays anyway. If a debugger
21708 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21709 then we'll have to put the DW_AT_ordering attribute back in. (But if
21710 and when we find out that we need to put these in, we will only do so
21711 for multidimensional arrays. */
21712 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21713 #endif
21714
21715 if (TREE_CODE (type) == VECTOR_TYPE)
21716 {
21717 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21718 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21719 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21720 add_bound_info (subrange_die, DW_AT_upper_bound,
21721 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21722 }
21723 else
21724 add_subscript_info (array_die, type, collapse_nested_arrays);
21725
21726 /* Add representation of the type of the elements of this array type and
21727 emit the corresponding DIE if we haven't done it already. */
21728 element_type = TREE_TYPE (type);
21729 if (collapse_nested_arrays)
21730 while (TREE_CODE (element_type) == ARRAY_TYPE)
21731 {
21732 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21733 break;
21734 element_type = TREE_TYPE (element_type);
21735 }
21736
21737 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21738 TREE_CODE (type) == ARRAY_TYPE
21739 && TYPE_REVERSE_STORAGE_ORDER (type),
21740 context_die);
21741
21742 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21743 if (TYPE_ARTIFICIAL (type))
21744 add_AT_flag (array_die, DW_AT_artificial, 1);
21745
21746 if (get_AT (array_die, DW_AT_name))
21747 add_pubtype (type, array_die);
21748
21749 add_alignment_attribute (array_die, type);
21750 }
21751
21752 /* This routine generates DIE for array with hidden descriptor, details
21753 are filled into *info by a langhook. */
21754
21755 static void
21756 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21757 dw_die_ref context_die)
21758 {
21759 const dw_die_ref scope_die = scope_die_for (type, context_die);
21760 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21761 struct loc_descr_context context = { type, info->base_decl, NULL,
21762 false, false };
21763 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21764 int dim;
21765
21766 add_name_attribute (array_die, type_tag (type));
21767 equate_type_number_to_die (type, array_die);
21768
21769 if (info->ndimensions > 1)
21770 switch (info->ordering)
21771 {
21772 case array_descr_ordering_row_major:
21773 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21774 break;
21775 case array_descr_ordering_column_major:
21776 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21777 break;
21778 default:
21779 break;
21780 }
21781
21782 if (dwarf_version >= 3 || !dwarf_strict)
21783 {
21784 if (info->data_location)
21785 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21786 dw_scalar_form_exprloc, &context);
21787 if (info->associated)
21788 add_scalar_info (array_die, DW_AT_associated, info->associated,
21789 dw_scalar_form_constant
21790 | dw_scalar_form_exprloc
21791 | dw_scalar_form_reference, &context);
21792 if (info->allocated)
21793 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21794 dw_scalar_form_constant
21795 | dw_scalar_form_exprloc
21796 | dw_scalar_form_reference, &context);
21797 if (info->stride)
21798 {
21799 const enum dwarf_attribute attr
21800 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21801 const int forms
21802 = (info->stride_in_bits)
21803 ? dw_scalar_form_constant
21804 : (dw_scalar_form_constant
21805 | dw_scalar_form_exprloc
21806 | dw_scalar_form_reference);
21807
21808 add_scalar_info (array_die, attr, info->stride, forms, &context);
21809 }
21810 }
21811 if (dwarf_version >= 5)
21812 {
21813 if (info->rank)
21814 {
21815 add_scalar_info (array_die, DW_AT_rank, info->rank,
21816 dw_scalar_form_constant
21817 | dw_scalar_form_exprloc, &context);
21818 subrange_tag = DW_TAG_generic_subrange;
21819 context.placeholder_arg = true;
21820 }
21821 }
21822
21823 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21824
21825 for (dim = 0; dim < info->ndimensions; dim++)
21826 {
21827 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21828
21829 if (info->dimen[dim].bounds_type)
21830 add_type_attribute (subrange_die,
21831 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21832 false, context_die);
21833 if (info->dimen[dim].lower_bound)
21834 add_bound_info (subrange_die, DW_AT_lower_bound,
21835 info->dimen[dim].lower_bound, &context);
21836 if (info->dimen[dim].upper_bound)
21837 add_bound_info (subrange_die, DW_AT_upper_bound,
21838 info->dimen[dim].upper_bound, &context);
21839 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21840 add_scalar_info (subrange_die, DW_AT_byte_stride,
21841 info->dimen[dim].stride,
21842 dw_scalar_form_constant
21843 | dw_scalar_form_exprloc
21844 | dw_scalar_form_reference,
21845 &context);
21846 }
21847
21848 gen_type_die (info->element_type, context_die);
21849 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21850 TREE_CODE (type) == ARRAY_TYPE
21851 && TYPE_REVERSE_STORAGE_ORDER (type),
21852 context_die);
21853
21854 if (get_AT (array_die, DW_AT_name))
21855 add_pubtype (type, array_die);
21856
21857 add_alignment_attribute (array_die, type);
21858 }
21859
21860 #if 0
21861 static void
21862 gen_entry_point_die (tree decl, dw_die_ref context_die)
21863 {
21864 tree origin = decl_ultimate_origin (decl);
21865 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21866
21867 if (origin != NULL)
21868 add_abstract_origin_attribute (decl_die, origin);
21869 else
21870 {
21871 add_name_and_src_coords_attributes (decl_die, decl);
21872 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21873 TYPE_UNQUALIFIED, false, context_die);
21874 }
21875
21876 if (DECL_ABSTRACT_P (decl))
21877 equate_decl_number_to_die (decl, decl_die);
21878 else
21879 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21880 }
21881 #endif
21882
21883 /* Walk through the list of incomplete types again, trying once more to
21884 emit full debugging info for them. */
21885
21886 static void
21887 retry_incomplete_types (void)
21888 {
21889 set_early_dwarf s;
21890 int i;
21891
21892 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21893 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21894 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21895 vec_safe_truncate (incomplete_types, 0);
21896 }
21897
21898 /* Determine what tag to use for a record type. */
21899
21900 static enum dwarf_tag
21901 record_type_tag (tree type)
21902 {
21903 if (! lang_hooks.types.classify_record)
21904 return DW_TAG_structure_type;
21905
21906 switch (lang_hooks.types.classify_record (type))
21907 {
21908 case RECORD_IS_STRUCT:
21909 return DW_TAG_structure_type;
21910
21911 case RECORD_IS_CLASS:
21912 return DW_TAG_class_type;
21913
21914 case RECORD_IS_INTERFACE:
21915 if (dwarf_version >= 3 || !dwarf_strict)
21916 return DW_TAG_interface_type;
21917 return DW_TAG_structure_type;
21918
21919 default:
21920 gcc_unreachable ();
21921 }
21922 }
21923
21924 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21925 include all of the information about the enumeration values also. Each
21926 enumerated type name/value is listed as a child of the enumerated type
21927 DIE. */
21928
21929 static dw_die_ref
21930 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21931 {
21932 dw_die_ref type_die = lookup_type_die (type);
21933 dw_die_ref orig_type_die = type_die;
21934
21935 if (type_die == NULL)
21936 {
21937 type_die = new_die (DW_TAG_enumeration_type,
21938 scope_die_for (type, context_die), type);
21939 equate_type_number_to_die (type, type_die);
21940 add_name_attribute (type_die, type_tag (type));
21941 if ((dwarf_version >= 4 || !dwarf_strict)
21942 && ENUM_IS_SCOPED (type))
21943 add_AT_flag (type_die, DW_AT_enum_class, 1);
21944 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21945 add_AT_flag (type_die, DW_AT_declaration, 1);
21946 if (!dwarf_strict)
21947 add_AT_unsigned (type_die, DW_AT_encoding,
21948 TYPE_UNSIGNED (type)
21949 ? DW_ATE_unsigned
21950 : DW_ATE_signed);
21951 }
21952 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21953 return type_die;
21954 else
21955 remove_AT (type_die, DW_AT_declaration);
21956
21957 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21958 given enum type is incomplete, do not generate the DW_AT_byte_size
21959 attribute or the DW_AT_element_list attribute. */
21960 if (TYPE_SIZE (type))
21961 {
21962 tree link;
21963
21964 if (!ENUM_IS_OPAQUE (type))
21965 TREE_ASM_WRITTEN (type) = 1;
21966 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
21967 add_byte_size_attribute (type_die, type);
21968 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
21969 add_alignment_attribute (type_die, type);
21970 if ((dwarf_version >= 3 || !dwarf_strict)
21971 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
21972 {
21973 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21974 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21975 context_die);
21976 }
21977 if (TYPE_STUB_DECL (type) != NULL_TREE)
21978 {
21979 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
21980 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21981 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
21982 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21983 }
21984
21985 /* If the first reference to this type was as the return type of an
21986 inline function, then it may not have a parent. Fix this now. */
21987 if (type_die->die_parent == NULL)
21988 add_child_die (scope_die_for (type, context_die), type_die);
21989
21990 for (link = TYPE_VALUES (type);
21991 link != NULL; link = TREE_CHAIN (link))
21992 {
21993 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21994 tree value = TREE_VALUE (link);
21995
21996 gcc_assert (!ENUM_IS_OPAQUE (type));
21997 add_name_attribute (enum_die,
21998 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21999
22000 if (TREE_CODE (value) == CONST_DECL)
22001 value = DECL_INITIAL (value);
22002
22003 if (simple_type_size_in_bits (TREE_TYPE (value))
22004 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22005 {
22006 /* For constant forms created by add_AT_unsigned DWARF
22007 consumers (GDB, elfutils, etc.) always zero extend
22008 the value. Only when the actual value is negative
22009 do we need to use add_AT_int to generate a constant
22010 form that can represent negative values. */
22011 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22012 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22013 add_AT_unsigned (enum_die, DW_AT_const_value,
22014 (unsigned HOST_WIDE_INT) val);
22015 else
22016 add_AT_int (enum_die, DW_AT_const_value, val);
22017 }
22018 else
22019 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22020 that here. TODO: This should be re-worked to use correct
22021 signed/unsigned double tags for all cases. */
22022 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22023 }
22024
22025 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22026 if (TYPE_ARTIFICIAL (type)
22027 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22028 add_AT_flag (type_die, DW_AT_artificial, 1);
22029 }
22030 else
22031 add_AT_flag (type_die, DW_AT_declaration, 1);
22032
22033 add_pubtype (type, type_die);
22034
22035 return type_die;
22036 }
22037
22038 /* Generate a DIE to represent either a real live formal parameter decl or to
22039 represent just the type of some formal parameter position in some function
22040 type.
22041
22042 Note that this routine is a bit unusual because its argument may be a
22043 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22044 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22045 node. If it's the former then this function is being called to output a
22046 DIE to represent a formal parameter object (or some inlining thereof). If
22047 it's the latter, then this function is only being called to output a
22048 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22049 argument type of some subprogram type.
22050 If EMIT_NAME_P is true, name and source coordinate attributes
22051 are emitted. */
22052
22053 static dw_die_ref
22054 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22055 dw_die_ref context_die)
22056 {
22057 tree node_or_origin = node ? node : origin;
22058 tree ultimate_origin;
22059 dw_die_ref parm_die = NULL;
22060
22061 if (DECL_P (node_or_origin))
22062 {
22063 parm_die = lookup_decl_die (node);
22064
22065 /* If the contexts differ, we may not be talking about the same
22066 thing.
22067 ??? When in LTO the DIE parent is the "abstract" copy and the
22068 context_die is the specification "copy". But this whole block
22069 should eventually be no longer needed. */
22070 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22071 {
22072 if (!DECL_ABSTRACT_P (node))
22073 {
22074 /* This can happen when creating an inlined instance, in
22075 which case we need to create a new DIE that will get
22076 annotated with DW_AT_abstract_origin. */
22077 parm_die = NULL;
22078 }
22079 else
22080 gcc_unreachable ();
22081 }
22082
22083 if (parm_die && parm_die->die_parent == NULL)
22084 {
22085 /* Check that parm_die already has the right attributes that
22086 we would have added below. If any attributes are
22087 missing, fall through to add them. */
22088 if (! DECL_ABSTRACT_P (node_or_origin)
22089 && !get_AT (parm_die, DW_AT_location)
22090 && !get_AT (parm_die, DW_AT_const_value))
22091 /* We are missing location info, and are about to add it. */
22092 ;
22093 else
22094 {
22095 add_child_die (context_die, parm_die);
22096 return parm_die;
22097 }
22098 }
22099 }
22100
22101 /* If we have a previously generated DIE, use it, unless this is an
22102 concrete instance (origin != NULL), in which case we need a new
22103 DIE with a corresponding DW_AT_abstract_origin. */
22104 bool reusing_die;
22105 if (parm_die && origin == NULL)
22106 reusing_die = true;
22107 else
22108 {
22109 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22110 reusing_die = false;
22111 }
22112
22113 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22114 {
22115 case tcc_declaration:
22116 ultimate_origin = decl_ultimate_origin (node_or_origin);
22117 if (node || ultimate_origin)
22118 origin = ultimate_origin;
22119
22120 if (reusing_die)
22121 goto add_location;
22122
22123 if (origin != NULL)
22124 add_abstract_origin_attribute (parm_die, origin);
22125 else if (emit_name_p)
22126 add_name_and_src_coords_attributes (parm_die, node);
22127 if (origin == NULL
22128 || (! DECL_ABSTRACT_P (node_or_origin)
22129 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22130 decl_function_context
22131 (node_or_origin))))
22132 {
22133 tree type = TREE_TYPE (node_or_origin);
22134 if (decl_by_reference_p (node_or_origin))
22135 add_type_attribute (parm_die, TREE_TYPE (type),
22136 TYPE_UNQUALIFIED,
22137 false, context_die);
22138 else
22139 add_type_attribute (parm_die, type,
22140 decl_quals (node_or_origin),
22141 false, context_die);
22142 }
22143 if (origin == NULL && DECL_ARTIFICIAL (node))
22144 add_AT_flag (parm_die, DW_AT_artificial, 1);
22145 add_location:
22146 if (node && node != origin)
22147 equate_decl_number_to_die (node, parm_die);
22148 if (! DECL_ABSTRACT_P (node_or_origin))
22149 add_location_or_const_value_attribute (parm_die, node_or_origin,
22150 node == NULL);
22151
22152 break;
22153
22154 case tcc_type:
22155 /* We were called with some kind of a ..._TYPE node. */
22156 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22157 context_die);
22158 break;
22159
22160 default:
22161 gcc_unreachable ();
22162 }
22163
22164 return parm_die;
22165 }
22166
22167 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22168 children DW_TAG_formal_parameter DIEs representing the arguments of the
22169 parameter pack.
22170
22171 PARM_PACK must be a function parameter pack.
22172 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22173 must point to the subsequent arguments of the function PACK_ARG belongs to.
22174 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22175 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22176 following the last one for which a DIE was generated. */
22177
22178 static dw_die_ref
22179 gen_formal_parameter_pack_die (tree parm_pack,
22180 tree pack_arg,
22181 dw_die_ref subr_die,
22182 tree *next_arg)
22183 {
22184 tree arg;
22185 dw_die_ref parm_pack_die;
22186
22187 gcc_assert (parm_pack
22188 && lang_hooks.function_parameter_pack_p (parm_pack)
22189 && subr_die);
22190
22191 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22192 add_src_coords_attributes (parm_pack_die, parm_pack);
22193
22194 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22195 {
22196 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22197 parm_pack))
22198 break;
22199 gen_formal_parameter_die (arg, NULL,
22200 false /* Don't emit name attribute. */,
22201 parm_pack_die);
22202 }
22203 if (next_arg)
22204 *next_arg = arg;
22205 return parm_pack_die;
22206 }
22207
22208 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22209 at the end of an (ANSI prototyped) formal parameters list. */
22210
22211 static void
22212 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22213 {
22214 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22215 }
22216
22217 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22218 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22219 parameters as specified in some function type specification (except for
22220 those which appear as part of a function *definition*). */
22221
22222 static void
22223 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22224 {
22225 tree link;
22226 tree formal_type = NULL;
22227 tree first_parm_type;
22228 tree arg;
22229
22230 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22231 {
22232 arg = DECL_ARGUMENTS (function_or_method_type);
22233 function_or_method_type = TREE_TYPE (function_or_method_type);
22234 }
22235 else
22236 arg = NULL_TREE;
22237
22238 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22239
22240 /* Make our first pass over the list of formal parameter types and output a
22241 DW_TAG_formal_parameter DIE for each one. */
22242 for (link = first_parm_type; link; )
22243 {
22244 dw_die_ref parm_die;
22245
22246 formal_type = TREE_VALUE (link);
22247 if (formal_type == void_type_node)
22248 break;
22249
22250 /* Output a (nameless) DIE to represent the formal parameter itself. */
22251 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22252 {
22253 parm_die = gen_formal_parameter_die (formal_type, NULL,
22254 true /* Emit name attribute. */,
22255 context_die);
22256 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22257 && link == first_parm_type)
22258 {
22259 add_AT_flag (parm_die, DW_AT_artificial, 1);
22260 if (dwarf_version >= 3 || !dwarf_strict)
22261 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22262 }
22263 else if (arg && DECL_ARTIFICIAL (arg))
22264 add_AT_flag (parm_die, DW_AT_artificial, 1);
22265 }
22266
22267 link = TREE_CHAIN (link);
22268 if (arg)
22269 arg = DECL_CHAIN (arg);
22270 }
22271
22272 /* If this function type has an ellipsis, add a
22273 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22274 if (formal_type != void_type_node)
22275 gen_unspecified_parameters_die (function_or_method_type, context_die);
22276
22277 /* Make our second (and final) pass over the list of formal parameter types
22278 and output DIEs to represent those types (as necessary). */
22279 for (link = TYPE_ARG_TYPES (function_or_method_type);
22280 link && TREE_VALUE (link);
22281 link = TREE_CHAIN (link))
22282 gen_type_die (TREE_VALUE (link), context_die);
22283 }
22284
22285 /* We want to generate the DIE for TYPE so that we can generate the
22286 die for MEMBER, which has been defined; we will need to refer back
22287 to the member declaration nested within TYPE. If we're trying to
22288 generate minimal debug info for TYPE, processing TYPE won't do the
22289 trick; we need to attach the member declaration by hand. */
22290
22291 static void
22292 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22293 {
22294 gen_type_die (type, context_die);
22295
22296 /* If we're trying to avoid duplicate debug info, we may not have
22297 emitted the member decl for this function. Emit it now. */
22298 if (TYPE_STUB_DECL (type)
22299 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22300 && ! lookup_decl_die (member))
22301 {
22302 dw_die_ref type_die;
22303 gcc_assert (!decl_ultimate_origin (member));
22304
22305 push_decl_scope (type);
22306 type_die = lookup_type_die_strip_naming_typedef (type);
22307 if (TREE_CODE (member) == FUNCTION_DECL)
22308 gen_subprogram_die (member, type_die);
22309 else if (TREE_CODE (member) == FIELD_DECL)
22310 {
22311 /* Ignore the nameless fields that are used to skip bits but handle
22312 C++ anonymous unions and structs. */
22313 if (DECL_NAME (member) != NULL_TREE
22314 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22315 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22316 {
22317 struct vlr_context vlr_ctx = {
22318 DECL_CONTEXT (member), /* struct_type */
22319 NULL_TREE /* variant_part_offset */
22320 };
22321 gen_type_die (member_declared_type (member), type_die);
22322 gen_field_die (member, &vlr_ctx, type_die);
22323 }
22324 }
22325 else
22326 gen_variable_die (member, NULL_TREE, type_die);
22327
22328 pop_decl_scope ();
22329 }
22330 }
22331 \f
22332 /* Forward declare these functions, because they are mutually recursive
22333 with their set_block_* pairing functions. */
22334 static void set_decl_origin_self (tree);
22335
22336 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22337 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22338 that it points to the node itself, thus indicating that the node is its
22339 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22340 the given node is NULL, recursively descend the decl/block tree which
22341 it is the root of, and for each other ..._DECL or BLOCK node contained
22342 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22343 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22344 values to point to themselves. */
22345
22346 static void
22347 set_block_origin_self (tree stmt)
22348 {
22349 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22350 {
22351 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22352
22353 {
22354 tree local_decl;
22355
22356 for (local_decl = BLOCK_VARS (stmt);
22357 local_decl != NULL_TREE;
22358 local_decl = DECL_CHAIN (local_decl))
22359 /* Do not recurse on nested functions since the inlining status
22360 of parent and child can be different as per the DWARF spec. */
22361 if (TREE_CODE (local_decl) != FUNCTION_DECL
22362 && !DECL_EXTERNAL (local_decl))
22363 set_decl_origin_self (local_decl);
22364 }
22365
22366 {
22367 tree subblock;
22368
22369 for (subblock = BLOCK_SUBBLOCKS (stmt);
22370 subblock != NULL_TREE;
22371 subblock = BLOCK_CHAIN (subblock))
22372 set_block_origin_self (subblock); /* Recurse. */
22373 }
22374 }
22375 }
22376
22377 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22378 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22379 node to so that it points to the node itself, thus indicating that the
22380 node represents its own (abstract) origin. Additionally, if the
22381 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22382 the decl/block tree of which the given node is the root of, and for
22383 each other ..._DECL or BLOCK node contained therein whose
22384 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22385 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22386 point to themselves. */
22387
22388 static void
22389 set_decl_origin_self (tree decl)
22390 {
22391 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22392 {
22393 DECL_ABSTRACT_ORIGIN (decl) = decl;
22394 if (TREE_CODE (decl) == FUNCTION_DECL)
22395 {
22396 tree arg;
22397
22398 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22399 DECL_ABSTRACT_ORIGIN (arg) = arg;
22400 if (DECL_INITIAL (decl) != NULL_TREE
22401 && DECL_INITIAL (decl) != error_mark_node)
22402 set_block_origin_self (DECL_INITIAL (decl));
22403 }
22404 }
22405 }
22406 \f
22407 /* Mark the early DIE for DECL as the abstract instance. */
22408
22409 static void
22410 dwarf2out_abstract_function (tree decl)
22411 {
22412 dw_die_ref old_die;
22413
22414 /* Make sure we have the actual abstract inline, not a clone. */
22415 decl = DECL_ORIGIN (decl);
22416
22417 if (DECL_IGNORED_P (decl))
22418 return;
22419
22420 old_die = lookup_decl_die (decl);
22421 /* With early debug we always have an old DIE unless we are in LTO
22422 and the user did not compile but only link with debug. */
22423 if (in_lto_p && ! old_die)
22424 return;
22425 gcc_assert (old_die != NULL);
22426 if (get_AT (old_die, DW_AT_inline)
22427 || get_AT (old_die, DW_AT_abstract_origin))
22428 /* We've already generated the abstract instance. */
22429 return;
22430
22431 /* Go ahead and put DW_AT_inline on the DIE. */
22432 if (DECL_DECLARED_INLINE_P (decl))
22433 {
22434 if (cgraph_function_possibly_inlined_p (decl))
22435 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22436 else
22437 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22438 }
22439 else
22440 {
22441 if (cgraph_function_possibly_inlined_p (decl))
22442 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22443 else
22444 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22445 }
22446
22447 if (DECL_DECLARED_INLINE_P (decl)
22448 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22449 add_AT_flag (old_die, DW_AT_artificial, 1);
22450
22451 set_decl_origin_self (decl);
22452 }
22453
22454 /* Helper function of premark_used_types() which gets called through
22455 htab_traverse.
22456
22457 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22458 marked as unused by prune_unused_types. */
22459
22460 bool
22461 premark_used_types_helper (tree const &type, void *)
22462 {
22463 dw_die_ref die;
22464
22465 die = lookup_type_die (type);
22466 if (die != NULL)
22467 die->die_perennial_p = 1;
22468 return true;
22469 }
22470
22471 /* Helper function of premark_types_used_by_global_vars which gets called
22472 through htab_traverse.
22473
22474 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22475 marked as unused by prune_unused_types. The DIE of the type is marked
22476 only if the global variable using the type will actually be emitted. */
22477
22478 int
22479 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22480 void *)
22481 {
22482 struct types_used_by_vars_entry *entry;
22483 dw_die_ref die;
22484
22485 entry = (struct types_used_by_vars_entry *) *slot;
22486 gcc_assert (entry->type != NULL
22487 && entry->var_decl != NULL);
22488 die = lookup_type_die (entry->type);
22489 if (die)
22490 {
22491 /* Ask cgraph if the global variable really is to be emitted.
22492 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22493 varpool_node *node = varpool_node::get (entry->var_decl);
22494 if (node && node->definition)
22495 {
22496 die->die_perennial_p = 1;
22497 /* Keep the parent DIEs as well. */
22498 while ((die = die->die_parent) && die->die_perennial_p == 0)
22499 die->die_perennial_p = 1;
22500 }
22501 }
22502 return 1;
22503 }
22504
22505 /* Mark all members of used_types_hash as perennial. */
22506
22507 static void
22508 premark_used_types (struct function *fun)
22509 {
22510 if (fun && fun->used_types_hash)
22511 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22512 }
22513
22514 /* Mark all members of types_used_by_vars_entry as perennial. */
22515
22516 static void
22517 premark_types_used_by_global_vars (void)
22518 {
22519 if (types_used_by_vars_hash)
22520 types_used_by_vars_hash
22521 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22522 }
22523
22524 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22525 for CA_LOC call arg loc node. */
22526
22527 static dw_die_ref
22528 gen_call_site_die (tree decl, dw_die_ref subr_die,
22529 struct call_arg_loc_node *ca_loc)
22530 {
22531 dw_die_ref stmt_die = NULL, die;
22532 tree block = ca_loc->block;
22533
22534 while (block
22535 && block != DECL_INITIAL (decl)
22536 && TREE_CODE (block) == BLOCK)
22537 {
22538 stmt_die = BLOCK_DIE (block);
22539 if (stmt_die)
22540 break;
22541 block = BLOCK_SUPERCONTEXT (block);
22542 }
22543 if (stmt_die == NULL)
22544 stmt_die = subr_die;
22545 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22546 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22547 if (ca_loc->tail_call_p)
22548 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22549 if (ca_loc->symbol_ref)
22550 {
22551 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22552 if (tdie)
22553 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22554 else
22555 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22556 false);
22557 }
22558 return die;
22559 }
22560
22561 /* Generate a DIE to represent a declared function (either file-scope or
22562 block-local). */
22563
22564 static void
22565 gen_subprogram_die (tree decl, dw_die_ref context_die)
22566 {
22567 tree origin = decl_ultimate_origin (decl);
22568 dw_die_ref subr_die;
22569 dw_die_ref old_die = lookup_decl_die (decl);
22570
22571 /* This function gets called multiple times for different stages of
22572 the debug process. For example, for func() in this code:
22573
22574 namespace S
22575 {
22576 void func() { ... }
22577 }
22578
22579 ...we get called 4 times. Twice in early debug and twice in
22580 late debug:
22581
22582 Early debug
22583 -----------
22584
22585 1. Once while generating func() within the namespace. This is
22586 the declaration. The declaration bit below is set, as the
22587 context is the namespace.
22588
22589 A new DIE will be generated with DW_AT_declaration set.
22590
22591 2. Once for func() itself. This is the specification. The
22592 declaration bit below is clear as the context is the CU.
22593
22594 We will use the cached DIE from (1) to create a new DIE with
22595 DW_AT_specification pointing to the declaration in (1).
22596
22597 Late debug via rest_of_handle_final()
22598 -------------------------------------
22599
22600 3. Once generating func() within the namespace. This is also the
22601 declaration, as in (1), but this time we will early exit below
22602 as we have a cached DIE and a declaration needs no additional
22603 annotations (no locations), as the source declaration line
22604 info is enough.
22605
22606 4. Once for func() itself. As in (2), this is the specification,
22607 but this time we will re-use the cached DIE, and just annotate
22608 it with the location information that should now be available.
22609
22610 For something without namespaces, but with abstract instances, we
22611 are also called a multiple times:
22612
22613 class Base
22614 {
22615 public:
22616 Base (); // constructor declaration (1)
22617 };
22618
22619 Base::Base () { } // constructor specification (2)
22620
22621 Early debug
22622 -----------
22623
22624 1. Once for the Base() constructor by virtue of it being a
22625 member of the Base class. This is done via
22626 rest_of_type_compilation.
22627
22628 This is a declaration, so a new DIE will be created with
22629 DW_AT_declaration.
22630
22631 2. Once for the Base() constructor definition, but this time
22632 while generating the abstract instance of the base
22633 constructor (__base_ctor) which is being generated via early
22634 debug of reachable functions.
22635
22636 Even though we have a cached version of the declaration (1),
22637 we will create a DW_AT_specification of the declaration DIE
22638 in (1).
22639
22640 3. Once for the __base_ctor itself, but this time, we generate
22641 an DW_AT_abstract_origin version of the DW_AT_specification in
22642 (2).
22643
22644 Late debug via rest_of_handle_final
22645 -----------------------------------
22646
22647 4. One final time for the __base_ctor (which will have a cached
22648 DIE with DW_AT_abstract_origin created in (3). This time,
22649 we will just annotate the location information now
22650 available.
22651 */
22652 int declaration = (current_function_decl != decl
22653 || class_or_namespace_scope_p (context_die));
22654
22655 /* A declaration that has been previously dumped needs no
22656 additional information. */
22657 if (old_die && declaration)
22658 return;
22659
22660 /* Now that the C++ front end lazily declares artificial member fns, we
22661 might need to retrofit the declaration into its class. */
22662 if (!declaration && !origin && !old_die
22663 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22664 && !class_or_namespace_scope_p (context_die)
22665 && debug_info_level > DINFO_LEVEL_TERSE)
22666 old_die = force_decl_die (decl);
22667
22668 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22669 if (origin != NULL)
22670 {
22671 gcc_assert (!declaration || local_scope_p (context_die));
22672
22673 /* Fixup die_parent for the abstract instance of a nested
22674 inline function. */
22675 if (old_die && old_die->die_parent == NULL)
22676 add_child_die (context_die, old_die);
22677
22678 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22679 {
22680 /* If we have a DW_AT_abstract_origin we have a working
22681 cached version. */
22682 subr_die = old_die;
22683 }
22684 else
22685 {
22686 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22687 add_abstract_origin_attribute (subr_die, origin);
22688 /* This is where the actual code for a cloned function is.
22689 Let's emit linkage name attribute for it. This helps
22690 debuggers to e.g, set breakpoints into
22691 constructors/destructors when the user asks "break
22692 K::K". */
22693 add_linkage_name (subr_die, decl);
22694 }
22695 }
22696 /* A cached copy, possibly from early dwarf generation. Reuse as
22697 much as possible. */
22698 else if (old_die)
22699 {
22700 if (!get_AT_flag (old_die, DW_AT_declaration)
22701 /* We can have a normal definition following an inline one in the
22702 case of redefinition of GNU C extern inlines.
22703 It seems reasonable to use AT_specification in this case. */
22704 && !get_AT (old_die, DW_AT_inline))
22705 {
22706 /* Detect and ignore this case, where we are trying to output
22707 something we have already output. */
22708 if (get_AT (old_die, DW_AT_low_pc)
22709 || get_AT (old_die, DW_AT_ranges))
22710 return;
22711
22712 /* If we have no location information, this must be a
22713 partially generated DIE from early dwarf generation.
22714 Fall through and generate it. */
22715 }
22716
22717 /* If the definition comes from the same place as the declaration,
22718 maybe use the old DIE. We always want the DIE for this function
22719 that has the *_pc attributes to be under comp_unit_die so the
22720 debugger can find it. We also need to do this for abstract
22721 instances of inlines, since the spec requires the out-of-line copy
22722 to have the same parent. For local class methods, this doesn't
22723 apply; we just use the old DIE. */
22724 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22725 struct dwarf_file_data * file_index = lookup_filename (s.file);
22726 if ((is_cu_die (old_die->die_parent)
22727 /* This condition fixes the inconsistency/ICE with the
22728 following Fortran test (or some derivative thereof) while
22729 building libgfortran:
22730
22731 module some_m
22732 contains
22733 logical function funky (FLAG)
22734 funky = .true.
22735 end function
22736 end module
22737 */
22738 || (old_die->die_parent
22739 && old_die->die_parent->die_tag == DW_TAG_module)
22740 || context_die == NULL)
22741 && (DECL_ARTIFICIAL (decl)
22742 /* The location attributes may be in the abstract origin
22743 which in the case of LTO might be not available to
22744 look at. */
22745 || get_AT (old_die, DW_AT_abstract_origin)
22746 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22747 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22748 == (unsigned) s.line)
22749 && (!debug_column_info
22750 || s.column == 0
22751 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22752 == (unsigned) s.column)))))
22753 {
22754 subr_die = old_die;
22755
22756 /* Clear out the declaration attribute, but leave the
22757 parameters so they can be augmented with location
22758 information later. Unless this was a declaration, in
22759 which case, wipe out the nameless parameters and recreate
22760 them further down. */
22761 if (remove_AT (subr_die, DW_AT_declaration))
22762 {
22763
22764 remove_AT (subr_die, DW_AT_object_pointer);
22765 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22766 }
22767 }
22768 /* Make a specification pointing to the previously built
22769 declaration. */
22770 else
22771 {
22772 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22773 add_AT_specification (subr_die, old_die);
22774 add_pubname (decl, subr_die);
22775 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22776 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22777 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22778 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22779 if (debug_column_info
22780 && s.column
22781 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22782 != (unsigned) s.column))
22783 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22784
22785 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22786 emit the real type on the definition die. */
22787 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22788 {
22789 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22790 if (die == auto_die || die == decltype_auto_die)
22791 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22792 TYPE_UNQUALIFIED, false, context_die);
22793 }
22794
22795 /* When we process the method declaration, we haven't seen
22796 the out-of-class defaulted definition yet, so we have to
22797 recheck now. */
22798 if ((dwarf_version >= 5 || ! dwarf_strict)
22799 && !get_AT (subr_die, DW_AT_defaulted))
22800 {
22801 int defaulted
22802 = lang_hooks.decls.decl_dwarf_attribute (decl,
22803 DW_AT_defaulted);
22804 if (defaulted != -1)
22805 {
22806 /* Other values must have been handled before. */
22807 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22808 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22809 }
22810 }
22811 }
22812 }
22813 /* Create a fresh DIE for anything else. */
22814 else
22815 {
22816 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22817
22818 if (TREE_PUBLIC (decl))
22819 add_AT_flag (subr_die, DW_AT_external, 1);
22820
22821 add_name_and_src_coords_attributes (subr_die, decl);
22822 add_pubname (decl, subr_die);
22823 if (debug_info_level > DINFO_LEVEL_TERSE)
22824 {
22825 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22826 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22827 TYPE_UNQUALIFIED, false, context_die);
22828 }
22829
22830 add_pure_or_virtual_attribute (subr_die, decl);
22831 if (DECL_ARTIFICIAL (decl))
22832 add_AT_flag (subr_die, DW_AT_artificial, 1);
22833
22834 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22835 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22836
22837 add_alignment_attribute (subr_die, decl);
22838
22839 add_accessibility_attribute (subr_die, decl);
22840 }
22841
22842 /* Unless we have an existing non-declaration DIE, equate the new
22843 DIE. */
22844 if (!old_die || is_declaration_die (old_die))
22845 equate_decl_number_to_die (decl, subr_die);
22846
22847 if (declaration)
22848 {
22849 if (!old_die || !get_AT (old_die, DW_AT_inline))
22850 {
22851 add_AT_flag (subr_die, DW_AT_declaration, 1);
22852
22853 /* If this is an explicit function declaration then generate
22854 a DW_AT_explicit attribute. */
22855 if ((dwarf_version >= 3 || !dwarf_strict)
22856 && lang_hooks.decls.decl_dwarf_attribute (decl,
22857 DW_AT_explicit) == 1)
22858 add_AT_flag (subr_die, DW_AT_explicit, 1);
22859
22860 /* If this is a C++11 deleted special function member then generate
22861 a DW_AT_deleted attribute. */
22862 if ((dwarf_version >= 5 || !dwarf_strict)
22863 && lang_hooks.decls.decl_dwarf_attribute (decl,
22864 DW_AT_deleted) == 1)
22865 add_AT_flag (subr_die, DW_AT_deleted, 1);
22866
22867 /* If this is a C++11 defaulted special function member then
22868 generate a DW_AT_defaulted attribute. */
22869 if (dwarf_version >= 5 || !dwarf_strict)
22870 {
22871 int defaulted
22872 = lang_hooks.decls.decl_dwarf_attribute (decl,
22873 DW_AT_defaulted);
22874 if (defaulted != -1)
22875 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22876 }
22877
22878 /* If this is a C++11 non-static member function with & ref-qualifier
22879 then generate a DW_AT_reference attribute. */
22880 if ((dwarf_version >= 5 || !dwarf_strict)
22881 && lang_hooks.decls.decl_dwarf_attribute (decl,
22882 DW_AT_reference) == 1)
22883 add_AT_flag (subr_die, DW_AT_reference, 1);
22884
22885 /* If this is a C++11 non-static member function with &&
22886 ref-qualifier then generate a DW_AT_reference attribute. */
22887 if ((dwarf_version >= 5 || !dwarf_strict)
22888 && lang_hooks.decls.decl_dwarf_attribute (decl,
22889 DW_AT_rvalue_reference)
22890 == 1)
22891 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22892 }
22893 }
22894 /* For non DECL_EXTERNALs, if range information is available, fill
22895 the DIE with it. */
22896 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22897 {
22898 HOST_WIDE_INT cfa_fb_offset;
22899
22900 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22901
22902 if (!crtl->has_bb_partition)
22903 {
22904 dw_fde_ref fde = fun->fde;
22905 if (fde->dw_fde_begin)
22906 {
22907 /* We have already generated the labels. */
22908 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22909 fde->dw_fde_end, false);
22910 }
22911 else
22912 {
22913 /* Create start/end labels and add the range. */
22914 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22915 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22916 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22917 current_function_funcdef_no);
22918 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22919 current_function_funcdef_no);
22920 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22921 false);
22922 }
22923
22924 #if VMS_DEBUGGING_INFO
22925 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22926 Section 2.3 Prologue and Epilogue Attributes:
22927 When a breakpoint is set on entry to a function, it is generally
22928 desirable for execution to be suspended, not on the very first
22929 instruction of the function, but rather at a point after the
22930 function's frame has been set up, after any language defined local
22931 declaration processing has been completed, and before execution of
22932 the first statement of the function begins. Debuggers generally
22933 cannot properly determine where this point is. Similarly for a
22934 breakpoint set on exit from a function. The prologue and epilogue
22935 attributes allow a compiler to communicate the location(s) to use. */
22936
22937 {
22938 if (fde->dw_fde_vms_end_prologue)
22939 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22940 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22941
22942 if (fde->dw_fde_vms_begin_epilogue)
22943 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22944 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22945 }
22946 #endif
22947
22948 }
22949 else
22950 {
22951 /* Generate pubnames entries for the split function code ranges. */
22952 dw_fde_ref fde = fun->fde;
22953
22954 if (fde->dw_fde_second_begin)
22955 {
22956 if (dwarf_version >= 3 || !dwarf_strict)
22957 {
22958 /* We should use ranges for non-contiguous code section
22959 addresses. Use the actual code range for the initial
22960 section, since the HOT/COLD labels might precede an
22961 alignment offset. */
22962 bool range_list_added = false;
22963 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22964 fde->dw_fde_end, &range_list_added,
22965 false);
22966 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22967 fde->dw_fde_second_end,
22968 &range_list_added, false);
22969 if (range_list_added)
22970 add_ranges (NULL);
22971 }
22972 else
22973 {
22974 /* There is no real support in DW2 for this .. so we make
22975 a work-around. First, emit the pub name for the segment
22976 containing the function label. Then make and emit a
22977 simplified subprogram DIE for the second segment with the
22978 name pre-fixed by __hot/cold_sect_of_. We use the same
22979 linkage name for the second die so that gdb will find both
22980 sections when given "b foo". */
22981 const char *name = NULL;
22982 tree decl_name = DECL_NAME (decl);
22983 dw_die_ref seg_die;
22984
22985 /* Do the 'primary' section. */
22986 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22987 fde->dw_fde_end, false);
22988
22989 /* Build a minimal DIE for the secondary section. */
22990 seg_die = new_die (DW_TAG_subprogram,
22991 subr_die->die_parent, decl);
22992
22993 if (TREE_PUBLIC (decl))
22994 add_AT_flag (seg_die, DW_AT_external, 1);
22995
22996 if (decl_name != NULL
22997 && IDENTIFIER_POINTER (decl_name) != NULL)
22998 {
22999 name = dwarf2_name (decl, 1);
23000 if (! DECL_ARTIFICIAL (decl))
23001 add_src_coords_attributes (seg_die, decl);
23002
23003 add_linkage_name (seg_die, decl);
23004 }
23005 gcc_assert (name != NULL);
23006 add_pure_or_virtual_attribute (seg_die, decl);
23007 if (DECL_ARTIFICIAL (decl))
23008 add_AT_flag (seg_die, DW_AT_artificial, 1);
23009
23010 name = concat ("__second_sect_of_", name, NULL);
23011 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23012 fde->dw_fde_second_end, false);
23013 add_name_attribute (seg_die, name);
23014 if (want_pubnames ())
23015 add_pubname_string (name, seg_die);
23016 }
23017 }
23018 else
23019 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23020 false);
23021 }
23022
23023 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23024
23025 /* We define the "frame base" as the function's CFA. This is more
23026 convenient for several reasons: (1) It's stable across the prologue
23027 and epilogue, which makes it better than just a frame pointer,
23028 (2) With dwarf3, there exists a one-byte encoding that allows us
23029 to reference the .debug_frame data by proxy, but failing that,
23030 (3) We can at least reuse the code inspection and interpretation
23031 code that determines the CFA position at various points in the
23032 function. */
23033 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23034 {
23035 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23036 add_AT_loc (subr_die, DW_AT_frame_base, op);
23037 }
23038 else
23039 {
23040 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23041 if (list->dw_loc_next)
23042 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23043 else
23044 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23045 }
23046
23047 /* Compute a displacement from the "steady-state frame pointer" to
23048 the CFA. The former is what all stack slots and argument slots
23049 will reference in the rtl; the latter is what we've told the
23050 debugger about. We'll need to adjust all frame_base references
23051 by this displacement. */
23052 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23053
23054 if (fun->static_chain_decl)
23055 {
23056 /* DWARF requires here a location expression that computes the
23057 address of the enclosing subprogram's frame base. The machinery
23058 in tree-nested.c is supposed to store this specific address in the
23059 last field of the FRAME record. */
23060 const tree frame_type
23061 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23062 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23063
23064 tree fb_expr
23065 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23066 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23067 fb_expr, fb_decl, NULL_TREE);
23068
23069 add_AT_location_description (subr_die, DW_AT_static_link,
23070 loc_list_from_tree (fb_expr, 0, NULL));
23071 }
23072
23073 resolve_variable_values ();
23074 }
23075
23076 /* Generate child dies for template paramaters. */
23077 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23078 gen_generic_params_dies (decl);
23079
23080 /* Now output descriptions of the arguments for this function. This gets
23081 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23082 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23083 `...' at the end of the formal parameter list. In order to find out if
23084 there was a trailing ellipsis or not, we must instead look at the type
23085 associated with the FUNCTION_DECL. This will be a node of type
23086 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23087 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23088 an ellipsis at the end. */
23089
23090 /* In the case where we are describing a mere function declaration, all we
23091 need to do here (and all we *can* do here) is to describe the *types* of
23092 its formal parameters. */
23093 if (debug_info_level <= DINFO_LEVEL_TERSE)
23094 ;
23095 else if (declaration)
23096 gen_formal_types_die (decl, subr_die);
23097 else
23098 {
23099 /* Generate DIEs to represent all known formal parameters. */
23100 tree parm = DECL_ARGUMENTS (decl);
23101 tree generic_decl = early_dwarf
23102 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23103 tree generic_decl_parm = generic_decl
23104 ? DECL_ARGUMENTS (generic_decl)
23105 : NULL;
23106
23107 /* Now we want to walk the list of parameters of the function and
23108 emit their relevant DIEs.
23109
23110 We consider the case of DECL being an instance of a generic function
23111 as well as it being a normal function.
23112
23113 If DECL is an instance of a generic function we walk the
23114 parameters of the generic function declaration _and_ the parameters of
23115 DECL itself. This is useful because we want to emit specific DIEs for
23116 function parameter packs and those are declared as part of the
23117 generic function declaration. In that particular case,
23118 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23119 That DIE has children DIEs representing the set of arguments
23120 of the pack. Note that the set of pack arguments can be empty.
23121 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23122 children DIE.
23123
23124 Otherwise, we just consider the parameters of DECL. */
23125 while (generic_decl_parm || parm)
23126 {
23127 if (generic_decl_parm
23128 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23129 gen_formal_parameter_pack_die (generic_decl_parm,
23130 parm, subr_die,
23131 &parm);
23132 else if (parm && !POINTER_BOUNDS_P (parm))
23133 {
23134 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23135
23136 if (early_dwarf
23137 && parm == DECL_ARGUMENTS (decl)
23138 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23139 && parm_die
23140 && (dwarf_version >= 3 || !dwarf_strict))
23141 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23142
23143 parm = DECL_CHAIN (parm);
23144 }
23145 else if (parm)
23146 parm = DECL_CHAIN (parm);
23147
23148 if (generic_decl_parm)
23149 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23150 }
23151
23152 /* Decide whether we need an unspecified_parameters DIE at the end.
23153 There are 2 more cases to do this for: 1) the ansi ... declaration -
23154 this is detectable when the end of the arg list is not a
23155 void_type_node 2) an unprototyped function declaration (not a
23156 definition). This just means that we have no info about the
23157 parameters at all. */
23158 if (early_dwarf)
23159 {
23160 if (prototype_p (TREE_TYPE (decl)))
23161 {
23162 /* This is the prototyped case, check for.... */
23163 if (stdarg_p (TREE_TYPE (decl)))
23164 gen_unspecified_parameters_die (decl, subr_die);
23165 }
23166 else if (DECL_INITIAL (decl) == NULL_TREE)
23167 gen_unspecified_parameters_die (decl, subr_die);
23168 }
23169 }
23170
23171 if (subr_die != old_die)
23172 /* Add the calling convention attribute if requested. */
23173 add_calling_convention_attribute (subr_die, decl);
23174
23175 /* Output Dwarf info for all of the stuff within the body of the function
23176 (if it has one - it may be just a declaration).
23177
23178 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23179 a function. This BLOCK actually represents the outermost binding contour
23180 for the function, i.e. the contour in which the function's formal
23181 parameters and labels get declared. Curiously, it appears that the front
23182 end doesn't actually put the PARM_DECL nodes for the current function onto
23183 the BLOCK_VARS list for this outer scope, but are strung off of the
23184 DECL_ARGUMENTS list for the function instead.
23185
23186 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23187 the LABEL_DECL nodes for the function however, and we output DWARF info
23188 for those in decls_for_scope. Just within the `outer_scope' there will be
23189 a BLOCK node representing the function's outermost pair of curly braces,
23190 and any blocks used for the base and member initializers of a C++
23191 constructor function. */
23192 tree outer_scope = DECL_INITIAL (decl);
23193 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23194 {
23195 int call_site_note_count = 0;
23196 int tail_call_site_note_count = 0;
23197
23198 /* Emit a DW_TAG_variable DIE for a named return value. */
23199 if (DECL_NAME (DECL_RESULT (decl)))
23200 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23201
23202 /* The first time through decls_for_scope we will generate the
23203 DIEs for the locals. The second time, we fill in the
23204 location info. */
23205 decls_for_scope (outer_scope, subr_die);
23206
23207 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23208 {
23209 struct call_arg_loc_node *ca_loc;
23210 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23211 {
23212 dw_die_ref die = NULL;
23213 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23214 rtx arg, next_arg;
23215
23216 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23217 ? XEXP (ca_loc->call_arg_loc_note, 0)
23218 : NULL_RTX);
23219 arg; arg = next_arg)
23220 {
23221 dw_loc_descr_ref reg, val;
23222 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23223 dw_die_ref cdie, tdie = NULL;
23224
23225 next_arg = XEXP (arg, 1);
23226 if (REG_P (XEXP (XEXP (arg, 0), 0))
23227 && next_arg
23228 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23229 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23230 && REGNO (XEXP (XEXP (arg, 0), 0))
23231 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23232 next_arg = XEXP (next_arg, 1);
23233 if (mode == VOIDmode)
23234 {
23235 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23236 if (mode == VOIDmode)
23237 mode = GET_MODE (XEXP (arg, 0));
23238 }
23239 if (mode == VOIDmode || mode == BLKmode)
23240 continue;
23241 /* Get dynamic information about call target only if we
23242 have no static information: we cannot generate both
23243 DW_AT_call_origin and DW_AT_call_target
23244 attributes. */
23245 if (ca_loc->symbol_ref == NULL_RTX)
23246 {
23247 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23248 {
23249 tloc = XEXP (XEXP (arg, 0), 1);
23250 continue;
23251 }
23252 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23253 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23254 {
23255 tlocc = XEXP (XEXP (arg, 0), 1);
23256 continue;
23257 }
23258 }
23259 reg = NULL;
23260 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23261 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23262 VAR_INIT_STATUS_INITIALIZED);
23263 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23264 {
23265 rtx mem = XEXP (XEXP (arg, 0), 0);
23266 reg = mem_loc_descriptor (XEXP (mem, 0),
23267 get_address_mode (mem),
23268 GET_MODE (mem),
23269 VAR_INIT_STATUS_INITIALIZED);
23270 }
23271 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23272 == DEBUG_PARAMETER_REF)
23273 {
23274 tree tdecl
23275 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23276 tdie = lookup_decl_die (tdecl);
23277 if (tdie == NULL)
23278 continue;
23279 }
23280 else
23281 continue;
23282 if (reg == NULL
23283 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23284 != DEBUG_PARAMETER_REF)
23285 continue;
23286 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23287 VOIDmode,
23288 VAR_INIT_STATUS_INITIALIZED);
23289 if (val == NULL)
23290 continue;
23291 if (die == NULL)
23292 die = gen_call_site_die (decl, subr_die, ca_loc);
23293 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23294 NULL_TREE);
23295 if (reg != NULL)
23296 add_AT_loc (cdie, DW_AT_location, reg);
23297 else if (tdie != NULL)
23298 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23299 tdie);
23300 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23301 if (next_arg != XEXP (arg, 1))
23302 {
23303 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23304 if (mode == VOIDmode)
23305 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23306 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23307 0), 1),
23308 mode, VOIDmode,
23309 VAR_INIT_STATUS_INITIALIZED);
23310 if (val != NULL)
23311 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23312 val);
23313 }
23314 }
23315 if (die == NULL
23316 && (ca_loc->symbol_ref || tloc))
23317 die = gen_call_site_die (decl, subr_die, ca_loc);
23318 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23319 {
23320 dw_loc_descr_ref tval = NULL;
23321
23322 if (tloc != NULL_RTX)
23323 tval = mem_loc_descriptor (tloc,
23324 GET_MODE (tloc) == VOIDmode
23325 ? Pmode : GET_MODE (tloc),
23326 VOIDmode,
23327 VAR_INIT_STATUS_INITIALIZED);
23328 if (tval)
23329 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23330 else if (tlocc != NULL_RTX)
23331 {
23332 tval = mem_loc_descriptor (tlocc,
23333 GET_MODE (tlocc) == VOIDmode
23334 ? Pmode : GET_MODE (tlocc),
23335 VOIDmode,
23336 VAR_INIT_STATUS_INITIALIZED);
23337 if (tval)
23338 add_AT_loc (die,
23339 dwarf_AT (DW_AT_call_target_clobbered),
23340 tval);
23341 }
23342 }
23343 if (die != NULL)
23344 {
23345 call_site_note_count++;
23346 if (ca_loc->tail_call_p)
23347 tail_call_site_note_count++;
23348 }
23349 }
23350 }
23351 call_arg_locations = NULL;
23352 call_arg_loc_last = NULL;
23353 if (tail_call_site_count >= 0
23354 && tail_call_site_count == tail_call_site_note_count
23355 && (!dwarf_strict || dwarf_version >= 5))
23356 {
23357 if (call_site_count >= 0
23358 && call_site_count == call_site_note_count)
23359 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23360 else
23361 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23362 }
23363 call_site_count = -1;
23364 tail_call_site_count = -1;
23365 }
23366
23367 /* Mark used types after we have created DIEs for the functions scopes. */
23368 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23369 }
23370
23371 /* Returns a hash value for X (which really is a die_struct). */
23372
23373 hashval_t
23374 block_die_hasher::hash (die_struct *d)
23375 {
23376 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23377 }
23378
23379 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23380 as decl_id and die_parent of die_struct Y. */
23381
23382 bool
23383 block_die_hasher::equal (die_struct *x, die_struct *y)
23384 {
23385 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23386 }
23387
23388 /* Hold information about markers for inlined entry points. */
23389 struct GTY ((for_user)) inline_entry_data
23390 {
23391 /* The block that's the inlined_function_outer_scope for an inlined
23392 function. */
23393 tree block;
23394
23395 /* The label at the inlined entry point. */
23396 const char *label_pfx;
23397 unsigned int label_num;
23398
23399 /* The view number to be used as the inlined entry point. */
23400 var_loc_view view;
23401 };
23402
23403 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23404 {
23405 typedef tree compare_type;
23406 static inline hashval_t hash (const inline_entry_data *);
23407 static inline bool equal (const inline_entry_data *, const_tree);
23408 };
23409
23410 /* Hash table routines for inline_entry_data. */
23411
23412 inline hashval_t
23413 inline_entry_data_hasher::hash (const inline_entry_data *data)
23414 {
23415 return htab_hash_pointer (data->block);
23416 }
23417
23418 inline bool
23419 inline_entry_data_hasher::equal (const inline_entry_data *data,
23420 const_tree block)
23421 {
23422 return data->block == block;
23423 }
23424
23425 /* Inlined entry points pending DIE creation in this compilation unit. */
23426
23427 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23428
23429
23430 /* Return TRUE if DECL, which may have been previously generated as
23431 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23432 true if decl (or its origin) is either an extern declaration or a
23433 class/namespace scoped declaration.
23434
23435 The declare_in_namespace support causes us to get two DIEs for one
23436 variable, both of which are declarations. We want to avoid
23437 considering one to be a specification, so we must test for
23438 DECLARATION and DW_AT_declaration. */
23439 static inline bool
23440 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23441 {
23442 return (old_die && TREE_STATIC (decl) && !declaration
23443 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23444 }
23445
23446 /* Return true if DECL is a local static. */
23447
23448 static inline bool
23449 local_function_static (tree decl)
23450 {
23451 gcc_assert (VAR_P (decl));
23452 return TREE_STATIC (decl)
23453 && DECL_CONTEXT (decl)
23454 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23455 }
23456
23457 /* Generate a DIE to represent a declared data object.
23458 Either DECL or ORIGIN must be non-null. */
23459
23460 static void
23461 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23462 {
23463 HOST_WIDE_INT off = 0;
23464 tree com_decl;
23465 tree decl_or_origin = decl ? decl : origin;
23466 tree ultimate_origin;
23467 dw_die_ref var_die;
23468 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23469 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23470 || class_or_namespace_scope_p (context_die));
23471 bool specialization_p = false;
23472 bool no_linkage_name = false;
23473
23474 /* While C++ inline static data members have definitions inside of the
23475 class, force the first DIE to be a declaration, then let gen_member_die
23476 reparent it to the class context and call gen_variable_die again
23477 to create the outside of the class DIE for the definition. */
23478 if (!declaration
23479 && old_die == NULL
23480 && decl
23481 && DECL_CONTEXT (decl)
23482 && TYPE_P (DECL_CONTEXT (decl))
23483 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23484 {
23485 declaration = true;
23486 if (dwarf_version < 5)
23487 no_linkage_name = true;
23488 }
23489
23490 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23491 if (decl || ultimate_origin)
23492 origin = ultimate_origin;
23493 com_decl = fortran_common (decl_or_origin, &off);
23494
23495 /* Symbol in common gets emitted as a child of the common block, in the form
23496 of a data member. */
23497 if (com_decl)
23498 {
23499 dw_die_ref com_die;
23500 dw_loc_list_ref loc = NULL;
23501 die_node com_die_arg;
23502
23503 var_die = lookup_decl_die (decl_or_origin);
23504 if (var_die)
23505 {
23506 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23507 {
23508 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23509 if (loc)
23510 {
23511 if (off)
23512 {
23513 /* Optimize the common case. */
23514 if (single_element_loc_list_p (loc)
23515 && loc->expr->dw_loc_opc == DW_OP_addr
23516 && loc->expr->dw_loc_next == NULL
23517 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23518 == SYMBOL_REF)
23519 {
23520 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23521 loc->expr->dw_loc_oprnd1.v.val_addr
23522 = plus_constant (GET_MODE (x), x , off);
23523 }
23524 else
23525 loc_list_plus_const (loc, off);
23526 }
23527 add_AT_location_description (var_die, DW_AT_location, loc);
23528 remove_AT (var_die, DW_AT_declaration);
23529 }
23530 }
23531 return;
23532 }
23533
23534 if (common_block_die_table == NULL)
23535 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23536
23537 com_die_arg.decl_id = DECL_UID (com_decl);
23538 com_die_arg.die_parent = context_die;
23539 com_die = common_block_die_table->find (&com_die_arg);
23540 if (! early_dwarf)
23541 loc = loc_list_from_tree (com_decl, 2, NULL);
23542 if (com_die == NULL)
23543 {
23544 const char *cnam
23545 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23546 die_node **slot;
23547
23548 com_die = new_die (DW_TAG_common_block, context_die, decl);
23549 add_name_and_src_coords_attributes (com_die, com_decl);
23550 if (loc)
23551 {
23552 add_AT_location_description (com_die, DW_AT_location, loc);
23553 /* Avoid sharing the same loc descriptor between
23554 DW_TAG_common_block and DW_TAG_variable. */
23555 loc = loc_list_from_tree (com_decl, 2, NULL);
23556 }
23557 else if (DECL_EXTERNAL (decl_or_origin))
23558 add_AT_flag (com_die, DW_AT_declaration, 1);
23559 if (want_pubnames ())
23560 add_pubname_string (cnam, com_die); /* ??? needed? */
23561 com_die->decl_id = DECL_UID (com_decl);
23562 slot = common_block_die_table->find_slot (com_die, INSERT);
23563 *slot = com_die;
23564 }
23565 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23566 {
23567 add_AT_location_description (com_die, DW_AT_location, loc);
23568 loc = loc_list_from_tree (com_decl, 2, NULL);
23569 remove_AT (com_die, DW_AT_declaration);
23570 }
23571 var_die = new_die (DW_TAG_variable, com_die, decl);
23572 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23573 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23574 decl_quals (decl_or_origin), false,
23575 context_die);
23576 add_alignment_attribute (var_die, decl);
23577 add_AT_flag (var_die, DW_AT_external, 1);
23578 if (loc)
23579 {
23580 if (off)
23581 {
23582 /* Optimize the common case. */
23583 if (single_element_loc_list_p (loc)
23584 && loc->expr->dw_loc_opc == DW_OP_addr
23585 && loc->expr->dw_loc_next == NULL
23586 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23587 {
23588 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23589 loc->expr->dw_loc_oprnd1.v.val_addr
23590 = plus_constant (GET_MODE (x), x, off);
23591 }
23592 else
23593 loc_list_plus_const (loc, off);
23594 }
23595 add_AT_location_description (var_die, DW_AT_location, loc);
23596 }
23597 else if (DECL_EXTERNAL (decl_or_origin))
23598 add_AT_flag (var_die, DW_AT_declaration, 1);
23599 if (decl)
23600 equate_decl_number_to_die (decl, var_die);
23601 return;
23602 }
23603
23604 if (old_die)
23605 {
23606 if (declaration)
23607 {
23608 /* A declaration that has been previously dumped, needs no
23609 further annotations, since it doesn't need location on
23610 the second pass. */
23611 return;
23612 }
23613 else if (decl_will_get_specification_p (old_die, decl, declaration)
23614 && !get_AT (old_die, DW_AT_specification))
23615 {
23616 /* Fall-thru so we can make a new variable die along with a
23617 DW_AT_specification. */
23618 }
23619 else if (origin && old_die->die_parent != context_die)
23620 {
23621 /* If we will be creating an inlined instance, we need a
23622 new DIE that will get annotated with
23623 DW_AT_abstract_origin. */
23624 gcc_assert (!DECL_ABSTRACT_P (decl));
23625 }
23626 else
23627 {
23628 /* If a DIE was dumped early, it still needs location info.
23629 Skip to where we fill the location bits. */
23630 var_die = old_die;
23631
23632 /* ??? In LTRANS we cannot annotate early created variably
23633 modified type DIEs without copying them and adjusting all
23634 references to them. Thus we dumped them again. Also add a
23635 reference to them but beware of -g0 compile and -g link
23636 in which case the reference will be already present. */
23637 tree type = TREE_TYPE (decl_or_origin);
23638 if (in_lto_p
23639 && ! get_AT (var_die, DW_AT_type)
23640 && variably_modified_type_p
23641 (type, decl_function_context (decl_or_origin)))
23642 {
23643 if (decl_by_reference_p (decl_or_origin))
23644 add_type_attribute (var_die, TREE_TYPE (type),
23645 TYPE_UNQUALIFIED, false, context_die);
23646 else
23647 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23648 false, context_die);
23649 }
23650
23651 goto gen_variable_die_location;
23652 }
23653 }
23654
23655 /* For static data members, the declaration in the class is supposed
23656 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23657 also in DWARF2; the specification should still be DW_TAG_variable
23658 referencing the DW_TAG_member DIE. */
23659 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23660 var_die = new_die (DW_TAG_member, context_die, decl);
23661 else
23662 var_die = new_die (DW_TAG_variable, context_die, decl);
23663
23664 if (origin != NULL)
23665 add_abstract_origin_attribute (var_die, origin);
23666
23667 /* Loop unrolling can create multiple blocks that refer to the same
23668 static variable, so we must test for the DW_AT_declaration flag.
23669
23670 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23671 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23672 sharing them.
23673
23674 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23675 else if (decl_will_get_specification_p (old_die, decl, declaration))
23676 {
23677 /* This is a definition of a C++ class level static. */
23678 add_AT_specification (var_die, old_die);
23679 specialization_p = true;
23680 if (DECL_NAME (decl))
23681 {
23682 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23683 struct dwarf_file_data * file_index = lookup_filename (s.file);
23684
23685 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23686 add_AT_file (var_die, DW_AT_decl_file, file_index);
23687
23688 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23689 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23690
23691 if (debug_column_info
23692 && s.column
23693 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23694 != (unsigned) s.column))
23695 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23696
23697 if (old_die->die_tag == DW_TAG_member)
23698 add_linkage_name (var_die, decl);
23699 }
23700 }
23701 else
23702 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23703
23704 if ((origin == NULL && !specialization_p)
23705 || (origin != NULL
23706 && !DECL_ABSTRACT_P (decl_or_origin)
23707 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23708 decl_function_context
23709 (decl_or_origin))))
23710 {
23711 tree type = TREE_TYPE (decl_or_origin);
23712
23713 if (decl_by_reference_p (decl_or_origin))
23714 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23715 context_die);
23716 else
23717 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23718 context_die);
23719 }
23720
23721 if (origin == NULL && !specialization_p)
23722 {
23723 if (TREE_PUBLIC (decl))
23724 add_AT_flag (var_die, DW_AT_external, 1);
23725
23726 if (DECL_ARTIFICIAL (decl))
23727 add_AT_flag (var_die, DW_AT_artificial, 1);
23728
23729 add_alignment_attribute (var_die, decl);
23730
23731 add_accessibility_attribute (var_die, decl);
23732 }
23733
23734 if (declaration)
23735 add_AT_flag (var_die, DW_AT_declaration, 1);
23736
23737 if (decl && (DECL_ABSTRACT_P (decl)
23738 || !old_die || is_declaration_die (old_die)))
23739 equate_decl_number_to_die (decl, var_die);
23740
23741 gen_variable_die_location:
23742 if (! declaration
23743 && (! DECL_ABSTRACT_P (decl_or_origin)
23744 /* Local static vars are shared between all clones/inlines,
23745 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23746 already set. */
23747 || (VAR_P (decl_or_origin)
23748 && TREE_STATIC (decl_or_origin)
23749 && DECL_RTL_SET_P (decl_or_origin))))
23750 {
23751 if (early_dwarf)
23752 add_pubname (decl_or_origin, var_die);
23753 else
23754 add_location_or_const_value_attribute (var_die, decl_or_origin,
23755 decl == NULL);
23756 }
23757 else
23758 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23759
23760 if ((dwarf_version >= 4 || !dwarf_strict)
23761 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23762 DW_AT_const_expr) == 1
23763 && !get_AT (var_die, DW_AT_const_expr)
23764 && !specialization_p)
23765 add_AT_flag (var_die, DW_AT_const_expr, 1);
23766
23767 if (!dwarf_strict)
23768 {
23769 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23770 DW_AT_inline);
23771 if (inl != -1
23772 && !get_AT (var_die, DW_AT_inline)
23773 && !specialization_p)
23774 add_AT_unsigned (var_die, DW_AT_inline, inl);
23775 }
23776 }
23777
23778 /* Generate a DIE to represent a named constant. */
23779
23780 static void
23781 gen_const_die (tree decl, dw_die_ref context_die)
23782 {
23783 dw_die_ref const_die;
23784 tree type = TREE_TYPE (decl);
23785
23786 const_die = lookup_decl_die (decl);
23787 if (const_die)
23788 return;
23789
23790 const_die = new_die (DW_TAG_constant, context_die, decl);
23791 equate_decl_number_to_die (decl, const_die);
23792 add_name_and_src_coords_attributes (const_die, decl);
23793 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23794 if (TREE_PUBLIC (decl))
23795 add_AT_flag (const_die, DW_AT_external, 1);
23796 if (DECL_ARTIFICIAL (decl))
23797 add_AT_flag (const_die, DW_AT_artificial, 1);
23798 tree_add_const_value_attribute_for_decl (const_die, decl);
23799 }
23800
23801 /* Generate a DIE to represent a label identifier. */
23802
23803 static void
23804 gen_label_die (tree decl, dw_die_ref context_die)
23805 {
23806 tree origin = decl_ultimate_origin (decl);
23807 dw_die_ref lbl_die = lookup_decl_die (decl);
23808 rtx insn;
23809 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23810
23811 if (!lbl_die)
23812 {
23813 lbl_die = new_die (DW_TAG_label, context_die, decl);
23814 equate_decl_number_to_die (decl, lbl_die);
23815
23816 if (origin != NULL)
23817 add_abstract_origin_attribute (lbl_die, origin);
23818 else
23819 add_name_and_src_coords_attributes (lbl_die, decl);
23820 }
23821
23822 if (DECL_ABSTRACT_P (decl))
23823 equate_decl_number_to_die (decl, lbl_die);
23824 else if (! early_dwarf)
23825 {
23826 insn = DECL_RTL_IF_SET (decl);
23827
23828 /* Deleted labels are programmer specified labels which have been
23829 eliminated because of various optimizations. We still emit them
23830 here so that it is possible to put breakpoints on them. */
23831 if (insn
23832 && (LABEL_P (insn)
23833 || ((NOTE_P (insn)
23834 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23835 {
23836 /* When optimization is enabled (via -O) some parts of the compiler
23837 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23838 represent source-level labels which were explicitly declared by
23839 the user. This really shouldn't be happening though, so catch
23840 it if it ever does happen. */
23841 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23842
23843 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23844 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23845 }
23846 else if (insn
23847 && NOTE_P (insn)
23848 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23849 && CODE_LABEL_NUMBER (insn) != -1)
23850 {
23851 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23852 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23853 }
23854 }
23855 }
23856
23857 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23858 attributes to the DIE for a block STMT, to describe where the inlined
23859 function was called from. This is similar to add_src_coords_attributes. */
23860
23861 static inline void
23862 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23863 {
23864 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23865
23866 if (dwarf_version >= 3 || !dwarf_strict)
23867 {
23868 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23869 add_AT_unsigned (die, DW_AT_call_line, s.line);
23870 if (debug_column_info && s.column)
23871 add_AT_unsigned (die, DW_AT_call_column, s.column);
23872 }
23873 }
23874
23875
23876 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23877 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23878
23879 static inline void
23880 add_high_low_attributes (tree stmt, dw_die_ref die)
23881 {
23882 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23883
23884 if (inline_entry_data **iedp
23885 = !inline_entry_data_table ? NULL
23886 : inline_entry_data_table->find_slot_with_hash (stmt,
23887 htab_hash_pointer (stmt),
23888 NO_INSERT))
23889 {
23890 inline_entry_data *ied = *iedp;
23891 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23892 gcc_assert (debug_inline_points);
23893 gcc_assert (inlined_function_outer_scope_p (stmt));
23894
23895 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23896 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23897
23898 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23899 && !dwarf_strict)
23900 {
23901 if (!output_asm_line_debug_info ())
23902 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23903 else
23904 {
23905 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23906 /* FIXME: this will resolve to a small number. Could we
23907 possibly emit smaller data? Ideally we'd emit a
23908 uleb128, but that would make the size of DIEs
23909 impossible for the compiler to compute, since it's
23910 the assembler that computes the value of the view
23911 label in this case. Ideally, we'd have a single form
23912 encompassing both the address and the view, and
23913 indirecting them through a table might make things
23914 easier, but even that would be more wasteful,
23915 space-wise, than what we have now. */
23916 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23917 }
23918 }
23919
23920 inline_entry_data_table->clear_slot (iedp);
23921 }
23922
23923 if (BLOCK_FRAGMENT_CHAIN (stmt)
23924 && (dwarf_version >= 3 || !dwarf_strict))
23925 {
23926 tree chain, superblock = NULL_TREE;
23927 dw_die_ref pdie;
23928 dw_attr_node *attr = NULL;
23929
23930 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23931 {
23932 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23933 BLOCK_NUMBER (stmt));
23934 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23935 }
23936
23937 /* Optimize duplicate .debug_ranges lists or even tails of
23938 lists. If this BLOCK has same ranges as its supercontext,
23939 lookup DW_AT_ranges attribute in the supercontext (and
23940 recursively so), verify that the ranges_table contains the
23941 right values and use it instead of adding a new .debug_range. */
23942 for (chain = stmt, pdie = die;
23943 BLOCK_SAME_RANGE (chain);
23944 chain = BLOCK_SUPERCONTEXT (chain))
23945 {
23946 dw_attr_node *new_attr;
23947
23948 pdie = pdie->die_parent;
23949 if (pdie == NULL)
23950 break;
23951 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23952 break;
23953 new_attr = get_AT (pdie, DW_AT_ranges);
23954 if (new_attr == NULL
23955 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23956 break;
23957 attr = new_attr;
23958 superblock = BLOCK_SUPERCONTEXT (chain);
23959 }
23960 if (attr != NULL
23961 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23962 == BLOCK_NUMBER (superblock))
23963 && BLOCK_FRAGMENT_CHAIN (superblock))
23964 {
23965 unsigned long off = attr->dw_attr_val.v.val_offset;
23966 unsigned long supercnt = 0, thiscnt = 0;
23967 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23968 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23969 {
23970 ++supercnt;
23971 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23972 == BLOCK_NUMBER (chain));
23973 }
23974 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23975 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23976 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23977 ++thiscnt;
23978 gcc_assert (supercnt >= thiscnt);
23979 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23980 false);
23981 note_rnglist_head (off + supercnt - thiscnt);
23982 return;
23983 }
23984
23985 unsigned int offset = add_ranges (stmt, true);
23986 add_AT_range_list (die, DW_AT_ranges, offset, false);
23987 note_rnglist_head (offset);
23988
23989 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23990 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23991 do
23992 {
23993 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23994 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23995 chain = BLOCK_FRAGMENT_CHAIN (chain);
23996 }
23997 while (chain);
23998 add_ranges (NULL);
23999 }
24000 else
24001 {
24002 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24003 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24004 BLOCK_NUMBER (stmt));
24005 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24006 BLOCK_NUMBER (stmt));
24007 add_AT_low_high_pc (die, label, label_high, false);
24008 }
24009 }
24010
24011 /* Generate a DIE for a lexical block. */
24012
24013 static void
24014 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24015 {
24016 dw_die_ref old_die = BLOCK_DIE (stmt);
24017 dw_die_ref stmt_die = NULL;
24018 if (!old_die)
24019 {
24020 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24021 BLOCK_DIE (stmt) = stmt_die;
24022 }
24023
24024 if (BLOCK_ABSTRACT (stmt))
24025 {
24026 if (old_die)
24027 {
24028 /* This must have been generated early and it won't even
24029 need location information since it's a DW_AT_inline
24030 function. */
24031 if (flag_checking)
24032 for (dw_die_ref c = context_die; c; c = c->die_parent)
24033 if (c->die_tag == DW_TAG_inlined_subroutine
24034 || c->die_tag == DW_TAG_subprogram)
24035 {
24036 gcc_assert (get_AT (c, DW_AT_inline));
24037 break;
24038 }
24039 return;
24040 }
24041 }
24042 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24043 {
24044 /* If this is an inlined instance, create a new lexical die for
24045 anything below to attach DW_AT_abstract_origin to. */
24046 if (old_die)
24047 {
24048 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24049 BLOCK_DIE (stmt) = stmt_die;
24050 old_die = NULL;
24051 }
24052
24053 tree origin = block_ultimate_origin (stmt);
24054 if (origin != NULL_TREE && origin != stmt)
24055 add_abstract_origin_attribute (stmt_die, origin);
24056 }
24057
24058 if (old_die)
24059 stmt_die = old_die;
24060
24061 /* A non abstract block whose blocks have already been reordered
24062 should have the instruction range for this block. If so, set the
24063 high/low attributes. */
24064 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24065 {
24066 gcc_assert (stmt_die);
24067 add_high_low_attributes (stmt, stmt_die);
24068 }
24069
24070 decls_for_scope (stmt, stmt_die);
24071 }
24072
24073 /* Generate a DIE for an inlined subprogram. */
24074
24075 static void
24076 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24077 {
24078 tree decl;
24079
24080 /* The instance of function that is effectively being inlined shall not
24081 be abstract. */
24082 gcc_assert (! BLOCK_ABSTRACT (stmt));
24083
24084 decl = block_ultimate_origin (stmt);
24085
24086 /* Make sure any inlined functions are known to be inlineable. */
24087 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24088 || cgraph_function_possibly_inlined_p (decl));
24089
24090 if (! BLOCK_ABSTRACT (stmt))
24091 {
24092 dw_die_ref subr_die
24093 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24094
24095 if (call_arg_locations || debug_inline_points)
24096 BLOCK_DIE (stmt) = subr_die;
24097 add_abstract_origin_attribute (subr_die, decl);
24098 if (TREE_ASM_WRITTEN (stmt))
24099 add_high_low_attributes (stmt, subr_die);
24100 add_call_src_coords_attributes (stmt, subr_die);
24101
24102 decls_for_scope (stmt, subr_die);
24103 }
24104 }
24105
24106 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24107 the comment for VLR_CONTEXT. */
24108
24109 static void
24110 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24111 {
24112 dw_die_ref decl_die;
24113
24114 if (TREE_TYPE (decl) == error_mark_node)
24115 return;
24116
24117 decl_die = new_die (DW_TAG_member, context_die, decl);
24118 add_name_and_src_coords_attributes (decl_die, decl);
24119 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24120 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24121 context_die);
24122
24123 if (DECL_BIT_FIELD_TYPE (decl))
24124 {
24125 add_byte_size_attribute (decl_die, decl);
24126 add_bit_size_attribute (decl_die, decl);
24127 add_bit_offset_attribute (decl_die, decl, ctx);
24128 }
24129
24130 add_alignment_attribute (decl_die, decl);
24131
24132 /* If we have a variant part offset, then we are supposed to process a member
24133 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24134 trees. */
24135 gcc_assert (ctx->variant_part_offset == NULL_TREE
24136 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24137 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24138 add_data_member_location_attribute (decl_die, decl, ctx);
24139
24140 if (DECL_ARTIFICIAL (decl))
24141 add_AT_flag (decl_die, DW_AT_artificial, 1);
24142
24143 add_accessibility_attribute (decl_die, decl);
24144
24145 /* Equate decl number to die, so that we can look up this decl later on. */
24146 equate_decl_number_to_die (decl, decl_die);
24147 }
24148
24149 /* Generate a DIE for a pointer to a member type. TYPE can be an
24150 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24151 pointer to member function. */
24152
24153 static void
24154 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24155 {
24156 if (lookup_type_die (type))
24157 return;
24158
24159 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24160 scope_die_for (type, context_die), type);
24161
24162 equate_type_number_to_die (type, ptr_die);
24163 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24164 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24165 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24166 context_die);
24167 add_alignment_attribute (ptr_die, type);
24168
24169 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24170 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24171 {
24172 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24173 add_AT_loc (ptr_die, DW_AT_use_location, op);
24174 }
24175 }
24176
24177 static char *producer_string;
24178
24179 /* Return a heap allocated producer string including command line options
24180 if -grecord-gcc-switches. */
24181
24182 static char *
24183 gen_producer_string (void)
24184 {
24185 size_t j;
24186 auto_vec<const char *> switches;
24187 const char *language_string = lang_hooks.name;
24188 char *producer, *tail;
24189 const char *p;
24190 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24191 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24192
24193 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24194 switch (save_decoded_options[j].opt_index)
24195 {
24196 case OPT_o:
24197 case OPT_d:
24198 case OPT_dumpbase:
24199 case OPT_dumpdir:
24200 case OPT_auxbase:
24201 case OPT_auxbase_strip:
24202 case OPT_quiet:
24203 case OPT_version:
24204 case OPT_v:
24205 case OPT_w:
24206 case OPT_L:
24207 case OPT_D:
24208 case OPT_I:
24209 case OPT_U:
24210 case OPT_SPECIAL_unknown:
24211 case OPT_SPECIAL_ignore:
24212 case OPT_SPECIAL_program_name:
24213 case OPT_SPECIAL_input_file:
24214 case OPT_grecord_gcc_switches:
24215 case OPT__output_pch_:
24216 case OPT_fdiagnostics_show_location_:
24217 case OPT_fdiagnostics_show_option:
24218 case OPT_fdiagnostics_show_caret:
24219 case OPT_fdiagnostics_color_:
24220 case OPT_fverbose_asm:
24221 case OPT____:
24222 case OPT__sysroot_:
24223 case OPT_nostdinc:
24224 case OPT_nostdinc__:
24225 case OPT_fpreprocessed:
24226 case OPT_fltrans_output_list_:
24227 case OPT_fresolution_:
24228 case OPT_fdebug_prefix_map_:
24229 case OPT_fmacro_prefix_map_:
24230 case OPT_ffile_prefix_map_:
24231 case OPT_fcompare_debug:
24232 /* Ignore these. */
24233 continue;
24234 default:
24235 if (cl_options[save_decoded_options[j].opt_index].flags
24236 & CL_NO_DWARF_RECORD)
24237 continue;
24238 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24239 == '-');
24240 switch (save_decoded_options[j].canonical_option[0][1])
24241 {
24242 case 'M':
24243 case 'i':
24244 case 'W':
24245 continue;
24246 case 'f':
24247 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24248 "dump", 4) == 0)
24249 continue;
24250 break;
24251 default:
24252 break;
24253 }
24254 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24255 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24256 break;
24257 }
24258
24259 producer = XNEWVEC (char, plen + 1 + len + 1);
24260 tail = producer;
24261 sprintf (tail, "%s %s", language_string, version_string);
24262 tail += plen;
24263
24264 FOR_EACH_VEC_ELT (switches, j, p)
24265 {
24266 len = strlen (p);
24267 *tail = ' ';
24268 memcpy (tail + 1, p, len);
24269 tail += len + 1;
24270 }
24271
24272 *tail = '\0';
24273 return producer;
24274 }
24275
24276 /* Given a C and/or C++ language/version string return the "highest".
24277 C++ is assumed to be "higher" than C in this case. Used for merging
24278 LTO translation unit languages. */
24279 static const char *
24280 highest_c_language (const char *lang1, const char *lang2)
24281 {
24282 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24283 return "GNU C++17";
24284 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24285 return "GNU C++14";
24286 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24287 return "GNU C++11";
24288 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24289 return "GNU C++98";
24290
24291 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24292 return "GNU C17";
24293 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24294 return "GNU C11";
24295 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24296 return "GNU C99";
24297 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24298 return "GNU C89";
24299
24300 gcc_unreachable ();
24301 }
24302
24303
24304 /* Generate the DIE for the compilation unit. */
24305
24306 static dw_die_ref
24307 gen_compile_unit_die (const char *filename)
24308 {
24309 dw_die_ref die;
24310 const char *language_string = lang_hooks.name;
24311 int language;
24312
24313 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24314
24315 if (filename)
24316 {
24317 add_name_attribute (die, filename);
24318 /* Don't add cwd for <built-in>. */
24319 if (filename[0] != '<')
24320 add_comp_dir_attribute (die);
24321 }
24322
24323 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24324
24325 /* If our producer is LTO try to figure out a common language to use
24326 from the global list of translation units. */
24327 if (strcmp (language_string, "GNU GIMPLE") == 0)
24328 {
24329 unsigned i;
24330 tree t;
24331 const char *common_lang = NULL;
24332
24333 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24334 {
24335 if (!TRANSLATION_UNIT_LANGUAGE (t))
24336 continue;
24337 if (!common_lang)
24338 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24339 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24340 ;
24341 else if (strncmp (common_lang, "GNU C", 5) == 0
24342 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24343 /* Mixing C and C++ is ok, use C++ in that case. */
24344 common_lang = highest_c_language (common_lang,
24345 TRANSLATION_UNIT_LANGUAGE (t));
24346 else
24347 {
24348 /* Fall back to C. */
24349 common_lang = NULL;
24350 break;
24351 }
24352 }
24353
24354 if (common_lang)
24355 language_string = common_lang;
24356 }
24357
24358 language = DW_LANG_C;
24359 if (strncmp (language_string, "GNU C", 5) == 0
24360 && ISDIGIT (language_string[5]))
24361 {
24362 language = DW_LANG_C89;
24363 if (dwarf_version >= 3 || !dwarf_strict)
24364 {
24365 if (strcmp (language_string, "GNU C89") != 0)
24366 language = DW_LANG_C99;
24367
24368 if (dwarf_version >= 5 /* || !dwarf_strict */)
24369 if (strcmp (language_string, "GNU C11") == 0
24370 || strcmp (language_string, "GNU C17") == 0)
24371 language = DW_LANG_C11;
24372 }
24373 }
24374 else if (strncmp (language_string, "GNU C++", 7) == 0)
24375 {
24376 language = DW_LANG_C_plus_plus;
24377 if (dwarf_version >= 5 /* || !dwarf_strict */)
24378 {
24379 if (strcmp (language_string, "GNU C++11") == 0)
24380 language = DW_LANG_C_plus_plus_11;
24381 else if (strcmp (language_string, "GNU C++14") == 0)
24382 language = DW_LANG_C_plus_plus_14;
24383 else if (strcmp (language_string, "GNU C++17") == 0)
24384 /* For now. */
24385 language = DW_LANG_C_plus_plus_14;
24386 }
24387 }
24388 else if (strcmp (language_string, "GNU F77") == 0)
24389 language = DW_LANG_Fortran77;
24390 else if (dwarf_version >= 3 || !dwarf_strict)
24391 {
24392 if (strcmp (language_string, "GNU Ada") == 0)
24393 language = DW_LANG_Ada95;
24394 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24395 {
24396 language = DW_LANG_Fortran95;
24397 if (dwarf_version >= 5 /* || !dwarf_strict */)
24398 {
24399 if (strcmp (language_string, "GNU Fortran2003") == 0)
24400 language = DW_LANG_Fortran03;
24401 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24402 language = DW_LANG_Fortran08;
24403 }
24404 }
24405 else if (strcmp (language_string, "GNU Objective-C") == 0)
24406 language = DW_LANG_ObjC;
24407 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24408 language = DW_LANG_ObjC_plus_plus;
24409 else if (dwarf_version >= 5 || !dwarf_strict)
24410 {
24411 if (strcmp (language_string, "GNU Go") == 0)
24412 language = DW_LANG_Go;
24413 }
24414 }
24415 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24416 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24417 language = DW_LANG_Fortran90;
24418
24419 add_AT_unsigned (die, DW_AT_language, language);
24420
24421 switch (language)
24422 {
24423 case DW_LANG_Fortran77:
24424 case DW_LANG_Fortran90:
24425 case DW_LANG_Fortran95:
24426 case DW_LANG_Fortran03:
24427 case DW_LANG_Fortran08:
24428 /* Fortran has case insensitive identifiers and the front-end
24429 lowercases everything. */
24430 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24431 break;
24432 default:
24433 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24434 break;
24435 }
24436 return die;
24437 }
24438
24439 /* Generate the DIE for a base class. */
24440
24441 static void
24442 gen_inheritance_die (tree binfo, tree access, tree type,
24443 dw_die_ref context_die)
24444 {
24445 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24446 struct vlr_context ctx = { type, NULL };
24447
24448 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24449 context_die);
24450 add_data_member_location_attribute (die, binfo, &ctx);
24451
24452 if (BINFO_VIRTUAL_P (binfo))
24453 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24454
24455 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24456 children, otherwise the default is DW_ACCESS_public. In DWARF2
24457 the default has always been DW_ACCESS_private. */
24458 if (access == access_public_node)
24459 {
24460 if (dwarf_version == 2
24461 || context_die->die_tag == DW_TAG_class_type)
24462 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24463 }
24464 else if (access == access_protected_node)
24465 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24466 else if (dwarf_version > 2
24467 && context_die->die_tag != DW_TAG_class_type)
24468 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24469 }
24470
24471 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24472 structure. */
24473 static bool
24474 is_variant_part (tree decl)
24475 {
24476 return (TREE_CODE (decl) == FIELD_DECL
24477 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24478 }
24479
24480 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24481 return the FIELD_DECL. Return NULL_TREE otherwise. */
24482
24483 static tree
24484 analyze_discr_in_predicate (tree operand, tree struct_type)
24485 {
24486 bool continue_stripping = true;
24487 while (continue_stripping)
24488 switch (TREE_CODE (operand))
24489 {
24490 CASE_CONVERT:
24491 operand = TREE_OPERAND (operand, 0);
24492 break;
24493 default:
24494 continue_stripping = false;
24495 break;
24496 }
24497
24498 /* Match field access to members of struct_type only. */
24499 if (TREE_CODE (operand) == COMPONENT_REF
24500 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24501 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24502 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24503 return TREE_OPERAND (operand, 1);
24504 else
24505 return NULL_TREE;
24506 }
24507
24508 /* Check that SRC is a constant integer that can be represented as a native
24509 integer constant (either signed or unsigned). If so, store it into DEST and
24510 return true. Return false otherwise. */
24511
24512 static bool
24513 get_discr_value (tree src, dw_discr_value *dest)
24514 {
24515 tree discr_type = TREE_TYPE (src);
24516
24517 if (lang_hooks.types.get_debug_type)
24518 {
24519 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24520 if (debug_type != NULL)
24521 discr_type = debug_type;
24522 }
24523
24524 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24525 return false;
24526
24527 /* Signedness can vary between the original type and the debug type. This
24528 can happen for character types in Ada for instance: the character type
24529 used for code generation can be signed, to be compatible with the C one,
24530 but from a debugger point of view, it must be unsigned. */
24531 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24532 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24533
24534 if (is_orig_unsigned != is_debug_unsigned)
24535 src = fold_convert (discr_type, src);
24536
24537 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24538 return false;
24539
24540 dest->pos = is_debug_unsigned;
24541 if (is_debug_unsigned)
24542 dest->v.uval = tree_to_uhwi (src);
24543 else
24544 dest->v.sval = tree_to_shwi (src);
24545
24546 return true;
24547 }
24548
24549 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24550 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24551 store NULL_TREE in DISCR_DECL. Otherwise:
24552
24553 - store the discriminant field in STRUCT_TYPE that controls the variant
24554 part to *DISCR_DECL
24555
24556 - put in *DISCR_LISTS_P an array where for each variant, the item
24557 represents the corresponding matching list of discriminant values.
24558
24559 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24560 the above array.
24561
24562 Note that when the array is allocated (i.e. when the analysis is
24563 successful), it is up to the caller to free the array. */
24564
24565 static void
24566 analyze_variants_discr (tree variant_part_decl,
24567 tree struct_type,
24568 tree *discr_decl,
24569 dw_discr_list_ref **discr_lists_p,
24570 unsigned *discr_lists_length)
24571 {
24572 tree variant_part_type = TREE_TYPE (variant_part_decl);
24573 tree variant;
24574 dw_discr_list_ref *discr_lists;
24575 unsigned i;
24576
24577 /* Compute how many variants there are in this variant part. */
24578 *discr_lists_length = 0;
24579 for (variant = TYPE_FIELDS (variant_part_type);
24580 variant != NULL_TREE;
24581 variant = DECL_CHAIN (variant))
24582 ++*discr_lists_length;
24583
24584 *discr_decl = NULL_TREE;
24585 *discr_lists_p
24586 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24587 sizeof (**discr_lists_p));
24588 discr_lists = *discr_lists_p;
24589
24590 /* And then analyze all variants to extract discriminant information for all
24591 of them. This analysis is conservative: as soon as we detect something we
24592 do not support, abort everything and pretend we found nothing. */
24593 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24594 variant != NULL_TREE;
24595 variant = DECL_CHAIN (variant), ++i)
24596 {
24597 tree match_expr = DECL_QUALIFIER (variant);
24598
24599 /* Now, try to analyze the predicate and deduce a discriminant for
24600 it. */
24601 if (match_expr == boolean_true_node)
24602 /* Typically happens for the default variant: it matches all cases that
24603 previous variants rejected. Don't output any matching value for
24604 this one. */
24605 continue;
24606
24607 /* The following loop tries to iterate over each discriminant
24608 possibility: single values or ranges. */
24609 while (match_expr != NULL_TREE)
24610 {
24611 tree next_round_match_expr;
24612 tree candidate_discr = NULL_TREE;
24613 dw_discr_list_ref new_node = NULL;
24614
24615 /* Possibilities are matched one after the other by nested
24616 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24617 continue with the rest at next iteration. */
24618 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24619 {
24620 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24621 match_expr = TREE_OPERAND (match_expr, 1);
24622 }
24623 else
24624 next_round_match_expr = NULL_TREE;
24625
24626 if (match_expr == boolean_false_node)
24627 /* This sub-expression matches nothing: just wait for the next
24628 one. */
24629 ;
24630
24631 else if (TREE_CODE (match_expr) == EQ_EXPR)
24632 {
24633 /* We are matching: <discr_field> == <integer_cst>
24634 This sub-expression matches a single value. */
24635 tree integer_cst = TREE_OPERAND (match_expr, 1);
24636
24637 candidate_discr
24638 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24639 struct_type);
24640
24641 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24642 if (!get_discr_value (integer_cst,
24643 &new_node->dw_discr_lower_bound))
24644 goto abort;
24645 new_node->dw_discr_range = false;
24646 }
24647
24648 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24649 {
24650 /* We are matching:
24651 <discr_field> > <integer_cst>
24652 && <discr_field> < <integer_cst>.
24653 This sub-expression matches the range of values between the
24654 two matched integer constants. Note that comparisons can be
24655 inclusive or exclusive. */
24656 tree candidate_discr_1, candidate_discr_2;
24657 tree lower_cst, upper_cst;
24658 bool lower_cst_included, upper_cst_included;
24659 tree lower_op = TREE_OPERAND (match_expr, 0);
24660 tree upper_op = TREE_OPERAND (match_expr, 1);
24661
24662 /* When the comparison is exclusive, the integer constant is not
24663 the discriminant range bound we are looking for: we will have
24664 to increment or decrement it. */
24665 if (TREE_CODE (lower_op) == GE_EXPR)
24666 lower_cst_included = true;
24667 else if (TREE_CODE (lower_op) == GT_EXPR)
24668 lower_cst_included = false;
24669 else
24670 goto abort;
24671
24672 if (TREE_CODE (upper_op) == LE_EXPR)
24673 upper_cst_included = true;
24674 else if (TREE_CODE (upper_op) == LT_EXPR)
24675 upper_cst_included = false;
24676 else
24677 goto abort;
24678
24679 /* Extract the discriminant from the first operand and check it
24680 is consistant with the same analysis in the second
24681 operand. */
24682 candidate_discr_1
24683 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24684 struct_type);
24685 candidate_discr_2
24686 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24687 struct_type);
24688 if (candidate_discr_1 == candidate_discr_2)
24689 candidate_discr = candidate_discr_1;
24690 else
24691 goto abort;
24692
24693 /* Extract bounds from both. */
24694 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24695 lower_cst = TREE_OPERAND (lower_op, 1);
24696 upper_cst = TREE_OPERAND (upper_op, 1);
24697
24698 if (!lower_cst_included)
24699 lower_cst
24700 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24701 build_int_cst (TREE_TYPE (lower_cst), 1));
24702 if (!upper_cst_included)
24703 upper_cst
24704 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24705 build_int_cst (TREE_TYPE (upper_cst), 1));
24706
24707 if (!get_discr_value (lower_cst,
24708 &new_node->dw_discr_lower_bound)
24709 || !get_discr_value (upper_cst,
24710 &new_node->dw_discr_upper_bound))
24711 goto abort;
24712
24713 new_node->dw_discr_range = true;
24714 }
24715
24716 else
24717 /* Unsupported sub-expression: we cannot determine the set of
24718 matching discriminant values. Abort everything. */
24719 goto abort;
24720
24721 /* If the discriminant info is not consistant with what we saw so
24722 far, consider the analysis failed and abort everything. */
24723 if (candidate_discr == NULL_TREE
24724 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24725 goto abort;
24726 else
24727 *discr_decl = candidate_discr;
24728
24729 if (new_node != NULL)
24730 {
24731 new_node->dw_discr_next = discr_lists[i];
24732 discr_lists[i] = new_node;
24733 }
24734 match_expr = next_round_match_expr;
24735 }
24736 }
24737
24738 /* If we reach this point, we could match everything we were interested
24739 in. */
24740 return;
24741
24742 abort:
24743 /* Clean all data structure and return no result. */
24744 free (*discr_lists_p);
24745 *discr_lists_p = NULL;
24746 *discr_decl = NULL_TREE;
24747 }
24748
24749 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24750 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24751 under CONTEXT_DIE.
24752
24753 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24754 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24755 this type, which are record types, represent the available variants and each
24756 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24757 values are inferred from these attributes.
24758
24759 In trees, the offsets for the fields inside these sub-records are relative
24760 to the variant part itself, whereas the corresponding DIEs should have
24761 offset attributes that are relative to the embedding record base address.
24762 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24763 must be an expression that computes the offset of the variant part to
24764 describe in DWARF. */
24765
24766 static void
24767 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24768 dw_die_ref context_die)
24769 {
24770 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24771 tree variant_part_offset = vlr_ctx->variant_part_offset;
24772 struct loc_descr_context ctx = {
24773 vlr_ctx->struct_type, /* context_type */
24774 NULL_TREE, /* base_decl */
24775 NULL, /* dpi */
24776 false, /* placeholder_arg */
24777 false /* placeholder_seen */
24778 };
24779
24780 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24781 NULL_TREE if there is no such field. */
24782 tree discr_decl = NULL_TREE;
24783 dw_discr_list_ref *discr_lists;
24784 unsigned discr_lists_length = 0;
24785 unsigned i;
24786
24787 dw_die_ref dwarf_proc_die = NULL;
24788 dw_die_ref variant_part_die
24789 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24790
24791 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24792
24793 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24794 &discr_decl, &discr_lists, &discr_lists_length);
24795
24796 if (discr_decl != NULL_TREE)
24797 {
24798 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24799
24800 if (discr_die)
24801 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24802 else
24803 /* We have no DIE for the discriminant, so just discard all
24804 discrimimant information in the output. */
24805 discr_decl = NULL_TREE;
24806 }
24807
24808 /* If the offset for this variant part is more complex than a constant,
24809 create a DWARF procedure for it so that we will not have to generate DWARF
24810 expressions for it for each member. */
24811 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24812 && (dwarf_version >= 3 || !dwarf_strict))
24813 {
24814 const tree dwarf_proc_fndecl
24815 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24816 build_function_type (TREE_TYPE (variant_part_offset),
24817 NULL_TREE));
24818 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24819 const dw_loc_descr_ref dwarf_proc_body
24820 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24821
24822 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24823 dwarf_proc_fndecl, context_die);
24824 if (dwarf_proc_die != NULL)
24825 variant_part_offset = dwarf_proc_call;
24826 }
24827
24828 /* Output DIEs for all variants. */
24829 i = 0;
24830 for (tree variant = TYPE_FIELDS (variant_part_type);
24831 variant != NULL_TREE;
24832 variant = DECL_CHAIN (variant), ++i)
24833 {
24834 tree variant_type = TREE_TYPE (variant);
24835 dw_die_ref variant_die;
24836
24837 /* All variants (i.e. members of a variant part) are supposed to be
24838 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24839 under these records. */
24840 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24841
24842 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24843 equate_decl_number_to_die (variant, variant_die);
24844
24845 /* Output discriminant values this variant matches, if any. */
24846 if (discr_decl == NULL || discr_lists[i] == NULL)
24847 /* In the case we have discriminant information at all, this is
24848 probably the default variant: as the standard says, don't
24849 output any discriminant value/list attribute. */
24850 ;
24851 else if (discr_lists[i]->dw_discr_next == NULL
24852 && !discr_lists[i]->dw_discr_range)
24853 /* If there is only one accepted value, don't bother outputting a
24854 list. */
24855 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24856 else
24857 add_discr_list (variant_die, discr_lists[i]);
24858
24859 for (tree member = TYPE_FIELDS (variant_type);
24860 member != NULL_TREE;
24861 member = DECL_CHAIN (member))
24862 {
24863 struct vlr_context vlr_sub_ctx = {
24864 vlr_ctx->struct_type, /* struct_type */
24865 NULL /* variant_part_offset */
24866 };
24867 if (is_variant_part (member))
24868 {
24869 /* All offsets for fields inside variant parts are relative to
24870 the top-level embedding RECORD_TYPE's base address. On the
24871 other hand, offsets in GCC's types are relative to the
24872 nested-most variant part. So we have to sum offsets each time
24873 we recurse. */
24874
24875 vlr_sub_ctx.variant_part_offset
24876 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24877 variant_part_offset, byte_position (member));
24878 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24879 }
24880 else
24881 {
24882 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24883 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24884 }
24885 }
24886 }
24887
24888 free (discr_lists);
24889 }
24890
24891 /* Generate a DIE for a class member. */
24892
24893 static void
24894 gen_member_die (tree type, dw_die_ref context_die)
24895 {
24896 tree member;
24897 tree binfo = TYPE_BINFO (type);
24898
24899 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24900
24901 /* If this is not an incomplete type, output descriptions of each of its
24902 members. Note that as we output the DIEs necessary to represent the
24903 members of this record or union type, we will also be trying to output
24904 DIEs to represent the *types* of those members. However the `type'
24905 function (above) will specifically avoid generating type DIEs for member
24906 types *within* the list of member DIEs for this (containing) type except
24907 for those types (of members) which are explicitly marked as also being
24908 members of this (containing) type themselves. The g++ front- end can
24909 force any given type to be treated as a member of some other (containing)
24910 type by setting the TYPE_CONTEXT of the given (member) type to point to
24911 the TREE node representing the appropriate (containing) type. */
24912
24913 /* First output info about the base classes. */
24914 if (binfo)
24915 {
24916 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24917 int i;
24918 tree base;
24919
24920 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24921 gen_inheritance_die (base,
24922 (accesses ? (*accesses)[i] : access_public_node),
24923 type,
24924 context_die);
24925 }
24926
24927 /* Now output info about the data members and type members. */
24928 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24929 {
24930 struct vlr_context vlr_ctx = { type, NULL_TREE };
24931 bool static_inline_p
24932 = (TREE_STATIC (member)
24933 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24934 != -1));
24935
24936 /* Ignore clones. */
24937 if (DECL_ABSTRACT_ORIGIN (member))
24938 continue;
24939
24940 /* If we thought we were generating minimal debug info for TYPE
24941 and then changed our minds, some of the member declarations
24942 may have already been defined. Don't define them again, but
24943 do put them in the right order. */
24944
24945 if (dw_die_ref child = lookup_decl_die (member))
24946 {
24947 /* Handle inline static data members, which only have in-class
24948 declarations. */
24949 dw_die_ref ref = NULL;
24950 if (child->die_tag == DW_TAG_variable
24951 && child->die_parent == comp_unit_die ())
24952 {
24953 ref = get_AT_ref (child, DW_AT_specification);
24954 /* For C++17 inline static data members followed by redundant
24955 out of class redeclaration, we might get here with
24956 child being the DIE created for the out of class
24957 redeclaration and with its DW_AT_specification being
24958 the DIE created for in-class definition. We want to
24959 reparent the latter, and don't want to create another
24960 DIE with DW_AT_specification in that case, because
24961 we already have one. */
24962 if (ref
24963 && static_inline_p
24964 && ref->die_tag == DW_TAG_variable
24965 && ref->die_parent == comp_unit_die ()
24966 && get_AT (ref, DW_AT_specification) == NULL)
24967 {
24968 child = ref;
24969 ref = NULL;
24970 static_inline_p = false;
24971 }
24972 }
24973
24974 if (child->die_tag == DW_TAG_variable
24975 && child->die_parent == comp_unit_die ()
24976 && ref == NULL)
24977 {
24978 reparent_child (child, context_die);
24979 if (dwarf_version < 5)
24980 child->die_tag = DW_TAG_member;
24981 }
24982 else
24983 splice_child_die (context_die, child);
24984 }
24985
24986 /* Do not generate standard DWARF for variant parts if we are generating
24987 the corresponding GNAT encodings: DIEs generated for both would
24988 conflict in our mappings. */
24989 else if (is_variant_part (member)
24990 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24991 {
24992 vlr_ctx.variant_part_offset = byte_position (member);
24993 gen_variant_part (member, &vlr_ctx, context_die);
24994 }
24995 else
24996 {
24997 vlr_ctx.variant_part_offset = NULL_TREE;
24998 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24999 }
25000
25001 /* For C++ inline static data members emit immediately a DW_TAG_variable
25002 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25003 DW_AT_specification. */
25004 if (static_inline_p)
25005 {
25006 int old_extern = DECL_EXTERNAL (member);
25007 DECL_EXTERNAL (member) = 0;
25008 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25009 DECL_EXTERNAL (member) = old_extern;
25010 }
25011 }
25012 }
25013
25014 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25015 is set, we pretend that the type was never defined, so we only get the
25016 member DIEs needed by later specification DIEs. */
25017
25018 static void
25019 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25020 enum debug_info_usage usage)
25021 {
25022 if (TREE_ASM_WRITTEN (type))
25023 {
25024 /* Fill in the bound of variable-length fields in late dwarf if
25025 still incomplete. */
25026 if (!early_dwarf && variably_modified_type_p (type, NULL))
25027 for (tree member = TYPE_FIELDS (type);
25028 member;
25029 member = DECL_CHAIN (member))
25030 fill_variable_array_bounds (TREE_TYPE (member));
25031 return;
25032 }
25033
25034 dw_die_ref type_die = lookup_type_die (type);
25035 dw_die_ref scope_die = 0;
25036 int nested = 0;
25037 int complete = (TYPE_SIZE (type)
25038 && (! TYPE_STUB_DECL (type)
25039 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25040 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25041 complete = complete && should_emit_struct_debug (type, usage);
25042
25043 if (type_die && ! complete)
25044 return;
25045
25046 if (TYPE_CONTEXT (type) != NULL_TREE
25047 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25048 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25049 nested = 1;
25050
25051 scope_die = scope_die_for (type, context_die);
25052
25053 /* Generate child dies for template paramaters. */
25054 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25055 schedule_generic_params_dies_gen (type);
25056
25057 if (! type_die || (nested && is_cu_die (scope_die)))
25058 /* First occurrence of type or toplevel definition of nested class. */
25059 {
25060 dw_die_ref old_die = type_die;
25061
25062 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25063 ? record_type_tag (type) : DW_TAG_union_type,
25064 scope_die, type);
25065 equate_type_number_to_die (type, type_die);
25066 if (old_die)
25067 add_AT_specification (type_die, old_die);
25068 else
25069 add_name_attribute (type_die, type_tag (type));
25070 }
25071 else
25072 remove_AT (type_die, DW_AT_declaration);
25073
25074 /* If this type has been completed, then give it a byte_size attribute and
25075 then give a list of members. */
25076 if (complete && !ns_decl)
25077 {
25078 /* Prevent infinite recursion in cases where the type of some member of
25079 this type is expressed in terms of this type itself. */
25080 TREE_ASM_WRITTEN (type) = 1;
25081 add_byte_size_attribute (type_die, type);
25082 add_alignment_attribute (type_die, type);
25083 if (TYPE_STUB_DECL (type) != NULL_TREE)
25084 {
25085 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25086 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25087 }
25088
25089 /* If the first reference to this type was as the return type of an
25090 inline function, then it may not have a parent. Fix this now. */
25091 if (type_die->die_parent == NULL)
25092 add_child_die (scope_die, type_die);
25093
25094 push_decl_scope (type);
25095 gen_member_die (type, type_die);
25096 pop_decl_scope ();
25097
25098 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25099 if (TYPE_ARTIFICIAL (type))
25100 add_AT_flag (type_die, DW_AT_artificial, 1);
25101
25102 /* GNU extension: Record what type our vtable lives in. */
25103 if (TYPE_VFIELD (type))
25104 {
25105 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25106
25107 gen_type_die (vtype, context_die);
25108 add_AT_die_ref (type_die, DW_AT_containing_type,
25109 lookup_type_die (vtype));
25110 }
25111 }
25112 else
25113 {
25114 add_AT_flag (type_die, DW_AT_declaration, 1);
25115
25116 /* We don't need to do this for function-local types. */
25117 if (TYPE_STUB_DECL (type)
25118 && ! decl_function_context (TYPE_STUB_DECL (type)))
25119 vec_safe_push (incomplete_types, type);
25120 }
25121
25122 if (get_AT (type_die, DW_AT_name))
25123 add_pubtype (type, type_die);
25124 }
25125
25126 /* Generate a DIE for a subroutine _type_. */
25127
25128 static void
25129 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25130 {
25131 tree return_type = TREE_TYPE (type);
25132 dw_die_ref subr_die
25133 = new_die (DW_TAG_subroutine_type,
25134 scope_die_for (type, context_die), type);
25135
25136 equate_type_number_to_die (type, subr_die);
25137 add_prototyped_attribute (subr_die, type);
25138 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25139 context_die);
25140 add_alignment_attribute (subr_die, type);
25141 gen_formal_types_die (type, subr_die);
25142
25143 if (get_AT (subr_die, DW_AT_name))
25144 add_pubtype (type, subr_die);
25145 if ((dwarf_version >= 5 || !dwarf_strict)
25146 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25147 add_AT_flag (subr_die, DW_AT_reference, 1);
25148 if ((dwarf_version >= 5 || !dwarf_strict)
25149 && lang_hooks.types.type_dwarf_attribute (type,
25150 DW_AT_rvalue_reference) != -1)
25151 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25152 }
25153
25154 /* Generate a DIE for a type definition. */
25155
25156 static void
25157 gen_typedef_die (tree decl, dw_die_ref context_die)
25158 {
25159 dw_die_ref type_die;
25160 tree type;
25161
25162 if (TREE_ASM_WRITTEN (decl))
25163 {
25164 if (DECL_ORIGINAL_TYPE (decl))
25165 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25166 return;
25167 }
25168
25169 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25170 checks in process_scope_var and modified_type_die), this should be called
25171 only for original types. */
25172 gcc_assert (decl_ultimate_origin (decl) == NULL
25173 || decl_ultimate_origin (decl) == decl);
25174
25175 TREE_ASM_WRITTEN (decl) = 1;
25176 type_die = new_die (DW_TAG_typedef, context_die, decl);
25177
25178 add_name_and_src_coords_attributes (type_die, decl);
25179 if (DECL_ORIGINAL_TYPE (decl))
25180 {
25181 type = DECL_ORIGINAL_TYPE (decl);
25182 if (type == error_mark_node)
25183 return;
25184
25185 gcc_assert (type != TREE_TYPE (decl));
25186 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25187 }
25188 else
25189 {
25190 type = TREE_TYPE (decl);
25191 if (type == error_mark_node)
25192 return;
25193
25194 if (is_naming_typedef_decl (TYPE_NAME (type)))
25195 {
25196 /* Here, we are in the case of decl being a typedef naming
25197 an anonymous type, e.g:
25198 typedef struct {...} foo;
25199 In that case TREE_TYPE (decl) is not a typedef variant
25200 type and TYPE_NAME of the anonymous type is set to the
25201 TYPE_DECL of the typedef. This construct is emitted by
25202 the C++ FE.
25203
25204 TYPE is the anonymous struct named by the typedef
25205 DECL. As we need the DW_AT_type attribute of the
25206 DW_TAG_typedef to point to the DIE of TYPE, let's
25207 generate that DIE right away. add_type_attribute
25208 called below will then pick (via lookup_type_die) that
25209 anonymous struct DIE. */
25210 if (!TREE_ASM_WRITTEN (type))
25211 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25212
25213 /* This is a GNU Extension. We are adding a
25214 DW_AT_linkage_name attribute to the DIE of the
25215 anonymous struct TYPE. The value of that attribute
25216 is the name of the typedef decl naming the anonymous
25217 struct. This greatly eases the work of consumers of
25218 this debug info. */
25219 add_linkage_name_raw (lookup_type_die (type), decl);
25220 }
25221 }
25222
25223 add_type_attribute (type_die, type, decl_quals (decl), false,
25224 context_die);
25225
25226 if (is_naming_typedef_decl (decl))
25227 /* We want that all subsequent calls to lookup_type_die with
25228 TYPE in argument yield the DW_TAG_typedef we have just
25229 created. */
25230 equate_type_number_to_die (type, type_die);
25231
25232 add_alignment_attribute (type_die, TREE_TYPE (decl));
25233
25234 add_accessibility_attribute (type_die, decl);
25235
25236 if (DECL_ABSTRACT_P (decl))
25237 equate_decl_number_to_die (decl, type_die);
25238
25239 if (get_AT (type_die, DW_AT_name))
25240 add_pubtype (decl, type_die);
25241 }
25242
25243 /* Generate a DIE for a struct, class, enum or union type. */
25244
25245 static void
25246 gen_tagged_type_die (tree type,
25247 dw_die_ref context_die,
25248 enum debug_info_usage usage)
25249 {
25250 int need_pop;
25251
25252 if (type == NULL_TREE
25253 || !is_tagged_type (type))
25254 return;
25255
25256 if (TREE_ASM_WRITTEN (type))
25257 need_pop = 0;
25258 /* If this is a nested type whose containing class hasn't been written
25259 out yet, writing it out will cover this one, too. This does not apply
25260 to instantiations of member class templates; they need to be added to
25261 the containing class as they are generated. FIXME: This hurts the
25262 idea of combining type decls from multiple TUs, since we can't predict
25263 what set of template instantiations we'll get. */
25264 else if (TYPE_CONTEXT (type)
25265 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25266 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25267 {
25268 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25269
25270 if (TREE_ASM_WRITTEN (type))
25271 return;
25272
25273 /* If that failed, attach ourselves to the stub. */
25274 push_decl_scope (TYPE_CONTEXT (type));
25275 context_die = lookup_type_die (TYPE_CONTEXT (type));
25276 need_pop = 1;
25277 }
25278 else if (TYPE_CONTEXT (type) != NULL_TREE
25279 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25280 {
25281 /* If this type is local to a function that hasn't been written
25282 out yet, use a NULL context for now; it will be fixed up in
25283 decls_for_scope. */
25284 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25285 /* A declaration DIE doesn't count; nested types need to go in the
25286 specification. */
25287 if (context_die && is_declaration_die (context_die))
25288 context_die = NULL;
25289 need_pop = 0;
25290 }
25291 else
25292 {
25293 context_die = declare_in_namespace (type, context_die);
25294 need_pop = 0;
25295 }
25296
25297 if (TREE_CODE (type) == ENUMERAL_TYPE)
25298 {
25299 /* This might have been written out by the call to
25300 declare_in_namespace. */
25301 if (!TREE_ASM_WRITTEN (type))
25302 gen_enumeration_type_die (type, context_die);
25303 }
25304 else
25305 gen_struct_or_union_type_die (type, context_die, usage);
25306
25307 if (need_pop)
25308 pop_decl_scope ();
25309
25310 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25311 it up if it is ever completed. gen_*_type_die will set it for us
25312 when appropriate. */
25313 }
25314
25315 /* Generate a type description DIE. */
25316
25317 static void
25318 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25319 enum debug_info_usage usage)
25320 {
25321 struct array_descr_info info;
25322
25323 if (type == NULL_TREE || type == error_mark_node)
25324 return;
25325
25326 if (flag_checking && type)
25327 verify_type (type);
25328
25329 if (TYPE_NAME (type) != NULL_TREE
25330 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25331 && is_redundant_typedef (TYPE_NAME (type))
25332 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25333 /* The DECL of this type is a typedef we don't want to emit debug
25334 info for but we want debug info for its underlying typedef.
25335 This can happen for e.g, the injected-class-name of a C++
25336 type. */
25337 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25338
25339 /* If TYPE is a typedef type variant, let's generate debug info
25340 for the parent typedef which TYPE is a type of. */
25341 if (typedef_variant_p (type))
25342 {
25343 if (TREE_ASM_WRITTEN (type))
25344 return;
25345
25346 tree name = TYPE_NAME (type);
25347 tree origin = decl_ultimate_origin (name);
25348 if (origin != NULL && origin != name)
25349 {
25350 gen_decl_die (origin, NULL, NULL, context_die);
25351 return;
25352 }
25353
25354 /* Prevent broken recursion; we can't hand off to the same type. */
25355 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25356
25357 /* Give typedefs the right scope. */
25358 context_die = scope_die_for (type, context_die);
25359
25360 TREE_ASM_WRITTEN (type) = 1;
25361
25362 gen_decl_die (name, NULL, NULL, context_die);
25363 return;
25364 }
25365
25366 /* If type is an anonymous tagged type named by a typedef, let's
25367 generate debug info for the typedef. */
25368 if (is_naming_typedef_decl (TYPE_NAME (type)))
25369 {
25370 /* Use the DIE of the containing namespace as the parent DIE of
25371 the type description DIE we want to generate. */
25372 if (DECL_CONTEXT (TYPE_NAME (type))
25373 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25374 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25375
25376 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25377 return;
25378 }
25379
25380 if (lang_hooks.types.get_debug_type)
25381 {
25382 tree debug_type = lang_hooks.types.get_debug_type (type);
25383
25384 if (debug_type != NULL_TREE && debug_type != type)
25385 {
25386 gen_type_die_with_usage (debug_type, context_die, usage);
25387 return;
25388 }
25389 }
25390
25391 /* We are going to output a DIE to represent the unqualified version
25392 of this type (i.e. without any const or volatile qualifiers) so
25393 get the main variant (i.e. the unqualified version) of this type
25394 now. (Vectors and arrays are special because the debugging info is in the
25395 cloned type itself. Similarly function/method types can contain extra
25396 ref-qualification). */
25397 if (TREE_CODE (type) == FUNCTION_TYPE
25398 || TREE_CODE (type) == METHOD_TYPE)
25399 {
25400 /* For function/method types, can't use type_main_variant here,
25401 because that can have different ref-qualifiers for C++,
25402 but try to canonicalize. */
25403 tree main = TYPE_MAIN_VARIANT (type);
25404 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25405 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25406 && check_base_type (t, main)
25407 && check_lang_type (t, type))
25408 {
25409 type = t;
25410 break;
25411 }
25412 }
25413 else if (TREE_CODE (type) != VECTOR_TYPE
25414 && TREE_CODE (type) != ARRAY_TYPE)
25415 type = type_main_variant (type);
25416
25417 /* If this is an array type with hidden descriptor, handle it first. */
25418 if (!TREE_ASM_WRITTEN (type)
25419 && lang_hooks.types.get_array_descr_info)
25420 {
25421 memset (&info, 0, sizeof (info));
25422 if (lang_hooks.types.get_array_descr_info (type, &info))
25423 {
25424 /* Fortran sometimes emits array types with no dimension. */
25425 gcc_assert (info.ndimensions >= 0
25426 && (info.ndimensions
25427 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25428 gen_descr_array_type_die (type, &info, context_die);
25429 TREE_ASM_WRITTEN (type) = 1;
25430 return;
25431 }
25432 }
25433
25434 if (TREE_ASM_WRITTEN (type))
25435 {
25436 /* Variable-length types may be incomplete even if
25437 TREE_ASM_WRITTEN. For such types, fall through to
25438 gen_array_type_die() and possibly fill in
25439 DW_AT_{upper,lower}_bound attributes. */
25440 if ((TREE_CODE (type) != ARRAY_TYPE
25441 && TREE_CODE (type) != RECORD_TYPE
25442 && TREE_CODE (type) != UNION_TYPE
25443 && TREE_CODE (type) != QUAL_UNION_TYPE)
25444 || !variably_modified_type_p (type, NULL))
25445 return;
25446 }
25447
25448 switch (TREE_CODE (type))
25449 {
25450 case ERROR_MARK:
25451 break;
25452
25453 case POINTER_TYPE:
25454 case REFERENCE_TYPE:
25455 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25456 ensures that the gen_type_die recursion will terminate even if the
25457 type is recursive. Recursive types are possible in Ada. */
25458 /* ??? We could perhaps do this for all types before the switch
25459 statement. */
25460 TREE_ASM_WRITTEN (type) = 1;
25461
25462 /* For these types, all that is required is that we output a DIE (or a
25463 set of DIEs) to represent the "basis" type. */
25464 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25465 DINFO_USAGE_IND_USE);
25466 break;
25467
25468 case OFFSET_TYPE:
25469 /* This code is used for C++ pointer-to-data-member types.
25470 Output a description of the relevant class type. */
25471 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25472 DINFO_USAGE_IND_USE);
25473
25474 /* Output a description of the type of the object pointed to. */
25475 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25476 DINFO_USAGE_IND_USE);
25477
25478 /* Now output a DIE to represent this pointer-to-data-member type
25479 itself. */
25480 gen_ptr_to_mbr_type_die (type, context_die);
25481 break;
25482
25483 case FUNCTION_TYPE:
25484 /* Force out return type (in case it wasn't forced out already). */
25485 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25486 DINFO_USAGE_DIR_USE);
25487 gen_subroutine_type_die (type, context_die);
25488 break;
25489
25490 case METHOD_TYPE:
25491 /* Force out return type (in case it wasn't forced out already). */
25492 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25493 DINFO_USAGE_DIR_USE);
25494 gen_subroutine_type_die (type, context_die);
25495 break;
25496
25497 case ARRAY_TYPE:
25498 case VECTOR_TYPE:
25499 gen_array_type_die (type, context_die);
25500 break;
25501
25502 case ENUMERAL_TYPE:
25503 case RECORD_TYPE:
25504 case UNION_TYPE:
25505 case QUAL_UNION_TYPE:
25506 gen_tagged_type_die (type, context_die, usage);
25507 return;
25508
25509 case VOID_TYPE:
25510 case INTEGER_TYPE:
25511 case REAL_TYPE:
25512 case FIXED_POINT_TYPE:
25513 case COMPLEX_TYPE:
25514 case BOOLEAN_TYPE:
25515 case POINTER_BOUNDS_TYPE:
25516 /* No DIEs needed for fundamental types. */
25517 break;
25518
25519 case NULLPTR_TYPE:
25520 case LANG_TYPE:
25521 /* Just use DW_TAG_unspecified_type. */
25522 {
25523 dw_die_ref type_die = lookup_type_die (type);
25524 if (type_die == NULL)
25525 {
25526 tree name = TYPE_IDENTIFIER (type);
25527 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25528 type);
25529 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25530 equate_type_number_to_die (type, type_die);
25531 }
25532 }
25533 break;
25534
25535 default:
25536 if (is_cxx_auto (type))
25537 {
25538 tree name = TYPE_IDENTIFIER (type);
25539 dw_die_ref *die = (name == get_identifier ("auto")
25540 ? &auto_die : &decltype_auto_die);
25541 if (!*die)
25542 {
25543 *die = new_die (DW_TAG_unspecified_type,
25544 comp_unit_die (), NULL_TREE);
25545 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25546 }
25547 equate_type_number_to_die (type, *die);
25548 break;
25549 }
25550 gcc_unreachable ();
25551 }
25552
25553 TREE_ASM_WRITTEN (type) = 1;
25554 }
25555
25556 static void
25557 gen_type_die (tree type, dw_die_ref context_die)
25558 {
25559 if (type != error_mark_node)
25560 {
25561 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25562 if (flag_checking)
25563 {
25564 dw_die_ref die = lookup_type_die (type);
25565 if (die)
25566 check_die (die);
25567 }
25568 }
25569 }
25570
25571 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25572 things which are local to the given block. */
25573
25574 static void
25575 gen_block_die (tree stmt, dw_die_ref context_die)
25576 {
25577 int must_output_die = 0;
25578 bool inlined_func;
25579
25580 /* Ignore blocks that are NULL. */
25581 if (stmt == NULL_TREE)
25582 return;
25583
25584 inlined_func = inlined_function_outer_scope_p (stmt);
25585
25586 /* If the block is one fragment of a non-contiguous block, do not
25587 process the variables, since they will have been done by the
25588 origin block. Do process subblocks. */
25589 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25590 {
25591 tree sub;
25592
25593 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25594 gen_block_die (sub, context_die);
25595
25596 return;
25597 }
25598
25599 /* Determine if we need to output any Dwarf DIEs at all to represent this
25600 block. */
25601 if (inlined_func)
25602 /* The outer scopes for inlinings *must* always be represented. We
25603 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25604 must_output_die = 1;
25605 else
25606 {
25607 /* Determine if this block directly contains any "significant"
25608 local declarations which we will need to output DIEs for. */
25609 if (debug_info_level > DINFO_LEVEL_TERSE)
25610 /* We are not in terse mode so *any* local declaration counts
25611 as being a "significant" one. */
25612 must_output_die = ((BLOCK_VARS (stmt) != NULL
25613 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25614 && (TREE_USED (stmt)
25615 || TREE_ASM_WRITTEN (stmt)
25616 || BLOCK_ABSTRACT (stmt)));
25617 else if ((TREE_USED (stmt)
25618 || TREE_ASM_WRITTEN (stmt)
25619 || BLOCK_ABSTRACT (stmt))
25620 && !dwarf2out_ignore_block (stmt))
25621 must_output_die = 1;
25622 }
25623
25624 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25625 DIE for any block which contains no significant local declarations at
25626 all. Rather, in such cases we just call `decls_for_scope' so that any
25627 needed Dwarf info for any sub-blocks will get properly generated. Note
25628 that in terse mode, our definition of what constitutes a "significant"
25629 local declaration gets restricted to include only inlined function
25630 instances and local (nested) function definitions. */
25631 if (must_output_die)
25632 {
25633 if (inlined_func)
25634 {
25635 /* If STMT block is abstract, that means we have been called
25636 indirectly from dwarf2out_abstract_function.
25637 That function rightfully marks the descendent blocks (of
25638 the abstract function it is dealing with) as being abstract,
25639 precisely to prevent us from emitting any
25640 DW_TAG_inlined_subroutine DIE as a descendent
25641 of an abstract function instance. So in that case, we should
25642 not call gen_inlined_subroutine_die.
25643
25644 Later though, when cgraph asks dwarf2out to emit info
25645 for the concrete instance of the function decl into which
25646 the concrete instance of STMT got inlined, the later will lead
25647 to the generation of a DW_TAG_inlined_subroutine DIE. */
25648 if (! BLOCK_ABSTRACT (stmt))
25649 gen_inlined_subroutine_die (stmt, context_die);
25650 }
25651 else
25652 gen_lexical_block_die (stmt, context_die);
25653 }
25654 else
25655 decls_for_scope (stmt, context_die);
25656 }
25657
25658 /* Process variable DECL (or variable with origin ORIGIN) within
25659 block STMT and add it to CONTEXT_DIE. */
25660 static void
25661 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25662 {
25663 dw_die_ref die;
25664 tree decl_or_origin = decl ? decl : origin;
25665
25666 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25667 die = lookup_decl_die (decl_or_origin);
25668 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25669 {
25670 if (TYPE_DECL_IS_STUB (decl_or_origin))
25671 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25672 else
25673 die = lookup_decl_die (decl_or_origin);
25674 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25675 if (! die && ! early_dwarf)
25676 return;
25677 }
25678 else
25679 die = NULL;
25680
25681 /* Avoid creating DIEs for local typedefs and concrete static variables that
25682 will only be pruned later. */
25683 if ((origin || decl_ultimate_origin (decl))
25684 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25685 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25686 {
25687 origin = decl_ultimate_origin (decl_or_origin);
25688 if (decl && VAR_P (decl) && die != NULL)
25689 {
25690 die = lookup_decl_die (origin);
25691 if (die != NULL)
25692 equate_decl_number_to_die (decl, die);
25693 }
25694 return;
25695 }
25696
25697 if (die != NULL && die->die_parent == NULL)
25698 add_child_die (context_die, die);
25699 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25700 {
25701 if (early_dwarf)
25702 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25703 stmt, context_die);
25704 }
25705 else
25706 {
25707 if (decl && DECL_P (decl))
25708 {
25709 die = lookup_decl_die (decl);
25710
25711 /* Early created DIEs do not have a parent as the decls refer
25712 to the function as DECL_CONTEXT rather than the BLOCK. */
25713 if (die && die->die_parent == NULL)
25714 {
25715 gcc_assert (in_lto_p);
25716 add_child_die (context_die, die);
25717 }
25718 }
25719
25720 gen_decl_die (decl, origin, NULL, context_die);
25721 }
25722 }
25723
25724 /* Generate all of the decls declared within a given scope and (recursively)
25725 all of its sub-blocks. */
25726
25727 static void
25728 decls_for_scope (tree stmt, dw_die_ref context_die)
25729 {
25730 tree decl;
25731 unsigned int i;
25732 tree subblocks;
25733
25734 /* Ignore NULL blocks. */
25735 if (stmt == NULL_TREE)
25736 return;
25737
25738 /* Output the DIEs to represent all of the data objects and typedefs
25739 declared directly within this block but not within any nested
25740 sub-blocks. Also, nested function and tag DIEs have been
25741 generated with a parent of NULL; fix that up now. We don't
25742 have to do this if we're at -g1. */
25743 if (debug_info_level > DINFO_LEVEL_TERSE)
25744 {
25745 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25746 process_scope_var (stmt, decl, NULL_TREE, context_die);
25747 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25748 origin - avoid doing this twice as we have no good way to see
25749 if we've done it once already. */
25750 if (! early_dwarf)
25751 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25752 {
25753 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25754 if (decl == current_function_decl)
25755 /* Ignore declarations of the current function, while they
25756 are declarations, gen_subprogram_die would treat them
25757 as definitions again, because they are equal to
25758 current_function_decl and endlessly recurse. */;
25759 else if (TREE_CODE (decl) == FUNCTION_DECL)
25760 process_scope_var (stmt, decl, NULL_TREE, context_die);
25761 else
25762 process_scope_var (stmt, NULL_TREE, decl, context_die);
25763 }
25764 }
25765
25766 /* Even if we're at -g1, we need to process the subblocks in order to get
25767 inlined call information. */
25768
25769 /* Output the DIEs to represent all sub-blocks (and the items declared
25770 therein) of this block. */
25771 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25772 subblocks != NULL;
25773 subblocks = BLOCK_CHAIN (subblocks))
25774 gen_block_die (subblocks, context_die);
25775 }
25776
25777 /* Is this a typedef we can avoid emitting? */
25778
25779 bool
25780 is_redundant_typedef (const_tree decl)
25781 {
25782 if (TYPE_DECL_IS_STUB (decl))
25783 return true;
25784
25785 if (DECL_ARTIFICIAL (decl)
25786 && DECL_CONTEXT (decl)
25787 && is_tagged_type (DECL_CONTEXT (decl))
25788 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25789 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25790 /* Also ignore the artificial member typedef for the class name. */
25791 return true;
25792
25793 return false;
25794 }
25795
25796 /* Return TRUE if TYPE is a typedef that names a type for linkage
25797 purposes. This kind of typedefs is produced by the C++ FE for
25798 constructs like:
25799
25800 typedef struct {...} foo;
25801
25802 In that case, there is no typedef variant type produced for foo.
25803 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25804 struct type. */
25805
25806 static bool
25807 is_naming_typedef_decl (const_tree decl)
25808 {
25809 if (decl == NULL_TREE
25810 || TREE_CODE (decl) != TYPE_DECL
25811 || DECL_NAMELESS (decl)
25812 || !is_tagged_type (TREE_TYPE (decl))
25813 || DECL_IS_BUILTIN (decl)
25814 || is_redundant_typedef (decl)
25815 /* It looks like Ada produces TYPE_DECLs that are very similar
25816 to C++ naming typedefs but that have different
25817 semantics. Let's be specific to c++ for now. */
25818 || !is_cxx (decl))
25819 return FALSE;
25820
25821 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25822 && TYPE_NAME (TREE_TYPE (decl)) == decl
25823 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25824 != TYPE_NAME (TREE_TYPE (decl))));
25825 }
25826
25827 /* Looks up the DIE for a context. */
25828
25829 static inline dw_die_ref
25830 lookup_context_die (tree context)
25831 {
25832 if (context)
25833 {
25834 /* Find die that represents this context. */
25835 if (TYPE_P (context))
25836 {
25837 context = TYPE_MAIN_VARIANT (context);
25838 dw_die_ref ctx = lookup_type_die (context);
25839 if (!ctx)
25840 return NULL;
25841 return strip_naming_typedef (context, ctx);
25842 }
25843 else
25844 return lookup_decl_die (context);
25845 }
25846 return comp_unit_die ();
25847 }
25848
25849 /* Returns the DIE for a context. */
25850
25851 static inline dw_die_ref
25852 get_context_die (tree context)
25853 {
25854 if (context)
25855 {
25856 /* Find die that represents this context. */
25857 if (TYPE_P (context))
25858 {
25859 context = TYPE_MAIN_VARIANT (context);
25860 return strip_naming_typedef (context, force_type_die (context));
25861 }
25862 else
25863 return force_decl_die (context);
25864 }
25865 return comp_unit_die ();
25866 }
25867
25868 /* Returns the DIE for decl. A DIE will always be returned. */
25869
25870 static dw_die_ref
25871 force_decl_die (tree decl)
25872 {
25873 dw_die_ref decl_die;
25874 unsigned saved_external_flag;
25875 tree save_fn = NULL_TREE;
25876 decl_die = lookup_decl_die (decl);
25877 if (!decl_die)
25878 {
25879 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25880
25881 decl_die = lookup_decl_die (decl);
25882 if (decl_die)
25883 return decl_die;
25884
25885 switch (TREE_CODE (decl))
25886 {
25887 case FUNCTION_DECL:
25888 /* Clear current_function_decl, so that gen_subprogram_die thinks
25889 that this is a declaration. At this point, we just want to force
25890 declaration die. */
25891 save_fn = current_function_decl;
25892 current_function_decl = NULL_TREE;
25893 gen_subprogram_die (decl, context_die);
25894 current_function_decl = save_fn;
25895 break;
25896
25897 case VAR_DECL:
25898 /* Set external flag to force declaration die. Restore it after
25899 gen_decl_die() call. */
25900 saved_external_flag = DECL_EXTERNAL (decl);
25901 DECL_EXTERNAL (decl) = 1;
25902 gen_decl_die (decl, NULL, NULL, context_die);
25903 DECL_EXTERNAL (decl) = saved_external_flag;
25904 break;
25905
25906 case NAMESPACE_DECL:
25907 if (dwarf_version >= 3 || !dwarf_strict)
25908 dwarf2out_decl (decl);
25909 else
25910 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25911 decl_die = comp_unit_die ();
25912 break;
25913
25914 case TRANSLATION_UNIT_DECL:
25915 decl_die = comp_unit_die ();
25916 break;
25917
25918 default:
25919 gcc_unreachable ();
25920 }
25921
25922 /* We should be able to find the DIE now. */
25923 if (!decl_die)
25924 decl_die = lookup_decl_die (decl);
25925 gcc_assert (decl_die);
25926 }
25927
25928 return decl_die;
25929 }
25930
25931 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25932 always returned. */
25933
25934 static dw_die_ref
25935 force_type_die (tree type)
25936 {
25937 dw_die_ref type_die;
25938
25939 type_die = lookup_type_die (type);
25940 if (!type_die)
25941 {
25942 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25943
25944 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25945 false, context_die);
25946 gcc_assert (type_die);
25947 }
25948 return type_die;
25949 }
25950
25951 /* Force out any required namespaces to be able to output DECL,
25952 and return the new context_die for it, if it's changed. */
25953
25954 static dw_die_ref
25955 setup_namespace_context (tree thing, dw_die_ref context_die)
25956 {
25957 tree context = (DECL_P (thing)
25958 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25959 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25960 /* Force out the namespace. */
25961 context_die = force_decl_die (context);
25962
25963 return context_die;
25964 }
25965
25966 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25967 type) within its namespace, if appropriate.
25968
25969 For compatibility with older debuggers, namespace DIEs only contain
25970 declarations; all definitions are emitted at CU scope, with
25971 DW_AT_specification pointing to the declaration (like with class
25972 members). */
25973
25974 static dw_die_ref
25975 declare_in_namespace (tree thing, dw_die_ref context_die)
25976 {
25977 dw_die_ref ns_context;
25978
25979 if (debug_info_level <= DINFO_LEVEL_TERSE)
25980 return context_die;
25981
25982 /* External declarations in the local scope only need to be emitted
25983 once, not once in the namespace and once in the scope.
25984
25985 This avoids declaring the `extern' below in the
25986 namespace DIE as well as in the innermost scope:
25987
25988 namespace S
25989 {
25990 int i=5;
25991 int foo()
25992 {
25993 int i=8;
25994 extern int i;
25995 return i;
25996 }
25997 }
25998 */
25999 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26000 return context_die;
26001
26002 /* If this decl is from an inlined function, then don't try to emit it in its
26003 namespace, as we will get confused. It would have already been emitted
26004 when the abstract instance of the inline function was emitted anyways. */
26005 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26006 return context_die;
26007
26008 ns_context = setup_namespace_context (thing, context_die);
26009
26010 if (ns_context != context_die)
26011 {
26012 if (is_fortran ())
26013 return ns_context;
26014 if (DECL_P (thing))
26015 gen_decl_die (thing, NULL, NULL, ns_context);
26016 else
26017 gen_type_die (thing, ns_context);
26018 }
26019 return context_die;
26020 }
26021
26022 /* Generate a DIE for a namespace or namespace alias. */
26023
26024 static void
26025 gen_namespace_die (tree decl, dw_die_ref context_die)
26026 {
26027 dw_die_ref namespace_die;
26028
26029 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26030 they are an alias of. */
26031 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26032 {
26033 /* Output a real namespace or module. */
26034 context_die = setup_namespace_context (decl, comp_unit_die ());
26035 namespace_die = new_die (is_fortran ()
26036 ? DW_TAG_module : DW_TAG_namespace,
26037 context_die, decl);
26038 /* For Fortran modules defined in different CU don't add src coords. */
26039 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26040 {
26041 const char *name = dwarf2_name (decl, 0);
26042 if (name)
26043 add_name_attribute (namespace_die, name);
26044 }
26045 else
26046 add_name_and_src_coords_attributes (namespace_die, decl);
26047 if (DECL_EXTERNAL (decl))
26048 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26049 equate_decl_number_to_die (decl, namespace_die);
26050 }
26051 else
26052 {
26053 /* Output a namespace alias. */
26054
26055 /* Force out the namespace we are an alias of, if necessary. */
26056 dw_die_ref origin_die
26057 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26058
26059 if (DECL_FILE_SCOPE_P (decl)
26060 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26061 context_die = setup_namespace_context (decl, comp_unit_die ());
26062 /* Now create the namespace alias DIE. */
26063 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26064 add_name_and_src_coords_attributes (namespace_die, decl);
26065 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26066 equate_decl_number_to_die (decl, namespace_die);
26067 }
26068 if ((dwarf_version >= 5 || !dwarf_strict)
26069 && lang_hooks.decls.decl_dwarf_attribute (decl,
26070 DW_AT_export_symbols) == 1)
26071 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26072
26073 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26074 if (want_pubnames ())
26075 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26076 }
26077
26078 /* Generate Dwarf debug information for a decl described by DECL.
26079 The return value is currently only meaningful for PARM_DECLs,
26080 for all other decls it returns NULL.
26081
26082 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26083 It can be NULL otherwise. */
26084
26085 static dw_die_ref
26086 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26087 dw_die_ref context_die)
26088 {
26089 tree decl_or_origin = decl ? decl : origin;
26090 tree class_origin = NULL, ultimate_origin;
26091
26092 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26093 return NULL;
26094
26095 /* Ignore pointer bounds decls. */
26096 if (DECL_P (decl_or_origin)
26097 && TREE_TYPE (decl_or_origin)
26098 && POINTER_BOUNDS_P (decl_or_origin))
26099 return NULL;
26100
26101 switch (TREE_CODE (decl_or_origin))
26102 {
26103 case ERROR_MARK:
26104 break;
26105
26106 case CONST_DECL:
26107 if (!is_fortran () && !is_ada ())
26108 {
26109 /* The individual enumerators of an enum type get output when we output
26110 the Dwarf representation of the relevant enum type itself. */
26111 break;
26112 }
26113
26114 /* Emit its type. */
26115 gen_type_die (TREE_TYPE (decl), context_die);
26116
26117 /* And its containing namespace. */
26118 context_die = declare_in_namespace (decl, context_die);
26119
26120 gen_const_die (decl, context_die);
26121 break;
26122
26123 case FUNCTION_DECL:
26124 #if 0
26125 /* FIXME */
26126 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26127 on local redeclarations of global functions. That seems broken. */
26128 if (current_function_decl != decl)
26129 /* This is only a declaration. */;
26130 #endif
26131
26132 /* We should have abstract copies already and should not generate
26133 stray type DIEs in late LTO dumping. */
26134 if (! early_dwarf)
26135 ;
26136
26137 /* If we're emitting a clone, emit info for the abstract instance. */
26138 else if (origin || DECL_ORIGIN (decl) != decl)
26139 dwarf2out_abstract_function (origin
26140 ? DECL_ORIGIN (origin)
26141 : DECL_ABSTRACT_ORIGIN (decl));
26142
26143 /* If we're emitting a possibly inlined function emit it as
26144 abstract instance. */
26145 else if (cgraph_function_possibly_inlined_p (decl)
26146 && ! DECL_ABSTRACT_P (decl)
26147 && ! class_or_namespace_scope_p (context_die)
26148 /* dwarf2out_abstract_function won't emit a die if this is just
26149 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26150 that case, because that works only if we have a die. */
26151 && DECL_INITIAL (decl) != NULL_TREE)
26152 dwarf2out_abstract_function (decl);
26153
26154 /* Otherwise we're emitting the primary DIE for this decl. */
26155 else if (debug_info_level > DINFO_LEVEL_TERSE)
26156 {
26157 /* Before we describe the FUNCTION_DECL itself, make sure that we
26158 have its containing type. */
26159 if (!origin)
26160 origin = decl_class_context (decl);
26161 if (origin != NULL_TREE)
26162 gen_type_die (origin, context_die);
26163
26164 /* And its return type. */
26165 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26166
26167 /* And its virtual context. */
26168 if (DECL_VINDEX (decl) != NULL_TREE)
26169 gen_type_die (DECL_CONTEXT (decl), context_die);
26170
26171 /* Make sure we have a member DIE for decl. */
26172 if (origin != NULL_TREE)
26173 gen_type_die_for_member (origin, decl, context_die);
26174
26175 /* And its containing namespace. */
26176 context_die = declare_in_namespace (decl, context_die);
26177 }
26178
26179 /* Now output a DIE to represent the function itself. */
26180 if (decl)
26181 gen_subprogram_die (decl, context_die);
26182 break;
26183
26184 case TYPE_DECL:
26185 /* If we are in terse mode, don't generate any DIEs to represent any
26186 actual typedefs. */
26187 if (debug_info_level <= DINFO_LEVEL_TERSE)
26188 break;
26189
26190 /* In the special case of a TYPE_DECL node representing the declaration
26191 of some type tag, if the given TYPE_DECL is marked as having been
26192 instantiated from some other (original) TYPE_DECL node (e.g. one which
26193 was generated within the original definition of an inline function) we
26194 used to generate a special (abbreviated) DW_TAG_structure_type,
26195 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26196 should be actually referencing those DIEs, as variable DIEs with that
26197 type would be emitted already in the abstract origin, so it was always
26198 removed during unused type prunning. Don't add anything in this
26199 case. */
26200 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26201 break;
26202
26203 if (is_redundant_typedef (decl))
26204 gen_type_die (TREE_TYPE (decl), context_die);
26205 else
26206 /* Output a DIE to represent the typedef itself. */
26207 gen_typedef_die (decl, context_die);
26208 break;
26209
26210 case LABEL_DECL:
26211 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26212 gen_label_die (decl, context_die);
26213 break;
26214
26215 case VAR_DECL:
26216 case RESULT_DECL:
26217 /* If we are in terse mode, don't generate any DIEs to represent any
26218 variable declarations or definitions. */
26219 if (debug_info_level <= DINFO_LEVEL_TERSE)
26220 break;
26221
26222 /* Avoid generating stray type DIEs during late dwarf dumping.
26223 All types have been dumped early. */
26224 if (early_dwarf
26225 /* ??? But in LTRANS we cannot annotate early created variably
26226 modified type DIEs without copying them and adjusting all
26227 references to them. Dump them again as happens for inlining
26228 which copies both the decl and the types. */
26229 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26230 in VLA bound information for example. */
26231 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26232 current_function_decl)))
26233 {
26234 /* Output any DIEs that are needed to specify the type of this data
26235 object. */
26236 if (decl_by_reference_p (decl_or_origin))
26237 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26238 else
26239 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26240 }
26241
26242 if (early_dwarf)
26243 {
26244 /* And its containing type. */
26245 class_origin = decl_class_context (decl_or_origin);
26246 if (class_origin != NULL_TREE)
26247 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26248
26249 /* And its containing namespace. */
26250 context_die = declare_in_namespace (decl_or_origin, context_die);
26251 }
26252
26253 /* Now output the DIE to represent the data object itself. This gets
26254 complicated because of the possibility that the VAR_DECL really
26255 represents an inlined instance of a formal parameter for an inline
26256 function. */
26257 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26258 if (ultimate_origin != NULL_TREE
26259 && TREE_CODE (ultimate_origin) == PARM_DECL)
26260 gen_formal_parameter_die (decl, origin,
26261 true /* Emit name attribute. */,
26262 context_die);
26263 else
26264 gen_variable_die (decl, origin, context_die);
26265 break;
26266
26267 case FIELD_DECL:
26268 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26269 /* Ignore the nameless fields that are used to skip bits but handle C++
26270 anonymous unions and structs. */
26271 if (DECL_NAME (decl) != NULL_TREE
26272 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26273 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26274 {
26275 gen_type_die (member_declared_type (decl), context_die);
26276 gen_field_die (decl, ctx, context_die);
26277 }
26278 break;
26279
26280 case PARM_DECL:
26281 /* Avoid generating stray type DIEs during late dwarf dumping.
26282 All types have been dumped early. */
26283 if (early_dwarf
26284 /* ??? But in LTRANS we cannot annotate early created variably
26285 modified type DIEs without copying them and adjusting all
26286 references to them. Dump them again as happens for inlining
26287 which copies both the decl and the types. */
26288 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26289 in VLA bound information for example. */
26290 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26291 current_function_decl)))
26292 {
26293 if (DECL_BY_REFERENCE (decl_or_origin))
26294 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26295 else
26296 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26297 }
26298 return gen_formal_parameter_die (decl, origin,
26299 true /* Emit name attribute. */,
26300 context_die);
26301
26302 case NAMESPACE_DECL:
26303 if (dwarf_version >= 3 || !dwarf_strict)
26304 gen_namespace_die (decl, context_die);
26305 break;
26306
26307 case IMPORTED_DECL:
26308 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26309 DECL_CONTEXT (decl), context_die);
26310 break;
26311
26312 case NAMELIST_DECL:
26313 gen_namelist_decl (DECL_NAME (decl), context_die,
26314 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26315 break;
26316
26317 default:
26318 /* Probably some frontend-internal decl. Assume we don't care. */
26319 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26320 break;
26321 }
26322
26323 return NULL;
26324 }
26325 \f
26326 /* Output initial debug information for global DECL. Called at the
26327 end of the parsing process.
26328
26329 This is the initial debug generation process. As such, the DIEs
26330 generated may be incomplete. A later debug generation pass
26331 (dwarf2out_late_global_decl) will augment the information generated
26332 in this pass (e.g., with complete location info). */
26333
26334 static void
26335 dwarf2out_early_global_decl (tree decl)
26336 {
26337 set_early_dwarf s;
26338
26339 /* gen_decl_die() will set DECL_ABSTRACT because
26340 cgraph_function_possibly_inlined_p() returns true. This is in
26341 turn will cause DW_AT_inline attributes to be set.
26342
26343 This happens because at early dwarf generation, there is no
26344 cgraph information, causing cgraph_function_possibly_inlined_p()
26345 to return true. Trick cgraph_function_possibly_inlined_p()
26346 while we generate dwarf early. */
26347 bool save = symtab->global_info_ready;
26348 symtab->global_info_ready = true;
26349
26350 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26351 other DECLs and they can point to template types or other things
26352 that dwarf2out can't handle when done via dwarf2out_decl. */
26353 if (TREE_CODE (decl) != TYPE_DECL
26354 && TREE_CODE (decl) != PARM_DECL)
26355 {
26356 if (TREE_CODE (decl) == FUNCTION_DECL)
26357 {
26358 tree save_fndecl = current_function_decl;
26359
26360 /* For nested functions, make sure we have DIEs for the parents first
26361 so that all nested DIEs are generated at the proper scope in the
26362 first shot. */
26363 tree context = decl_function_context (decl);
26364 if (context != NULL)
26365 {
26366 dw_die_ref context_die = lookup_decl_die (context);
26367 current_function_decl = context;
26368
26369 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26370 enough so that it lands in its own context. This avoids type
26371 pruning issues later on. */
26372 if (context_die == NULL || is_declaration_die (context_die))
26373 dwarf2out_decl (context);
26374 }
26375
26376 /* Emit an abstract origin of a function first. This happens
26377 with C++ constructor clones for example and makes
26378 dwarf2out_abstract_function happy which requires the early
26379 DIE of the abstract instance to be present. */
26380 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26381 dw_die_ref origin_die;
26382 if (origin != NULL
26383 /* Do not emit the DIE multiple times but make sure to
26384 process it fully here in case we just saw a declaration. */
26385 && ((origin_die = lookup_decl_die (origin)) == NULL
26386 || is_declaration_die (origin_die)))
26387 {
26388 current_function_decl = origin;
26389 dwarf2out_decl (origin);
26390 }
26391
26392 /* Emit the DIE for decl but avoid doing that multiple times. */
26393 dw_die_ref old_die;
26394 if ((old_die = lookup_decl_die (decl)) == NULL
26395 || is_declaration_die (old_die))
26396 {
26397 current_function_decl = decl;
26398 dwarf2out_decl (decl);
26399 }
26400
26401 current_function_decl = save_fndecl;
26402 }
26403 else
26404 dwarf2out_decl (decl);
26405 }
26406 symtab->global_info_ready = save;
26407 }
26408
26409 /* Return whether EXPR is an expression with the following pattern:
26410 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26411
26412 static bool
26413 is_trivial_indirect_ref (tree expr)
26414 {
26415 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26416 return false;
26417
26418 tree nop = TREE_OPERAND (expr, 0);
26419 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26420 return false;
26421
26422 tree int_cst = TREE_OPERAND (nop, 0);
26423 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26424 }
26425
26426 /* Output debug information for global decl DECL. Called from
26427 toplev.c after compilation proper has finished. */
26428
26429 static void
26430 dwarf2out_late_global_decl (tree decl)
26431 {
26432 /* Fill-in any location information we were unable to determine
26433 on the first pass. */
26434 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26435 {
26436 dw_die_ref die = lookup_decl_die (decl);
26437
26438 /* We may have to generate early debug late for LTO in case debug
26439 was not enabled at compile-time or the target doesn't support
26440 the LTO early debug scheme. */
26441 if (! die && in_lto_p)
26442 {
26443 dwarf2out_decl (decl);
26444 die = lookup_decl_die (decl);
26445 }
26446
26447 if (die)
26448 {
26449 /* We get called via the symtab code invoking late_global_decl
26450 for symbols that are optimized out.
26451
26452 Do not add locations for those, except if they have a
26453 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26454 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26455 INDIRECT_REF expression, as this could generate relocations to
26456 text symbols in LTO object files, which is invalid. */
26457 varpool_node *node = varpool_node::get (decl);
26458 if ((! node || ! node->definition)
26459 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26460 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26461 tree_add_const_value_attribute_for_decl (die, decl);
26462 else
26463 add_location_or_const_value_attribute (die, decl, false);
26464 }
26465 }
26466 }
26467
26468 /* Output debug information for type decl DECL. Called from toplev.c
26469 and from language front ends (to record built-in types). */
26470 static void
26471 dwarf2out_type_decl (tree decl, int local)
26472 {
26473 if (!local)
26474 {
26475 set_early_dwarf s;
26476 dwarf2out_decl (decl);
26477 }
26478 }
26479
26480 /* Output debug information for imported module or decl DECL.
26481 NAME is non-NULL name in the lexical block if the decl has been renamed.
26482 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26483 that DECL belongs to.
26484 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26485 static void
26486 dwarf2out_imported_module_or_decl_1 (tree decl,
26487 tree name,
26488 tree lexical_block,
26489 dw_die_ref lexical_block_die)
26490 {
26491 expanded_location xloc;
26492 dw_die_ref imported_die = NULL;
26493 dw_die_ref at_import_die;
26494
26495 if (TREE_CODE (decl) == IMPORTED_DECL)
26496 {
26497 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26498 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26499 gcc_assert (decl);
26500 }
26501 else
26502 xloc = expand_location (input_location);
26503
26504 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26505 {
26506 at_import_die = force_type_die (TREE_TYPE (decl));
26507 /* For namespace N { typedef void T; } using N::T; base_type_die
26508 returns NULL, but DW_TAG_imported_declaration requires
26509 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26510 if (!at_import_die)
26511 {
26512 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26513 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26514 at_import_die = lookup_type_die (TREE_TYPE (decl));
26515 gcc_assert (at_import_die);
26516 }
26517 }
26518 else
26519 {
26520 at_import_die = lookup_decl_die (decl);
26521 if (!at_import_die)
26522 {
26523 /* If we're trying to avoid duplicate debug info, we may not have
26524 emitted the member decl for this field. Emit it now. */
26525 if (TREE_CODE (decl) == FIELD_DECL)
26526 {
26527 tree type = DECL_CONTEXT (decl);
26528
26529 if (TYPE_CONTEXT (type)
26530 && TYPE_P (TYPE_CONTEXT (type))
26531 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26532 DINFO_USAGE_DIR_USE))
26533 return;
26534 gen_type_die_for_member (type, decl,
26535 get_context_die (TYPE_CONTEXT (type)));
26536 }
26537 if (TREE_CODE (decl) == NAMELIST_DECL)
26538 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26539 get_context_die (DECL_CONTEXT (decl)),
26540 NULL_TREE);
26541 else
26542 at_import_die = force_decl_die (decl);
26543 }
26544 }
26545
26546 if (TREE_CODE (decl) == NAMESPACE_DECL)
26547 {
26548 if (dwarf_version >= 3 || !dwarf_strict)
26549 imported_die = new_die (DW_TAG_imported_module,
26550 lexical_block_die,
26551 lexical_block);
26552 else
26553 return;
26554 }
26555 else
26556 imported_die = new_die (DW_TAG_imported_declaration,
26557 lexical_block_die,
26558 lexical_block);
26559
26560 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26561 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26562 if (debug_column_info && xloc.column)
26563 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26564 if (name)
26565 add_AT_string (imported_die, DW_AT_name,
26566 IDENTIFIER_POINTER (name));
26567 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26568 }
26569
26570 /* Output debug information for imported module or decl DECL.
26571 NAME is non-NULL name in context if the decl has been renamed.
26572 CHILD is true if decl is one of the renamed decls as part of
26573 importing whole module.
26574 IMPLICIT is set if this hook is called for an implicit import
26575 such as inline namespace. */
26576
26577 static void
26578 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26579 bool child, bool implicit)
26580 {
26581 /* dw_die_ref at_import_die; */
26582 dw_die_ref scope_die;
26583
26584 if (debug_info_level <= DINFO_LEVEL_TERSE)
26585 return;
26586
26587 gcc_assert (decl);
26588
26589 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26590 should be enough, for DWARF4 and older even if we emit as extension
26591 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26592 for the benefit of consumers unaware of DW_AT_export_symbols. */
26593 if (implicit
26594 && dwarf_version >= 5
26595 && lang_hooks.decls.decl_dwarf_attribute (decl,
26596 DW_AT_export_symbols) == 1)
26597 return;
26598
26599 set_early_dwarf s;
26600
26601 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26602 We need decl DIE for reference and scope die. First, get DIE for the decl
26603 itself. */
26604
26605 /* Get the scope die for decl context. Use comp_unit_die for global module
26606 or decl. If die is not found for non globals, force new die. */
26607 if (context
26608 && TYPE_P (context)
26609 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26610 return;
26611
26612 scope_die = get_context_die (context);
26613
26614 if (child)
26615 {
26616 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26617 there is nothing we can do, here. */
26618 if (dwarf_version < 3 && dwarf_strict)
26619 return;
26620
26621 gcc_assert (scope_die->die_child);
26622 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26623 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26624 scope_die = scope_die->die_child;
26625 }
26626
26627 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26628 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26629 }
26630
26631 /* Output debug information for namelists. */
26632
26633 static dw_die_ref
26634 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26635 {
26636 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26637 tree value;
26638 unsigned i;
26639
26640 if (debug_info_level <= DINFO_LEVEL_TERSE)
26641 return NULL;
26642
26643 gcc_assert (scope_die != NULL);
26644 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26645 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26646
26647 /* If there are no item_decls, we have a nondefining namelist, e.g.
26648 with USE association; hence, set DW_AT_declaration. */
26649 if (item_decls == NULL_TREE)
26650 {
26651 add_AT_flag (nml_die, DW_AT_declaration, 1);
26652 return nml_die;
26653 }
26654
26655 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26656 {
26657 nml_item_ref_die = lookup_decl_die (value);
26658 if (!nml_item_ref_die)
26659 nml_item_ref_die = force_decl_die (value);
26660
26661 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26662 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26663 }
26664 return nml_die;
26665 }
26666
26667
26668 /* Write the debugging output for DECL and return the DIE. */
26669
26670 static void
26671 dwarf2out_decl (tree decl)
26672 {
26673 dw_die_ref context_die = comp_unit_die ();
26674
26675 switch (TREE_CODE (decl))
26676 {
26677 case ERROR_MARK:
26678 return;
26679
26680 case FUNCTION_DECL:
26681 /* If we're a nested function, initially use a parent of NULL; if we're
26682 a plain function, this will be fixed up in decls_for_scope. If
26683 we're a method, it will be ignored, since we already have a DIE. */
26684 if (decl_function_context (decl)
26685 /* But if we're in terse mode, we don't care about scope. */
26686 && debug_info_level > DINFO_LEVEL_TERSE)
26687 context_die = NULL;
26688 break;
26689
26690 case VAR_DECL:
26691 /* For local statics lookup proper context die. */
26692 if (local_function_static (decl))
26693 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26694
26695 /* If we are in terse mode, don't generate any DIEs to represent any
26696 variable declarations or definitions. */
26697 if (debug_info_level <= DINFO_LEVEL_TERSE)
26698 return;
26699 break;
26700
26701 case CONST_DECL:
26702 if (debug_info_level <= DINFO_LEVEL_TERSE)
26703 return;
26704 if (!is_fortran () && !is_ada ())
26705 return;
26706 if (TREE_STATIC (decl) && decl_function_context (decl))
26707 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26708 break;
26709
26710 case NAMESPACE_DECL:
26711 case IMPORTED_DECL:
26712 if (debug_info_level <= DINFO_LEVEL_TERSE)
26713 return;
26714 if (lookup_decl_die (decl) != NULL)
26715 return;
26716 break;
26717
26718 case TYPE_DECL:
26719 /* Don't emit stubs for types unless they are needed by other DIEs. */
26720 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26721 return;
26722
26723 /* Don't bother trying to generate any DIEs to represent any of the
26724 normal built-in types for the language we are compiling. */
26725 if (DECL_IS_BUILTIN (decl))
26726 return;
26727
26728 /* If we are in terse mode, don't generate any DIEs for types. */
26729 if (debug_info_level <= DINFO_LEVEL_TERSE)
26730 return;
26731
26732 /* If we're a function-scope tag, initially use a parent of NULL;
26733 this will be fixed up in decls_for_scope. */
26734 if (decl_function_context (decl))
26735 context_die = NULL;
26736
26737 break;
26738
26739 case NAMELIST_DECL:
26740 break;
26741
26742 default:
26743 return;
26744 }
26745
26746 gen_decl_die (decl, NULL, NULL, context_die);
26747
26748 if (flag_checking)
26749 {
26750 dw_die_ref die = lookup_decl_die (decl);
26751 if (die)
26752 check_die (die);
26753 }
26754 }
26755
26756 /* Write the debugging output for DECL. */
26757
26758 static void
26759 dwarf2out_function_decl (tree decl)
26760 {
26761 dwarf2out_decl (decl);
26762 call_arg_locations = NULL;
26763 call_arg_loc_last = NULL;
26764 call_site_count = -1;
26765 tail_call_site_count = -1;
26766 decl_loc_table->empty ();
26767 cached_dw_loc_list_table->empty ();
26768 }
26769
26770 /* Output a marker (i.e. a label) for the beginning of the generated code for
26771 a lexical block. */
26772
26773 static void
26774 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26775 unsigned int blocknum)
26776 {
26777 switch_to_section (current_function_section ());
26778 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26779 }
26780
26781 /* Output a marker (i.e. a label) for the end of the generated code for a
26782 lexical block. */
26783
26784 static void
26785 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26786 {
26787 switch_to_section (current_function_section ());
26788 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26789 }
26790
26791 /* Returns nonzero if it is appropriate not to emit any debugging
26792 information for BLOCK, because it doesn't contain any instructions.
26793
26794 Don't allow this for blocks with nested functions or local classes
26795 as we would end up with orphans, and in the presence of scheduling
26796 we may end up calling them anyway. */
26797
26798 static bool
26799 dwarf2out_ignore_block (const_tree block)
26800 {
26801 tree decl;
26802 unsigned int i;
26803
26804 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26805 if (TREE_CODE (decl) == FUNCTION_DECL
26806 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26807 return 0;
26808 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26809 {
26810 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26811 if (TREE_CODE (decl) == FUNCTION_DECL
26812 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26813 return 0;
26814 }
26815
26816 return 1;
26817 }
26818
26819 /* Hash table routines for file_hash. */
26820
26821 bool
26822 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26823 {
26824 return filename_cmp (p1->filename, p2) == 0;
26825 }
26826
26827 hashval_t
26828 dwarf_file_hasher::hash (dwarf_file_data *p)
26829 {
26830 return htab_hash_string (p->filename);
26831 }
26832
26833 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26834 dwarf2out.c) and return its "index". The index of each (known) filename is
26835 just a unique number which is associated with only that one filename. We
26836 need such numbers for the sake of generating labels (in the .debug_sfnames
26837 section) and references to those files numbers (in the .debug_srcinfo
26838 and .debug_macinfo sections). If the filename given as an argument is not
26839 found in our current list, add it to the list and assign it the next
26840 available unique index number. */
26841
26842 static struct dwarf_file_data *
26843 lookup_filename (const char *file_name)
26844 {
26845 struct dwarf_file_data * created;
26846
26847 if (!file_name)
26848 return NULL;
26849
26850 dwarf_file_data **slot
26851 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26852 INSERT);
26853 if (*slot)
26854 return *slot;
26855
26856 created = ggc_alloc<dwarf_file_data> ();
26857 created->filename = file_name;
26858 created->emitted_number = 0;
26859 *slot = created;
26860 return created;
26861 }
26862
26863 /* If the assembler will construct the file table, then translate the compiler
26864 internal file table number into the assembler file table number, and emit
26865 a .file directive if we haven't already emitted one yet. The file table
26866 numbers are different because we prune debug info for unused variables and
26867 types, which may include filenames. */
26868
26869 static int
26870 maybe_emit_file (struct dwarf_file_data * fd)
26871 {
26872 if (! fd->emitted_number)
26873 {
26874 if (last_emitted_file)
26875 fd->emitted_number = last_emitted_file->emitted_number + 1;
26876 else
26877 fd->emitted_number = 1;
26878 last_emitted_file = fd;
26879
26880 if (output_asm_line_debug_info ())
26881 {
26882 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26883 output_quoted_string (asm_out_file,
26884 remap_debug_filename (fd->filename));
26885 fputc ('\n', asm_out_file);
26886 }
26887 }
26888
26889 return fd->emitted_number;
26890 }
26891
26892 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26893 That generation should happen after function debug info has been
26894 generated. The value of the attribute is the constant value of ARG. */
26895
26896 static void
26897 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26898 {
26899 die_arg_entry entry;
26900
26901 if (!die || !arg)
26902 return;
26903
26904 gcc_assert (early_dwarf);
26905
26906 if (!tmpl_value_parm_die_table)
26907 vec_alloc (tmpl_value_parm_die_table, 32);
26908
26909 entry.die = die;
26910 entry.arg = arg;
26911 vec_safe_push (tmpl_value_parm_die_table, entry);
26912 }
26913
26914 /* Return TRUE if T is an instance of generic type, FALSE
26915 otherwise. */
26916
26917 static bool
26918 generic_type_p (tree t)
26919 {
26920 if (t == NULL_TREE || !TYPE_P (t))
26921 return false;
26922 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26923 }
26924
26925 /* Schedule the generation of the generic parameter dies for the
26926 instance of generic type T. The proper generation itself is later
26927 done by gen_scheduled_generic_parms_dies. */
26928
26929 static void
26930 schedule_generic_params_dies_gen (tree t)
26931 {
26932 if (!generic_type_p (t))
26933 return;
26934
26935 gcc_assert (early_dwarf);
26936
26937 if (!generic_type_instances)
26938 vec_alloc (generic_type_instances, 256);
26939
26940 vec_safe_push (generic_type_instances, t);
26941 }
26942
26943 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26944 by append_entry_to_tmpl_value_parm_die_table. This function must
26945 be called after function DIEs have been generated. */
26946
26947 static void
26948 gen_remaining_tmpl_value_param_die_attribute (void)
26949 {
26950 if (tmpl_value_parm_die_table)
26951 {
26952 unsigned i, j;
26953 die_arg_entry *e;
26954
26955 /* We do this in two phases - first get the cases we can
26956 handle during early-finish, preserving those we cannot
26957 (containing symbolic constants where we don't yet know
26958 whether we are going to output the referenced symbols).
26959 For those we try again at late-finish. */
26960 j = 0;
26961 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26962 {
26963 if (!e->die->removed
26964 && !tree_add_const_value_attribute (e->die, e->arg))
26965 {
26966 dw_loc_descr_ref loc = NULL;
26967 if (! early_dwarf
26968 && (dwarf_version >= 5 || !dwarf_strict))
26969 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26970 if (loc)
26971 add_AT_loc (e->die, DW_AT_location, loc);
26972 else
26973 (*tmpl_value_parm_die_table)[j++] = *e;
26974 }
26975 }
26976 tmpl_value_parm_die_table->truncate (j);
26977 }
26978 }
26979
26980 /* Generate generic parameters DIEs for instances of generic types
26981 that have been previously scheduled by
26982 schedule_generic_params_dies_gen. This function must be called
26983 after all the types of the CU have been laid out. */
26984
26985 static void
26986 gen_scheduled_generic_parms_dies (void)
26987 {
26988 unsigned i;
26989 tree t;
26990
26991 if (!generic_type_instances)
26992 return;
26993
26994 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26995 if (COMPLETE_TYPE_P (t))
26996 gen_generic_params_dies (t);
26997
26998 generic_type_instances = NULL;
26999 }
27000
27001
27002 /* Replace DW_AT_name for the decl with name. */
27003
27004 static void
27005 dwarf2out_set_name (tree decl, tree name)
27006 {
27007 dw_die_ref die;
27008 dw_attr_node *attr;
27009 const char *dname;
27010
27011 die = TYPE_SYMTAB_DIE (decl);
27012 if (!die)
27013 return;
27014
27015 dname = dwarf2_name (name, 0);
27016 if (!dname)
27017 return;
27018
27019 attr = get_AT (die, DW_AT_name);
27020 if (attr)
27021 {
27022 struct indirect_string_node *node;
27023
27024 node = find_AT_string (dname);
27025 /* replace the string. */
27026 attr->dw_attr_val.v.val_str = node;
27027 }
27028
27029 else
27030 add_name_attribute (die, dname);
27031 }
27032
27033 /* True if before or during processing of the first function being emitted. */
27034 static bool in_first_function_p = true;
27035 /* True if loc_note during dwarf2out_var_location call might still be
27036 before first real instruction at address equal to .Ltext0. */
27037 static bool maybe_at_text_label_p = true;
27038 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27039 static unsigned int first_loclabel_num_not_at_text_label;
27040
27041 /* Look ahead for a real insn, or for a begin stmt marker. */
27042
27043 static rtx_insn *
27044 dwarf2out_next_real_insn (rtx_insn *loc_note)
27045 {
27046 rtx_insn *next_real = NEXT_INSN (loc_note);
27047
27048 while (next_real)
27049 if (INSN_P (next_real))
27050 break;
27051 else
27052 next_real = NEXT_INSN (next_real);
27053
27054 return next_real;
27055 }
27056
27057 /* Called by the final INSN scan whenever we see a var location. We
27058 use it to drop labels in the right places, and throw the location in
27059 our lookup table. */
27060
27061 static void
27062 dwarf2out_var_location (rtx_insn *loc_note)
27063 {
27064 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27065 struct var_loc_node *newloc;
27066 rtx_insn *next_real, *next_note;
27067 rtx_insn *call_insn = NULL;
27068 static const char *last_label;
27069 static const char *last_postcall_label;
27070 static bool last_in_cold_section_p;
27071 static rtx_insn *expected_next_loc_note;
27072 tree decl;
27073 bool var_loc_p;
27074 var_loc_view view = 0;
27075
27076 if (!NOTE_P (loc_note))
27077 {
27078 if (CALL_P (loc_note))
27079 {
27080 maybe_reset_location_view (loc_note, cur_line_info_table);
27081 call_site_count++;
27082 if (SIBLING_CALL_P (loc_note))
27083 tail_call_site_count++;
27084 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27085 {
27086 call_insn = loc_note;
27087 loc_note = NULL;
27088 var_loc_p = false;
27089
27090 next_real = dwarf2out_next_real_insn (call_insn);
27091 next_note = NULL;
27092 cached_next_real_insn = NULL;
27093 goto create_label;
27094 }
27095 if (optimize == 0 && !flag_var_tracking)
27096 {
27097 /* When the var-tracking pass is not running, there is no note
27098 for indirect calls whose target is compile-time known. In this
27099 case, process such calls specifically so that we generate call
27100 sites for them anyway. */
27101 rtx x = PATTERN (loc_note);
27102 if (GET_CODE (x) == PARALLEL)
27103 x = XVECEXP (x, 0, 0);
27104 if (GET_CODE (x) == SET)
27105 x = SET_SRC (x);
27106 if (GET_CODE (x) == CALL)
27107 x = XEXP (x, 0);
27108 if (!MEM_P (x)
27109 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27110 || !SYMBOL_REF_DECL (XEXP (x, 0))
27111 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27112 != FUNCTION_DECL))
27113 {
27114 call_insn = loc_note;
27115 loc_note = NULL;
27116 var_loc_p = false;
27117
27118 next_real = dwarf2out_next_real_insn (call_insn);
27119 next_note = NULL;
27120 cached_next_real_insn = NULL;
27121 goto create_label;
27122 }
27123 }
27124 }
27125 else if (!debug_variable_location_views)
27126 gcc_unreachable ();
27127 else
27128 maybe_reset_location_view (loc_note, cur_line_info_table);
27129
27130 return;
27131 }
27132
27133 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27134 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27135 return;
27136
27137 /* Optimize processing a large consecutive sequence of location
27138 notes so we don't spend too much time in next_real_insn. If the
27139 next insn is another location note, remember the next_real_insn
27140 calculation for next time. */
27141 next_real = cached_next_real_insn;
27142 if (next_real)
27143 {
27144 if (expected_next_loc_note != loc_note)
27145 next_real = NULL;
27146 }
27147
27148 next_note = NEXT_INSN (loc_note);
27149 if (! next_note
27150 || next_note->deleted ()
27151 || ! NOTE_P (next_note)
27152 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27153 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27154 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27155 next_note = NULL;
27156
27157 if (! next_real)
27158 next_real = dwarf2out_next_real_insn (loc_note);
27159
27160 if (next_note)
27161 {
27162 expected_next_loc_note = next_note;
27163 cached_next_real_insn = next_real;
27164 }
27165 else
27166 cached_next_real_insn = NULL;
27167
27168 /* If there are no instructions which would be affected by this note,
27169 don't do anything. */
27170 if (var_loc_p
27171 && next_real == NULL_RTX
27172 && !NOTE_DURING_CALL_P (loc_note))
27173 return;
27174
27175 create_label:
27176
27177 if (next_real == NULL_RTX)
27178 next_real = get_last_insn ();
27179
27180 /* If there were any real insns between note we processed last time
27181 and this note (or if it is the first note), clear
27182 last_{,postcall_}label so that they are not reused this time. */
27183 if (last_var_location_insn == NULL_RTX
27184 || last_var_location_insn != next_real
27185 || last_in_cold_section_p != in_cold_section_p)
27186 {
27187 last_label = NULL;
27188 last_postcall_label = NULL;
27189 }
27190
27191 if (var_loc_p)
27192 {
27193 const char *label
27194 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27195 view = cur_line_info_table->view;
27196 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27197 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27198 if (newloc == NULL)
27199 return;
27200 }
27201 else
27202 {
27203 decl = NULL_TREE;
27204 newloc = NULL;
27205 }
27206
27207 /* If there were no real insns between note we processed last time
27208 and this note, use the label we emitted last time. Otherwise
27209 create a new label and emit it. */
27210 if (last_label == NULL)
27211 {
27212 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27213 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27214 loclabel_num++;
27215 last_label = ggc_strdup (loclabel);
27216 /* See if loclabel might be equal to .Ltext0. If yes,
27217 bump first_loclabel_num_not_at_text_label. */
27218 if (!have_multiple_function_sections
27219 && in_first_function_p
27220 && maybe_at_text_label_p)
27221 {
27222 static rtx_insn *last_start;
27223 rtx_insn *insn;
27224 for (insn = loc_note; insn; insn = previous_insn (insn))
27225 if (insn == last_start)
27226 break;
27227 else if (!NONDEBUG_INSN_P (insn))
27228 continue;
27229 else
27230 {
27231 rtx body = PATTERN (insn);
27232 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27233 continue;
27234 /* Inline asm could occupy zero bytes. */
27235 else if (GET_CODE (body) == ASM_INPUT
27236 || asm_noperands (body) >= 0)
27237 continue;
27238 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27239 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27240 continue;
27241 #endif
27242 else
27243 {
27244 /* Assume insn has non-zero length. */
27245 maybe_at_text_label_p = false;
27246 break;
27247 }
27248 }
27249 if (maybe_at_text_label_p)
27250 {
27251 last_start = loc_note;
27252 first_loclabel_num_not_at_text_label = loclabel_num;
27253 }
27254 }
27255 }
27256
27257 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27258 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27259
27260 if (!var_loc_p)
27261 {
27262 struct call_arg_loc_node *ca_loc
27263 = ggc_cleared_alloc<call_arg_loc_node> ();
27264 rtx_insn *prev = call_insn;
27265
27266 ca_loc->call_arg_loc_note
27267 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27268 ca_loc->next = NULL;
27269 ca_loc->label = last_label;
27270 gcc_assert (prev
27271 && (CALL_P (prev)
27272 || (NONJUMP_INSN_P (prev)
27273 && GET_CODE (PATTERN (prev)) == SEQUENCE
27274 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27275 if (!CALL_P (prev))
27276 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27277 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27278
27279 /* Look for a SYMBOL_REF in the "prev" instruction. */
27280 rtx x = get_call_rtx_from (PATTERN (prev));
27281 if (x)
27282 {
27283 /* Try to get the call symbol, if any. */
27284 if (MEM_P (XEXP (x, 0)))
27285 x = XEXP (x, 0);
27286 /* First, look for a memory access to a symbol_ref. */
27287 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27288 && SYMBOL_REF_DECL (XEXP (x, 0))
27289 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27290 ca_loc->symbol_ref = XEXP (x, 0);
27291 /* Otherwise, look at a compile-time known user-level function
27292 declaration. */
27293 else if (MEM_P (x)
27294 && MEM_EXPR (x)
27295 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27296 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27297 }
27298
27299 ca_loc->block = insn_scope (prev);
27300 if (call_arg_locations)
27301 call_arg_loc_last->next = ca_loc;
27302 else
27303 call_arg_locations = ca_loc;
27304 call_arg_loc_last = ca_loc;
27305 }
27306 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27307 {
27308 newloc->label = last_label;
27309 newloc->view = view;
27310 }
27311 else
27312 {
27313 if (!last_postcall_label)
27314 {
27315 sprintf (loclabel, "%s-1", last_label);
27316 last_postcall_label = ggc_strdup (loclabel);
27317 }
27318 newloc->label = last_postcall_label;
27319 /* ??? This view is at last_label, not last_label-1, but we
27320 could only assume view at last_label-1 is zero if we could
27321 assume calls always have length greater than one. This is
27322 probably true in general, though there might be a rare
27323 exception to this rule, e.g. if a call insn is optimized out
27324 by target magic. Then, even the -1 in the label will be
27325 wrong, which might invalidate the range. Anyway, using view,
27326 though technically possibly incorrect, will work as far as
27327 ranges go: since L-1 is in the middle of the call insn,
27328 (L-1).0 and (L-1).V shouldn't make any difference, and having
27329 the loclist entry refer to the .loc entry might be useful, so
27330 leave it like this. */
27331 newloc->view = view;
27332 }
27333
27334 if (var_loc_p && flag_debug_asm)
27335 {
27336 const char *name, *sep, *patstr;
27337 if (decl && DECL_NAME (decl))
27338 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27339 else
27340 name = "";
27341 if (NOTE_VAR_LOCATION_LOC (loc_note))
27342 {
27343 sep = " => ";
27344 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27345 }
27346 else
27347 {
27348 sep = " ";
27349 patstr = "RESET";
27350 }
27351 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27352 name, sep, patstr);
27353 }
27354
27355 last_var_location_insn = next_real;
27356 last_in_cold_section_p = in_cold_section_p;
27357 }
27358
27359 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27360 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27361 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27362 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27363 BLOCK_FRAGMENT_ORIGIN links. */
27364 static bool
27365 block_within_block_p (tree block, tree outer, bool bothways)
27366 {
27367 if (block == outer)
27368 return true;
27369
27370 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27371 for (tree context = BLOCK_SUPERCONTEXT (block);
27372 context != outer;
27373 context = BLOCK_SUPERCONTEXT (context))
27374 if (!context || TREE_CODE (context) != BLOCK)
27375 return false;
27376
27377 if (!bothways)
27378 return true;
27379
27380 /* Now check that each block is actually referenced by its
27381 parent. */
27382 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27383 context = BLOCK_SUPERCONTEXT (context))
27384 {
27385 if (BLOCK_FRAGMENT_ORIGIN (context))
27386 {
27387 gcc_assert (!BLOCK_SUBBLOCKS (context));
27388 context = BLOCK_FRAGMENT_ORIGIN (context);
27389 }
27390 for (tree sub = BLOCK_SUBBLOCKS (context);
27391 sub != block;
27392 sub = BLOCK_CHAIN (sub))
27393 if (!sub)
27394 return false;
27395 if (context == outer)
27396 return true;
27397 else
27398 block = context;
27399 }
27400 }
27401
27402 /* Called during final while assembling the marker of the entry point
27403 for an inlined function. */
27404
27405 static void
27406 dwarf2out_inline_entry (tree block)
27407 {
27408 gcc_assert (debug_inline_points);
27409
27410 /* If we can't represent it, don't bother. */
27411 if (!(dwarf_version >= 3 || !dwarf_strict))
27412 return;
27413
27414 gcc_assert (DECL_P (block_ultimate_origin (block)));
27415
27416 /* Sanity check the block tree. This would catch a case in which
27417 BLOCK got removed from the tree reachable from the outermost
27418 lexical block, but got retained in markers. It would still link
27419 back to its parents, but some ancestor would be missing a link
27420 down the path to the sub BLOCK. If the block got removed, its
27421 BLOCK_NUMBER will not be a usable value. */
27422 if (flag_checking)
27423 gcc_assert (block_within_block_p (block,
27424 DECL_INITIAL (current_function_decl),
27425 true));
27426
27427 gcc_assert (inlined_function_outer_scope_p (block));
27428 gcc_assert (!BLOCK_DIE (block));
27429
27430 if (BLOCK_FRAGMENT_ORIGIN (block))
27431 block = BLOCK_FRAGMENT_ORIGIN (block);
27432 /* Can the entry point ever not be at the beginning of an
27433 unfragmented lexical block? */
27434 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27435 || (cur_line_info_table
27436 && !ZERO_VIEW_P (cur_line_info_table->view))))
27437 return;
27438
27439 if (!inline_entry_data_table)
27440 inline_entry_data_table
27441 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27442
27443
27444 inline_entry_data **iedp
27445 = inline_entry_data_table->find_slot_with_hash (block,
27446 htab_hash_pointer (block),
27447 INSERT);
27448 if (*iedp)
27449 /* ??? Ideally, we'd record all entry points for the same inlined
27450 function (some may have been duplicated by e.g. unrolling), but
27451 we have no way to represent that ATM. */
27452 return;
27453
27454 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27455 ied->block = block;
27456 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27457 ied->label_num = BLOCK_NUMBER (block);
27458 if (cur_line_info_table)
27459 ied->view = cur_line_info_table->view;
27460
27461 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27462
27463 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27464 BLOCK_NUMBER (block));
27465 ASM_OUTPUT_LABEL (asm_out_file, label);
27466 }
27467
27468 /* Called from finalize_size_functions for size functions so that their body
27469 can be encoded in the debug info to describe the layout of variable-length
27470 structures. */
27471
27472 static void
27473 dwarf2out_size_function (tree decl)
27474 {
27475 function_to_dwarf_procedure (decl);
27476 }
27477
27478 /* Note in one location list that text section has changed. */
27479
27480 int
27481 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27482 {
27483 var_loc_list *list = *slot;
27484 if (list->first)
27485 list->last_before_switch
27486 = list->last->next ? list->last->next : list->last;
27487 return 1;
27488 }
27489
27490 /* Note in all location lists that text section has changed. */
27491
27492 static void
27493 var_location_switch_text_section (void)
27494 {
27495 if (decl_loc_table == NULL)
27496 return;
27497
27498 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27499 }
27500
27501 /* Create a new line number table. */
27502
27503 static dw_line_info_table *
27504 new_line_info_table (void)
27505 {
27506 dw_line_info_table *table;
27507
27508 table = ggc_cleared_alloc<dw_line_info_table> ();
27509 table->file_num = 1;
27510 table->line_num = 1;
27511 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27512 FORCE_RESET_NEXT_VIEW (table->view);
27513 table->symviews_since_reset = 0;
27514
27515 return table;
27516 }
27517
27518 /* Lookup the "current" table into which we emit line info, so
27519 that we don't have to do it for every source line. */
27520
27521 static void
27522 set_cur_line_info_table (section *sec)
27523 {
27524 dw_line_info_table *table;
27525
27526 if (sec == text_section)
27527 table = text_section_line_info;
27528 else if (sec == cold_text_section)
27529 {
27530 table = cold_text_section_line_info;
27531 if (!table)
27532 {
27533 cold_text_section_line_info = table = new_line_info_table ();
27534 table->end_label = cold_end_label;
27535 }
27536 }
27537 else
27538 {
27539 const char *end_label;
27540
27541 if (crtl->has_bb_partition)
27542 {
27543 if (in_cold_section_p)
27544 end_label = crtl->subsections.cold_section_end_label;
27545 else
27546 end_label = crtl->subsections.hot_section_end_label;
27547 }
27548 else
27549 {
27550 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27551 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27552 current_function_funcdef_no);
27553 end_label = ggc_strdup (label);
27554 }
27555
27556 table = new_line_info_table ();
27557 table->end_label = end_label;
27558
27559 vec_safe_push (separate_line_info, table);
27560 }
27561
27562 if (output_asm_line_debug_info ())
27563 table->is_stmt = (cur_line_info_table
27564 ? cur_line_info_table->is_stmt
27565 : DWARF_LINE_DEFAULT_IS_STMT_START);
27566 cur_line_info_table = table;
27567 }
27568
27569
27570 /* We need to reset the locations at the beginning of each
27571 function. We can't do this in the end_function hook, because the
27572 declarations that use the locations won't have been output when
27573 that hook is called. Also compute have_multiple_function_sections here. */
27574
27575 static void
27576 dwarf2out_begin_function (tree fun)
27577 {
27578 section *sec = function_section (fun);
27579
27580 if (sec != text_section)
27581 have_multiple_function_sections = true;
27582
27583 if (crtl->has_bb_partition && !cold_text_section)
27584 {
27585 gcc_assert (current_function_decl == fun);
27586 cold_text_section = unlikely_text_section ();
27587 switch_to_section (cold_text_section);
27588 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27589 switch_to_section (sec);
27590 }
27591
27592 dwarf2out_note_section_used ();
27593 call_site_count = 0;
27594 tail_call_site_count = 0;
27595
27596 set_cur_line_info_table (sec);
27597 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27598 }
27599
27600 /* Helper function of dwarf2out_end_function, called only after emitting
27601 the very first function into assembly. Check if some .debug_loc range
27602 might end with a .LVL* label that could be equal to .Ltext0.
27603 In that case we must force using absolute addresses in .debug_loc ranges,
27604 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27605 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27606 list terminator.
27607 Set have_multiple_function_sections to true in that case and
27608 terminate htab traversal. */
27609
27610 int
27611 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27612 {
27613 var_loc_list *entry = *slot;
27614 struct var_loc_node *node;
27615
27616 node = entry->first;
27617 if (node && node->next && node->next->label)
27618 {
27619 unsigned int i;
27620 const char *label = node->next->label;
27621 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27622
27623 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27624 {
27625 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27626 if (strcmp (label, loclabel) == 0)
27627 {
27628 have_multiple_function_sections = true;
27629 return 0;
27630 }
27631 }
27632 }
27633 return 1;
27634 }
27635
27636 /* Hook called after emitting a function into assembly.
27637 This does something only for the very first function emitted. */
27638
27639 static void
27640 dwarf2out_end_function (unsigned int)
27641 {
27642 if (in_first_function_p
27643 && !have_multiple_function_sections
27644 && first_loclabel_num_not_at_text_label
27645 && decl_loc_table)
27646 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27647 in_first_function_p = false;
27648 maybe_at_text_label_p = false;
27649 }
27650
27651 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27652 front-ends register a translation unit even before dwarf2out_init is
27653 called. */
27654 static tree main_translation_unit = NULL_TREE;
27655
27656 /* Hook called by front-ends after they built their main translation unit.
27657 Associate comp_unit_die to UNIT. */
27658
27659 static void
27660 dwarf2out_register_main_translation_unit (tree unit)
27661 {
27662 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27663 && main_translation_unit == NULL_TREE);
27664 main_translation_unit = unit;
27665 /* If dwarf2out_init has not been called yet, it will perform the association
27666 itself looking at main_translation_unit. */
27667 if (decl_die_table != NULL)
27668 equate_decl_number_to_die (unit, comp_unit_die ());
27669 }
27670
27671 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27672
27673 static void
27674 push_dw_line_info_entry (dw_line_info_table *table,
27675 enum dw_line_info_opcode opcode, unsigned int val)
27676 {
27677 dw_line_info_entry e;
27678 e.opcode = opcode;
27679 e.val = val;
27680 vec_safe_push (table->entries, e);
27681 }
27682
27683 /* Output a label to mark the beginning of a source code line entry
27684 and record information relating to this source line, in
27685 'line_info_table' for later output of the .debug_line section. */
27686 /* ??? The discriminator parameter ought to be unsigned. */
27687
27688 static void
27689 dwarf2out_source_line (unsigned int line, unsigned int column,
27690 const char *filename,
27691 int discriminator, bool is_stmt)
27692 {
27693 unsigned int file_num;
27694 dw_line_info_table *table;
27695 static var_loc_view lvugid;
27696
27697 if (debug_info_level < DINFO_LEVEL_TERSE)
27698 return;
27699
27700 table = cur_line_info_table;
27701
27702 if (line == 0)
27703 {
27704 if (debug_variable_location_views
27705 && output_asm_line_debug_info ()
27706 && table && !RESETTING_VIEW_P (table->view))
27707 {
27708 /* If we're using the assembler to compute view numbers, we
27709 can't issue a .loc directive for line zero, so we can't
27710 get a view number at this point. We might attempt to
27711 compute it from the previous view, or equate it to a
27712 subsequent view (though it might not be there!), but
27713 since we're omitting the line number entry, we might as
27714 well omit the view number as well. That means pretending
27715 it's a view number zero, which might very well turn out
27716 to be correct. ??? Extend the assembler so that the
27717 compiler could emit e.g. ".locview .LVU#", to output a
27718 view without changing line number information. We'd then
27719 have to count it in symviews_since_reset; when it's omitted,
27720 it doesn't count. */
27721 if (!zero_view_p)
27722 zero_view_p = BITMAP_GGC_ALLOC ();
27723 bitmap_set_bit (zero_view_p, table->view);
27724 if (flag_debug_asm)
27725 {
27726 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27727 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27728 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27729 ASM_COMMENT_START);
27730 assemble_name (asm_out_file, label);
27731 putc ('\n', asm_out_file);
27732 }
27733 table->view = ++lvugid;
27734 }
27735 return;
27736 }
27737
27738 /* The discriminator column was added in dwarf4. Simplify the below
27739 by simply removing it if we're not supposed to output it. */
27740 if (dwarf_version < 4 && dwarf_strict)
27741 discriminator = 0;
27742
27743 if (!debug_column_info)
27744 column = 0;
27745
27746 file_num = maybe_emit_file (lookup_filename (filename));
27747
27748 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27749 the debugger has used the second (possibly duplicate) line number
27750 at the beginning of the function to mark the end of the prologue.
27751 We could eliminate any other duplicates within the function. For
27752 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27753 that second line number entry. */
27754 /* Recall that this end-of-prologue indication is *not* the same thing
27755 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27756 to which the hook corresponds, follows the last insn that was
27757 emitted by gen_prologue. What we need is to precede the first insn
27758 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27759 insn that corresponds to something the user wrote. These may be
27760 very different locations once scheduling is enabled. */
27761
27762 if (0 && file_num == table->file_num
27763 && line == table->line_num
27764 && column == table->column_num
27765 && discriminator == table->discrim_num
27766 && is_stmt == table->is_stmt)
27767 return;
27768
27769 switch_to_section (current_function_section ());
27770
27771 /* If requested, emit something human-readable. */
27772 if (flag_debug_asm)
27773 {
27774 if (debug_column_info)
27775 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27776 filename, line, column);
27777 else
27778 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27779 filename, line);
27780 }
27781
27782 if (output_asm_line_debug_info ())
27783 {
27784 /* Emit the .loc directive understood by GNU as. */
27785 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27786 file_num, line, is_stmt, discriminator */
27787 fputs ("\t.loc ", asm_out_file);
27788 fprint_ul (asm_out_file, file_num);
27789 putc (' ', asm_out_file);
27790 fprint_ul (asm_out_file, line);
27791 putc (' ', asm_out_file);
27792 fprint_ul (asm_out_file, column);
27793
27794 if (is_stmt != table->is_stmt)
27795 {
27796 fputs (" is_stmt ", asm_out_file);
27797 putc (is_stmt ? '1' : '0', asm_out_file);
27798 }
27799 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27800 {
27801 gcc_assert (discriminator > 0);
27802 fputs (" discriminator ", asm_out_file);
27803 fprint_ul (asm_out_file, (unsigned long) discriminator);
27804 }
27805 if (debug_variable_location_views)
27806 {
27807 if (!RESETTING_VIEW_P (table->view))
27808 {
27809 table->symviews_since_reset++;
27810 if (table->symviews_since_reset > symview_upper_bound)
27811 symview_upper_bound = table->symviews_since_reset;
27812 /* When we're using the assembler to compute view
27813 numbers, we output symbolic labels after "view" in
27814 .loc directives, and the assembler will set them for
27815 us, so that we can refer to the view numbers in
27816 location lists. The only exceptions are when we know
27817 a view will be zero: "-0" is a forced reset, used
27818 e.g. in the beginning of functions, whereas "0" tells
27819 the assembler to check that there was a PC change
27820 since the previous view, in a way that implicitly
27821 resets the next view. */
27822 fputs (" view ", asm_out_file);
27823 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27824 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27825 assemble_name (asm_out_file, label);
27826 table->view = ++lvugid;
27827 }
27828 else
27829 {
27830 table->symviews_since_reset = 0;
27831 if (FORCE_RESETTING_VIEW_P (table->view))
27832 fputs (" view -0", asm_out_file);
27833 else
27834 fputs (" view 0", asm_out_file);
27835 /* Mark the present view as a zero view. Earlier debug
27836 binds may have already added its id to loclists to be
27837 emitted later, so we can't reuse the id for something
27838 else. However, it's good to know whether a view is
27839 known to be zero, because then we may be able to
27840 optimize out locviews that are all zeros, so take
27841 note of it in zero_view_p. */
27842 if (!zero_view_p)
27843 zero_view_p = BITMAP_GGC_ALLOC ();
27844 bitmap_set_bit (zero_view_p, lvugid);
27845 table->view = ++lvugid;
27846 }
27847 }
27848 putc ('\n', asm_out_file);
27849 }
27850 else
27851 {
27852 unsigned int label_num = ++line_info_label_num;
27853
27854 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27855
27856 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27857 push_dw_line_info_entry (table, LI_adv_address, label_num);
27858 else
27859 push_dw_line_info_entry (table, LI_set_address, label_num);
27860 if (debug_variable_location_views)
27861 {
27862 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27863 if (resetting)
27864 table->view = 0;
27865
27866 if (flag_debug_asm)
27867 fprintf (asm_out_file, "\t%s view %s%d\n",
27868 ASM_COMMENT_START,
27869 resetting ? "-" : "",
27870 table->view);
27871
27872 table->view++;
27873 }
27874 if (file_num != table->file_num)
27875 push_dw_line_info_entry (table, LI_set_file, file_num);
27876 if (discriminator != table->discrim_num)
27877 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27878 if (is_stmt != table->is_stmt)
27879 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27880 push_dw_line_info_entry (table, LI_set_line, line);
27881 if (debug_column_info)
27882 push_dw_line_info_entry (table, LI_set_column, column);
27883 }
27884
27885 table->file_num = file_num;
27886 table->line_num = line;
27887 table->column_num = column;
27888 table->discrim_num = discriminator;
27889 table->is_stmt = is_stmt;
27890 table->in_use = true;
27891 }
27892
27893 /* Record the beginning of a new source file. */
27894
27895 static void
27896 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27897 {
27898 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27899 {
27900 macinfo_entry e;
27901 e.code = DW_MACINFO_start_file;
27902 e.lineno = lineno;
27903 e.info = ggc_strdup (filename);
27904 vec_safe_push (macinfo_table, e);
27905 }
27906 }
27907
27908 /* Record the end of a source file. */
27909
27910 static void
27911 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27912 {
27913 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27914 {
27915 macinfo_entry e;
27916 e.code = DW_MACINFO_end_file;
27917 e.lineno = lineno;
27918 e.info = NULL;
27919 vec_safe_push (macinfo_table, e);
27920 }
27921 }
27922
27923 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27924 the tail part of the directive line, i.e. the part which is past the
27925 initial whitespace, #, whitespace, directive-name, whitespace part. */
27926
27927 static void
27928 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27929 const char *buffer ATTRIBUTE_UNUSED)
27930 {
27931 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27932 {
27933 macinfo_entry e;
27934 /* Insert a dummy first entry to be able to optimize the whole
27935 predefined macro block using DW_MACRO_import. */
27936 if (macinfo_table->is_empty () && lineno <= 1)
27937 {
27938 e.code = 0;
27939 e.lineno = 0;
27940 e.info = NULL;
27941 vec_safe_push (macinfo_table, e);
27942 }
27943 e.code = DW_MACINFO_define;
27944 e.lineno = lineno;
27945 e.info = ggc_strdup (buffer);
27946 vec_safe_push (macinfo_table, e);
27947 }
27948 }
27949
27950 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27951 the tail part of the directive line, i.e. the part which is past the
27952 initial whitespace, #, whitespace, directive-name, whitespace part. */
27953
27954 static void
27955 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27956 const char *buffer ATTRIBUTE_UNUSED)
27957 {
27958 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27959 {
27960 macinfo_entry e;
27961 /* Insert a dummy first entry to be able to optimize the whole
27962 predefined macro block using DW_MACRO_import. */
27963 if (macinfo_table->is_empty () && lineno <= 1)
27964 {
27965 e.code = 0;
27966 e.lineno = 0;
27967 e.info = NULL;
27968 vec_safe_push (macinfo_table, e);
27969 }
27970 e.code = DW_MACINFO_undef;
27971 e.lineno = lineno;
27972 e.info = ggc_strdup (buffer);
27973 vec_safe_push (macinfo_table, e);
27974 }
27975 }
27976
27977 /* Helpers to manipulate hash table of CUs. */
27978
27979 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
27980 {
27981 static inline hashval_t hash (const macinfo_entry *);
27982 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
27983 };
27984
27985 inline hashval_t
27986 macinfo_entry_hasher::hash (const macinfo_entry *entry)
27987 {
27988 return htab_hash_string (entry->info);
27989 }
27990
27991 inline bool
27992 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
27993 const macinfo_entry *entry2)
27994 {
27995 return !strcmp (entry1->info, entry2->info);
27996 }
27997
27998 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27999
28000 /* Output a single .debug_macinfo entry. */
28001
28002 static void
28003 output_macinfo_op (macinfo_entry *ref)
28004 {
28005 int file_num;
28006 size_t len;
28007 struct indirect_string_node *node;
28008 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28009 struct dwarf_file_data *fd;
28010
28011 switch (ref->code)
28012 {
28013 case DW_MACINFO_start_file:
28014 fd = lookup_filename (ref->info);
28015 file_num = maybe_emit_file (fd);
28016 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28017 dw2_asm_output_data_uleb128 (ref->lineno,
28018 "Included from line number %lu",
28019 (unsigned long) ref->lineno);
28020 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28021 break;
28022 case DW_MACINFO_end_file:
28023 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28024 break;
28025 case DW_MACINFO_define:
28026 case DW_MACINFO_undef:
28027 len = strlen (ref->info) + 1;
28028 if (!dwarf_strict
28029 && len > DWARF_OFFSET_SIZE
28030 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28031 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28032 {
28033 ref->code = ref->code == DW_MACINFO_define
28034 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28035 output_macinfo_op (ref);
28036 return;
28037 }
28038 dw2_asm_output_data (1, ref->code,
28039 ref->code == DW_MACINFO_define
28040 ? "Define macro" : "Undefine macro");
28041 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28042 (unsigned long) ref->lineno);
28043 dw2_asm_output_nstring (ref->info, -1, "The macro");
28044 break;
28045 case DW_MACRO_define_strp:
28046 case DW_MACRO_undef_strp:
28047 node = find_AT_string (ref->info);
28048 gcc_assert (node
28049 && (node->form == DW_FORM_strp
28050 || node->form == DW_FORM_GNU_str_index));
28051 dw2_asm_output_data (1, ref->code,
28052 ref->code == DW_MACRO_define_strp
28053 ? "Define macro strp"
28054 : "Undefine macro strp");
28055 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28056 (unsigned long) ref->lineno);
28057 if (node->form == DW_FORM_strp)
28058 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28059 debug_str_section, "The macro: \"%s\"",
28060 ref->info);
28061 else
28062 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28063 ref->info);
28064 break;
28065 case DW_MACRO_import:
28066 dw2_asm_output_data (1, ref->code, "Import");
28067 ASM_GENERATE_INTERNAL_LABEL (label,
28068 DEBUG_MACRO_SECTION_LABEL,
28069 ref->lineno + macinfo_label_base);
28070 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28071 break;
28072 default:
28073 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28074 ASM_COMMENT_START, (unsigned long) ref->code);
28075 break;
28076 }
28077 }
28078
28079 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28080 other compilation unit .debug_macinfo sections. IDX is the first
28081 index of a define/undef, return the number of ops that should be
28082 emitted in a comdat .debug_macinfo section and emit
28083 a DW_MACRO_import entry referencing it.
28084 If the define/undef entry should be emitted normally, return 0. */
28085
28086 static unsigned
28087 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28088 macinfo_hash_type **macinfo_htab)
28089 {
28090 macinfo_entry *first, *second, *cur, *inc;
28091 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28092 unsigned char checksum[16];
28093 struct md5_ctx ctx;
28094 char *grp_name, *tail;
28095 const char *base;
28096 unsigned int i, count, encoded_filename_len, linebuf_len;
28097 macinfo_entry **slot;
28098
28099 first = &(*macinfo_table)[idx];
28100 second = &(*macinfo_table)[idx + 1];
28101
28102 /* Optimize only if there are at least two consecutive define/undef ops,
28103 and either all of them are before first DW_MACINFO_start_file
28104 with lineno {0,1} (i.e. predefined macro block), or all of them are
28105 in some included header file. */
28106 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28107 return 0;
28108 if (vec_safe_is_empty (files))
28109 {
28110 if (first->lineno > 1 || second->lineno > 1)
28111 return 0;
28112 }
28113 else if (first->lineno == 0)
28114 return 0;
28115
28116 /* Find the last define/undef entry that can be grouped together
28117 with first and at the same time compute md5 checksum of their
28118 codes, linenumbers and strings. */
28119 md5_init_ctx (&ctx);
28120 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28121 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28122 break;
28123 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28124 break;
28125 else
28126 {
28127 unsigned char code = cur->code;
28128 md5_process_bytes (&code, 1, &ctx);
28129 checksum_uleb128 (cur->lineno, &ctx);
28130 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28131 }
28132 md5_finish_ctx (&ctx, checksum);
28133 count = i - idx;
28134
28135 /* From the containing include filename (if any) pick up just
28136 usable characters from its basename. */
28137 if (vec_safe_is_empty (files))
28138 base = "";
28139 else
28140 base = lbasename (files->last ().info);
28141 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28142 if (ISIDNUM (base[i]) || base[i] == '.')
28143 encoded_filename_len++;
28144 /* Count . at the end. */
28145 if (encoded_filename_len)
28146 encoded_filename_len++;
28147
28148 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28149 linebuf_len = strlen (linebuf);
28150
28151 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28152 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28153 + 16 * 2 + 1);
28154 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28155 tail = grp_name + 4;
28156 if (encoded_filename_len)
28157 {
28158 for (i = 0; base[i]; i++)
28159 if (ISIDNUM (base[i]) || base[i] == '.')
28160 *tail++ = base[i];
28161 *tail++ = '.';
28162 }
28163 memcpy (tail, linebuf, linebuf_len);
28164 tail += linebuf_len;
28165 *tail++ = '.';
28166 for (i = 0; i < 16; i++)
28167 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28168
28169 /* Construct a macinfo_entry for DW_MACRO_import
28170 in the empty vector entry before the first define/undef. */
28171 inc = &(*macinfo_table)[idx - 1];
28172 inc->code = DW_MACRO_import;
28173 inc->lineno = 0;
28174 inc->info = ggc_strdup (grp_name);
28175 if (!*macinfo_htab)
28176 *macinfo_htab = new macinfo_hash_type (10);
28177 /* Avoid emitting duplicates. */
28178 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28179 if (*slot != NULL)
28180 {
28181 inc->code = 0;
28182 inc->info = NULL;
28183 /* If such an entry has been used before, just emit
28184 a DW_MACRO_import op. */
28185 inc = *slot;
28186 output_macinfo_op (inc);
28187 /* And clear all macinfo_entry in the range to avoid emitting them
28188 in the second pass. */
28189 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28190 {
28191 cur->code = 0;
28192 cur->info = NULL;
28193 }
28194 }
28195 else
28196 {
28197 *slot = inc;
28198 inc->lineno = (*macinfo_htab)->elements ();
28199 output_macinfo_op (inc);
28200 }
28201 return count;
28202 }
28203
28204 /* Save any strings needed by the macinfo table in the debug str
28205 table. All strings must be collected into the table by the time
28206 index_string is called. */
28207
28208 static void
28209 save_macinfo_strings (void)
28210 {
28211 unsigned len;
28212 unsigned i;
28213 macinfo_entry *ref;
28214
28215 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28216 {
28217 switch (ref->code)
28218 {
28219 /* Match the logic in output_macinfo_op to decide on
28220 indirect strings. */
28221 case DW_MACINFO_define:
28222 case DW_MACINFO_undef:
28223 len = strlen (ref->info) + 1;
28224 if (!dwarf_strict
28225 && len > DWARF_OFFSET_SIZE
28226 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28227 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28228 set_indirect_string (find_AT_string (ref->info));
28229 break;
28230 case DW_MACRO_define_strp:
28231 case DW_MACRO_undef_strp:
28232 set_indirect_string (find_AT_string (ref->info));
28233 break;
28234 default:
28235 break;
28236 }
28237 }
28238 }
28239
28240 /* Output macinfo section(s). */
28241
28242 static void
28243 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28244 {
28245 unsigned i;
28246 unsigned long length = vec_safe_length (macinfo_table);
28247 macinfo_entry *ref;
28248 vec<macinfo_entry, va_gc> *files = NULL;
28249 macinfo_hash_type *macinfo_htab = NULL;
28250 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28251
28252 if (! length)
28253 return;
28254
28255 /* output_macinfo* uses these interchangeably. */
28256 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28257 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28258 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28259 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28260
28261 /* AIX Assembler inserts the length, so adjust the reference to match the
28262 offset expected by debuggers. */
28263 strcpy (dl_section_ref, debug_line_label);
28264 if (XCOFF_DEBUGGING_INFO)
28265 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28266
28267 /* For .debug_macro emit the section header. */
28268 if (!dwarf_strict || dwarf_version >= 5)
28269 {
28270 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28271 "DWARF macro version number");
28272 if (DWARF_OFFSET_SIZE == 8)
28273 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28274 else
28275 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28276 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28277 debug_line_section, NULL);
28278 }
28279
28280 /* In the first loop, it emits the primary .debug_macinfo section
28281 and after each emitted op the macinfo_entry is cleared.
28282 If a longer range of define/undef ops can be optimized using
28283 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28284 the vector before the first define/undef in the range and the
28285 whole range of define/undef ops is not emitted and kept. */
28286 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28287 {
28288 switch (ref->code)
28289 {
28290 case DW_MACINFO_start_file:
28291 vec_safe_push (files, *ref);
28292 break;
28293 case DW_MACINFO_end_file:
28294 if (!vec_safe_is_empty (files))
28295 files->pop ();
28296 break;
28297 case DW_MACINFO_define:
28298 case DW_MACINFO_undef:
28299 if ((!dwarf_strict || dwarf_version >= 5)
28300 && HAVE_COMDAT_GROUP
28301 && vec_safe_length (files) != 1
28302 && i > 0
28303 && i + 1 < length
28304 && (*macinfo_table)[i - 1].code == 0)
28305 {
28306 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28307 if (count)
28308 {
28309 i += count - 1;
28310 continue;
28311 }
28312 }
28313 break;
28314 case 0:
28315 /* A dummy entry may be inserted at the beginning to be able
28316 to optimize the whole block of predefined macros. */
28317 if (i == 0)
28318 continue;
28319 default:
28320 break;
28321 }
28322 output_macinfo_op (ref);
28323 ref->info = NULL;
28324 ref->code = 0;
28325 }
28326
28327 if (!macinfo_htab)
28328 return;
28329
28330 /* Save the number of transparent includes so we can adjust the
28331 label number for the fat LTO object DWARF. */
28332 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28333
28334 delete macinfo_htab;
28335 macinfo_htab = NULL;
28336
28337 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28338 terminate the current chain and switch to a new comdat .debug_macinfo
28339 section and emit the define/undef entries within it. */
28340 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28341 switch (ref->code)
28342 {
28343 case 0:
28344 continue;
28345 case DW_MACRO_import:
28346 {
28347 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28348 tree comdat_key = get_identifier (ref->info);
28349 /* Terminate the previous .debug_macinfo section. */
28350 dw2_asm_output_data (1, 0, "End compilation unit");
28351 targetm.asm_out.named_section (debug_macinfo_section_name,
28352 SECTION_DEBUG
28353 | SECTION_LINKONCE
28354 | (early_lto_debug
28355 ? SECTION_EXCLUDE : 0),
28356 comdat_key);
28357 ASM_GENERATE_INTERNAL_LABEL (label,
28358 DEBUG_MACRO_SECTION_LABEL,
28359 ref->lineno + macinfo_label_base);
28360 ASM_OUTPUT_LABEL (asm_out_file, label);
28361 ref->code = 0;
28362 ref->info = NULL;
28363 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28364 "DWARF macro version number");
28365 if (DWARF_OFFSET_SIZE == 8)
28366 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28367 else
28368 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28369 }
28370 break;
28371 case DW_MACINFO_define:
28372 case DW_MACINFO_undef:
28373 output_macinfo_op (ref);
28374 ref->code = 0;
28375 ref->info = NULL;
28376 break;
28377 default:
28378 gcc_unreachable ();
28379 }
28380
28381 macinfo_label_base += macinfo_label_base_adj;
28382 }
28383
28384 /* Initialize the various sections and labels for dwarf output and prefix
28385 them with PREFIX if non-NULL. Returns the generation (zero based
28386 number of times function was called). */
28387
28388 static unsigned
28389 init_sections_and_labels (bool early_lto_debug)
28390 {
28391 /* As we may get called multiple times have a generation count for
28392 labels. */
28393 static unsigned generation = 0;
28394
28395 if (early_lto_debug)
28396 {
28397 if (!dwarf_split_debug_info)
28398 {
28399 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28400 SECTION_DEBUG | SECTION_EXCLUDE,
28401 NULL);
28402 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28403 SECTION_DEBUG | SECTION_EXCLUDE,
28404 NULL);
28405 debug_macinfo_section_name
28406 = ((dwarf_strict && dwarf_version < 5)
28407 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28408 debug_macinfo_section = get_section (debug_macinfo_section_name,
28409 SECTION_DEBUG
28410 | SECTION_EXCLUDE, NULL);
28411 }
28412 else
28413 {
28414 /* ??? Which of the following do we need early? */
28415 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28416 SECTION_DEBUG | SECTION_EXCLUDE,
28417 NULL);
28418 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28419 SECTION_DEBUG | SECTION_EXCLUDE,
28420 NULL);
28421 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28422 SECTION_DEBUG
28423 | SECTION_EXCLUDE, NULL);
28424 debug_skeleton_abbrev_section
28425 = get_section (DEBUG_LTO_ABBREV_SECTION,
28426 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28427 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28428 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28429 generation);
28430
28431 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28432 stay in the main .o, but the skeleton_line goes into the split
28433 off dwo. */
28434 debug_skeleton_line_section
28435 = get_section (DEBUG_LTO_LINE_SECTION,
28436 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28437 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28438 DEBUG_SKELETON_LINE_SECTION_LABEL,
28439 generation);
28440 debug_str_offsets_section
28441 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28442 SECTION_DEBUG | SECTION_EXCLUDE,
28443 NULL);
28444 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28445 DEBUG_SKELETON_INFO_SECTION_LABEL,
28446 generation);
28447 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28448 DEBUG_STR_DWO_SECTION_FLAGS,
28449 NULL);
28450 debug_macinfo_section_name
28451 = ((dwarf_strict && dwarf_version < 5)
28452 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28453 debug_macinfo_section = get_section (debug_macinfo_section_name,
28454 SECTION_DEBUG | SECTION_EXCLUDE,
28455 NULL);
28456 }
28457 /* For macro info and the file table we have to refer to a
28458 debug_line section. */
28459 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28460 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28461 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28462 DEBUG_LINE_SECTION_LABEL, generation);
28463
28464 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28465 DEBUG_STR_SECTION_FLAGS
28466 | SECTION_EXCLUDE, NULL);
28467 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28468 debug_line_str_section
28469 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28470 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28471 }
28472 else
28473 {
28474 if (!dwarf_split_debug_info)
28475 {
28476 debug_info_section = get_section (DEBUG_INFO_SECTION,
28477 SECTION_DEBUG, NULL);
28478 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28479 SECTION_DEBUG, NULL);
28480 debug_loc_section = get_section (dwarf_version >= 5
28481 ? DEBUG_LOCLISTS_SECTION
28482 : DEBUG_LOC_SECTION,
28483 SECTION_DEBUG, NULL);
28484 debug_macinfo_section_name
28485 = ((dwarf_strict && dwarf_version < 5)
28486 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28487 debug_macinfo_section = get_section (debug_macinfo_section_name,
28488 SECTION_DEBUG, NULL);
28489 }
28490 else
28491 {
28492 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28493 SECTION_DEBUG | SECTION_EXCLUDE,
28494 NULL);
28495 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28496 SECTION_DEBUG | SECTION_EXCLUDE,
28497 NULL);
28498 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28499 SECTION_DEBUG, NULL);
28500 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28501 SECTION_DEBUG, NULL);
28502 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28503 SECTION_DEBUG, NULL);
28504 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28505 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28506 generation);
28507
28508 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28509 stay in the main .o, but the skeleton_line goes into the
28510 split off dwo. */
28511 debug_skeleton_line_section
28512 = get_section (DEBUG_DWO_LINE_SECTION,
28513 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28514 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28515 DEBUG_SKELETON_LINE_SECTION_LABEL,
28516 generation);
28517 debug_str_offsets_section
28518 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28519 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28520 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28521 DEBUG_SKELETON_INFO_SECTION_LABEL,
28522 generation);
28523 debug_loc_section = get_section (dwarf_version >= 5
28524 ? DEBUG_DWO_LOCLISTS_SECTION
28525 : DEBUG_DWO_LOC_SECTION,
28526 SECTION_DEBUG | SECTION_EXCLUDE,
28527 NULL);
28528 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28529 DEBUG_STR_DWO_SECTION_FLAGS,
28530 NULL);
28531 debug_macinfo_section_name
28532 = ((dwarf_strict && dwarf_version < 5)
28533 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28534 debug_macinfo_section = get_section (debug_macinfo_section_name,
28535 SECTION_DEBUG | SECTION_EXCLUDE,
28536 NULL);
28537 }
28538 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28539 SECTION_DEBUG, NULL);
28540 debug_line_section = get_section (DEBUG_LINE_SECTION,
28541 SECTION_DEBUG, NULL);
28542 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28543 SECTION_DEBUG, NULL);
28544 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28545 SECTION_DEBUG, NULL);
28546 debug_str_section = get_section (DEBUG_STR_SECTION,
28547 DEBUG_STR_SECTION_FLAGS, NULL);
28548 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28549 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28550 DEBUG_STR_SECTION_FLAGS, NULL);
28551
28552 debug_ranges_section = get_section (dwarf_version >= 5
28553 ? DEBUG_RNGLISTS_SECTION
28554 : DEBUG_RANGES_SECTION,
28555 SECTION_DEBUG, NULL);
28556 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28557 SECTION_DEBUG, NULL);
28558 }
28559
28560 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28561 DEBUG_ABBREV_SECTION_LABEL, generation);
28562 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28563 DEBUG_INFO_SECTION_LABEL, generation);
28564 info_section_emitted = false;
28565 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28566 DEBUG_LINE_SECTION_LABEL, generation);
28567 /* There are up to 4 unique ranges labels per generation.
28568 See also output_rnglists. */
28569 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28570 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28571 if (dwarf_version >= 5 && dwarf_split_debug_info)
28572 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28573 DEBUG_RANGES_SECTION_LABEL,
28574 1 + generation * 4);
28575 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28576 DEBUG_ADDR_SECTION_LABEL, generation);
28577 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28578 (dwarf_strict && dwarf_version < 5)
28579 ? DEBUG_MACINFO_SECTION_LABEL
28580 : DEBUG_MACRO_SECTION_LABEL, generation);
28581 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28582 generation);
28583
28584 ++generation;
28585 return generation - 1;
28586 }
28587
28588 /* Set up for Dwarf output at the start of compilation. */
28589
28590 static void
28591 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28592 {
28593 /* Allocate the file_table. */
28594 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28595
28596 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28597 /* Allocate the decl_die_table. */
28598 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28599
28600 /* Allocate the decl_loc_table. */
28601 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28602
28603 /* Allocate the cached_dw_loc_list_table. */
28604 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28605
28606 /* Allocate the initial hunk of the decl_scope_table. */
28607 vec_alloc (decl_scope_table, 256);
28608
28609 /* Allocate the initial hunk of the abbrev_die_table. */
28610 vec_alloc (abbrev_die_table, 256);
28611 /* Zero-th entry is allocated, but unused. */
28612 abbrev_die_table->quick_push (NULL);
28613
28614 /* Allocate the dwarf_proc_stack_usage_map. */
28615 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28616
28617 /* Allocate the pubtypes and pubnames vectors. */
28618 vec_alloc (pubname_table, 32);
28619 vec_alloc (pubtype_table, 32);
28620
28621 vec_alloc (incomplete_types, 64);
28622
28623 vec_alloc (used_rtx_array, 32);
28624
28625 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28626 vec_alloc (macinfo_table, 64);
28627 #endif
28628
28629 /* If front-ends already registered a main translation unit but we were not
28630 ready to perform the association, do this now. */
28631 if (main_translation_unit != NULL_TREE)
28632 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28633 }
28634
28635 /* Called before compile () starts outputtting functions, variables
28636 and toplevel asms into assembly. */
28637
28638 static void
28639 dwarf2out_assembly_start (void)
28640 {
28641 if (text_section_line_info)
28642 return;
28643
28644 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28645 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28646 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28647 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28648 COLD_TEXT_SECTION_LABEL, 0);
28649 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28650
28651 switch_to_section (text_section);
28652 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28653 #endif
28654
28655 /* Make sure the line number table for .text always exists. */
28656 text_section_line_info = new_line_info_table ();
28657 text_section_line_info->end_label = text_end_label;
28658
28659 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28660 cur_line_info_table = text_section_line_info;
28661 #endif
28662
28663 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28664 && dwarf2out_do_cfi_asm ()
28665 && !dwarf2out_do_eh_frame ())
28666 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28667 }
28668
28669 /* A helper function for dwarf2out_finish called through
28670 htab_traverse. Assign a string its index. All strings must be
28671 collected into the table by the time index_string is called,
28672 because the indexing code relies on htab_traverse to traverse nodes
28673 in the same order for each run. */
28674
28675 int
28676 index_string (indirect_string_node **h, unsigned int *index)
28677 {
28678 indirect_string_node *node = *h;
28679
28680 find_string_form (node);
28681 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28682 {
28683 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28684 node->index = *index;
28685 *index += 1;
28686 }
28687 return 1;
28688 }
28689
28690 /* A helper function for output_indirect_strings called through
28691 htab_traverse. Output the offset to a string and update the
28692 current offset. */
28693
28694 int
28695 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28696 {
28697 indirect_string_node *node = *h;
28698
28699 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28700 {
28701 /* Assert that this node has been assigned an index. */
28702 gcc_assert (node->index != NO_INDEX_ASSIGNED
28703 && node->index != NOT_INDEXED);
28704 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28705 "indexed string 0x%x: %s", node->index, node->str);
28706 *offset += strlen (node->str) + 1;
28707 }
28708 return 1;
28709 }
28710
28711 /* A helper function for dwarf2out_finish called through
28712 htab_traverse. Output the indexed string. */
28713
28714 int
28715 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28716 {
28717 struct indirect_string_node *node = *h;
28718
28719 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28720 {
28721 /* Assert that the strings are output in the same order as their
28722 indexes were assigned. */
28723 gcc_assert (*cur_idx == node->index);
28724 assemble_string (node->str, strlen (node->str) + 1);
28725 *cur_idx += 1;
28726 }
28727 return 1;
28728 }
28729
28730 /* A helper function for dwarf2out_finish called through
28731 htab_traverse. Emit one queued .debug_str string. */
28732
28733 int
28734 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28735 {
28736 struct indirect_string_node *node = *h;
28737
28738 node->form = find_string_form (node);
28739 if (node->form == form && node->refcount > 0)
28740 {
28741 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28742 assemble_string (node->str, strlen (node->str) + 1);
28743 }
28744
28745 return 1;
28746 }
28747
28748 /* Output the indexed string table. */
28749
28750 static void
28751 output_indirect_strings (void)
28752 {
28753 switch_to_section (debug_str_section);
28754 if (!dwarf_split_debug_info)
28755 debug_str_hash->traverse<enum dwarf_form,
28756 output_indirect_string> (DW_FORM_strp);
28757 else
28758 {
28759 unsigned int offset = 0;
28760 unsigned int cur_idx = 0;
28761
28762 if (skeleton_debug_str_hash)
28763 skeleton_debug_str_hash->traverse<enum dwarf_form,
28764 output_indirect_string> (DW_FORM_strp);
28765
28766 switch_to_section (debug_str_offsets_section);
28767 debug_str_hash->traverse_noresize
28768 <unsigned int *, output_index_string_offset> (&offset);
28769 switch_to_section (debug_str_dwo_section);
28770 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28771 (&cur_idx);
28772 }
28773 }
28774
28775 /* Callback for htab_traverse to assign an index to an entry in the
28776 table, and to write that entry to the .debug_addr section. */
28777
28778 int
28779 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28780 {
28781 addr_table_entry *entry = *slot;
28782
28783 if (entry->refcount == 0)
28784 {
28785 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28786 || entry->index == NOT_INDEXED);
28787 return 1;
28788 }
28789
28790 gcc_assert (entry->index == *cur_index);
28791 (*cur_index)++;
28792
28793 switch (entry->kind)
28794 {
28795 case ate_kind_rtx:
28796 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28797 "0x%x", entry->index);
28798 break;
28799 case ate_kind_rtx_dtprel:
28800 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28801 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28802 DWARF2_ADDR_SIZE,
28803 entry->addr.rtl);
28804 fputc ('\n', asm_out_file);
28805 break;
28806 case ate_kind_label:
28807 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28808 "0x%x", entry->index);
28809 break;
28810 default:
28811 gcc_unreachable ();
28812 }
28813 return 1;
28814 }
28815
28816 /* Produce the .debug_addr section. */
28817
28818 static void
28819 output_addr_table (void)
28820 {
28821 unsigned int index = 0;
28822 if (addr_index_table == NULL || addr_index_table->size () == 0)
28823 return;
28824
28825 switch_to_section (debug_addr_section);
28826 addr_index_table
28827 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28828 }
28829
28830 #if ENABLE_ASSERT_CHECKING
28831 /* Verify that all marks are clear. */
28832
28833 static void
28834 verify_marks_clear (dw_die_ref die)
28835 {
28836 dw_die_ref c;
28837
28838 gcc_assert (! die->die_mark);
28839 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28840 }
28841 #endif /* ENABLE_ASSERT_CHECKING */
28842
28843 /* Clear the marks for a die and its children.
28844 Be cool if the mark isn't set. */
28845
28846 static void
28847 prune_unmark_dies (dw_die_ref die)
28848 {
28849 dw_die_ref c;
28850
28851 if (die->die_mark)
28852 die->die_mark = 0;
28853 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28854 }
28855
28856 /* Given LOC that is referenced by a DIE we're marking as used, find all
28857 referenced DWARF procedures it references and mark them as used. */
28858
28859 static void
28860 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28861 {
28862 for (; loc != NULL; loc = loc->dw_loc_next)
28863 switch (loc->dw_loc_opc)
28864 {
28865 case DW_OP_implicit_pointer:
28866 case DW_OP_convert:
28867 case DW_OP_reinterpret:
28868 case DW_OP_GNU_implicit_pointer:
28869 case DW_OP_GNU_convert:
28870 case DW_OP_GNU_reinterpret:
28871 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28872 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28873 break;
28874 case DW_OP_GNU_variable_value:
28875 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28876 {
28877 dw_die_ref ref
28878 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28879 if (ref == NULL)
28880 break;
28881 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28882 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28883 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28884 }
28885 /* FALLTHRU */
28886 case DW_OP_call2:
28887 case DW_OP_call4:
28888 case DW_OP_call_ref:
28889 case DW_OP_const_type:
28890 case DW_OP_GNU_const_type:
28891 case DW_OP_GNU_parameter_ref:
28892 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28893 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28894 break;
28895 case DW_OP_regval_type:
28896 case DW_OP_deref_type:
28897 case DW_OP_GNU_regval_type:
28898 case DW_OP_GNU_deref_type:
28899 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28900 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28901 break;
28902 case DW_OP_entry_value:
28903 case DW_OP_GNU_entry_value:
28904 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28905 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28906 break;
28907 default:
28908 break;
28909 }
28910 }
28911
28912 /* Given DIE that we're marking as used, find any other dies
28913 it references as attributes and mark them as used. */
28914
28915 static void
28916 prune_unused_types_walk_attribs (dw_die_ref die)
28917 {
28918 dw_attr_node *a;
28919 unsigned ix;
28920
28921 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28922 {
28923 switch (AT_class (a))
28924 {
28925 /* Make sure DWARF procedures referenced by location descriptions will
28926 get emitted. */
28927 case dw_val_class_loc:
28928 prune_unused_types_walk_loc_descr (AT_loc (a));
28929 break;
28930 case dw_val_class_loc_list:
28931 for (dw_loc_list_ref list = AT_loc_list (a);
28932 list != NULL;
28933 list = list->dw_loc_next)
28934 prune_unused_types_walk_loc_descr (list->expr);
28935 break;
28936
28937 case dw_val_class_view_list:
28938 /* This points to a loc_list in another attribute, so it's
28939 already covered. */
28940 break;
28941
28942 case dw_val_class_die_ref:
28943 /* A reference to another DIE.
28944 Make sure that it will get emitted.
28945 If it was broken out into a comdat group, don't follow it. */
28946 if (! AT_ref (a)->comdat_type_p
28947 || a->dw_attr == DW_AT_specification)
28948 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
28949 break;
28950
28951 case dw_val_class_str:
28952 /* Set the string's refcount to 0 so that prune_unused_types_mark
28953 accounts properly for it. */
28954 a->dw_attr_val.v.val_str->refcount = 0;
28955 break;
28956
28957 default:
28958 break;
28959 }
28960 }
28961 }
28962
28963 /* Mark the generic parameters and arguments children DIEs of DIE. */
28964
28965 static void
28966 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
28967 {
28968 dw_die_ref c;
28969
28970 if (die == NULL || die->die_child == NULL)
28971 return;
28972 c = die->die_child;
28973 do
28974 {
28975 if (is_template_parameter (c))
28976 prune_unused_types_mark (c, 1);
28977 c = c->die_sib;
28978 } while (c && c != die->die_child);
28979 }
28980
28981 /* Mark DIE as being used. If DOKIDS is true, then walk down
28982 to DIE's children. */
28983
28984 static void
28985 prune_unused_types_mark (dw_die_ref die, int dokids)
28986 {
28987 dw_die_ref c;
28988
28989 if (die->die_mark == 0)
28990 {
28991 /* We haven't done this node yet. Mark it as used. */
28992 die->die_mark = 1;
28993 /* If this is the DIE of a generic type instantiation,
28994 mark the children DIEs that describe its generic parms and
28995 args. */
28996 prune_unused_types_mark_generic_parms_dies (die);
28997
28998 /* We also have to mark its parents as used.
28999 (But we don't want to mark our parent's kids due to this,
29000 unless it is a class.) */
29001 if (die->die_parent)
29002 prune_unused_types_mark (die->die_parent,
29003 class_scope_p (die->die_parent));
29004
29005 /* Mark any referenced nodes. */
29006 prune_unused_types_walk_attribs (die);
29007
29008 /* If this node is a specification,
29009 also mark the definition, if it exists. */
29010 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29011 prune_unused_types_mark (die->die_definition, 1);
29012 }
29013
29014 if (dokids && die->die_mark != 2)
29015 {
29016 /* We need to walk the children, but haven't done so yet.
29017 Remember that we've walked the kids. */
29018 die->die_mark = 2;
29019
29020 /* If this is an array type, we need to make sure our
29021 kids get marked, even if they're types. If we're
29022 breaking out types into comdat sections, do this
29023 for all type definitions. */
29024 if (die->die_tag == DW_TAG_array_type
29025 || (use_debug_types
29026 && is_type_die (die) && ! is_declaration_die (die)))
29027 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29028 else
29029 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29030 }
29031 }
29032
29033 /* For local classes, look if any static member functions were emitted
29034 and if so, mark them. */
29035
29036 static void
29037 prune_unused_types_walk_local_classes (dw_die_ref die)
29038 {
29039 dw_die_ref c;
29040
29041 if (die->die_mark == 2)
29042 return;
29043
29044 switch (die->die_tag)
29045 {
29046 case DW_TAG_structure_type:
29047 case DW_TAG_union_type:
29048 case DW_TAG_class_type:
29049 break;
29050
29051 case DW_TAG_subprogram:
29052 if (!get_AT_flag (die, DW_AT_declaration)
29053 || die->die_definition != NULL)
29054 prune_unused_types_mark (die, 1);
29055 return;
29056
29057 default:
29058 return;
29059 }
29060
29061 /* Mark children. */
29062 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29063 }
29064
29065 /* Walk the tree DIE and mark types that we actually use. */
29066
29067 static void
29068 prune_unused_types_walk (dw_die_ref die)
29069 {
29070 dw_die_ref c;
29071
29072 /* Don't do anything if this node is already marked and
29073 children have been marked as well. */
29074 if (die->die_mark == 2)
29075 return;
29076
29077 switch (die->die_tag)
29078 {
29079 case DW_TAG_structure_type:
29080 case DW_TAG_union_type:
29081 case DW_TAG_class_type:
29082 if (die->die_perennial_p)
29083 break;
29084
29085 for (c = die->die_parent; c; c = c->die_parent)
29086 if (c->die_tag == DW_TAG_subprogram)
29087 break;
29088
29089 /* Finding used static member functions inside of classes
29090 is needed just for local classes, because for other classes
29091 static member function DIEs with DW_AT_specification
29092 are emitted outside of the DW_TAG_*_type. If we ever change
29093 it, we'd need to call this even for non-local classes. */
29094 if (c)
29095 prune_unused_types_walk_local_classes (die);
29096
29097 /* It's a type node --- don't mark it. */
29098 return;
29099
29100 case DW_TAG_const_type:
29101 case DW_TAG_packed_type:
29102 case DW_TAG_pointer_type:
29103 case DW_TAG_reference_type:
29104 case DW_TAG_rvalue_reference_type:
29105 case DW_TAG_volatile_type:
29106 case DW_TAG_typedef:
29107 case DW_TAG_array_type:
29108 case DW_TAG_interface_type:
29109 case DW_TAG_friend:
29110 case DW_TAG_enumeration_type:
29111 case DW_TAG_subroutine_type:
29112 case DW_TAG_string_type:
29113 case DW_TAG_set_type:
29114 case DW_TAG_subrange_type:
29115 case DW_TAG_ptr_to_member_type:
29116 case DW_TAG_file_type:
29117 /* Type nodes are useful only when other DIEs reference them --- don't
29118 mark them. */
29119 /* FALLTHROUGH */
29120
29121 case DW_TAG_dwarf_procedure:
29122 /* Likewise for DWARF procedures. */
29123
29124 if (die->die_perennial_p)
29125 break;
29126
29127 return;
29128
29129 default:
29130 /* Mark everything else. */
29131 break;
29132 }
29133
29134 if (die->die_mark == 0)
29135 {
29136 die->die_mark = 1;
29137
29138 /* Now, mark any dies referenced from here. */
29139 prune_unused_types_walk_attribs (die);
29140 }
29141
29142 die->die_mark = 2;
29143
29144 /* Mark children. */
29145 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29146 }
29147
29148 /* Increment the string counts on strings referred to from DIE's
29149 attributes. */
29150
29151 static void
29152 prune_unused_types_update_strings (dw_die_ref die)
29153 {
29154 dw_attr_node *a;
29155 unsigned ix;
29156
29157 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29158 if (AT_class (a) == dw_val_class_str)
29159 {
29160 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29161 s->refcount++;
29162 /* Avoid unnecessarily putting strings that are used less than
29163 twice in the hash table. */
29164 if (s->refcount
29165 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29166 {
29167 indirect_string_node **slot
29168 = debug_str_hash->find_slot_with_hash (s->str,
29169 htab_hash_string (s->str),
29170 INSERT);
29171 gcc_assert (*slot == NULL);
29172 *slot = s;
29173 }
29174 }
29175 }
29176
29177 /* Mark DIE and its children as removed. */
29178
29179 static void
29180 mark_removed (dw_die_ref die)
29181 {
29182 dw_die_ref c;
29183 die->removed = true;
29184 FOR_EACH_CHILD (die, c, mark_removed (c));
29185 }
29186
29187 /* Remove from the tree DIE any dies that aren't marked. */
29188
29189 static void
29190 prune_unused_types_prune (dw_die_ref die)
29191 {
29192 dw_die_ref c;
29193
29194 gcc_assert (die->die_mark);
29195 prune_unused_types_update_strings (die);
29196
29197 if (! die->die_child)
29198 return;
29199
29200 c = die->die_child;
29201 do {
29202 dw_die_ref prev = c, next;
29203 for (c = c->die_sib; ! c->die_mark; c = next)
29204 if (c == die->die_child)
29205 {
29206 /* No marked children between 'prev' and the end of the list. */
29207 if (prev == c)
29208 /* No marked children at all. */
29209 die->die_child = NULL;
29210 else
29211 {
29212 prev->die_sib = c->die_sib;
29213 die->die_child = prev;
29214 }
29215 c->die_sib = NULL;
29216 mark_removed (c);
29217 return;
29218 }
29219 else
29220 {
29221 next = c->die_sib;
29222 c->die_sib = NULL;
29223 mark_removed (c);
29224 }
29225
29226 if (c != prev->die_sib)
29227 prev->die_sib = c;
29228 prune_unused_types_prune (c);
29229 } while (c != die->die_child);
29230 }
29231
29232 /* Remove dies representing declarations that we never use. */
29233
29234 static void
29235 prune_unused_types (void)
29236 {
29237 unsigned int i;
29238 limbo_die_node *node;
29239 comdat_type_node *ctnode;
29240 pubname_entry *pub;
29241 dw_die_ref base_type;
29242
29243 #if ENABLE_ASSERT_CHECKING
29244 /* All the marks should already be clear. */
29245 verify_marks_clear (comp_unit_die ());
29246 for (node = limbo_die_list; node; node = node->next)
29247 verify_marks_clear (node->die);
29248 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29249 verify_marks_clear (ctnode->root_die);
29250 #endif /* ENABLE_ASSERT_CHECKING */
29251
29252 /* Mark types that are used in global variables. */
29253 premark_types_used_by_global_vars ();
29254
29255 /* Set the mark on nodes that are actually used. */
29256 prune_unused_types_walk (comp_unit_die ());
29257 for (node = limbo_die_list; node; node = node->next)
29258 prune_unused_types_walk (node->die);
29259 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29260 {
29261 prune_unused_types_walk (ctnode->root_die);
29262 prune_unused_types_mark (ctnode->type_die, 1);
29263 }
29264
29265 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29266 are unusual in that they are pubnames that are the children of pubtypes.
29267 They should only be marked via their parent DW_TAG_enumeration_type die,
29268 not as roots in themselves. */
29269 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29270 if (pub->die->die_tag != DW_TAG_enumerator)
29271 prune_unused_types_mark (pub->die, 1);
29272 for (i = 0; base_types.iterate (i, &base_type); i++)
29273 prune_unused_types_mark (base_type, 1);
29274
29275 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29276 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29277 callees). */
29278 cgraph_node *cnode;
29279 FOR_EACH_FUNCTION (cnode)
29280 if (cnode->referred_to_p (false))
29281 {
29282 dw_die_ref die = lookup_decl_die (cnode->decl);
29283 if (die == NULL || die->die_mark)
29284 continue;
29285 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29286 if (e->caller != cnode
29287 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29288 {
29289 prune_unused_types_mark (die, 1);
29290 break;
29291 }
29292 }
29293
29294 if (debug_str_hash)
29295 debug_str_hash->empty ();
29296 if (skeleton_debug_str_hash)
29297 skeleton_debug_str_hash->empty ();
29298 prune_unused_types_prune (comp_unit_die ());
29299 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29300 {
29301 node = *pnode;
29302 if (!node->die->die_mark)
29303 *pnode = node->next;
29304 else
29305 {
29306 prune_unused_types_prune (node->die);
29307 pnode = &node->next;
29308 }
29309 }
29310 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29311 prune_unused_types_prune (ctnode->root_die);
29312
29313 /* Leave the marks clear. */
29314 prune_unmark_dies (comp_unit_die ());
29315 for (node = limbo_die_list; node; node = node->next)
29316 prune_unmark_dies (node->die);
29317 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29318 prune_unmark_dies (ctnode->root_die);
29319 }
29320
29321 /* Helpers to manipulate hash table of comdat type units. */
29322
29323 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29324 {
29325 static inline hashval_t hash (const comdat_type_node *);
29326 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29327 };
29328
29329 inline hashval_t
29330 comdat_type_hasher::hash (const comdat_type_node *type_node)
29331 {
29332 hashval_t h;
29333 memcpy (&h, type_node->signature, sizeof (h));
29334 return h;
29335 }
29336
29337 inline bool
29338 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29339 const comdat_type_node *type_node_2)
29340 {
29341 return (! memcmp (type_node_1->signature, type_node_2->signature,
29342 DWARF_TYPE_SIGNATURE_SIZE));
29343 }
29344
29345 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29346 to the location it would have been added, should we know its
29347 DECL_ASSEMBLER_NAME when we added other attributes. This will
29348 probably improve compactness of debug info, removing equivalent
29349 abbrevs, and hide any differences caused by deferring the
29350 computation of the assembler name, triggered by e.g. PCH. */
29351
29352 static inline void
29353 move_linkage_attr (dw_die_ref die)
29354 {
29355 unsigned ix = vec_safe_length (die->die_attr);
29356 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29357
29358 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29359 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29360
29361 while (--ix > 0)
29362 {
29363 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29364
29365 if (prev->dw_attr == DW_AT_decl_line
29366 || prev->dw_attr == DW_AT_decl_column
29367 || prev->dw_attr == DW_AT_name)
29368 break;
29369 }
29370
29371 if (ix != vec_safe_length (die->die_attr) - 1)
29372 {
29373 die->die_attr->pop ();
29374 die->die_attr->quick_insert (ix, linkage);
29375 }
29376 }
29377
29378 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29379 referenced from typed stack ops and count how often they are used. */
29380
29381 static void
29382 mark_base_types (dw_loc_descr_ref loc)
29383 {
29384 dw_die_ref base_type = NULL;
29385
29386 for (; loc; loc = loc->dw_loc_next)
29387 {
29388 switch (loc->dw_loc_opc)
29389 {
29390 case DW_OP_regval_type:
29391 case DW_OP_deref_type:
29392 case DW_OP_GNU_regval_type:
29393 case DW_OP_GNU_deref_type:
29394 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29395 break;
29396 case DW_OP_convert:
29397 case DW_OP_reinterpret:
29398 case DW_OP_GNU_convert:
29399 case DW_OP_GNU_reinterpret:
29400 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29401 continue;
29402 /* FALLTHRU */
29403 case DW_OP_const_type:
29404 case DW_OP_GNU_const_type:
29405 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29406 break;
29407 case DW_OP_entry_value:
29408 case DW_OP_GNU_entry_value:
29409 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29410 continue;
29411 default:
29412 continue;
29413 }
29414 gcc_assert (base_type->die_parent == comp_unit_die ());
29415 if (base_type->die_mark)
29416 base_type->die_mark++;
29417 else
29418 {
29419 base_types.safe_push (base_type);
29420 base_type->die_mark = 1;
29421 }
29422 }
29423 }
29424
29425 /* Comparison function for sorting marked base types. */
29426
29427 static int
29428 base_type_cmp (const void *x, const void *y)
29429 {
29430 dw_die_ref dx = *(const dw_die_ref *) x;
29431 dw_die_ref dy = *(const dw_die_ref *) y;
29432 unsigned int byte_size1, byte_size2;
29433 unsigned int encoding1, encoding2;
29434 unsigned int align1, align2;
29435 if (dx->die_mark > dy->die_mark)
29436 return -1;
29437 if (dx->die_mark < dy->die_mark)
29438 return 1;
29439 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29440 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29441 if (byte_size1 < byte_size2)
29442 return 1;
29443 if (byte_size1 > byte_size2)
29444 return -1;
29445 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29446 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29447 if (encoding1 < encoding2)
29448 return 1;
29449 if (encoding1 > encoding2)
29450 return -1;
29451 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29452 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29453 if (align1 < align2)
29454 return 1;
29455 if (align1 > align2)
29456 return -1;
29457 return 0;
29458 }
29459
29460 /* Move base types marked by mark_base_types as early as possible
29461 in the CU, sorted by decreasing usage count both to make the
29462 uleb128 references as small as possible and to make sure they
29463 will have die_offset already computed by calc_die_sizes when
29464 sizes of typed stack loc ops is computed. */
29465
29466 static void
29467 move_marked_base_types (void)
29468 {
29469 unsigned int i;
29470 dw_die_ref base_type, die, c;
29471
29472 if (base_types.is_empty ())
29473 return;
29474
29475 /* Sort by decreasing usage count, they will be added again in that
29476 order later on. */
29477 base_types.qsort (base_type_cmp);
29478 die = comp_unit_die ();
29479 c = die->die_child;
29480 do
29481 {
29482 dw_die_ref prev = c;
29483 c = c->die_sib;
29484 while (c->die_mark)
29485 {
29486 remove_child_with_prev (c, prev);
29487 /* As base types got marked, there must be at least
29488 one node other than DW_TAG_base_type. */
29489 gcc_assert (die->die_child != NULL);
29490 c = prev->die_sib;
29491 }
29492 }
29493 while (c != die->die_child);
29494 gcc_assert (die->die_child);
29495 c = die->die_child;
29496 for (i = 0; base_types.iterate (i, &base_type); i++)
29497 {
29498 base_type->die_mark = 0;
29499 base_type->die_sib = c->die_sib;
29500 c->die_sib = base_type;
29501 c = base_type;
29502 }
29503 }
29504
29505 /* Helper function for resolve_addr, attempt to resolve
29506 one CONST_STRING, return true if successful. Similarly verify that
29507 SYMBOL_REFs refer to variables emitted in the current CU. */
29508
29509 static bool
29510 resolve_one_addr (rtx *addr)
29511 {
29512 rtx rtl = *addr;
29513
29514 if (GET_CODE (rtl) == CONST_STRING)
29515 {
29516 size_t len = strlen (XSTR (rtl, 0)) + 1;
29517 tree t = build_string (len, XSTR (rtl, 0));
29518 tree tlen = size_int (len - 1);
29519 TREE_TYPE (t)
29520 = build_array_type (char_type_node, build_index_type (tlen));
29521 rtl = lookup_constant_def (t);
29522 if (!rtl || !MEM_P (rtl))
29523 return false;
29524 rtl = XEXP (rtl, 0);
29525 if (GET_CODE (rtl) == SYMBOL_REF
29526 && SYMBOL_REF_DECL (rtl)
29527 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29528 return false;
29529 vec_safe_push (used_rtx_array, rtl);
29530 *addr = rtl;
29531 return true;
29532 }
29533
29534 if (GET_CODE (rtl) == SYMBOL_REF
29535 && SYMBOL_REF_DECL (rtl))
29536 {
29537 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29538 {
29539 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29540 return false;
29541 }
29542 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29543 return false;
29544 }
29545
29546 if (GET_CODE (rtl) == CONST)
29547 {
29548 subrtx_ptr_iterator::array_type array;
29549 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29550 if (!resolve_one_addr (*iter))
29551 return false;
29552 }
29553
29554 return true;
29555 }
29556
29557 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29558 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29559 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29560
29561 static rtx
29562 string_cst_pool_decl (tree t)
29563 {
29564 rtx rtl = output_constant_def (t, 1);
29565 unsigned char *array;
29566 dw_loc_descr_ref l;
29567 tree decl;
29568 size_t len;
29569 dw_die_ref ref;
29570
29571 if (!rtl || !MEM_P (rtl))
29572 return NULL_RTX;
29573 rtl = XEXP (rtl, 0);
29574 if (GET_CODE (rtl) != SYMBOL_REF
29575 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29576 return NULL_RTX;
29577
29578 decl = SYMBOL_REF_DECL (rtl);
29579 if (!lookup_decl_die (decl))
29580 {
29581 len = TREE_STRING_LENGTH (t);
29582 vec_safe_push (used_rtx_array, rtl);
29583 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29584 array = ggc_vec_alloc<unsigned char> (len);
29585 memcpy (array, TREE_STRING_POINTER (t), len);
29586 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29587 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29588 l->dw_loc_oprnd2.v.val_vec.length = len;
29589 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29590 l->dw_loc_oprnd2.v.val_vec.array = array;
29591 add_AT_loc (ref, DW_AT_location, l);
29592 equate_decl_number_to_die (decl, ref);
29593 }
29594 return rtl;
29595 }
29596
29597 /* Helper function of resolve_addr_in_expr. LOC is
29598 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29599 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29600 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29601 with DW_OP_implicit_pointer if possible
29602 and return true, if unsuccessful, return false. */
29603
29604 static bool
29605 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29606 {
29607 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29608 HOST_WIDE_INT offset = 0;
29609 dw_die_ref ref = NULL;
29610 tree decl;
29611
29612 if (GET_CODE (rtl) == CONST
29613 && GET_CODE (XEXP (rtl, 0)) == PLUS
29614 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29615 {
29616 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29617 rtl = XEXP (XEXP (rtl, 0), 0);
29618 }
29619 if (GET_CODE (rtl) == CONST_STRING)
29620 {
29621 size_t len = strlen (XSTR (rtl, 0)) + 1;
29622 tree t = build_string (len, XSTR (rtl, 0));
29623 tree tlen = size_int (len - 1);
29624
29625 TREE_TYPE (t)
29626 = build_array_type (char_type_node, build_index_type (tlen));
29627 rtl = string_cst_pool_decl (t);
29628 if (!rtl)
29629 return false;
29630 }
29631 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29632 {
29633 decl = SYMBOL_REF_DECL (rtl);
29634 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29635 {
29636 ref = lookup_decl_die (decl);
29637 if (ref && (get_AT (ref, DW_AT_location)
29638 || get_AT (ref, DW_AT_const_value)))
29639 {
29640 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29641 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29642 loc->dw_loc_oprnd1.val_entry = NULL;
29643 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29644 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29645 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29646 loc->dw_loc_oprnd2.v.val_int = offset;
29647 return true;
29648 }
29649 }
29650 }
29651 return false;
29652 }
29653
29654 /* Helper function for resolve_addr, handle one location
29655 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29656 the location list couldn't be resolved. */
29657
29658 static bool
29659 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29660 {
29661 dw_loc_descr_ref keep = NULL;
29662 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29663 switch (loc->dw_loc_opc)
29664 {
29665 case DW_OP_addr:
29666 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29667 {
29668 if ((prev == NULL
29669 || prev->dw_loc_opc == DW_OP_piece
29670 || prev->dw_loc_opc == DW_OP_bit_piece)
29671 && loc->dw_loc_next
29672 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29673 && (!dwarf_strict || dwarf_version >= 5)
29674 && optimize_one_addr_into_implicit_ptr (loc))
29675 break;
29676 return false;
29677 }
29678 break;
29679 case DW_OP_GNU_addr_index:
29680 case DW_OP_GNU_const_index:
29681 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
29682 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
29683 {
29684 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29685 if (!resolve_one_addr (&rtl))
29686 return false;
29687 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29688 loc->dw_loc_oprnd1.val_entry
29689 = add_addr_table_entry (rtl, ate_kind_rtx);
29690 }
29691 break;
29692 case DW_OP_const4u:
29693 case DW_OP_const8u:
29694 if (loc->dtprel
29695 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29696 return false;
29697 break;
29698 case DW_OP_plus_uconst:
29699 if (size_of_loc_descr (loc)
29700 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29701 + 1
29702 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29703 {
29704 dw_loc_descr_ref repl
29705 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29706 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29707 add_loc_descr (&repl, loc->dw_loc_next);
29708 *loc = *repl;
29709 }
29710 break;
29711 case DW_OP_implicit_value:
29712 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29713 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29714 return false;
29715 break;
29716 case DW_OP_implicit_pointer:
29717 case DW_OP_GNU_implicit_pointer:
29718 case DW_OP_GNU_parameter_ref:
29719 case DW_OP_GNU_variable_value:
29720 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29721 {
29722 dw_die_ref ref
29723 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29724 if (ref == NULL)
29725 return false;
29726 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29727 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29728 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29729 }
29730 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29731 {
29732 if (prev == NULL
29733 && loc->dw_loc_next == NULL
29734 && AT_class (a) == dw_val_class_loc)
29735 switch (a->dw_attr)
29736 {
29737 /* Following attributes allow both exprloc and reference,
29738 so if the whole expression is DW_OP_GNU_variable_value
29739 alone we could transform it into reference. */
29740 case DW_AT_byte_size:
29741 case DW_AT_bit_size:
29742 case DW_AT_lower_bound:
29743 case DW_AT_upper_bound:
29744 case DW_AT_bit_stride:
29745 case DW_AT_count:
29746 case DW_AT_allocated:
29747 case DW_AT_associated:
29748 case DW_AT_byte_stride:
29749 a->dw_attr_val.val_class = dw_val_class_die_ref;
29750 a->dw_attr_val.val_entry = NULL;
29751 a->dw_attr_val.v.val_die_ref.die
29752 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29753 a->dw_attr_val.v.val_die_ref.external = 0;
29754 return true;
29755 default:
29756 break;
29757 }
29758 if (dwarf_strict)
29759 return false;
29760 }
29761 break;
29762 case DW_OP_const_type:
29763 case DW_OP_regval_type:
29764 case DW_OP_deref_type:
29765 case DW_OP_convert:
29766 case DW_OP_reinterpret:
29767 case DW_OP_GNU_const_type:
29768 case DW_OP_GNU_regval_type:
29769 case DW_OP_GNU_deref_type:
29770 case DW_OP_GNU_convert:
29771 case DW_OP_GNU_reinterpret:
29772 while (loc->dw_loc_next
29773 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29774 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29775 {
29776 dw_die_ref base1, base2;
29777 unsigned enc1, enc2, size1, size2;
29778 if (loc->dw_loc_opc == DW_OP_regval_type
29779 || loc->dw_loc_opc == DW_OP_deref_type
29780 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29781 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29782 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29783 else if (loc->dw_loc_oprnd1.val_class
29784 == dw_val_class_unsigned_const)
29785 break;
29786 else
29787 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29788 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29789 == dw_val_class_unsigned_const)
29790 break;
29791 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29792 gcc_assert (base1->die_tag == DW_TAG_base_type
29793 && base2->die_tag == DW_TAG_base_type);
29794 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29795 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29796 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29797 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29798 if (size1 == size2
29799 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29800 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29801 && loc != keep)
29802 || enc1 == enc2))
29803 {
29804 /* Optimize away next DW_OP_convert after
29805 adjusting LOC's base type die reference. */
29806 if (loc->dw_loc_opc == DW_OP_regval_type
29807 || loc->dw_loc_opc == DW_OP_deref_type
29808 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29809 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29810 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29811 else
29812 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29813 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29814 continue;
29815 }
29816 /* Don't change integer DW_OP_convert after e.g. floating
29817 point typed stack entry. */
29818 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29819 keep = loc->dw_loc_next;
29820 break;
29821 }
29822 break;
29823 default:
29824 break;
29825 }
29826 return true;
29827 }
29828
29829 /* Helper function of resolve_addr. DIE had DW_AT_location of
29830 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29831 and DW_OP_addr couldn't be resolved. resolve_addr has already
29832 removed the DW_AT_location attribute. This function attempts to
29833 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29834 to it or DW_AT_const_value attribute, if possible. */
29835
29836 static void
29837 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29838 {
29839 if (!VAR_P (decl)
29840 || lookup_decl_die (decl) != die
29841 || DECL_EXTERNAL (decl)
29842 || !TREE_STATIC (decl)
29843 || DECL_INITIAL (decl) == NULL_TREE
29844 || DECL_P (DECL_INITIAL (decl))
29845 || get_AT (die, DW_AT_const_value))
29846 return;
29847
29848 tree init = DECL_INITIAL (decl);
29849 HOST_WIDE_INT offset = 0;
29850 /* For variables that have been optimized away and thus
29851 don't have a memory location, see if we can emit
29852 DW_AT_const_value instead. */
29853 if (tree_add_const_value_attribute (die, init))
29854 return;
29855 if (dwarf_strict && dwarf_version < 5)
29856 return;
29857 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29858 and ADDR_EXPR refers to a decl that has DW_AT_location or
29859 DW_AT_const_value (but isn't addressable, otherwise
29860 resolving the original DW_OP_addr wouldn't fail), see if
29861 we can add DW_OP_implicit_pointer. */
29862 STRIP_NOPS (init);
29863 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29864 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29865 {
29866 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29867 init = TREE_OPERAND (init, 0);
29868 STRIP_NOPS (init);
29869 }
29870 if (TREE_CODE (init) != ADDR_EXPR)
29871 return;
29872 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29873 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29874 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29875 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29876 && TREE_OPERAND (init, 0) != decl))
29877 {
29878 dw_die_ref ref;
29879 dw_loc_descr_ref l;
29880
29881 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29882 {
29883 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29884 if (!rtl)
29885 return;
29886 decl = SYMBOL_REF_DECL (rtl);
29887 }
29888 else
29889 decl = TREE_OPERAND (init, 0);
29890 ref = lookup_decl_die (decl);
29891 if (ref == NULL
29892 || (!get_AT (ref, DW_AT_location)
29893 && !get_AT (ref, DW_AT_const_value)))
29894 return;
29895 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29896 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29897 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29898 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29899 add_AT_loc (die, DW_AT_location, l);
29900 }
29901 }
29902
29903 /* Return NULL if l is a DWARF expression, or first op that is not
29904 valid DWARF expression. */
29905
29906 static dw_loc_descr_ref
29907 non_dwarf_expression (dw_loc_descr_ref l)
29908 {
29909 while (l)
29910 {
29911 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29912 return l;
29913 switch (l->dw_loc_opc)
29914 {
29915 case DW_OP_regx:
29916 case DW_OP_implicit_value:
29917 case DW_OP_stack_value:
29918 case DW_OP_implicit_pointer:
29919 case DW_OP_GNU_implicit_pointer:
29920 case DW_OP_GNU_parameter_ref:
29921 case DW_OP_piece:
29922 case DW_OP_bit_piece:
29923 return l;
29924 default:
29925 break;
29926 }
29927 l = l->dw_loc_next;
29928 }
29929 return NULL;
29930 }
29931
29932 /* Return adjusted copy of EXPR:
29933 If it is empty DWARF expression, return it.
29934 If it is valid non-empty DWARF expression,
29935 return copy of EXPR with DW_OP_deref appended to it.
29936 If it is DWARF expression followed by DW_OP_reg{N,x}, return
29937 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
29938 If it is DWARF expression followed by DW_OP_stack_value, return
29939 copy of the DWARF expression without anything appended.
29940 Otherwise, return NULL. */
29941
29942 static dw_loc_descr_ref
29943 copy_deref_exprloc (dw_loc_descr_ref expr)
29944 {
29945 dw_loc_descr_ref tail = NULL;
29946
29947 if (expr == NULL)
29948 return NULL;
29949
29950 dw_loc_descr_ref l = non_dwarf_expression (expr);
29951 if (l && l->dw_loc_next)
29952 return NULL;
29953
29954 if (l)
29955 {
29956 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29957 tail = new_loc_descr ((enum dwarf_location_atom)
29958 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
29959 0, 0);
29960 else
29961 switch (l->dw_loc_opc)
29962 {
29963 case DW_OP_regx:
29964 tail = new_loc_descr (DW_OP_bregx,
29965 l->dw_loc_oprnd1.v.val_unsigned, 0);
29966 break;
29967 case DW_OP_stack_value:
29968 break;
29969 default:
29970 return NULL;
29971 }
29972 }
29973 else
29974 tail = new_loc_descr (DW_OP_deref, 0, 0);
29975
29976 dw_loc_descr_ref ret = NULL, *p = &ret;
29977 while (expr != l)
29978 {
29979 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
29980 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
29981 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
29982 p = &(*p)->dw_loc_next;
29983 expr = expr->dw_loc_next;
29984 }
29985 *p = tail;
29986 return ret;
29987 }
29988
29989 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
29990 reference to a variable or argument, adjust it if needed and return:
29991 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
29992 attribute if present should be removed
29993 0 keep the attribute perhaps with minor modifications, no need to rescan
29994 1 if the attribute has been successfully adjusted. */
29995
29996 static int
29997 optimize_string_length (dw_attr_node *a)
29998 {
29999 dw_loc_descr_ref l = AT_loc (a), lv;
30000 dw_die_ref die;
30001 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30002 {
30003 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30004 die = lookup_decl_die (decl);
30005 if (die)
30006 {
30007 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30008 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30009 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30010 }
30011 else
30012 return -1;
30013 }
30014 else
30015 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30016
30017 /* DWARF5 allows reference class, so we can then reference the DIE.
30018 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30019 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30020 {
30021 a->dw_attr_val.val_class = dw_val_class_die_ref;
30022 a->dw_attr_val.val_entry = NULL;
30023 a->dw_attr_val.v.val_die_ref.die = die;
30024 a->dw_attr_val.v.val_die_ref.external = 0;
30025 return 0;
30026 }
30027
30028 dw_attr_node *av = get_AT (die, DW_AT_location);
30029 dw_loc_list_ref d;
30030 bool non_dwarf_expr = false;
30031
30032 if (av == NULL)
30033 return dwarf_strict ? -1 : 0;
30034 switch (AT_class (av))
30035 {
30036 case dw_val_class_loc_list:
30037 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30038 if (d->expr && non_dwarf_expression (d->expr))
30039 non_dwarf_expr = true;
30040 break;
30041 case dw_val_class_view_list:
30042 gcc_unreachable ();
30043 case dw_val_class_loc:
30044 lv = AT_loc (av);
30045 if (lv == NULL)
30046 return dwarf_strict ? -1 : 0;
30047 if (non_dwarf_expression (lv))
30048 non_dwarf_expr = true;
30049 break;
30050 default:
30051 return dwarf_strict ? -1 : 0;
30052 }
30053
30054 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30055 into DW_OP_call4 or DW_OP_GNU_variable_value into
30056 DW_OP_call4 DW_OP_deref, do so. */
30057 if (!non_dwarf_expr
30058 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30059 {
30060 l->dw_loc_opc = DW_OP_call4;
30061 if (l->dw_loc_next)
30062 l->dw_loc_next = NULL;
30063 else
30064 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30065 return 0;
30066 }
30067
30068 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30069 copy over the DW_AT_location attribute from die to a. */
30070 if (l->dw_loc_next != NULL)
30071 {
30072 a->dw_attr_val = av->dw_attr_val;
30073 return 1;
30074 }
30075
30076 dw_loc_list_ref list, *p;
30077 switch (AT_class (av))
30078 {
30079 case dw_val_class_loc_list:
30080 p = &list;
30081 list = NULL;
30082 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30083 {
30084 lv = copy_deref_exprloc (d->expr);
30085 if (lv)
30086 {
30087 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30088 p = &(*p)->dw_loc_next;
30089 }
30090 else if (!dwarf_strict && d->expr)
30091 return 0;
30092 }
30093 if (list == NULL)
30094 return dwarf_strict ? -1 : 0;
30095 a->dw_attr_val.val_class = dw_val_class_loc_list;
30096 gen_llsym (list);
30097 *AT_loc_list_ptr (a) = list;
30098 return 1;
30099 case dw_val_class_loc:
30100 lv = copy_deref_exprloc (AT_loc (av));
30101 if (lv == NULL)
30102 return dwarf_strict ? -1 : 0;
30103 a->dw_attr_val.v.val_loc = lv;
30104 return 1;
30105 default:
30106 gcc_unreachable ();
30107 }
30108 }
30109
30110 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30111 an address in .rodata section if the string literal is emitted there,
30112 or remove the containing location list or replace DW_AT_const_value
30113 with DW_AT_location and empty location expression, if it isn't found
30114 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30115 to something that has been emitted in the current CU. */
30116
30117 static void
30118 resolve_addr (dw_die_ref die)
30119 {
30120 dw_die_ref c;
30121 dw_attr_node *a;
30122 dw_loc_list_ref *curr, *start, loc;
30123 unsigned ix;
30124 bool remove_AT_byte_size = false;
30125
30126 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30127 switch (AT_class (a))
30128 {
30129 case dw_val_class_loc_list:
30130 start = curr = AT_loc_list_ptr (a);
30131 loc = *curr;
30132 gcc_assert (loc);
30133 /* The same list can be referenced more than once. See if we have
30134 already recorded the result from a previous pass. */
30135 if (loc->replaced)
30136 *curr = loc->dw_loc_next;
30137 else if (!loc->resolved_addr)
30138 {
30139 /* As things stand, we do not expect or allow one die to
30140 reference a suffix of another die's location list chain.
30141 References must be identical or completely separate.
30142 There is therefore no need to cache the result of this
30143 pass on any list other than the first; doing so
30144 would lead to unnecessary writes. */
30145 while (*curr)
30146 {
30147 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30148 if (!resolve_addr_in_expr (a, (*curr)->expr))
30149 {
30150 dw_loc_list_ref next = (*curr)->dw_loc_next;
30151 dw_loc_descr_ref l = (*curr)->expr;
30152
30153 if (next && (*curr)->ll_symbol)
30154 {
30155 gcc_assert (!next->ll_symbol);
30156 next->ll_symbol = (*curr)->ll_symbol;
30157 next->vl_symbol = (*curr)->vl_symbol;
30158 }
30159 if (dwarf_split_debug_info)
30160 remove_loc_list_addr_table_entries (l);
30161 *curr = next;
30162 }
30163 else
30164 {
30165 mark_base_types ((*curr)->expr);
30166 curr = &(*curr)->dw_loc_next;
30167 }
30168 }
30169 if (loc == *start)
30170 loc->resolved_addr = 1;
30171 else
30172 {
30173 loc->replaced = 1;
30174 loc->dw_loc_next = *start;
30175 }
30176 }
30177 if (!*start)
30178 {
30179 remove_AT (die, a->dw_attr);
30180 ix--;
30181 }
30182 break;
30183 case dw_val_class_view_list:
30184 {
30185 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30186 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30187 dw_val_node *llnode
30188 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30189 /* If we no longer have a loclist, or it no longer needs
30190 views, drop this attribute. */
30191 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30192 {
30193 remove_AT (die, a->dw_attr);
30194 ix--;
30195 }
30196 break;
30197 }
30198 case dw_val_class_loc:
30199 {
30200 dw_loc_descr_ref l = AT_loc (a);
30201 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30202 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30203 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30204 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30205 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30206 with DW_FORM_ref referencing the same DIE as
30207 DW_OP_GNU_variable_value used to reference. */
30208 if (a->dw_attr == DW_AT_string_length
30209 && l
30210 && l->dw_loc_opc == DW_OP_GNU_variable_value
30211 && (l->dw_loc_next == NULL
30212 || (l->dw_loc_next->dw_loc_next == NULL
30213 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30214 {
30215 switch (optimize_string_length (a))
30216 {
30217 case -1:
30218 remove_AT (die, a->dw_attr);
30219 ix--;
30220 /* If we drop DW_AT_string_length, we need to drop also
30221 DW_AT_{string_length_,}byte_size. */
30222 remove_AT_byte_size = true;
30223 continue;
30224 default:
30225 break;
30226 case 1:
30227 /* Even if we keep the optimized DW_AT_string_length,
30228 it might have changed AT_class, so process it again. */
30229 ix--;
30230 continue;
30231 }
30232 }
30233 /* For -gdwarf-2 don't attempt to optimize
30234 DW_AT_data_member_location containing
30235 DW_OP_plus_uconst - older consumers might
30236 rely on it being that op instead of a more complex,
30237 but shorter, location description. */
30238 if ((dwarf_version > 2
30239 || a->dw_attr != DW_AT_data_member_location
30240 || l == NULL
30241 || l->dw_loc_opc != DW_OP_plus_uconst
30242 || l->dw_loc_next != NULL)
30243 && !resolve_addr_in_expr (a, l))
30244 {
30245 if (dwarf_split_debug_info)
30246 remove_loc_list_addr_table_entries (l);
30247 if (l != NULL
30248 && l->dw_loc_next == NULL
30249 && l->dw_loc_opc == DW_OP_addr
30250 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30251 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30252 && a->dw_attr == DW_AT_location)
30253 {
30254 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30255 remove_AT (die, a->dw_attr);
30256 ix--;
30257 optimize_location_into_implicit_ptr (die, decl);
30258 break;
30259 }
30260 if (a->dw_attr == DW_AT_string_length)
30261 /* If we drop DW_AT_string_length, we need to drop also
30262 DW_AT_{string_length_,}byte_size. */
30263 remove_AT_byte_size = true;
30264 remove_AT (die, a->dw_attr);
30265 ix--;
30266 }
30267 else
30268 mark_base_types (l);
30269 }
30270 break;
30271 case dw_val_class_addr:
30272 if (a->dw_attr == DW_AT_const_value
30273 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30274 {
30275 if (AT_index (a) != NOT_INDEXED)
30276 remove_addr_table_entry (a->dw_attr_val.val_entry);
30277 remove_AT (die, a->dw_attr);
30278 ix--;
30279 }
30280 if ((die->die_tag == DW_TAG_call_site
30281 && a->dw_attr == DW_AT_call_origin)
30282 || (die->die_tag == DW_TAG_GNU_call_site
30283 && a->dw_attr == DW_AT_abstract_origin))
30284 {
30285 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30286 dw_die_ref tdie = lookup_decl_die (tdecl);
30287 dw_die_ref cdie;
30288 if (tdie == NULL
30289 && DECL_EXTERNAL (tdecl)
30290 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30291 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30292 {
30293 dw_die_ref pdie = cdie;
30294 /* Make sure we don't add these DIEs into type units.
30295 We could emit skeleton DIEs for context (namespaces,
30296 outer structs/classes) and a skeleton DIE for the
30297 innermost context with DW_AT_signature pointing to the
30298 type unit. See PR78835. */
30299 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30300 pdie = pdie->die_parent;
30301 if (pdie == NULL)
30302 {
30303 /* Creating a full DIE for tdecl is overly expensive and
30304 at this point even wrong when in the LTO phase
30305 as it can end up generating new type DIEs we didn't
30306 output and thus optimize_external_refs will crash. */
30307 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30308 add_AT_flag (tdie, DW_AT_external, 1);
30309 add_AT_flag (tdie, DW_AT_declaration, 1);
30310 add_linkage_attr (tdie, tdecl);
30311 add_name_and_src_coords_attributes (tdie, tdecl, true);
30312 equate_decl_number_to_die (tdecl, tdie);
30313 }
30314 }
30315 if (tdie)
30316 {
30317 a->dw_attr_val.val_class = dw_val_class_die_ref;
30318 a->dw_attr_val.v.val_die_ref.die = tdie;
30319 a->dw_attr_val.v.val_die_ref.external = 0;
30320 }
30321 else
30322 {
30323 if (AT_index (a) != NOT_INDEXED)
30324 remove_addr_table_entry (a->dw_attr_val.val_entry);
30325 remove_AT (die, a->dw_attr);
30326 ix--;
30327 }
30328 }
30329 break;
30330 default:
30331 break;
30332 }
30333
30334 if (remove_AT_byte_size)
30335 remove_AT (die, dwarf_version >= 5
30336 ? DW_AT_string_length_byte_size
30337 : DW_AT_byte_size);
30338
30339 FOR_EACH_CHILD (die, c, resolve_addr (c));
30340 }
30341 \f
30342 /* Helper routines for optimize_location_lists.
30343 This pass tries to share identical local lists in .debug_loc
30344 section. */
30345
30346 /* Iteratively hash operands of LOC opcode into HSTATE. */
30347
30348 static void
30349 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30350 {
30351 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30352 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30353
30354 switch (loc->dw_loc_opc)
30355 {
30356 case DW_OP_const4u:
30357 case DW_OP_const8u:
30358 if (loc->dtprel)
30359 goto hash_addr;
30360 /* FALLTHRU */
30361 case DW_OP_const1u:
30362 case DW_OP_const1s:
30363 case DW_OP_const2u:
30364 case DW_OP_const2s:
30365 case DW_OP_const4s:
30366 case DW_OP_const8s:
30367 case DW_OP_constu:
30368 case DW_OP_consts:
30369 case DW_OP_pick:
30370 case DW_OP_plus_uconst:
30371 case DW_OP_breg0:
30372 case DW_OP_breg1:
30373 case DW_OP_breg2:
30374 case DW_OP_breg3:
30375 case DW_OP_breg4:
30376 case DW_OP_breg5:
30377 case DW_OP_breg6:
30378 case DW_OP_breg7:
30379 case DW_OP_breg8:
30380 case DW_OP_breg9:
30381 case DW_OP_breg10:
30382 case DW_OP_breg11:
30383 case DW_OP_breg12:
30384 case DW_OP_breg13:
30385 case DW_OP_breg14:
30386 case DW_OP_breg15:
30387 case DW_OP_breg16:
30388 case DW_OP_breg17:
30389 case DW_OP_breg18:
30390 case DW_OP_breg19:
30391 case DW_OP_breg20:
30392 case DW_OP_breg21:
30393 case DW_OP_breg22:
30394 case DW_OP_breg23:
30395 case DW_OP_breg24:
30396 case DW_OP_breg25:
30397 case DW_OP_breg26:
30398 case DW_OP_breg27:
30399 case DW_OP_breg28:
30400 case DW_OP_breg29:
30401 case DW_OP_breg30:
30402 case DW_OP_breg31:
30403 case DW_OP_regx:
30404 case DW_OP_fbreg:
30405 case DW_OP_piece:
30406 case DW_OP_deref_size:
30407 case DW_OP_xderef_size:
30408 hstate.add_object (val1->v.val_int);
30409 break;
30410 case DW_OP_skip:
30411 case DW_OP_bra:
30412 {
30413 int offset;
30414
30415 gcc_assert (val1->val_class == dw_val_class_loc);
30416 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30417 hstate.add_object (offset);
30418 }
30419 break;
30420 case DW_OP_implicit_value:
30421 hstate.add_object (val1->v.val_unsigned);
30422 switch (val2->val_class)
30423 {
30424 case dw_val_class_const:
30425 hstate.add_object (val2->v.val_int);
30426 break;
30427 case dw_val_class_vec:
30428 {
30429 unsigned int elt_size = val2->v.val_vec.elt_size;
30430 unsigned int len = val2->v.val_vec.length;
30431
30432 hstate.add_int (elt_size);
30433 hstate.add_int (len);
30434 hstate.add (val2->v.val_vec.array, len * elt_size);
30435 }
30436 break;
30437 case dw_val_class_const_double:
30438 hstate.add_object (val2->v.val_double.low);
30439 hstate.add_object (val2->v.val_double.high);
30440 break;
30441 case dw_val_class_wide_int:
30442 hstate.add (val2->v.val_wide->get_val (),
30443 get_full_len (*val2->v.val_wide)
30444 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30445 break;
30446 case dw_val_class_addr:
30447 inchash::add_rtx (val2->v.val_addr, hstate);
30448 break;
30449 default:
30450 gcc_unreachable ();
30451 }
30452 break;
30453 case DW_OP_bregx:
30454 case DW_OP_bit_piece:
30455 hstate.add_object (val1->v.val_int);
30456 hstate.add_object (val2->v.val_int);
30457 break;
30458 case DW_OP_addr:
30459 hash_addr:
30460 if (loc->dtprel)
30461 {
30462 unsigned char dtprel = 0xd1;
30463 hstate.add_object (dtprel);
30464 }
30465 inchash::add_rtx (val1->v.val_addr, hstate);
30466 break;
30467 case DW_OP_GNU_addr_index:
30468 case DW_OP_GNU_const_index:
30469 {
30470 if (loc->dtprel)
30471 {
30472 unsigned char dtprel = 0xd1;
30473 hstate.add_object (dtprel);
30474 }
30475 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30476 }
30477 break;
30478 case DW_OP_implicit_pointer:
30479 case DW_OP_GNU_implicit_pointer:
30480 hstate.add_int (val2->v.val_int);
30481 break;
30482 case DW_OP_entry_value:
30483 case DW_OP_GNU_entry_value:
30484 hstate.add_object (val1->v.val_loc);
30485 break;
30486 case DW_OP_regval_type:
30487 case DW_OP_deref_type:
30488 case DW_OP_GNU_regval_type:
30489 case DW_OP_GNU_deref_type:
30490 {
30491 unsigned int byte_size
30492 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30493 unsigned int encoding
30494 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30495 hstate.add_object (val1->v.val_int);
30496 hstate.add_object (byte_size);
30497 hstate.add_object (encoding);
30498 }
30499 break;
30500 case DW_OP_convert:
30501 case DW_OP_reinterpret:
30502 case DW_OP_GNU_convert:
30503 case DW_OP_GNU_reinterpret:
30504 if (val1->val_class == dw_val_class_unsigned_const)
30505 {
30506 hstate.add_object (val1->v.val_unsigned);
30507 break;
30508 }
30509 /* FALLTHRU */
30510 case DW_OP_const_type:
30511 case DW_OP_GNU_const_type:
30512 {
30513 unsigned int byte_size
30514 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30515 unsigned int encoding
30516 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30517 hstate.add_object (byte_size);
30518 hstate.add_object (encoding);
30519 if (loc->dw_loc_opc != DW_OP_const_type
30520 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30521 break;
30522 hstate.add_object (val2->val_class);
30523 switch (val2->val_class)
30524 {
30525 case dw_val_class_const:
30526 hstate.add_object (val2->v.val_int);
30527 break;
30528 case dw_val_class_vec:
30529 {
30530 unsigned int elt_size = val2->v.val_vec.elt_size;
30531 unsigned int len = val2->v.val_vec.length;
30532
30533 hstate.add_object (elt_size);
30534 hstate.add_object (len);
30535 hstate.add (val2->v.val_vec.array, len * elt_size);
30536 }
30537 break;
30538 case dw_val_class_const_double:
30539 hstate.add_object (val2->v.val_double.low);
30540 hstate.add_object (val2->v.val_double.high);
30541 break;
30542 case dw_val_class_wide_int:
30543 hstate.add (val2->v.val_wide->get_val (),
30544 get_full_len (*val2->v.val_wide)
30545 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30546 break;
30547 default:
30548 gcc_unreachable ();
30549 }
30550 }
30551 break;
30552
30553 default:
30554 /* Other codes have no operands. */
30555 break;
30556 }
30557 }
30558
30559 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30560
30561 static inline void
30562 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30563 {
30564 dw_loc_descr_ref l;
30565 bool sizes_computed = false;
30566 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30567 size_of_locs (loc);
30568
30569 for (l = loc; l != NULL; l = l->dw_loc_next)
30570 {
30571 enum dwarf_location_atom opc = l->dw_loc_opc;
30572 hstate.add_object (opc);
30573 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30574 {
30575 size_of_locs (loc);
30576 sizes_computed = true;
30577 }
30578 hash_loc_operands (l, hstate);
30579 }
30580 }
30581
30582 /* Compute hash of the whole location list LIST_HEAD. */
30583
30584 static inline void
30585 hash_loc_list (dw_loc_list_ref list_head)
30586 {
30587 dw_loc_list_ref curr = list_head;
30588 inchash::hash hstate;
30589
30590 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30591 {
30592 hstate.add (curr->begin, strlen (curr->begin) + 1);
30593 hstate.add (curr->end, strlen (curr->end) + 1);
30594 hstate.add_object (curr->vbegin);
30595 hstate.add_object (curr->vend);
30596 if (curr->section)
30597 hstate.add (curr->section, strlen (curr->section) + 1);
30598 hash_locs (curr->expr, hstate);
30599 }
30600 list_head->hash = hstate.end ();
30601 }
30602
30603 /* Return true if X and Y opcodes have the same operands. */
30604
30605 static inline bool
30606 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30607 {
30608 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30609 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30610 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30611 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30612
30613 switch (x->dw_loc_opc)
30614 {
30615 case DW_OP_const4u:
30616 case DW_OP_const8u:
30617 if (x->dtprel)
30618 goto hash_addr;
30619 /* FALLTHRU */
30620 case DW_OP_const1u:
30621 case DW_OP_const1s:
30622 case DW_OP_const2u:
30623 case DW_OP_const2s:
30624 case DW_OP_const4s:
30625 case DW_OP_const8s:
30626 case DW_OP_constu:
30627 case DW_OP_consts:
30628 case DW_OP_pick:
30629 case DW_OP_plus_uconst:
30630 case DW_OP_breg0:
30631 case DW_OP_breg1:
30632 case DW_OP_breg2:
30633 case DW_OP_breg3:
30634 case DW_OP_breg4:
30635 case DW_OP_breg5:
30636 case DW_OP_breg6:
30637 case DW_OP_breg7:
30638 case DW_OP_breg8:
30639 case DW_OP_breg9:
30640 case DW_OP_breg10:
30641 case DW_OP_breg11:
30642 case DW_OP_breg12:
30643 case DW_OP_breg13:
30644 case DW_OP_breg14:
30645 case DW_OP_breg15:
30646 case DW_OP_breg16:
30647 case DW_OP_breg17:
30648 case DW_OP_breg18:
30649 case DW_OP_breg19:
30650 case DW_OP_breg20:
30651 case DW_OP_breg21:
30652 case DW_OP_breg22:
30653 case DW_OP_breg23:
30654 case DW_OP_breg24:
30655 case DW_OP_breg25:
30656 case DW_OP_breg26:
30657 case DW_OP_breg27:
30658 case DW_OP_breg28:
30659 case DW_OP_breg29:
30660 case DW_OP_breg30:
30661 case DW_OP_breg31:
30662 case DW_OP_regx:
30663 case DW_OP_fbreg:
30664 case DW_OP_piece:
30665 case DW_OP_deref_size:
30666 case DW_OP_xderef_size:
30667 return valx1->v.val_int == valy1->v.val_int;
30668 case DW_OP_skip:
30669 case DW_OP_bra:
30670 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30671 can cause irrelevant differences in dw_loc_addr. */
30672 gcc_assert (valx1->val_class == dw_val_class_loc
30673 && valy1->val_class == dw_val_class_loc
30674 && (dwarf_split_debug_info
30675 || x->dw_loc_addr == y->dw_loc_addr));
30676 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30677 case DW_OP_implicit_value:
30678 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30679 || valx2->val_class != valy2->val_class)
30680 return false;
30681 switch (valx2->val_class)
30682 {
30683 case dw_val_class_const:
30684 return valx2->v.val_int == valy2->v.val_int;
30685 case dw_val_class_vec:
30686 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30687 && valx2->v.val_vec.length == valy2->v.val_vec.length
30688 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30689 valx2->v.val_vec.elt_size
30690 * valx2->v.val_vec.length) == 0;
30691 case dw_val_class_const_double:
30692 return valx2->v.val_double.low == valy2->v.val_double.low
30693 && valx2->v.val_double.high == valy2->v.val_double.high;
30694 case dw_val_class_wide_int:
30695 return *valx2->v.val_wide == *valy2->v.val_wide;
30696 case dw_val_class_addr:
30697 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30698 default:
30699 gcc_unreachable ();
30700 }
30701 case DW_OP_bregx:
30702 case DW_OP_bit_piece:
30703 return valx1->v.val_int == valy1->v.val_int
30704 && valx2->v.val_int == valy2->v.val_int;
30705 case DW_OP_addr:
30706 hash_addr:
30707 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30708 case DW_OP_GNU_addr_index:
30709 case DW_OP_GNU_const_index:
30710 {
30711 rtx ax1 = valx1->val_entry->addr.rtl;
30712 rtx ay1 = valy1->val_entry->addr.rtl;
30713 return rtx_equal_p (ax1, ay1);
30714 }
30715 case DW_OP_implicit_pointer:
30716 case DW_OP_GNU_implicit_pointer:
30717 return valx1->val_class == dw_val_class_die_ref
30718 && valx1->val_class == valy1->val_class
30719 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30720 && valx2->v.val_int == valy2->v.val_int;
30721 case DW_OP_entry_value:
30722 case DW_OP_GNU_entry_value:
30723 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30724 case DW_OP_const_type:
30725 case DW_OP_GNU_const_type:
30726 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30727 || valx2->val_class != valy2->val_class)
30728 return false;
30729 switch (valx2->val_class)
30730 {
30731 case dw_val_class_const:
30732 return valx2->v.val_int == valy2->v.val_int;
30733 case dw_val_class_vec:
30734 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30735 && valx2->v.val_vec.length == valy2->v.val_vec.length
30736 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30737 valx2->v.val_vec.elt_size
30738 * valx2->v.val_vec.length) == 0;
30739 case dw_val_class_const_double:
30740 return valx2->v.val_double.low == valy2->v.val_double.low
30741 && valx2->v.val_double.high == valy2->v.val_double.high;
30742 case dw_val_class_wide_int:
30743 return *valx2->v.val_wide == *valy2->v.val_wide;
30744 default:
30745 gcc_unreachable ();
30746 }
30747 case DW_OP_regval_type:
30748 case DW_OP_deref_type:
30749 case DW_OP_GNU_regval_type:
30750 case DW_OP_GNU_deref_type:
30751 return valx1->v.val_int == valy1->v.val_int
30752 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30753 case DW_OP_convert:
30754 case DW_OP_reinterpret:
30755 case DW_OP_GNU_convert:
30756 case DW_OP_GNU_reinterpret:
30757 if (valx1->val_class != valy1->val_class)
30758 return false;
30759 if (valx1->val_class == dw_val_class_unsigned_const)
30760 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30761 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30762 case DW_OP_GNU_parameter_ref:
30763 return valx1->val_class == dw_val_class_die_ref
30764 && valx1->val_class == valy1->val_class
30765 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30766 default:
30767 /* Other codes have no operands. */
30768 return true;
30769 }
30770 }
30771
30772 /* Return true if DWARF location expressions X and Y are the same. */
30773
30774 static inline bool
30775 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30776 {
30777 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30778 if (x->dw_loc_opc != y->dw_loc_opc
30779 || x->dtprel != y->dtprel
30780 || !compare_loc_operands (x, y))
30781 break;
30782 return x == NULL && y == NULL;
30783 }
30784
30785 /* Hashtable helpers. */
30786
30787 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30788 {
30789 static inline hashval_t hash (const dw_loc_list_struct *);
30790 static inline bool equal (const dw_loc_list_struct *,
30791 const dw_loc_list_struct *);
30792 };
30793
30794 /* Return precomputed hash of location list X. */
30795
30796 inline hashval_t
30797 loc_list_hasher::hash (const dw_loc_list_struct *x)
30798 {
30799 return x->hash;
30800 }
30801
30802 /* Return true if location lists A and B are the same. */
30803
30804 inline bool
30805 loc_list_hasher::equal (const dw_loc_list_struct *a,
30806 const dw_loc_list_struct *b)
30807 {
30808 if (a == b)
30809 return 1;
30810 if (a->hash != b->hash)
30811 return 0;
30812 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30813 if (strcmp (a->begin, b->begin) != 0
30814 || strcmp (a->end, b->end) != 0
30815 || (a->section == NULL) != (b->section == NULL)
30816 || (a->section && strcmp (a->section, b->section) != 0)
30817 || a->vbegin != b->vbegin || a->vend != b->vend
30818 || !compare_locs (a->expr, b->expr))
30819 break;
30820 return a == NULL && b == NULL;
30821 }
30822
30823 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30824
30825
30826 /* Recursively optimize location lists referenced from DIE
30827 children and share them whenever possible. */
30828
30829 static void
30830 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30831 {
30832 dw_die_ref c;
30833 dw_attr_node *a;
30834 unsigned ix;
30835 dw_loc_list_struct **slot;
30836 bool drop_locviews = false;
30837 bool has_locviews = false;
30838
30839 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30840 if (AT_class (a) == dw_val_class_loc_list)
30841 {
30842 dw_loc_list_ref list = AT_loc_list (a);
30843 /* TODO: perform some optimizations here, before hashing
30844 it and storing into the hash table. */
30845 hash_loc_list (list);
30846 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30847 if (*slot == NULL)
30848 {
30849 *slot = list;
30850 if (loc_list_has_views (list))
30851 gcc_assert (list->vl_symbol);
30852 else if (list->vl_symbol)
30853 {
30854 drop_locviews = true;
30855 list->vl_symbol = NULL;
30856 }
30857 }
30858 else
30859 {
30860 if (list->vl_symbol && !(*slot)->vl_symbol)
30861 drop_locviews = true;
30862 a->dw_attr_val.v.val_loc_list = *slot;
30863 }
30864 }
30865 else if (AT_class (a) == dw_val_class_view_list)
30866 {
30867 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30868 has_locviews = true;
30869 }
30870
30871
30872 if (drop_locviews && has_locviews)
30873 remove_AT (die, DW_AT_GNU_locviews);
30874
30875 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30876 }
30877
30878
30879 /* Recursively assign each location list a unique index into the debug_addr
30880 section. */
30881
30882 static void
30883 index_location_lists (dw_die_ref die)
30884 {
30885 dw_die_ref c;
30886 dw_attr_node *a;
30887 unsigned ix;
30888
30889 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30890 if (AT_class (a) == dw_val_class_loc_list)
30891 {
30892 dw_loc_list_ref list = AT_loc_list (a);
30893 dw_loc_list_ref curr;
30894 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30895 {
30896 /* Don't index an entry that has already been indexed
30897 or won't be output. Make sure skip_loc_list_entry doesn't
30898 call size_of_locs, because that might cause circular dependency,
30899 index_location_lists requiring address table indexes to be
30900 computed, but adding new indexes through add_addr_table_entry
30901 and address table index computation requiring no new additions
30902 to the hash table. In the rare case of DWARF[234] >= 64KB
30903 location expression, we'll just waste unused address table entry
30904 for it. */
30905 if (curr->begin_entry != NULL
30906 || skip_loc_list_entry (curr))
30907 continue;
30908
30909 curr->begin_entry
30910 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30911 }
30912 }
30913
30914 FOR_EACH_CHILD (die, c, index_location_lists (c));
30915 }
30916
30917 /* Optimize location lists referenced from DIE
30918 children and share them whenever possible. */
30919
30920 static void
30921 optimize_location_lists (dw_die_ref die)
30922 {
30923 loc_list_hash_type htab (500);
30924 optimize_location_lists_1 (die, &htab);
30925 }
30926 \f
30927 /* Traverse the limbo die list, and add parent/child links. The only
30928 dies without parents that should be here are concrete instances of
30929 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
30930 For concrete instances, we can get the parent die from the abstract
30931 instance. */
30932
30933 static void
30934 flush_limbo_die_list (void)
30935 {
30936 limbo_die_node *node;
30937
30938 /* get_context_die calls force_decl_die, which can put new DIEs on the
30939 limbo list in LTO mode when nested functions are put in a different
30940 partition than that of their parent function. */
30941 while ((node = limbo_die_list))
30942 {
30943 dw_die_ref die = node->die;
30944 limbo_die_list = node->next;
30945
30946 if (die->die_parent == NULL)
30947 {
30948 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
30949
30950 if (origin && origin->die_parent)
30951 add_child_die (origin->die_parent, die);
30952 else if (is_cu_die (die))
30953 ;
30954 else if (seen_error ())
30955 /* It's OK to be confused by errors in the input. */
30956 add_child_die (comp_unit_die (), die);
30957 else
30958 {
30959 /* In certain situations, the lexical block containing a
30960 nested function can be optimized away, which results
30961 in the nested function die being orphaned. Likewise
30962 with the return type of that nested function. Force
30963 this to be a child of the containing function.
30964
30965 It may happen that even the containing function got fully
30966 inlined and optimized out. In that case we are lost and
30967 assign the empty child. This should not be big issue as
30968 the function is likely unreachable too. */
30969 gcc_assert (node->created_for);
30970
30971 if (DECL_P (node->created_for))
30972 origin = get_context_die (DECL_CONTEXT (node->created_for));
30973 else if (TYPE_P (node->created_for))
30974 origin = scope_die_for (node->created_for, comp_unit_die ());
30975 else
30976 origin = comp_unit_die ();
30977
30978 add_child_die (origin, die);
30979 }
30980 }
30981 }
30982 }
30983
30984 /* Reset DIEs so we can output them again. */
30985
30986 static void
30987 reset_dies (dw_die_ref die)
30988 {
30989 dw_die_ref c;
30990
30991 /* Remove stuff we re-generate. */
30992 die->die_mark = 0;
30993 die->die_offset = 0;
30994 die->die_abbrev = 0;
30995 remove_AT (die, DW_AT_sibling);
30996
30997 FOR_EACH_CHILD (die, c, reset_dies (c));
30998 }
30999
31000 /* Output stuff that dwarf requires at the end of every file,
31001 and generate the DWARF-2 debugging info. */
31002
31003 static void
31004 dwarf2out_finish (const char *)
31005 {
31006 comdat_type_node *ctnode;
31007 dw_die_ref main_comp_unit_die;
31008 unsigned char checksum[16];
31009 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31010
31011 /* Flush out any latecomers to the limbo party. */
31012 flush_limbo_die_list ();
31013
31014 if (inline_entry_data_table)
31015 gcc_assert (inline_entry_data_table->elements () == 0);
31016
31017 if (flag_checking)
31018 {
31019 verify_die (comp_unit_die ());
31020 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31021 verify_die (node->die);
31022 }
31023
31024 /* We shouldn't have any symbols with delayed asm names for
31025 DIEs generated after early finish. */
31026 gcc_assert (deferred_asm_name == NULL);
31027
31028 gen_remaining_tmpl_value_param_die_attribute ();
31029
31030 if (flag_generate_lto || flag_generate_offload)
31031 {
31032 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31033
31034 /* Prune stuff so that dwarf2out_finish runs successfully
31035 for the fat part of the object. */
31036 reset_dies (comp_unit_die ());
31037 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31038 reset_dies (node->die);
31039
31040 hash_table<comdat_type_hasher> comdat_type_table (100);
31041 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31042 {
31043 comdat_type_node **slot
31044 = comdat_type_table.find_slot (ctnode, INSERT);
31045
31046 /* Don't reset types twice. */
31047 if (*slot != HTAB_EMPTY_ENTRY)
31048 continue;
31049
31050 /* Add a pointer to the line table for the main compilation unit
31051 so that the debugger can make sense of DW_AT_decl_file
31052 attributes. */
31053 if (debug_info_level >= DINFO_LEVEL_TERSE)
31054 reset_dies (ctnode->root_die);
31055
31056 *slot = ctnode;
31057 }
31058
31059 /* Reset die CU symbol so we don't output it twice. */
31060 comp_unit_die ()->die_id.die_symbol = NULL;
31061
31062 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31063 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31064 if (have_macinfo)
31065 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31066
31067 /* Remove indirect string decisions. */
31068 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31069 }
31070
31071 #if ENABLE_ASSERT_CHECKING
31072 {
31073 dw_die_ref die = comp_unit_die (), c;
31074 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31075 }
31076 #endif
31077 resolve_addr (comp_unit_die ());
31078 move_marked_base_types ();
31079
31080 /* Initialize sections and labels used for actual assembler output. */
31081 unsigned generation = init_sections_and_labels (false);
31082
31083 /* Traverse the DIE's and add sibling attributes to those DIE's that
31084 have children. */
31085 add_sibling_attributes (comp_unit_die ());
31086 limbo_die_node *node;
31087 for (node = cu_die_list; node; node = node->next)
31088 add_sibling_attributes (node->die);
31089 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31090 add_sibling_attributes (ctnode->root_die);
31091
31092 /* When splitting DWARF info, we put some attributes in the
31093 skeleton compile_unit DIE that remains in the .o, while
31094 most attributes go in the DWO compile_unit_die. */
31095 if (dwarf_split_debug_info)
31096 {
31097 limbo_die_node *cu;
31098 main_comp_unit_die = gen_compile_unit_die (NULL);
31099 if (dwarf_version >= 5)
31100 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31101 cu = limbo_die_list;
31102 gcc_assert (cu->die == main_comp_unit_die);
31103 limbo_die_list = limbo_die_list->next;
31104 cu->next = cu_die_list;
31105 cu_die_list = cu;
31106 }
31107 else
31108 main_comp_unit_die = comp_unit_die ();
31109
31110 /* Output a terminator label for the .text section. */
31111 switch_to_section (text_section);
31112 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31113 if (cold_text_section)
31114 {
31115 switch_to_section (cold_text_section);
31116 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31117 }
31118
31119 /* We can only use the low/high_pc attributes if all of the code was
31120 in .text. */
31121 if (!have_multiple_function_sections
31122 || (dwarf_version < 3 && dwarf_strict))
31123 {
31124 /* Don't add if the CU has no associated code. */
31125 if (text_section_used)
31126 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31127 text_end_label, true);
31128 }
31129 else
31130 {
31131 unsigned fde_idx;
31132 dw_fde_ref fde;
31133 bool range_list_added = false;
31134
31135 if (text_section_used)
31136 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31137 text_end_label, &range_list_added, true);
31138 if (cold_text_section_used)
31139 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31140 cold_end_label, &range_list_added, true);
31141
31142 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31143 {
31144 if (DECL_IGNORED_P (fde->decl))
31145 continue;
31146 if (!fde->in_std_section)
31147 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31148 fde->dw_fde_end, &range_list_added,
31149 true);
31150 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31151 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31152 fde->dw_fde_second_end, &range_list_added,
31153 true);
31154 }
31155
31156 if (range_list_added)
31157 {
31158 /* We need to give .debug_loc and .debug_ranges an appropriate
31159 "base address". Use zero so that these addresses become
31160 absolute. Historically, we've emitted the unexpected
31161 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31162 Emit both to give time for other tools to adapt. */
31163 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31164 if (! dwarf_strict && dwarf_version < 4)
31165 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31166
31167 add_ranges (NULL);
31168 }
31169 }
31170
31171 /* AIX Assembler inserts the length, so adjust the reference to match the
31172 offset expected by debuggers. */
31173 strcpy (dl_section_ref, debug_line_section_label);
31174 if (XCOFF_DEBUGGING_INFO)
31175 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31176
31177 if (debug_info_level >= DINFO_LEVEL_TERSE)
31178 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31179 dl_section_ref);
31180
31181 if (have_macinfo)
31182 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31183 macinfo_section_label);
31184
31185 if (dwarf_split_debug_info)
31186 {
31187 if (have_location_lists)
31188 {
31189 if (dwarf_version >= 5)
31190 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31191 loc_section_label);
31192 /* optimize_location_lists calculates the size of the lists,
31193 so index them first, and assign indices to the entries.
31194 Although optimize_location_lists will remove entries from
31195 the table, it only does so for duplicates, and therefore
31196 only reduces ref_counts to 1. */
31197 index_location_lists (comp_unit_die ());
31198 }
31199
31200 if (addr_index_table != NULL)
31201 {
31202 unsigned int index = 0;
31203 addr_index_table
31204 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31205 (&index);
31206 }
31207 }
31208
31209 loc_list_idx = 0;
31210 if (have_location_lists)
31211 {
31212 optimize_location_lists (comp_unit_die ());
31213 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31214 if (dwarf_version >= 5 && dwarf_split_debug_info)
31215 assign_location_list_indexes (comp_unit_die ());
31216 }
31217
31218 save_macinfo_strings ();
31219
31220 if (dwarf_split_debug_info)
31221 {
31222 unsigned int index = 0;
31223
31224 /* Add attributes common to skeleton compile_units and
31225 type_units. Because these attributes include strings, it
31226 must be done before freezing the string table. Top-level
31227 skeleton die attrs are added when the skeleton type unit is
31228 created, so ensure it is created by this point. */
31229 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31230 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31231 }
31232
31233 /* Output all of the compilation units. We put the main one last so that
31234 the offsets are available to output_pubnames. */
31235 for (node = cu_die_list; node; node = node->next)
31236 output_comp_unit (node->die, 0, NULL);
31237
31238 hash_table<comdat_type_hasher> comdat_type_table (100);
31239 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31240 {
31241 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31242
31243 /* Don't output duplicate types. */
31244 if (*slot != HTAB_EMPTY_ENTRY)
31245 continue;
31246
31247 /* Add a pointer to the line table for the main compilation unit
31248 so that the debugger can make sense of DW_AT_decl_file
31249 attributes. */
31250 if (debug_info_level >= DINFO_LEVEL_TERSE)
31251 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31252 (!dwarf_split_debug_info
31253 ? dl_section_ref
31254 : debug_skeleton_line_section_label));
31255
31256 output_comdat_type_unit (ctnode);
31257 *slot = ctnode;
31258 }
31259
31260 if (dwarf_split_debug_info)
31261 {
31262 int mark;
31263 struct md5_ctx ctx;
31264
31265 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31266 index_rnglists ();
31267
31268 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31269 md5_init_ctx (&ctx);
31270 mark = 0;
31271 die_checksum (comp_unit_die (), &ctx, &mark);
31272 unmark_all_dies (comp_unit_die ());
31273 md5_finish_ctx (&ctx, checksum);
31274
31275 if (dwarf_version < 5)
31276 {
31277 /* Use the first 8 bytes of the checksum as the dwo_id,
31278 and add it to both comp-unit DIEs. */
31279 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31280 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31281 }
31282
31283 /* Add the base offset of the ranges table to the skeleton
31284 comp-unit DIE. */
31285 if (!vec_safe_is_empty (ranges_table))
31286 {
31287 if (dwarf_version >= 5)
31288 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31289 ranges_base_label);
31290 else
31291 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31292 ranges_section_label);
31293 }
31294
31295 switch_to_section (debug_addr_section);
31296 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31297 output_addr_table ();
31298 }
31299
31300 /* Output the main compilation unit if non-empty or if .debug_macinfo
31301 or .debug_macro will be emitted. */
31302 output_comp_unit (comp_unit_die (), have_macinfo,
31303 dwarf_split_debug_info ? checksum : NULL);
31304
31305 if (dwarf_split_debug_info && info_section_emitted)
31306 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31307
31308 /* Output the abbreviation table. */
31309 if (vec_safe_length (abbrev_die_table) != 1)
31310 {
31311 switch_to_section (debug_abbrev_section);
31312 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31313 output_abbrev_section ();
31314 }
31315
31316 /* Output location list section if necessary. */
31317 if (have_location_lists)
31318 {
31319 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31320 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31321 /* Output the location lists info. */
31322 switch_to_section (debug_loc_section);
31323 if (dwarf_version >= 5)
31324 {
31325 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31326 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31327 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31328 dw2_asm_output_data (4, 0xffffffff,
31329 "Initial length escape value indicating "
31330 "64-bit DWARF extension");
31331 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31332 "Length of Location Lists");
31333 ASM_OUTPUT_LABEL (asm_out_file, l1);
31334 output_dwarf_version ();
31335 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31336 dw2_asm_output_data (1, 0, "Segment Size");
31337 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31338 "Offset Entry Count");
31339 }
31340 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31341 if (dwarf_version >= 5 && dwarf_split_debug_info)
31342 {
31343 unsigned int save_loc_list_idx = loc_list_idx;
31344 loc_list_idx = 0;
31345 output_loclists_offsets (comp_unit_die ());
31346 gcc_assert (save_loc_list_idx == loc_list_idx);
31347 }
31348 output_location_lists (comp_unit_die ());
31349 if (dwarf_version >= 5)
31350 ASM_OUTPUT_LABEL (asm_out_file, l2);
31351 }
31352
31353 output_pubtables ();
31354
31355 /* Output the address range information if a CU (.debug_info section)
31356 was emitted. We output an empty table even if we had no functions
31357 to put in it. This because the consumer has no way to tell the
31358 difference between an empty table that we omitted and failure to
31359 generate a table that would have contained data. */
31360 if (info_section_emitted)
31361 {
31362 switch_to_section (debug_aranges_section);
31363 output_aranges ();
31364 }
31365
31366 /* Output ranges section if necessary. */
31367 if (!vec_safe_is_empty (ranges_table))
31368 {
31369 if (dwarf_version >= 5)
31370 output_rnglists (generation);
31371 else
31372 output_ranges ();
31373 }
31374
31375 /* Have to end the macro section. */
31376 if (have_macinfo)
31377 {
31378 switch_to_section (debug_macinfo_section);
31379 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31380 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31381 : debug_skeleton_line_section_label, false);
31382 dw2_asm_output_data (1, 0, "End compilation unit");
31383 }
31384
31385 /* Output the source line correspondence table. We must do this
31386 even if there is no line information. Otherwise, on an empty
31387 translation unit, we will generate a present, but empty,
31388 .debug_info section. IRIX 6.5 `nm' will then complain when
31389 examining the file. This is done late so that any filenames
31390 used by the debug_info section are marked as 'used'. */
31391 switch_to_section (debug_line_section);
31392 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31393 if (! output_asm_line_debug_info ())
31394 output_line_info (false);
31395
31396 if (dwarf_split_debug_info && info_section_emitted)
31397 {
31398 switch_to_section (debug_skeleton_line_section);
31399 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31400 output_line_info (true);
31401 }
31402
31403 /* If we emitted any indirect strings, output the string table too. */
31404 if (debug_str_hash || skeleton_debug_str_hash)
31405 output_indirect_strings ();
31406 if (debug_line_str_hash)
31407 {
31408 switch_to_section (debug_line_str_section);
31409 const enum dwarf_form form = DW_FORM_line_strp;
31410 debug_line_str_hash->traverse<enum dwarf_form,
31411 output_indirect_string> (form);
31412 }
31413
31414 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31415 symview_upper_bound = 0;
31416 if (zero_view_p)
31417 bitmap_clear (zero_view_p);
31418 }
31419
31420 /* Returns a hash value for X (which really is a variable_value_struct). */
31421
31422 inline hashval_t
31423 variable_value_hasher::hash (variable_value_struct *x)
31424 {
31425 return (hashval_t) x->decl_id;
31426 }
31427
31428 /* Return nonzero if decl_id of variable_value_struct X is the same as
31429 UID of decl Y. */
31430
31431 inline bool
31432 variable_value_hasher::equal (variable_value_struct *x, tree y)
31433 {
31434 return x->decl_id == DECL_UID (y);
31435 }
31436
31437 /* Helper function for resolve_variable_value, handle
31438 DW_OP_GNU_variable_value in one location expression.
31439 Return true if exprloc has been changed into loclist. */
31440
31441 static bool
31442 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31443 {
31444 dw_loc_descr_ref next;
31445 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31446 {
31447 next = loc->dw_loc_next;
31448 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31449 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31450 continue;
31451
31452 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31453 if (DECL_CONTEXT (decl) != current_function_decl)
31454 continue;
31455
31456 dw_die_ref ref = lookup_decl_die (decl);
31457 if (ref)
31458 {
31459 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31460 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31461 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31462 continue;
31463 }
31464 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31465 if (l == NULL)
31466 continue;
31467 if (l->dw_loc_next)
31468 {
31469 if (AT_class (a) != dw_val_class_loc)
31470 continue;
31471 switch (a->dw_attr)
31472 {
31473 /* Following attributes allow both exprloc and loclist
31474 classes, so we can change them into a loclist. */
31475 case DW_AT_location:
31476 case DW_AT_string_length:
31477 case DW_AT_return_addr:
31478 case DW_AT_data_member_location:
31479 case DW_AT_frame_base:
31480 case DW_AT_segment:
31481 case DW_AT_static_link:
31482 case DW_AT_use_location:
31483 case DW_AT_vtable_elem_location:
31484 if (prev)
31485 {
31486 prev->dw_loc_next = NULL;
31487 prepend_loc_descr_to_each (l, AT_loc (a));
31488 }
31489 if (next)
31490 add_loc_descr_to_each (l, next);
31491 a->dw_attr_val.val_class = dw_val_class_loc_list;
31492 a->dw_attr_val.val_entry = NULL;
31493 a->dw_attr_val.v.val_loc_list = l;
31494 have_location_lists = true;
31495 return true;
31496 /* Following attributes allow both exprloc and reference,
31497 so if the whole expression is DW_OP_GNU_variable_value alone
31498 we could transform it into reference. */
31499 case DW_AT_byte_size:
31500 case DW_AT_bit_size:
31501 case DW_AT_lower_bound:
31502 case DW_AT_upper_bound:
31503 case DW_AT_bit_stride:
31504 case DW_AT_count:
31505 case DW_AT_allocated:
31506 case DW_AT_associated:
31507 case DW_AT_byte_stride:
31508 if (prev == NULL && next == NULL)
31509 break;
31510 /* FALLTHRU */
31511 default:
31512 if (dwarf_strict)
31513 continue;
31514 break;
31515 }
31516 /* Create DW_TAG_variable that we can refer to. */
31517 gen_decl_die (decl, NULL_TREE, NULL,
31518 lookup_decl_die (current_function_decl));
31519 ref = lookup_decl_die (decl);
31520 if (ref)
31521 {
31522 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31523 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31524 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31525 }
31526 continue;
31527 }
31528 if (prev)
31529 {
31530 prev->dw_loc_next = l->expr;
31531 add_loc_descr (&prev->dw_loc_next, next);
31532 free_loc_descr (loc, NULL);
31533 next = prev->dw_loc_next;
31534 }
31535 else
31536 {
31537 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31538 add_loc_descr (&loc, next);
31539 next = loc;
31540 }
31541 loc = prev;
31542 }
31543 return false;
31544 }
31545
31546 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31547
31548 static void
31549 resolve_variable_value (dw_die_ref die)
31550 {
31551 dw_attr_node *a;
31552 dw_loc_list_ref loc;
31553 unsigned ix;
31554
31555 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31556 switch (AT_class (a))
31557 {
31558 case dw_val_class_loc:
31559 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31560 break;
31561 /* FALLTHRU */
31562 case dw_val_class_loc_list:
31563 loc = AT_loc_list (a);
31564 gcc_assert (loc);
31565 for (; loc; loc = loc->dw_loc_next)
31566 resolve_variable_value_in_expr (a, loc->expr);
31567 break;
31568 default:
31569 break;
31570 }
31571 }
31572
31573 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31574 temporaries in the current function. */
31575
31576 static void
31577 resolve_variable_values (void)
31578 {
31579 if (!variable_value_hash || !current_function_decl)
31580 return;
31581
31582 struct variable_value_struct *node
31583 = variable_value_hash->find_with_hash (current_function_decl,
31584 DECL_UID (current_function_decl));
31585
31586 if (node == NULL)
31587 return;
31588
31589 unsigned int i;
31590 dw_die_ref die;
31591 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31592 resolve_variable_value (die);
31593 }
31594
31595 /* Helper function for note_variable_value, handle one location
31596 expression. */
31597
31598 static void
31599 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31600 {
31601 for (; loc; loc = loc->dw_loc_next)
31602 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31603 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31604 {
31605 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31606 dw_die_ref ref = lookup_decl_die (decl);
31607 if (! ref && (flag_generate_lto || flag_generate_offload))
31608 {
31609 /* ??? This is somewhat a hack because we do not create DIEs
31610 for variables not in BLOCK trees early but when generating
31611 early LTO output we need the dw_val_class_decl_ref to be
31612 fully resolved. For fat LTO objects we'd also like to
31613 undo this after LTO dwarf output. */
31614 gcc_assert (DECL_CONTEXT (decl));
31615 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31616 gcc_assert (ctx != NULL);
31617 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31618 ref = lookup_decl_die (decl);
31619 gcc_assert (ref != NULL);
31620 }
31621 if (ref)
31622 {
31623 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31624 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31625 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31626 continue;
31627 }
31628 if (VAR_P (decl)
31629 && DECL_CONTEXT (decl)
31630 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31631 && lookup_decl_die (DECL_CONTEXT (decl)))
31632 {
31633 if (!variable_value_hash)
31634 variable_value_hash
31635 = hash_table<variable_value_hasher>::create_ggc (10);
31636
31637 tree fndecl = DECL_CONTEXT (decl);
31638 struct variable_value_struct *node;
31639 struct variable_value_struct **slot
31640 = variable_value_hash->find_slot_with_hash (fndecl,
31641 DECL_UID (fndecl),
31642 INSERT);
31643 if (*slot == NULL)
31644 {
31645 node = ggc_cleared_alloc<variable_value_struct> ();
31646 node->decl_id = DECL_UID (fndecl);
31647 *slot = node;
31648 }
31649 else
31650 node = *slot;
31651
31652 vec_safe_push (node->dies, die);
31653 }
31654 }
31655 }
31656
31657 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31658 with dw_val_class_decl_ref operand. */
31659
31660 static void
31661 note_variable_value (dw_die_ref die)
31662 {
31663 dw_die_ref c;
31664 dw_attr_node *a;
31665 dw_loc_list_ref loc;
31666 unsigned ix;
31667
31668 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31669 switch (AT_class (a))
31670 {
31671 case dw_val_class_loc_list:
31672 loc = AT_loc_list (a);
31673 gcc_assert (loc);
31674 if (!loc->noted_variable_value)
31675 {
31676 loc->noted_variable_value = 1;
31677 for (; loc; loc = loc->dw_loc_next)
31678 note_variable_value_in_expr (die, loc->expr);
31679 }
31680 break;
31681 case dw_val_class_loc:
31682 note_variable_value_in_expr (die, AT_loc (a));
31683 break;
31684 default:
31685 break;
31686 }
31687
31688 /* Mark children. */
31689 FOR_EACH_CHILD (die, c, note_variable_value (c));
31690 }
31691
31692 /* Perform any cleanups needed after the early debug generation pass
31693 has run. */
31694
31695 static void
31696 dwarf2out_early_finish (const char *filename)
31697 {
31698 set_early_dwarf s;
31699 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31700
31701 /* PCH might result in DW_AT_producer string being restored from the
31702 header compilation, so always fill it with empty string initially
31703 and overwrite only here. */
31704 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31705 producer_string = gen_producer_string ();
31706 producer->dw_attr_val.v.val_str->refcount--;
31707 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31708
31709 /* Add the name for the main input file now. We delayed this from
31710 dwarf2out_init to avoid complications with PCH. */
31711 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31712 add_comp_dir_attribute (comp_unit_die ());
31713
31714 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31715 DW_AT_comp_dir into .debug_line_str section. */
31716 if (!dwarf2out_as_loc_support
31717 && dwarf_version >= 5
31718 && DWARF5_USE_DEBUG_LINE_STR)
31719 {
31720 for (int i = 0; i < 2; i++)
31721 {
31722 dw_attr_node *a = get_AT (comp_unit_die (),
31723 i ? DW_AT_comp_dir : DW_AT_name);
31724 if (a == NULL
31725 || AT_class (a) != dw_val_class_str
31726 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31727 continue;
31728
31729 if (! debug_line_str_hash)
31730 debug_line_str_hash
31731 = hash_table<indirect_string_hasher>::create_ggc (10);
31732
31733 struct indirect_string_node *node
31734 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31735 set_indirect_string (node);
31736 node->form = DW_FORM_line_strp;
31737 a->dw_attr_val.v.val_str->refcount--;
31738 a->dw_attr_val.v.val_str = node;
31739 }
31740 }
31741
31742 /* With LTO early dwarf was really finished at compile-time, so make
31743 sure to adjust the phase after annotating the LTRANS CU DIE. */
31744 if (in_lto_p)
31745 {
31746 early_dwarf_finished = true;
31747 return;
31748 }
31749
31750 /* Walk through the list of incomplete types again, trying once more to
31751 emit full debugging info for them. */
31752 retry_incomplete_types ();
31753
31754 /* The point here is to flush out the limbo list so that it is empty
31755 and we don't need to stream it for LTO. */
31756 flush_limbo_die_list ();
31757
31758 gen_scheduled_generic_parms_dies ();
31759 gen_remaining_tmpl_value_param_die_attribute ();
31760
31761 /* Add DW_AT_linkage_name for all deferred DIEs. */
31762 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31763 {
31764 tree decl = node->created_for;
31765 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31766 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31767 ended up in deferred_asm_name before we knew it was
31768 constant and never written to disk. */
31769 && DECL_ASSEMBLER_NAME (decl))
31770 {
31771 add_linkage_attr (node->die, decl);
31772 move_linkage_attr (node->die);
31773 }
31774 }
31775 deferred_asm_name = NULL;
31776
31777 if (flag_eliminate_unused_debug_types)
31778 prune_unused_types ();
31779
31780 /* Generate separate COMDAT sections for type DIEs. */
31781 if (use_debug_types)
31782 {
31783 break_out_comdat_types (comp_unit_die ());
31784
31785 /* Each new type_unit DIE was added to the limbo die list when created.
31786 Since these have all been added to comdat_type_list, clear the
31787 limbo die list. */
31788 limbo_die_list = NULL;
31789
31790 /* For each new comdat type unit, copy declarations for incomplete
31791 types to make the new unit self-contained (i.e., no direct
31792 references to the main compile unit). */
31793 for (comdat_type_node *ctnode = comdat_type_list;
31794 ctnode != NULL; ctnode = ctnode->next)
31795 copy_decls_for_unworthy_types (ctnode->root_die);
31796 copy_decls_for_unworthy_types (comp_unit_die ());
31797
31798 /* In the process of copying declarations from one unit to another,
31799 we may have left some declarations behind that are no longer
31800 referenced. Prune them. */
31801 prune_unused_types ();
31802 }
31803
31804 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31805 with dw_val_class_decl_ref operand. */
31806 note_variable_value (comp_unit_die ());
31807 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31808 note_variable_value (node->die);
31809 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31810 ctnode = ctnode->next)
31811 note_variable_value (ctnode->root_die);
31812 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31813 note_variable_value (node->die);
31814
31815 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31816 both the main_cu and all skeleton TUs. Making this call unconditional
31817 would end up either adding a second copy of the AT_pubnames attribute, or
31818 requiring a special case in add_top_level_skeleton_die_attrs. */
31819 if (!dwarf_split_debug_info)
31820 add_AT_pubnames (comp_unit_die ());
31821
31822 /* The early debug phase is now finished. */
31823 early_dwarf_finished = true;
31824
31825 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31826 if ((!flag_generate_lto && !flag_generate_offload)
31827 /* FIXME: Disable debug info generation for PE-COFF targets since the
31828 copy_lto_debug_sections operation of the simple object support in
31829 libiberty is not implemented for them yet. */
31830 || TARGET_PECOFF)
31831 return;
31832
31833 /* Now as we are going to output for LTO initialize sections and labels
31834 to the LTO variants. We don't need a random-seed postfix as other
31835 LTO sections as linking the LTO debug sections into one in a partial
31836 link is fine. */
31837 init_sections_and_labels (true);
31838
31839 /* The output below is modeled after dwarf2out_finish with all
31840 location related output removed and some LTO specific changes.
31841 Some refactoring might make both smaller and easier to match up. */
31842
31843 /* Traverse the DIE's and add add sibling attributes to those DIE's
31844 that have children. */
31845 add_sibling_attributes (comp_unit_die ());
31846 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31847 add_sibling_attributes (node->die);
31848 for (comdat_type_node *ctnode = comdat_type_list;
31849 ctnode != NULL; ctnode = ctnode->next)
31850 add_sibling_attributes (ctnode->root_die);
31851
31852 /* AIX Assembler inserts the length, so adjust the reference to match the
31853 offset expected by debuggers. */
31854 strcpy (dl_section_ref, debug_line_section_label);
31855 if (XCOFF_DEBUGGING_INFO)
31856 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31857
31858 if (debug_info_level >= DINFO_LEVEL_TERSE)
31859 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31860
31861 if (have_macinfo)
31862 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31863 macinfo_section_label);
31864
31865 save_macinfo_strings ();
31866
31867 if (dwarf_split_debug_info)
31868 {
31869 unsigned int index = 0;
31870 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31871 }
31872
31873 /* Output all of the compilation units. We put the main one last so that
31874 the offsets are available to output_pubnames. */
31875 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31876 output_comp_unit (node->die, 0, NULL);
31877
31878 hash_table<comdat_type_hasher> comdat_type_table (100);
31879 for (comdat_type_node *ctnode = comdat_type_list;
31880 ctnode != NULL; ctnode = ctnode->next)
31881 {
31882 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31883
31884 /* Don't output duplicate types. */
31885 if (*slot != HTAB_EMPTY_ENTRY)
31886 continue;
31887
31888 /* Add a pointer to the line table for the main compilation unit
31889 so that the debugger can make sense of DW_AT_decl_file
31890 attributes. */
31891 if (debug_info_level >= DINFO_LEVEL_TERSE)
31892 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31893 (!dwarf_split_debug_info
31894 ? debug_line_section_label
31895 : debug_skeleton_line_section_label));
31896
31897 output_comdat_type_unit (ctnode);
31898 *slot = ctnode;
31899 }
31900
31901 /* Stick a unique symbol to the main debuginfo section. */
31902 compute_comp_unit_symbol (comp_unit_die ());
31903
31904 /* Output the main compilation unit. We always need it if only for
31905 the CU symbol. */
31906 output_comp_unit (comp_unit_die (), true, NULL);
31907
31908 /* Output the abbreviation table. */
31909 if (vec_safe_length (abbrev_die_table) != 1)
31910 {
31911 switch_to_section (debug_abbrev_section);
31912 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31913 output_abbrev_section ();
31914 }
31915
31916 /* Have to end the macro section. */
31917 if (have_macinfo)
31918 {
31919 /* We have to save macinfo state if we need to output it again
31920 for the FAT part of the object. */
31921 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
31922 if (flag_fat_lto_objects)
31923 macinfo_table = macinfo_table->copy ();
31924
31925 switch_to_section (debug_macinfo_section);
31926 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31927 output_macinfo (debug_line_section_label, true);
31928 dw2_asm_output_data (1, 0, "End compilation unit");
31929
31930 if (flag_fat_lto_objects)
31931 {
31932 vec_free (macinfo_table);
31933 macinfo_table = saved_macinfo_table;
31934 }
31935 }
31936
31937 /* Emit a skeleton debug_line section. */
31938 switch_to_section (debug_line_section);
31939 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31940 output_line_info (true);
31941
31942 /* If we emitted any indirect strings, output the string table too. */
31943 if (debug_str_hash || skeleton_debug_str_hash)
31944 output_indirect_strings ();
31945
31946 /* Switch back to the text section. */
31947 switch_to_section (text_section);
31948 }
31949
31950 /* Reset all state within dwarf2out.c so that we can rerun the compiler
31951 within the same process. For use by toplev::finalize. */
31952
31953 void
31954 dwarf2out_c_finalize (void)
31955 {
31956 last_var_location_insn = NULL;
31957 cached_next_real_insn = NULL;
31958 used_rtx_array = NULL;
31959 incomplete_types = NULL;
31960 decl_scope_table = NULL;
31961 debug_info_section = NULL;
31962 debug_skeleton_info_section = NULL;
31963 debug_abbrev_section = NULL;
31964 debug_skeleton_abbrev_section = NULL;
31965 debug_aranges_section = NULL;
31966 debug_addr_section = NULL;
31967 debug_macinfo_section = NULL;
31968 debug_line_section = NULL;
31969 debug_skeleton_line_section = NULL;
31970 debug_loc_section = NULL;
31971 debug_pubnames_section = NULL;
31972 debug_pubtypes_section = NULL;
31973 debug_str_section = NULL;
31974 debug_line_str_section = NULL;
31975 debug_str_dwo_section = NULL;
31976 debug_str_offsets_section = NULL;
31977 debug_ranges_section = NULL;
31978 debug_frame_section = NULL;
31979 fde_vec = NULL;
31980 debug_str_hash = NULL;
31981 debug_line_str_hash = NULL;
31982 skeleton_debug_str_hash = NULL;
31983 dw2_string_counter = 0;
31984 have_multiple_function_sections = false;
31985 text_section_used = false;
31986 cold_text_section_used = false;
31987 cold_text_section = NULL;
31988 current_unit_personality = NULL;
31989
31990 early_dwarf = false;
31991 early_dwarf_finished = false;
31992
31993 next_die_offset = 0;
31994 single_comp_unit_die = NULL;
31995 comdat_type_list = NULL;
31996 limbo_die_list = NULL;
31997 file_table = NULL;
31998 decl_die_table = NULL;
31999 common_block_die_table = NULL;
32000 decl_loc_table = NULL;
32001 call_arg_locations = NULL;
32002 call_arg_loc_last = NULL;
32003 call_site_count = -1;
32004 tail_call_site_count = -1;
32005 cached_dw_loc_list_table = NULL;
32006 abbrev_die_table = NULL;
32007 delete dwarf_proc_stack_usage_map;
32008 dwarf_proc_stack_usage_map = NULL;
32009 line_info_label_num = 0;
32010 cur_line_info_table = NULL;
32011 text_section_line_info = NULL;
32012 cold_text_section_line_info = NULL;
32013 separate_line_info = NULL;
32014 info_section_emitted = false;
32015 pubname_table = NULL;
32016 pubtype_table = NULL;
32017 macinfo_table = NULL;
32018 ranges_table = NULL;
32019 ranges_by_label = NULL;
32020 rnglist_idx = 0;
32021 have_location_lists = false;
32022 loclabel_num = 0;
32023 poc_label_num = 0;
32024 last_emitted_file = NULL;
32025 label_num = 0;
32026 tmpl_value_parm_die_table = NULL;
32027 generic_type_instances = NULL;
32028 frame_pointer_fb_offset = 0;
32029 frame_pointer_fb_offset_valid = false;
32030 base_types.release ();
32031 XDELETEVEC (producer_string);
32032 producer_string = NULL;
32033 }
32034
32035 #include "gt-dwarf2out.h"