ipa/97673 - fix input_location leak
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2021 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 dwarf_offset_size. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (dwarf_offset_size == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (dwarf_offset_size == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (dwarf_offset_size == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (dwarf_offset_size == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (dwarf_offset_size == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 if (debug_info_level <= DINFO_LEVEL_TERSE)
403 return false;
404
405 enum debug_struct_file criterion;
406 tree type_decl;
407 bool generic = lang_hooks.types.generic_p (type);
408
409 if (generic)
410 criterion = debug_struct_generic[usage];
411 else
412 criterion = debug_struct_ordinary[usage];
413
414 if (criterion == DINFO_STRUCT_FILE_NONE)
415 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
416 if (criterion == DINFO_STRUCT_FILE_ANY)
417 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
418
419 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
420
421 if (type_decl != NULL)
422 {
423 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
425
426 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
427 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
428 }
429
430 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
431 }
432 \f
433 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
434 switch to the data section instead, and write out a synthetic start label
435 for collect2 the first time around. */
436
437 static void
438 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
439 {
440 if (eh_frame_section == 0)
441 {
442 int flags;
443
444 if (EH_TABLES_CAN_BE_READ_ONLY)
445 {
446 int fde_encoding;
447 int per_encoding;
448 int lsda_encoding;
449
450 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
451 /*global=*/0);
452 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
453 /*global=*/1);
454 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
455 /*global=*/0);
456 flags = ((! flag_pic
457 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
458 && (fde_encoding & 0x70) != DW_EH_PE_aligned
459 && (per_encoding & 0x70) != DW_EH_PE_absptr
460 && (per_encoding & 0x70) != DW_EH_PE_aligned
461 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
462 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
463 ? 0 : SECTION_WRITE);
464 }
465 else
466 flags = SECTION_WRITE;
467
468 #ifdef EH_FRAME_SECTION_NAME
469 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
470 #else
471 eh_frame_section = ((flags == SECTION_WRITE)
472 ? data_section : readonly_data_section);
473 #endif /* EH_FRAME_SECTION_NAME */
474 }
475
476 switch_to_section (eh_frame_section);
477
478 #ifdef EH_FRAME_THROUGH_COLLECT2
479 /* We have no special eh_frame section. Emit special labels to guide
480 collect2. */
481 if (!back)
482 {
483 tree label = get_file_function_name ("F");
484 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
485 targetm.asm_out.globalize_label (asm_out_file,
486 IDENTIFIER_POINTER (label));
487 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
488 }
489 #endif
490 }
491
492 /* Switch [BACK] to the eh or debug frame table section, depending on
493 FOR_EH. */
494
495 static void
496 switch_to_frame_table_section (int for_eh, bool back)
497 {
498 if (for_eh)
499 switch_to_eh_frame_section (back);
500 else
501 {
502 if (!debug_frame_section)
503 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
504 SECTION_DEBUG, NULL);
505 switch_to_section (debug_frame_section);
506 }
507 }
508
509 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
510
511 enum dw_cfi_oprnd_type
512 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
513 {
514 switch (cfi)
515 {
516 case DW_CFA_nop:
517 case DW_CFA_GNU_window_save:
518 case DW_CFA_remember_state:
519 case DW_CFA_restore_state:
520 return dw_cfi_oprnd_unused;
521
522 case DW_CFA_set_loc:
523 case DW_CFA_advance_loc1:
524 case DW_CFA_advance_loc2:
525 case DW_CFA_advance_loc4:
526 case DW_CFA_MIPS_advance_loc8:
527 return dw_cfi_oprnd_addr;
528
529 case DW_CFA_offset:
530 case DW_CFA_offset_extended:
531 case DW_CFA_def_cfa:
532 case DW_CFA_offset_extended_sf:
533 case DW_CFA_def_cfa_sf:
534 case DW_CFA_restore:
535 case DW_CFA_restore_extended:
536 case DW_CFA_undefined:
537 case DW_CFA_same_value:
538 case DW_CFA_def_cfa_register:
539 case DW_CFA_register:
540 case DW_CFA_expression:
541 case DW_CFA_val_expression:
542 return dw_cfi_oprnd_reg_num;
543
544 case DW_CFA_def_cfa_offset:
545 case DW_CFA_GNU_args_size:
546 case DW_CFA_def_cfa_offset_sf:
547 return dw_cfi_oprnd_offset;
548
549 case DW_CFA_def_cfa_expression:
550 return dw_cfi_oprnd_loc;
551
552 default:
553 gcc_unreachable ();
554 }
555 }
556
557 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
558
559 enum dw_cfi_oprnd_type
560 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
561 {
562 switch (cfi)
563 {
564 case DW_CFA_def_cfa:
565 case DW_CFA_def_cfa_sf:
566 case DW_CFA_offset:
567 case DW_CFA_offset_extended_sf:
568 case DW_CFA_offset_extended:
569 return dw_cfi_oprnd_offset;
570
571 case DW_CFA_register:
572 return dw_cfi_oprnd_reg_num;
573
574 case DW_CFA_expression:
575 case DW_CFA_val_expression:
576 return dw_cfi_oprnd_loc;
577
578 case DW_CFA_def_cfa_expression:
579 return dw_cfi_oprnd_cfa_loc;
580
581 default:
582 return dw_cfi_oprnd_unused;
583 }
584 }
585
586 /* Output one FDE. */
587
588 static void
589 output_fde (dw_fde_ref fde, bool for_eh, bool second,
590 char *section_start_label, int fde_encoding, char *augmentation,
591 bool any_lsda_needed, int lsda_encoding)
592 {
593 const char *begin, *end;
594 static unsigned int j;
595 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
596
597 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
598 /* empty */ 0);
599 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
600 for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
602 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
603 if (!XCOFF_DEBUGGING_INFO || for_eh)
604 {
605 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh)
606 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
607 " indicating 64-bit DWARF extension");
608 dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1,
609 "FDE Length");
610 }
611 ASM_OUTPUT_LABEL (asm_out_file, l1);
612
613 if (for_eh)
614 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
615 else
616 dw2_asm_output_offset (dwarf_offset_size, section_start_label,
617 debug_frame_section, "FDE CIE offset");
618
619 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
620 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
621
622 if (for_eh)
623 {
624 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
625 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
626 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
627 "FDE initial location");
628 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
629 end, begin, "FDE address range");
630 }
631 else
632 {
633 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
634 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
635 }
636
637 if (augmentation[0])
638 {
639 if (any_lsda_needed)
640 {
641 int size = size_of_encoded_value (lsda_encoding);
642
643 if (lsda_encoding == DW_EH_PE_aligned)
644 {
645 int offset = ( 4 /* Length */
646 + 4 /* CIE offset */
647 + 2 * size_of_encoded_value (fde_encoding)
648 + 1 /* Augmentation size */ );
649 int pad = -offset & (PTR_SIZE - 1);
650
651 size += pad;
652 gcc_assert (size_of_uleb128 (size) == 1);
653 }
654
655 dw2_asm_output_data_uleb128 (size, "Augmentation size");
656
657 if (fde->uses_eh_lsda)
658 {
659 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
660 fde->funcdef_number);
661 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
662 gen_rtx_SYMBOL_REF (Pmode, l1),
663 false,
664 "Language Specific Data Area");
665 }
666 else
667 {
668 if (lsda_encoding == DW_EH_PE_aligned)
669 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
670 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
671 "Language Specific Data Area (none)");
672 }
673 }
674 else
675 dw2_asm_output_data_uleb128 (0, "Augmentation size");
676 }
677
678 /* Loop through the Call Frame Instructions associated with this FDE. */
679 fde->dw_fde_current_label = begin;
680 {
681 size_t from, until, i;
682
683 from = 0;
684 until = vec_safe_length (fde->dw_fde_cfi);
685
686 if (fde->dw_fde_second_begin == NULL)
687 ;
688 else if (!second)
689 until = fde->dw_fde_switch_cfi_index;
690 else
691 from = fde->dw_fde_switch_cfi_index;
692
693 for (i = from; i < until; i++)
694 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
695 }
696
697 /* If we are to emit a ref/link from function bodies to their frame tables,
698 do it now. This is typically performed to make sure that tables
699 associated with functions are dragged with them and not discarded in
700 garbage collecting links. We need to do this on a per function basis to
701 cope with -ffunction-sections. */
702
703 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
704 /* Switch to the function section, emit the ref to the tables, and
705 switch *back* into the table section. */
706 switch_to_section (function_section (fde->decl));
707 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
708 switch_to_frame_table_section (for_eh, true);
709 #endif
710
711 /* Pad the FDE out to an address sized boundary. */
712 ASM_OUTPUT_ALIGN (asm_out_file,
713 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
714 ASM_OUTPUT_LABEL (asm_out_file, l2);
715
716 j += 2;
717 }
718
719 /* Return true if frame description entry FDE is needed for EH. */
720
721 static bool
722 fde_needed_for_eh_p (dw_fde_ref fde)
723 {
724 if (flag_asynchronous_unwind_tables)
725 return true;
726
727 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
728 return true;
729
730 if (fde->uses_eh_lsda)
731 return true;
732
733 /* If exceptions are enabled, we have collected nothrow info. */
734 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
735 return false;
736
737 return true;
738 }
739
740 /* Output the call frame information used to record information
741 that relates to calculating the frame pointer, and records the
742 location of saved registers. */
743
744 static void
745 output_call_frame_info (int for_eh)
746 {
747 unsigned int i;
748 dw_fde_ref fde;
749 dw_cfi_ref cfi;
750 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
751 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
752 bool any_lsda_needed = false;
753 char augmentation[6];
754 int augmentation_size;
755 int fde_encoding = DW_EH_PE_absptr;
756 int per_encoding = DW_EH_PE_absptr;
757 int lsda_encoding = DW_EH_PE_absptr;
758 int return_reg;
759 rtx personality = NULL;
760 int dw_cie_version;
761
762 /* Don't emit a CIE if there won't be any FDEs. */
763 if (!fde_vec)
764 return;
765
766 /* Nothing to do if the assembler's doing it all. */
767 if (dwarf2out_do_cfi_asm ())
768 return;
769
770 /* If we don't have any functions we'll want to unwind out of, don't emit
771 any EH unwind information. If we make FDEs linkonce, we may have to
772 emit an empty label for an FDE that wouldn't otherwise be emitted. We
773 want to avoid having an FDE kept around when the function it refers to
774 is discarded. Example where this matters: a primary function template
775 in C++ requires EH information, an explicit specialization doesn't. */
776 if (for_eh)
777 {
778 bool any_eh_needed = false;
779
780 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
781 {
782 if (fde->uses_eh_lsda)
783 any_eh_needed = any_lsda_needed = true;
784 else if (fde_needed_for_eh_p (fde))
785 any_eh_needed = true;
786 else if (TARGET_USES_WEAK_UNWIND_INFO)
787 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
788 }
789
790 if (!any_eh_needed)
791 return;
792 }
793
794 /* We're going to be generating comments, so turn on app. */
795 if (flag_debug_asm)
796 app_enable ();
797
798 /* Switch to the proper frame section, first time. */
799 switch_to_frame_table_section (for_eh, false);
800
801 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
802 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
803
804 /* Output the CIE. */
805 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
806 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
807 if (!XCOFF_DEBUGGING_INFO || for_eh)
808 {
809 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4 && !for_eh)
810 dw2_asm_output_data (4, 0xffffffff,
811 "Initial length escape value indicating 64-bit DWARF extension");
812 dw2_asm_output_delta (for_eh ? 4 : dwarf_offset_size, l2, l1,
813 "Length of Common Information Entry");
814 }
815 ASM_OUTPUT_LABEL (asm_out_file, l1);
816
817 /* Now that the CIE pointer is PC-relative for EH,
818 use 0 to identify the CIE. */
819 dw2_asm_output_data ((for_eh ? 4 : dwarf_offset_size),
820 (for_eh ? 0 : DWARF_CIE_ID),
821 "CIE Identifier Tag");
822
823 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
824 use CIE version 1, unless that would produce incorrect results
825 due to overflowing the return register column. */
826 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
827 dw_cie_version = 1;
828 if (return_reg >= 256 || dwarf_version > 2)
829 dw_cie_version = 3;
830 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
831
832 augmentation[0] = 0;
833 augmentation_size = 0;
834
835 personality = current_unit_personality;
836 if (for_eh)
837 {
838 char *p;
839
840 /* Augmentation:
841 z Indicates that a uleb128 is present to size the
842 augmentation section.
843 L Indicates the encoding (and thus presence) of
844 an LSDA pointer in the FDE augmentation.
845 R Indicates a non-default pointer encoding for
846 FDE code pointers.
847 P Indicates the presence of an encoding + language
848 personality routine in the CIE augmentation. */
849
850 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
851 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
852 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
853
854 p = augmentation + 1;
855 if (personality)
856 {
857 *p++ = 'P';
858 augmentation_size += 1 + size_of_encoded_value (per_encoding);
859 assemble_external_libcall (personality);
860 }
861 if (any_lsda_needed)
862 {
863 *p++ = 'L';
864 augmentation_size += 1;
865 }
866 if (fde_encoding != DW_EH_PE_absptr)
867 {
868 *p++ = 'R';
869 augmentation_size += 1;
870 }
871 if (p > augmentation + 1)
872 {
873 augmentation[0] = 'z';
874 *p = '\0';
875 }
876
877 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
878 if (personality && per_encoding == DW_EH_PE_aligned)
879 {
880 int offset = ( 4 /* Length */
881 + 4 /* CIE Id */
882 + 1 /* CIE version */
883 + strlen (augmentation) + 1 /* Augmentation */
884 + size_of_uleb128 (1) /* Code alignment */
885 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
886 + 1 /* RA column */
887 + 1 /* Augmentation size */
888 + 1 /* Personality encoding */ );
889 int pad = -offset & (PTR_SIZE - 1);
890
891 augmentation_size += pad;
892
893 /* Augmentations should be small, so there's scarce need to
894 iterate for a solution. Die if we exceed one uleb128 byte. */
895 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
896 }
897 }
898
899 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
900 if (dw_cie_version >= 4)
901 {
902 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
903 dw2_asm_output_data (1, 0, "CIE Segment Size");
904 }
905 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
906 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
907 "CIE Data Alignment Factor");
908
909 if (dw_cie_version == 1)
910 dw2_asm_output_data (1, return_reg, "CIE RA Column");
911 else
912 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
913
914 if (augmentation[0])
915 {
916 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
917 if (personality)
918 {
919 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
920 eh_data_format_name (per_encoding));
921 dw2_asm_output_encoded_addr_rtx (per_encoding,
922 personality,
923 true, NULL);
924 }
925
926 if (any_lsda_needed)
927 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
928 eh_data_format_name (lsda_encoding));
929
930 if (fde_encoding != DW_EH_PE_absptr)
931 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
932 eh_data_format_name (fde_encoding));
933 }
934
935 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
936 output_cfi (cfi, NULL, for_eh);
937
938 /* Pad the CIE out to an address sized boundary. */
939 ASM_OUTPUT_ALIGN (asm_out_file,
940 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
941 ASM_OUTPUT_LABEL (asm_out_file, l2);
942
943 /* Loop through all of the FDE's. */
944 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
945 {
946 unsigned int k;
947
948 /* Don't emit EH unwind info for leaf functions that don't need it. */
949 if (for_eh && !fde_needed_for_eh_p (fde))
950 continue;
951
952 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
953 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
954 augmentation, any_lsda_needed, lsda_encoding);
955 }
956
957 if (for_eh && targetm.terminate_dw2_eh_frame_info)
958 dw2_asm_output_data (4, 0, "End of Table");
959
960 /* Turn off app to make assembly quicker. */
961 if (flag_debug_asm)
962 app_disable ();
963 }
964
965 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
966
967 static void
968 dwarf2out_do_cfi_startproc (bool second)
969 {
970 int enc;
971 rtx ref;
972
973 fprintf (asm_out_file, "\t.cfi_startproc\n");
974
975 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
976
977 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
978 eh unwinders. */
979 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
980 return;
981
982 rtx personality = get_personality_function (current_function_decl);
983
984 if (personality)
985 {
986 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
987 ref = personality;
988
989 /* ??? The GAS support isn't entirely consistent. We have to
990 handle indirect support ourselves, but PC-relative is done
991 in the assembler. Further, the assembler can't handle any
992 of the weirder relocation types. */
993 if (enc & DW_EH_PE_indirect)
994 {
995 if (targetm.asm_out.make_eh_symbol_indirect != NULL)
996 ref = targetm.asm_out.make_eh_symbol_indirect (ref, true);
997 else
998 ref = dw2_force_const_mem (ref, true);
999 }
1000
1001 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
1002 output_addr_const (asm_out_file, ref);
1003 fputc ('\n', asm_out_file);
1004 }
1005
1006 if (crtl->uses_eh_lsda)
1007 {
1008 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1009
1010 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1011 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1012 current_function_funcdef_no);
1013 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1014 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1015
1016 if (enc & DW_EH_PE_indirect)
1017 {
1018 if (targetm.asm_out.make_eh_symbol_indirect != NULL)
1019 ref = targetm.asm_out.make_eh_symbol_indirect (ref, true);
1020 else
1021 ref = dw2_force_const_mem (ref, true);
1022 }
1023
1024 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1025 output_addr_const (asm_out_file, ref);
1026 fputc ('\n', asm_out_file);
1027 }
1028 }
1029
1030 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1031 this allocation may be done before pass_final. */
1032
1033 dw_fde_ref
1034 dwarf2out_alloc_current_fde (void)
1035 {
1036 dw_fde_ref fde;
1037
1038 fde = ggc_cleared_alloc<dw_fde_node> ();
1039 fde->decl = current_function_decl;
1040 fde->funcdef_number = current_function_funcdef_no;
1041 fde->fde_index = vec_safe_length (fde_vec);
1042 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1043 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1044 fde->nothrow = crtl->nothrow;
1045 fde->drap_reg = INVALID_REGNUM;
1046 fde->vdrap_reg = INVALID_REGNUM;
1047
1048 /* Record the FDE associated with this function. */
1049 cfun->fde = fde;
1050 vec_safe_push (fde_vec, fde);
1051
1052 return fde;
1053 }
1054
1055 /* Output a marker (i.e. a label) for the beginning of a function, before
1056 the prologue. */
1057
1058 void
1059 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1060 unsigned int column ATTRIBUTE_UNUSED,
1061 const char *file ATTRIBUTE_UNUSED)
1062 {
1063 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1064 char * dup_label;
1065 dw_fde_ref fde;
1066 section *fnsec;
1067 bool do_frame;
1068
1069 current_function_func_begin_label = NULL;
1070
1071 do_frame = dwarf2out_do_frame ();
1072
1073 /* ??? current_function_func_begin_label is also used by except.c for
1074 call-site information. We must emit this label if it might be used. */
1075 if (!do_frame
1076 && (!flag_exceptions
1077 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1078 return;
1079
1080 fnsec = function_section (current_function_decl);
1081 switch_to_section (fnsec);
1082 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1083 current_function_funcdef_no);
1084 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1085 current_function_funcdef_no);
1086 dup_label = xstrdup (label);
1087 current_function_func_begin_label = dup_label;
1088
1089 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1090 if (!do_frame)
1091 return;
1092
1093 /* Unlike the debug version, the EH version of frame unwind info is a per-
1094 function setting so we need to record whether we need it for the unit. */
1095 do_eh_frame |= dwarf2out_do_eh_frame ();
1096
1097 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1098 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1099 would include pass_dwarf2_frame. If we've not created the FDE yet,
1100 do so now. */
1101 fde = cfun->fde;
1102 if (fde == NULL)
1103 fde = dwarf2out_alloc_current_fde ();
1104
1105 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1106 fde->dw_fde_begin = dup_label;
1107 fde->dw_fde_current_label = dup_label;
1108 fde->in_std_section = (fnsec == text_section
1109 || (cold_text_section && fnsec == cold_text_section));
1110
1111 /* We only want to output line number information for the genuine dwarf2
1112 prologue case, not the eh frame case. */
1113 #ifdef DWARF2_DEBUGGING_INFO
1114 if (file)
1115 dwarf2out_source_line (line, column, file, 0, true);
1116 #endif
1117
1118 if (dwarf2out_do_cfi_asm ())
1119 dwarf2out_do_cfi_startproc (false);
1120 else
1121 {
1122 rtx personality = get_personality_function (current_function_decl);
1123 if (!current_unit_personality)
1124 current_unit_personality = personality;
1125
1126 /* We cannot keep a current personality per function as without CFI
1127 asm, at the point where we emit the CFI data, there is no current
1128 function anymore. */
1129 if (personality && current_unit_personality != personality)
1130 sorry ("multiple EH personalities are supported only with assemblers "
1131 "supporting %<.cfi_personality%> directive");
1132 }
1133 }
1134
1135 /* Output a marker (i.e. a label) for the end of the generated code
1136 for a function prologue. This gets called *after* the prologue code has
1137 been generated. */
1138
1139 void
1140 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1141 const char *file ATTRIBUTE_UNUSED)
1142 {
1143 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1144
1145 /* Output a label to mark the endpoint of the code generated for this
1146 function. */
1147 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1148 current_function_funcdef_no);
1149 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1150 current_function_funcdef_no);
1151 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1152 }
1153
1154 /* Output a marker (i.e. a label) for the beginning of the generated code
1155 for a function epilogue. This gets called *before* the prologue code has
1156 been generated. */
1157
1158 void
1159 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1160 const char *file ATTRIBUTE_UNUSED)
1161 {
1162 dw_fde_ref fde = cfun->fde;
1163 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1164
1165 if (fde->dw_fde_vms_begin_epilogue)
1166 return;
1167
1168 /* Output a label to mark the endpoint of the code generated for this
1169 function. */
1170 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1171 current_function_funcdef_no);
1172 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1173 current_function_funcdef_no);
1174 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1175 }
1176
1177 /* Output a marker (i.e. a label) for the absolute end of the generated code
1178 for a function definition. This gets called *after* the epilogue code has
1179 been generated. */
1180
1181 void
1182 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1183 const char *file ATTRIBUTE_UNUSED)
1184 {
1185 dw_fde_ref fde;
1186 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1187
1188 last_var_location_insn = NULL;
1189 cached_next_real_insn = NULL;
1190
1191 if (dwarf2out_do_cfi_asm ())
1192 fprintf (asm_out_file, "\t.cfi_endproc\n");
1193
1194 /* Output a label to mark the endpoint of the code generated for this
1195 function. */
1196 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1197 current_function_funcdef_no);
1198 ASM_OUTPUT_LABEL (asm_out_file, label);
1199 fde = cfun->fde;
1200 gcc_assert (fde != NULL);
1201 if (fde->dw_fde_second_begin == NULL)
1202 fde->dw_fde_end = xstrdup (label);
1203 }
1204
1205 void
1206 dwarf2out_frame_finish (void)
1207 {
1208 /* Output call frame information. */
1209 if (targetm.debug_unwind_info () == UI_DWARF2)
1210 output_call_frame_info (0);
1211
1212 /* Output another copy for the unwinder. */
1213 if (do_eh_frame)
1214 output_call_frame_info (1);
1215 }
1216
1217 /* Note that the current function section is being used for code. */
1218
1219 static void
1220 dwarf2out_note_section_used (void)
1221 {
1222 section *sec = current_function_section ();
1223 if (sec == text_section)
1224 text_section_used = true;
1225 else if (sec == cold_text_section)
1226 cold_text_section_used = true;
1227 }
1228
1229 static void var_location_switch_text_section (void);
1230 static void set_cur_line_info_table (section *);
1231
1232 void
1233 dwarf2out_switch_text_section (void)
1234 {
1235 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1236 section *sect;
1237 dw_fde_ref fde = cfun->fde;
1238
1239 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1240
1241 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1242 current_function_funcdef_no);
1243
1244 fde->dw_fde_second_begin = ggc_strdup (label);
1245 if (!in_cold_section_p)
1246 {
1247 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1248 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1249 }
1250 else
1251 {
1252 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1253 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1254 }
1255 have_multiple_function_sections = true;
1256
1257 /* There is no need to mark used sections when not debugging. */
1258 if (cold_text_section != NULL)
1259 dwarf2out_note_section_used ();
1260
1261 if (dwarf2out_do_cfi_asm ())
1262 fprintf (asm_out_file, "\t.cfi_endproc\n");
1263
1264 /* Now do the real section switch. */
1265 sect = current_function_section ();
1266 switch_to_section (sect);
1267
1268 fde->second_in_std_section
1269 = (sect == text_section
1270 || (cold_text_section && sect == cold_text_section));
1271
1272 if (dwarf2out_do_cfi_asm ())
1273 dwarf2out_do_cfi_startproc (true);
1274
1275 var_location_switch_text_section ();
1276
1277 if (cold_text_section != NULL)
1278 set_cur_line_info_table (sect);
1279 }
1280 \f
1281 /* And now, the subset of the debugging information support code necessary
1282 for emitting location expressions. */
1283
1284 /* Data about a single source file. */
1285 struct GTY((for_user)) dwarf_file_data {
1286 const char * filename;
1287 int emitted_number;
1288 };
1289
1290 /* Describe an entry into the .debug_addr section. */
1291
1292 enum ate_kind {
1293 ate_kind_rtx,
1294 ate_kind_rtx_dtprel,
1295 ate_kind_label
1296 };
1297
1298 struct GTY((for_user)) addr_table_entry {
1299 enum ate_kind kind;
1300 unsigned int refcount;
1301 unsigned int index;
1302 union addr_table_entry_struct_union
1303 {
1304 rtx GTY ((tag ("0"))) rtl;
1305 char * GTY ((tag ("1"))) label;
1306 }
1307 GTY ((desc ("%1.kind"))) addr;
1308 };
1309
1310 typedef unsigned int var_loc_view;
1311
1312 /* Location lists are ranges + location descriptions for that range,
1313 so you can track variables that are in different places over
1314 their entire life. */
1315 typedef struct GTY(()) dw_loc_list_struct {
1316 dw_loc_list_ref dw_loc_next;
1317 const char *begin; /* Label and addr_entry for start of range */
1318 addr_table_entry *begin_entry;
1319 const char *end; /* Label for end of range */
1320 char *ll_symbol; /* Label for beginning of location list.
1321 Only on head of list. */
1322 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1323 const char *section; /* Section this loclist is relative to */
1324 dw_loc_descr_ref expr;
1325 var_loc_view vbegin, vend;
1326 hashval_t hash;
1327 /* True if all addresses in this and subsequent lists are known to be
1328 resolved. */
1329 bool resolved_addr;
1330 /* True if this list has been replaced by dw_loc_next. */
1331 bool replaced;
1332 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1333 section. */
1334 unsigned char emitted : 1;
1335 /* True if hash field is index rather than hash value. */
1336 unsigned char num_assigned : 1;
1337 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1338 unsigned char offset_emitted : 1;
1339 /* True if note_variable_value_in_expr has been called on it. */
1340 unsigned char noted_variable_value : 1;
1341 /* True if the range should be emitted even if begin and end
1342 are the same. */
1343 bool force;
1344 } dw_loc_list_node;
1345
1346 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1347 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1348
1349 /* Convert a DWARF stack opcode into its string name. */
1350
1351 static const char *
1352 dwarf_stack_op_name (unsigned int op)
1353 {
1354 const char *name = get_DW_OP_name (op);
1355
1356 if (name != NULL)
1357 return name;
1358
1359 return "OP_<unknown>";
1360 }
1361
1362 /* Return TRUE iff we're to output location view lists as a separate
1363 attribute next to the location lists, as an extension compatible
1364 with DWARF 2 and above. */
1365
1366 static inline bool
1367 dwarf2out_locviews_in_attribute ()
1368 {
1369 return debug_variable_location_views == 1;
1370 }
1371
1372 /* Return TRUE iff we're to output location view lists as part of the
1373 location lists, as proposed for standardization after DWARF 5. */
1374
1375 static inline bool
1376 dwarf2out_locviews_in_loclist ()
1377 {
1378 #ifndef DW_LLE_view_pair
1379 return false;
1380 #else
1381 return debug_variable_location_views == -1;
1382 #endif
1383 }
1384
1385 /* Return a pointer to a newly allocated location description. Location
1386 descriptions are simple expression terms that can be strung
1387 together to form more complicated location (address) descriptions. */
1388
1389 static inline dw_loc_descr_ref
1390 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1391 unsigned HOST_WIDE_INT oprnd2)
1392 {
1393 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1394
1395 descr->dw_loc_opc = op;
1396 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1397 descr->dw_loc_oprnd1.val_entry = NULL;
1398 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1399 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1400 descr->dw_loc_oprnd2.val_entry = NULL;
1401 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1402
1403 return descr;
1404 }
1405
1406 /* Add a location description term to a location description expression. */
1407
1408 static inline void
1409 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1410 {
1411 dw_loc_descr_ref *d;
1412
1413 /* Find the end of the chain. */
1414 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1415 ;
1416
1417 *d = descr;
1418 }
1419
1420 /* Compare two location operands for exact equality. */
1421
1422 static bool
1423 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1424 {
1425 if (a->val_class != b->val_class)
1426 return false;
1427 switch (a->val_class)
1428 {
1429 case dw_val_class_none:
1430 return true;
1431 case dw_val_class_addr:
1432 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1433
1434 case dw_val_class_offset:
1435 case dw_val_class_unsigned_const:
1436 case dw_val_class_const:
1437 case dw_val_class_unsigned_const_implicit:
1438 case dw_val_class_const_implicit:
1439 case dw_val_class_range_list:
1440 /* These are all HOST_WIDE_INT, signed or unsigned. */
1441 return a->v.val_unsigned == b->v.val_unsigned;
1442
1443 case dw_val_class_loc:
1444 return a->v.val_loc == b->v.val_loc;
1445 case dw_val_class_loc_list:
1446 return a->v.val_loc_list == b->v.val_loc_list;
1447 case dw_val_class_view_list:
1448 return a->v.val_view_list == b->v.val_view_list;
1449 case dw_val_class_die_ref:
1450 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1451 case dw_val_class_fde_ref:
1452 return a->v.val_fde_index == b->v.val_fde_index;
1453 case dw_val_class_symview:
1454 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1455 case dw_val_class_lbl_id:
1456 case dw_val_class_lineptr:
1457 case dw_val_class_macptr:
1458 case dw_val_class_loclistsptr:
1459 case dw_val_class_high_pc:
1460 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1461 case dw_val_class_str:
1462 return a->v.val_str == b->v.val_str;
1463 case dw_val_class_flag:
1464 return a->v.val_flag == b->v.val_flag;
1465 case dw_val_class_file:
1466 case dw_val_class_file_implicit:
1467 return a->v.val_file == b->v.val_file;
1468 case dw_val_class_decl_ref:
1469 return a->v.val_decl_ref == b->v.val_decl_ref;
1470
1471 case dw_val_class_const_double:
1472 return (a->v.val_double.high == b->v.val_double.high
1473 && a->v.val_double.low == b->v.val_double.low);
1474
1475 case dw_val_class_wide_int:
1476 return *a->v.val_wide == *b->v.val_wide;
1477
1478 case dw_val_class_vec:
1479 {
1480 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1481 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1482
1483 return (a_len == b_len
1484 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1485 }
1486
1487 case dw_val_class_data8:
1488 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1489
1490 case dw_val_class_vms_delta:
1491 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1492 && !strcmp (a->v.val_vms_delta.lbl2, b->v.val_vms_delta.lbl2));
1493
1494 case dw_val_class_discr_value:
1495 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1496 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1497 case dw_val_class_discr_list:
1498 /* It makes no sense comparing two discriminant value lists. */
1499 return false;
1500 }
1501 gcc_unreachable ();
1502 }
1503
1504 /* Compare two location atoms for exact equality. */
1505
1506 static bool
1507 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1508 {
1509 if (a->dw_loc_opc != b->dw_loc_opc)
1510 return false;
1511
1512 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1513 address size, but since we always allocate cleared storage it
1514 should be zero for other types of locations. */
1515 if (a->dtprel != b->dtprel)
1516 return false;
1517
1518 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1519 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1520 }
1521
1522 /* Compare two complete location expressions for exact equality. */
1523
1524 bool
1525 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1526 {
1527 while (1)
1528 {
1529 if (a == b)
1530 return true;
1531 if (a == NULL || b == NULL)
1532 return false;
1533 if (!loc_descr_equal_p_1 (a, b))
1534 return false;
1535
1536 a = a->dw_loc_next;
1537 b = b->dw_loc_next;
1538 }
1539 }
1540
1541
1542 /* Add a constant POLY_OFFSET to a location expression. */
1543
1544 static void
1545 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1546 {
1547 dw_loc_descr_ref loc;
1548 HOST_WIDE_INT *p;
1549
1550 gcc_assert (*list_head != NULL);
1551
1552 if (known_eq (poly_offset, 0))
1553 return;
1554
1555 /* Find the end of the chain. */
1556 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1557 ;
1558
1559 HOST_WIDE_INT offset;
1560 if (!poly_offset.is_constant (&offset))
1561 {
1562 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1563 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1564 return;
1565 }
1566
1567 p = NULL;
1568 if (loc->dw_loc_opc == DW_OP_fbreg
1569 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1570 p = &loc->dw_loc_oprnd1.v.val_int;
1571 else if (loc->dw_loc_opc == DW_OP_bregx)
1572 p = &loc->dw_loc_oprnd2.v.val_int;
1573
1574 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1575 offset. Don't optimize if an signed integer overflow would happen. */
1576 if (p != NULL
1577 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1578 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1579 *p += offset;
1580
1581 else if (offset > 0)
1582 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1583
1584 else
1585 {
1586 loc->dw_loc_next
1587 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1588 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1589 }
1590 }
1591
1592 /* Return a pointer to a newly allocated location description for
1593 REG and OFFSET. */
1594
1595 static inline dw_loc_descr_ref
1596 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1597 {
1598 HOST_WIDE_INT const_offset;
1599 if (offset.is_constant (&const_offset))
1600 {
1601 if (reg <= 31)
1602 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1603 const_offset, 0);
1604 else
1605 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1606 }
1607 else
1608 {
1609 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1610 loc_descr_plus_const (&ret, offset);
1611 return ret;
1612 }
1613 }
1614
1615 /* Add a constant OFFSET to a location list. */
1616
1617 static void
1618 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1619 {
1620 dw_loc_list_ref d;
1621 for (d = list_head; d != NULL; d = d->dw_loc_next)
1622 loc_descr_plus_const (&d->expr, offset);
1623 }
1624
1625 #define DWARF_REF_SIZE \
1626 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : dwarf_offset_size)
1627
1628 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1629 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1630 DW_FORM_data16 with 128 bits. */
1631 #define DWARF_LARGEST_DATA_FORM_BITS \
1632 (dwarf_version >= 5 ? 128 : 64)
1633
1634 /* Utility inline function for construction of ops that were GNU extension
1635 before DWARF 5. */
1636 static inline enum dwarf_location_atom
1637 dwarf_OP (enum dwarf_location_atom op)
1638 {
1639 switch (op)
1640 {
1641 case DW_OP_implicit_pointer:
1642 if (dwarf_version < 5)
1643 return DW_OP_GNU_implicit_pointer;
1644 break;
1645
1646 case DW_OP_entry_value:
1647 if (dwarf_version < 5)
1648 return DW_OP_GNU_entry_value;
1649 break;
1650
1651 case DW_OP_const_type:
1652 if (dwarf_version < 5)
1653 return DW_OP_GNU_const_type;
1654 break;
1655
1656 case DW_OP_regval_type:
1657 if (dwarf_version < 5)
1658 return DW_OP_GNU_regval_type;
1659 break;
1660
1661 case DW_OP_deref_type:
1662 if (dwarf_version < 5)
1663 return DW_OP_GNU_deref_type;
1664 break;
1665
1666 case DW_OP_convert:
1667 if (dwarf_version < 5)
1668 return DW_OP_GNU_convert;
1669 break;
1670
1671 case DW_OP_reinterpret:
1672 if (dwarf_version < 5)
1673 return DW_OP_GNU_reinterpret;
1674 break;
1675
1676 case DW_OP_addrx:
1677 if (dwarf_version < 5)
1678 return DW_OP_GNU_addr_index;
1679 break;
1680
1681 case DW_OP_constx:
1682 if (dwarf_version < 5)
1683 return DW_OP_GNU_const_index;
1684 break;
1685
1686 default:
1687 break;
1688 }
1689 return op;
1690 }
1691
1692 /* Similarly for attributes. */
1693 static inline enum dwarf_attribute
1694 dwarf_AT (enum dwarf_attribute at)
1695 {
1696 switch (at)
1697 {
1698 case DW_AT_call_return_pc:
1699 if (dwarf_version < 5)
1700 return DW_AT_low_pc;
1701 break;
1702
1703 case DW_AT_call_tail_call:
1704 if (dwarf_version < 5)
1705 return DW_AT_GNU_tail_call;
1706 break;
1707
1708 case DW_AT_call_origin:
1709 if (dwarf_version < 5)
1710 return DW_AT_abstract_origin;
1711 break;
1712
1713 case DW_AT_call_target:
1714 if (dwarf_version < 5)
1715 return DW_AT_GNU_call_site_target;
1716 break;
1717
1718 case DW_AT_call_target_clobbered:
1719 if (dwarf_version < 5)
1720 return DW_AT_GNU_call_site_target_clobbered;
1721 break;
1722
1723 case DW_AT_call_parameter:
1724 if (dwarf_version < 5)
1725 return DW_AT_abstract_origin;
1726 break;
1727
1728 case DW_AT_call_value:
1729 if (dwarf_version < 5)
1730 return DW_AT_GNU_call_site_value;
1731 break;
1732
1733 case DW_AT_call_data_value:
1734 if (dwarf_version < 5)
1735 return DW_AT_GNU_call_site_data_value;
1736 break;
1737
1738 case DW_AT_call_all_calls:
1739 if (dwarf_version < 5)
1740 return DW_AT_GNU_all_call_sites;
1741 break;
1742
1743 case DW_AT_call_all_tail_calls:
1744 if (dwarf_version < 5)
1745 return DW_AT_GNU_all_tail_call_sites;
1746 break;
1747
1748 case DW_AT_dwo_name:
1749 if (dwarf_version < 5)
1750 return DW_AT_GNU_dwo_name;
1751 break;
1752
1753 case DW_AT_addr_base:
1754 if (dwarf_version < 5)
1755 return DW_AT_GNU_addr_base;
1756 break;
1757
1758 default:
1759 break;
1760 }
1761 return at;
1762 }
1763
1764 /* And similarly for tags. */
1765 static inline enum dwarf_tag
1766 dwarf_TAG (enum dwarf_tag tag)
1767 {
1768 switch (tag)
1769 {
1770 case DW_TAG_call_site:
1771 if (dwarf_version < 5)
1772 return DW_TAG_GNU_call_site;
1773 break;
1774
1775 case DW_TAG_call_site_parameter:
1776 if (dwarf_version < 5)
1777 return DW_TAG_GNU_call_site_parameter;
1778 break;
1779
1780 default:
1781 break;
1782 }
1783 return tag;
1784 }
1785
1786 /* And similarly for forms. */
1787 static inline enum dwarf_form
1788 dwarf_FORM (enum dwarf_form form)
1789 {
1790 switch (form)
1791 {
1792 case DW_FORM_addrx:
1793 if (dwarf_version < 5)
1794 return DW_FORM_GNU_addr_index;
1795 break;
1796
1797 case DW_FORM_strx:
1798 if (dwarf_version < 5)
1799 return DW_FORM_GNU_str_index;
1800 break;
1801
1802 default:
1803 break;
1804 }
1805 return form;
1806 }
1807
1808 static unsigned long int get_base_type_offset (dw_die_ref);
1809
1810 /* Return the size of a location descriptor. */
1811
1812 static unsigned long
1813 size_of_loc_descr (dw_loc_descr_ref loc)
1814 {
1815 unsigned long size = 1;
1816
1817 switch (loc->dw_loc_opc)
1818 {
1819 case DW_OP_addr:
1820 size += DWARF2_ADDR_SIZE;
1821 break;
1822 case DW_OP_GNU_addr_index:
1823 case DW_OP_addrx:
1824 case DW_OP_GNU_const_index:
1825 case DW_OP_constx:
1826 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1827 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1828 break;
1829 case DW_OP_const1u:
1830 case DW_OP_const1s:
1831 size += 1;
1832 break;
1833 case DW_OP_const2u:
1834 case DW_OP_const2s:
1835 size += 2;
1836 break;
1837 case DW_OP_const4u:
1838 case DW_OP_const4s:
1839 size += 4;
1840 break;
1841 case DW_OP_const8u:
1842 case DW_OP_const8s:
1843 size += 8;
1844 break;
1845 case DW_OP_constu:
1846 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1847 break;
1848 case DW_OP_consts:
1849 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1850 break;
1851 case DW_OP_pick:
1852 size += 1;
1853 break;
1854 case DW_OP_plus_uconst:
1855 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1856 break;
1857 case DW_OP_skip:
1858 case DW_OP_bra:
1859 size += 2;
1860 break;
1861 case DW_OP_breg0:
1862 case DW_OP_breg1:
1863 case DW_OP_breg2:
1864 case DW_OP_breg3:
1865 case DW_OP_breg4:
1866 case DW_OP_breg5:
1867 case DW_OP_breg6:
1868 case DW_OP_breg7:
1869 case DW_OP_breg8:
1870 case DW_OP_breg9:
1871 case DW_OP_breg10:
1872 case DW_OP_breg11:
1873 case DW_OP_breg12:
1874 case DW_OP_breg13:
1875 case DW_OP_breg14:
1876 case DW_OP_breg15:
1877 case DW_OP_breg16:
1878 case DW_OP_breg17:
1879 case DW_OP_breg18:
1880 case DW_OP_breg19:
1881 case DW_OP_breg20:
1882 case DW_OP_breg21:
1883 case DW_OP_breg22:
1884 case DW_OP_breg23:
1885 case DW_OP_breg24:
1886 case DW_OP_breg25:
1887 case DW_OP_breg26:
1888 case DW_OP_breg27:
1889 case DW_OP_breg28:
1890 case DW_OP_breg29:
1891 case DW_OP_breg30:
1892 case DW_OP_breg31:
1893 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1894 break;
1895 case DW_OP_regx:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 break;
1898 case DW_OP_fbreg:
1899 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1900 break;
1901 case DW_OP_bregx:
1902 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1903 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1904 break;
1905 case DW_OP_piece:
1906 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1907 break;
1908 case DW_OP_bit_piece:
1909 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1910 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1911 break;
1912 case DW_OP_deref_size:
1913 case DW_OP_xderef_size:
1914 size += 1;
1915 break;
1916 case DW_OP_call2:
1917 size += 2;
1918 break;
1919 case DW_OP_call4:
1920 size += 4;
1921 break;
1922 case DW_OP_call_ref:
1923 case DW_OP_GNU_variable_value:
1924 size += DWARF_REF_SIZE;
1925 break;
1926 case DW_OP_implicit_value:
1927 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1928 + loc->dw_loc_oprnd1.v.val_unsigned;
1929 break;
1930 case DW_OP_implicit_pointer:
1931 case DW_OP_GNU_implicit_pointer:
1932 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1933 break;
1934 case DW_OP_entry_value:
1935 case DW_OP_GNU_entry_value:
1936 {
1937 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1938 size += size_of_uleb128 (op_size) + op_size;
1939 break;
1940 }
1941 case DW_OP_const_type:
1942 case DW_OP_GNU_const_type:
1943 {
1944 unsigned long o
1945 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1946 size += size_of_uleb128 (o) + 1;
1947 switch (loc->dw_loc_oprnd2.val_class)
1948 {
1949 case dw_val_class_vec:
1950 size += loc->dw_loc_oprnd2.v.val_vec.length
1951 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1952 break;
1953 case dw_val_class_const:
1954 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1955 break;
1956 case dw_val_class_const_double:
1957 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1958 break;
1959 case dw_val_class_wide_int:
1960 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1961 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1962 break;
1963 default:
1964 gcc_unreachable ();
1965 }
1966 break;
1967 }
1968 case DW_OP_regval_type:
1969 case DW_OP_GNU_regval_type:
1970 {
1971 unsigned long o
1972 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1973 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1974 + size_of_uleb128 (o);
1975 }
1976 break;
1977 case DW_OP_deref_type:
1978 case DW_OP_GNU_deref_type:
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1982 size += 1 + size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_convert:
1986 case DW_OP_reinterpret:
1987 case DW_OP_GNU_convert:
1988 case DW_OP_GNU_reinterpret:
1989 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1990 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1991 else
1992 {
1993 unsigned long o
1994 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1995 size += size_of_uleb128 (o);
1996 }
1997 break;
1998 case DW_OP_GNU_parameter_ref:
1999 size += 4;
2000 break;
2001 default:
2002 break;
2003 }
2004
2005 return size;
2006 }
2007
2008 /* Return the size of a series of location descriptors. */
2009
2010 unsigned long
2011 size_of_locs (dw_loc_descr_ref loc)
2012 {
2013 dw_loc_descr_ref l;
2014 unsigned long size;
2015
2016 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2017 field, to avoid writing to a PCH file. */
2018 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2019 {
2020 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2021 break;
2022 size += size_of_loc_descr (l);
2023 }
2024 if (! l)
2025 return size;
2026
2027 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2028 {
2029 l->dw_loc_addr = size;
2030 size += size_of_loc_descr (l);
2031 }
2032
2033 return size;
2034 }
2035
2036 /* Return the size of the value in a DW_AT_discr_value attribute. */
2037
2038 static int
2039 size_of_discr_value (dw_discr_value *discr_value)
2040 {
2041 if (discr_value->pos)
2042 return size_of_uleb128 (discr_value->v.uval);
2043 else
2044 return size_of_sleb128 (discr_value->v.sval);
2045 }
2046
2047 /* Return the size of the value in a DW_AT_discr_list attribute. */
2048
2049 static int
2050 size_of_discr_list (dw_discr_list_ref discr_list)
2051 {
2052 int size = 0;
2053
2054 for (dw_discr_list_ref list = discr_list;
2055 list != NULL;
2056 list = list->dw_discr_next)
2057 {
2058 /* One byte for the discriminant value descriptor, and then one or two
2059 LEB128 numbers, depending on whether it's a single case label or a
2060 range label. */
2061 size += 1;
2062 size += size_of_discr_value (&list->dw_discr_lower_bound);
2063 if (list->dw_discr_range != 0)
2064 size += size_of_discr_value (&list->dw_discr_upper_bound);
2065 }
2066 return size;
2067 }
2068
2069 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2070 static void get_ref_die_offset_label (char *, dw_die_ref);
2071 static unsigned long int get_ref_die_offset (dw_die_ref);
2072
2073 /* Output location description stack opcode's operands (if any).
2074 The for_eh_or_skip parameter controls whether register numbers are
2075 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2076 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2077 info). This should be suppressed for the cases that have not been converted
2078 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2079
2080 static void
2081 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2082 {
2083 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2084 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2085
2086 switch (loc->dw_loc_opc)
2087 {
2088 #ifdef DWARF2_DEBUGGING_INFO
2089 case DW_OP_const2u:
2090 case DW_OP_const2s:
2091 dw2_asm_output_data (2, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const4u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const4s:
2104 dw2_asm_output_data (4, val1->v.val_int, NULL);
2105 break;
2106 case DW_OP_const8u:
2107 if (loc->dtprel)
2108 {
2109 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2110 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2111 val1->v.val_addr);
2112 fputc ('\n', asm_out_file);
2113 break;
2114 }
2115 /* FALLTHRU */
2116 case DW_OP_const8s:
2117 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2118 dw2_asm_output_data (8, val1->v.val_int, NULL);
2119 break;
2120 case DW_OP_skip:
2121 case DW_OP_bra:
2122 {
2123 int offset;
2124
2125 gcc_assert (val1->val_class == dw_val_class_loc);
2126 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2127
2128 dw2_asm_output_data (2, offset, NULL);
2129 }
2130 break;
2131 case DW_OP_implicit_value:
2132 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2133 switch (val2->val_class)
2134 {
2135 case dw_val_class_const:
2136 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2137 break;
2138 case dw_val_class_vec:
2139 {
2140 unsigned int elt_size = val2->v.val_vec.elt_size;
2141 unsigned int len = val2->v.val_vec.length;
2142 unsigned int i;
2143 unsigned char *p;
2144
2145 if (elt_size > sizeof (HOST_WIDE_INT))
2146 {
2147 elt_size /= 2;
2148 len *= 2;
2149 }
2150 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2151 i < len;
2152 i++, p += elt_size)
2153 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2154 "fp or vector constant word %u", i);
2155 }
2156 break;
2157 case dw_val_class_const_double:
2158 {
2159 unsigned HOST_WIDE_INT first, second;
2160
2161 if (WORDS_BIG_ENDIAN)
2162 {
2163 first = val2->v.val_double.high;
2164 second = val2->v.val_double.low;
2165 }
2166 else
2167 {
2168 first = val2->v.val_double.low;
2169 second = val2->v.val_double.high;
2170 }
2171 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2172 first, NULL);
2173 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2174 second, NULL);
2175 }
2176 break;
2177 case dw_val_class_wide_int:
2178 {
2179 int i;
2180 int len = get_full_len (*val2->v.val_wide);
2181 if (WORDS_BIG_ENDIAN)
2182 for (i = len - 1; i >= 0; --i)
2183 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2184 val2->v.val_wide->elt (i), NULL);
2185 else
2186 for (i = 0; i < len; ++i)
2187 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2188 val2->v.val_wide->elt (i), NULL);
2189 }
2190 break;
2191 case dw_val_class_addr:
2192 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2193 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2194 break;
2195 default:
2196 gcc_unreachable ();
2197 }
2198 break;
2199 #else
2200 case DW_OP_const2u:
2201 case DW_OP_const2s:
2202 case DW_OP_const4u:
2203 case DW_OP_const4s:
2204 case DW_OP_const8u:
2205 case DW_OP_const8s:
2206 case DW_OP_skip:
2207 case DW_OP_bra:
2208 case DW_OP_implicit_value:
2209 /* We currently don't make any attempt to make sure these are
2210 aligned properly like we do for the main unwind info, so
2211 don't support emitting things larger than a byte if we're
2212 only doing unwinding. */
2213 gcc_unreachable ();
2214 #endif
2215 case DW_OP_const1u:
2216 case DW_OP_const1s:
2217 dw2_asm_output_data (1, val1->v.val_int, NULL);
2218 break;
2219 case DW_OP_constu:
2220 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2221 break;
2222 case DW_OP_consts:
2223 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2224 break;
2225 case DW_OP_pick:
2226 dw2_asm_output_data (1, val1->v.val_int, NULL);
2227 break;
2228 case DW_OP_plus_uconst:
2229 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2230 break;
2231 case DW_OP_breg0:
2232 case DW_OP_breg1:
2233 case DW_OP_breg2:
2234 case DW_OP_breg3:
2235 case DW_OP_breg4:
2236 case DW_OP_breg5:
2237 case DW_OP_breg6:
2238 case DW_OP_breg7:
2239 case DW_OP_breg8:
2240 case DW_OP_breg9:
2241 case DW_OP_breg10:
2242 case DW_OP_breg11:
2243 case DW_OP_breg12:
2244 case DW_OP_breg13:
2245 case DW_OP_breg14:
2246 case DW_OP_breg15:
2247 case DW_OP_breg16:
2248 case DW_OP_breg17:
2249 case DW_OP_breg18:
2250 case DW_OP_breg19:
2251 case DW_OP_breg20:
2252 case DW_OP_breg21:
2253 case DW_OP_breg22:
2254 case DW_OP_breg23:
2255 case DW_OP_breg24:
2256 case DW_OP_breg25:
2257 case DW_OP_breg26:
2258 case DW_OP_breg27:
2259 case DW_OP_breg28:
2260 case DW_OP_breg29:
2261 case DW_OP_breg30:
2262 case DW_OP_breg31:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_regx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 }
2274 break;
2275 case DW_OP_fbreg:
2276 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2277 break;
2278 case DW_OP_bregx:
2279 {
2280 unsigned r = val1->v.val_unsigned;
2281 if (for_eh_or_skip >= 0)
2282 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2283 gcc_assert (size_of_uleb128 (r)
2284 == size_of_uleb128 (val1->v.val_unsigned));
2285 dw2_asm_output_data_uleb128 (r, NULL);
2286 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2287 }
2288 break;
2289 case DW_OP_piece:
2290 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2291 break;
2292 case DW_OP_bit_piece:
2293 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2294 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2295 break;
2296 case DW_OP_deref_size:
2297 case DW_OP_xderef_size:
2298 dw2_asm_output_data (1, val1->v.val_int, NULL);
2299 break;
2300
2301 case DW_OP_addr:
2302 if (loc->dtprel)
2303 {
2304 if (targetm.asm_out.output_dwarf_dtprel)
2305 {
2306 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2307 DWARF2_ADDR_SIZE,
2308 val1->v.val_addr);
2309 fputc ('\n', asm_out_file);
2310 }
2311 else
2312 gcc_unreachable ();
2313 }
2314 else
2315 {
2316 #ifdef DWARF2_DEBUGGING_INFO
2317 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2318 #else
2319 gcc_unreachable ();
2320 #endif
2321 }
2322 break;
2323
2324 case DW_OP_GNU_addr_index:
2325 case DW_OP_addrx:
2326 case DW_OP_GNU_const_index:
2327 case DW_OP_constx:
2328 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2329 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2330 "(index into .debug_addr)");
2331 break;
2332
2333 case DW_OP_call2:
2334 case DW_OP_call4:
2335 {
2336 unsigned long die_offset
2337 = get_ref_die_offset (val1->v.val_die_ref.die);
2338 /* Make sure the offset has been computed and that we can encode it as
2339 an operand. */
2340 gcc_assert (die_offset > 0
2341 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2342 ? 0xffff
2343 : 0xffffffff));
2344 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2345 die_offset, NULL);
2346 }
2347 break;
2348
2349 case DW_OP_call_ref:
2350 case DW_OP_GNU_variable_value:
2351 {
2352 char label[MAX_ARTIFICIAL_LABEL_BYTES
2353 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2354 gcc_assert (val1->val_class == dw_val_class_die_ref);
2355 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2356 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2357 }
2358 break;
2359
2360 case DW_OP_implicit_pointer:
2361 case DW_OP_GNU_implicit_pointer:
2362 {
2363 char label[MAX_ARTIFICIAL_LABEL_BYTES
2364 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2365 gcc_assert (val1->val_class == dw_val_class_die_ref);
2366 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2367 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2368 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2369 }
2370 break;
2371
2372 case DW_OP_entry_value:
2373 case DW_OP_GNU_entry_value:
2374 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2375 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2376 break;
2377
2378 case DW_OP_const_type:
2379 case DW_OP_GNU_const_type:
2380 {
2381 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2382 gcc_assert (o);
2383 dw2_asm_output_data_uleb128 (o, NULL);
2384 switch (val2->val_class)
2385 {
2386 case dw_val_class_const:
2387 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2388 dw2_asm_output_data (1, l, NULL);
2389 dw2_asm_output_data (l, val2->v.val_int, NULL);
2390 break;
2391 case dw_val_class_vec:
2392 {
2393 unsigned int elt_size = val2->v.val_vec.elt_size;
2394 unsigned int len = val2->v.val_vec.length;
2395 unsigned int i;
2396 unsigned char *p;
2397
2398 l = len * elt_size;
2399 dw2_asm_output_data (1, l, NULL);
2400 if (elt_size > sizeof (HOST_WIDE_INT))
2401 {
2402 elt_size /= 2;
2403 len *= 2;
2404 }
2405 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2406 i < len;
2407 i++, p += elt_size)
2408 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2409 "fp or vector constant word %u", i);
2410 }
2411 break;
2412 case dw_val_class_const_double:
2413 {
2414 unsigned HOST_WIDE_INT first, second;
2415 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2416
2417 dw2_asm_output_data (1, 2 * l, NULL);
2418 if (WORDS_BIG_ENDIAN)
2419 {
2420 first = val2->v.val_double.high;
2421 second = val2->v.val_double.low;
2422 }
2423 else
2424 {
2425 first = val2->v.val_double.low;
2426 second = val2->v.val_double.high;
2427 }
2428 dw2_asm_output_data (l, first, NULL);
2429 dw2_asm_output_data (l, second, NULL);
2430 }
2431 break;
2432 case dw_val_class_wide_int:
2433 {
2434 int i;
2435 int len = get_full_len (*val2->v.val_wide);
2436 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2437
2438 dw2_asm_output_data (1, len * l, NULL);
2439 if (WORDS_BIG_ENDIAN)
2440 for (i = len - 1; i >= 0; --i)
2441 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2442 else
2443 for (i = 0; i < len; ++i)
2444 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2445 }
2446 break;
2447 default:
2448 gcc_unreachable ();
2449 }
2450 }
2451 break;
2452 case DW_OP_regval_type:
2453 case DW_OP_GNU_regval_type:
2454 {
2455 unsigned r = val1->v.val_unsigned;
2456 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2457 gcc_assert (o);
2458 if (for_eh_or_skip >= 0)
2459 {
2460 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2461 gcc_assert (size_of_uleb128 (r)
2462 == size_of_uleb128 (val1->v.val_unsigned));
2463 }
2464 dw2_asm_output_data_uleb128 (r, NULL);
2465 dw2_asm_output_data_uleb128 (o, NULL);
2466 }
2467 break;
2468 case DW_OP_deref_type:
2469 case DW_OP_GNU_deref_type:
2470 {
2471 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2472 gcc_assert (o);
2473 dw2_asm_output_data (1, val1->v.val_int, NULL);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477 case DW_OP_convert:
2478 case DW_OP_reinterpret:
2479 case DW_OP_GNU_convert:
2480 case DW_OP_GNU_reinterpret:
2481 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2482 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2483 else
2484 {
2485 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2486 gcc_assert (o);
2487 dw2_asm_output_data_uleb128 (o, NULL);
2488 }
2489 break;
2490
2491 case DW_OP_GNU_parameter_ref:
2492 {
2493 unsigned long o;
2494 gcc_assert (val1->val_class == dw_val_class_die_ref);
2495 o = get_ref_die_offset (val1->v.val_die_ref.die);
2496 dw2_asm_output_data (4, o, NULL);
2497 }
2498 break;
2499
2500 default:
2501 /* Other codes have no operands. */
2502 break;
2503 }
2504 }
2505
2506 /* Output a sequence of location operations.
2507 The for_eh_or_skip parameter controls whether register numbers are
2508 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2509 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2510 info). This should be suppressed for the cases that have not been converted
2511 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2512
2513 void
2514 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2515 {
2516 for (; loc != NULL; loc = loc->dw_loc_next)
2517 {
2518 enum dwarf_location_atom opc = loc->dw_loc_opc;
2519 /* Output the opcode. */
2520 if (for_eh_or_skip >= 0
2521 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2522 {
2523 unsigned r = (opc - DW_OP_breg0);
2524 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2525 gcc_assert (r <= 31);
2526 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2527 }
2528 else if (for_eh_or_skip >= 0
2529 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2530 {
2531 unsigned r = (opc - DW_OP_reg0);
2532 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2533 gcc_assert (r <= 31);
2534 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2535 }
2536
2537 dw2_asm_output_data (1, opc,
2538 "%s", dwarf_stack_op_name (opc));
2539
2540 /* Output the operand(s) (if any). */
2541 output_loc_operands (loc, for_eh_or_skip);
2542 }
2543 }
2544
2545 /* Output location description stack opcode's operands (if any).
2546 The output is single bytes on a line, suitable for .cfi_escape. */
2547
2548 static void
2549 output_loc_operands_raw (dw_loc_descr_ref loc)
2550 {
2551 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2552 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2553
2554 switch (loc->dw_loc_opc)
2555 {
2556 case DW_OP_addr:
2557 case DW_OP_GNU_addr_index:
2558 case DW_OP_addrx:
2559 case DW_OP_GNU_const_index:
2560 case DW_OP_constx:
2561 case DW_OP_implicit_value:
2562 /* We cannot output addresses in .cfi_escape, only bytes. */
2563 gcc_unreachable ();
2564
2565 case DW_OP_const1u:
2566 case DW_OP_const1s:
2567 case DW_OP_pick:
2568 case DW_OP_deref_size:
2569 case DW_OP_xderef_size:
2570 fputc (',', asm_out_file);
2571 dw2_asm_output_data_raw (1, val1->v.val_int);
2572 break;
2573
2574 case DW_OP_const2u:
2575 case DW_OP_const2s:
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (2, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_const4u:
2581 case DW_OP_const4s:
2582 fputc (',', asm_out_file);
2583 dw2_asm_output_data_raw (4, val1->v.val_int);
2584 break;
2585
2586 case DW_OP_const8u:
2587 case DW_OP_const8s:
2588 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2589 fputc (',', asm_out_file);
2590 dw2_asm_output_data_raw (8, val1->v.val_int);
2591 break;
2592
2593 case DW_OP_skip:
2594 case DW_OP_bra:
2595 {
2596 int offset;
2597
2598 gcc_assert (val1->val_class == dw_val_class_loc);
2599 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2600
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_raw (2, offset);
2603 }
2604 break;
2605
2606 case DW_OP_regx:
2607 {
2608 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2609 gcc_assert (size_of_uleb128 (r)
2610 == size_of_uleb128 (val1->v.val_unsigned));
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (r);
2613 }
2614 break;
2615
2616 case DW_OP_constu:
2617 case DW_OP_plus_uconst:
2618 case DW_OP_piece:
2619 fputc (',', asm_out_file);
2620 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2621 break;
2622
2623 case DW_OP_bit_piece:
2624 fputc (',', asm_out_file);
2625 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2626 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2627 break;
2628
2629 case DW_OP_consts:
2630 case DW_OP_breg0:
2631 case DW_OP_breg1:
2632 case DW_OP_breg2:
2633 case DW_OP_breg3:
2634 case DW_OP_breg4:
2635 case DW_OP_breg5:
2636 case DW_OP_breg6:
2637 case DW_OP_breg7:
2638 case DW_OP_breg8:
2639 case DW_OP_breg9:
2640 case DW_OP_breg10:
2641 case DW_OP_breg11:
2642 case DW_OP_breg12:
2643 case DW_OP_breg13:
2644 case DW_OP_breg14:
2645 case DW_OP_breg15:
2646 case DW_OP_breg16:
2647 case DW_OP_breg17:
2648 case DW_OP_breg18:
2649 case DW_OP_breg19:
2650 case DW_OP_breg20:
2651 case DW_OP_breg21:
2652 case DW_OP_breg22:
2653 case DW_OP_breg23:
2654 case DW_OP_breg24:
2655 case DW_OP_breg25:
2656 case DW_OP_breg26:
2657 case DW_OP_breg27:
2658 case DW_OP_breg28:
2659 case DW_OP_breg29:
2660 case DW_OP_breg30:
2661 case DW_OP_breg31:
2662 case DW_OP_fbreg:
2663 fputc (',', asm_out_file);
2664 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2665 break;
2666
2667 case DW_OP_bregx:
2668 {
2669 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2670 gcc_assert (size_of_uleb128 (r)
2671 == size_of_uleb128 (val1->v.val_unsigned));
2672 fputc (',', asm_out_file);
2673 dw2_asm_output_data_uleb128_raw (r);
2674 fputc (',', asm_out_file);
2675 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2676 }
2677 break;
2678
2679 case DW_OP_implicit_pointer:
2680 case DW_OP_entry_value:
2681 case DW_OP_const_type:
2682 case DW_OP_regval_type:
2683 case DW_OP_deref_type:
2684 case DW_OP_convert:
2685 case DW_OP_reinterpret:
2686 case DW_OP_GNU_implicit_pointer:
2687 case DW_OP_GNU_entry_value:
2688 case DW_OP_GNU_const_type:
2689 case DW_OP_GNU_regval_type:
2690 case DW_OP_GNU_deref_type:
2691 case DW_OP_GNU_convert:
2692 case DW_OP_GNU_reinterpret:
2693 case DW_OP_GNU_parameter_ref:
2694 gcc_unreachable ();
2695 break;
2696
2697 default:
2698 /* Other codes have no operands. */
2699 break;
2700 }
2701 }
2702
2703 void
2704 output_loc_sequence_raw (dw_loc_descr_ref loc)
2705 {
2706 while (1)
2707 {
2708 enum dwarf_location_atom opc = loc->dw_loc_opc;
2709 /* Output the opcode. */
2710 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2711 {
2712 unsigned r = (opc - DW_OP_breg0);
2713 r = DWARF2_FRAME_REG_OUT (r, 1);
2714 gcc_assert (r <= 31);
2715 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2716 }
2717 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2718 {
2719 unsigned r = (opc - DW_OP_reg0);
2720 r = DWARF2_FRAME_REG_OUT (r, 1);
2721 gcc_assert (r <= 31);
2722 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2723 }
2724 /* Output the opcode. */
2725 fprintf (asm_out_file, "%#x", opc);
2726 output_loc_operands_raw (loc);
2727
2728 if (!loc->dw_loc_next)
2729 break;
2730 loc = loc->dw_loc_next;
2731
2732 fputc (',', asm_out_file);
2733 }
2734 }
2735
2736 /* This function builds a dwarf location descriptor sequence from a
2737 dw_cfa_location, adding the given OFFSET to the result of the
2738 expression. */
2739
2740 struct dw_loc_descr_node *
2741 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2742 {
2743 struct dw_loc_descr_node *head, *tmp;
2744
2745 offset += cfa->offset;
2746
2747 if (cfa->indirect)
2748 {
2749 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2750 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2751 head->dw_loc_oprnd1.val_entry = NULL;
2752 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2753 add_loc_descr (&head, tmp);
2754 loc_descr_plus_const (&head, offset);
2755 }
2756 else
2757 head = new_reg_loc_descr (cfa->reg, offset);
2758
2759 return head;
2760 }
2761
2762 /* This function builds a dwarf location descriptor sequence for
2763 the address at OFFSET from the CFA when stack is aligned to
2764 ALIGNMENT byte. */
2765
2766 struct dw_loc_descr_node *
2767 build_cfa_aligned_loc (dw_cfa_location *cfa,
2768 poly_int64 offset, HOST_WIDE_INT alignment)
2769 {
2770 struct dw_loc_descr_node *head;
2771 unsigned int dwarf_fp
2772 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2773
2774 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2775 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2776 {
2777 head = new_reg_loc_descr (dwarf_fp, 0);
2778 add_loc_descr (&head, int_loc_descriptor (alignment));
2779 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2780 loc_descr_plus_const (&head, offset);
2781 }
2782 else
2783 head = new_reg_loc_descr (dwarf_fp, offset);
2784 return head;
2785 }
2786 \f
2787 /* And now, the support for symbolic debugging information. */
2788
2789 /* .debug_str support. */
2790
2791 static void dwarf2out_init (const char *);
2792 static void dwarf2out_finish (const char *);
2793 static void dwarf2out_early_finish (const char *);
2794 static void dwarf2out_assembly_start (void);
2795 static void dwarf2out_define (unsigned int, const char *);
2796 static void dwarf2out_undef (unsigned int, const char *);
2797 static void dwarf2out_start_source_file (unsigned, const char *);
2798 static void dwarf2out_end_source_file (unsigned);
2799 static void dwarf2out_function_decl (tree);
2800 static void dwarf2out_begin_block (unsigned, unsigned);
2801 static void dwarf2out_end_block (unsigned, unsigned);
2802 static bool dwarf2out_ignore_block (const_tree);
2803 static void dwarf2out_early_global_decl (tree);
2804 static void dwarf2out_late_global_decl (tree);
2805 static void dwarf2out_type_decl (tree, int);
2806 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2807 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2808 dw_die_ref);
2809 static void dwarf2out_abstract_function (tree);
2810 static void dwarf2out_var_location (rtx_insn *);
2811 static void dwarf2out_inline_entry (tree);
2812 static void dwarf2out_size_function (tree);
2813 static void dwarf2out_begin_function (tree);
2814 static void dwarf2out_end_function (unsigned int);
2815 static void dwarf2out_register_main_translation_unit (tree unit);
2816 static void dwarf2out_set_name (tree, tree);
2817 static void dwarf2out_register_external_die (tree decl, const char *sym,
2818 unsigned HOST_WIDE_INT off);
2819 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2820 unsigned HOST_WIDE_INT *off);
2821
2822 /* The debug hooks structure. */
2823
2824 const struct gcc_debug_hooks dwarf2_debug_hooks =
2825 {
2826 dwarf2out_init,
2827 dwarf2out_finish,
2828 dwarf2out_early_finish,
2829 dwarf2out_assembly_start,
2830 dwarf2out_define,
2831 dwarf2out_undef,
2832 dwarf2out_start_source_file,
2833 dwarf2out_end_source_file,
2834 dwarf2out_begin_block,
2835 dwarf2out_end_block,
2836 dwarf2out_ignore_block,
2837 dwarf2out_source_line,
2838 dwarf2out_begin_prologue,
2839 #if VMS_DEBUGGING_INFO
2840 dwarf2out_vms_end_prologue,
2841 dwarf2out_vms_begin_epilogue,
2842 #else
2843 debug_nothing_int_charstar,
2844 debug_nothing_int_charstar,
2845 #endif
2846 dwarf2out_end_epilogue,
2847 dwarf2out_begin_function,
2848 dwarf2out_end_function, /* end_function */
2849 dwarf2out_register_main_translation_unit,
2850 dwarf2out_function_decl, /* function_decl */
2851 dwarf2out_early_global_decl,
2852 dwarf2out_late_global_decl,
2853 dwarf2out_type_decl, /* type_decl */
2854 dwarf2out_imported_module_or_decl,
2855 dwarf2out_die_ref_for_decl,
2856 dwarf2out_register_external_die,
2857 debug_nothing_tree, /* deferred_inline_function */
2858 /* The DWARF 2 backend tries to reduce debugging bloat by not
2859 emitting the abstract description of inline functions until
2860 something tries to reference them. */
2861 dwarf2out_abstract_function, /* outlining_inline_function */
2862 debug_nothing_rtx_code_label, /* label */
2863 debug_nothing_int, /* handle_pch */
2864 dwarf2out_var_location,
2865 dwarf2out_inline_entry, /* inline_entry */
2866 dwarf2out_size_function, /* size_function */
2867 dwarf2out_switch_text_section,
2868 dwarf2out_set_name,
2869 1, /* start_end_main_source_file */
2870 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2871 };
2872
2873 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2874 {
2875 dwarf2out_init,
2876 debug_nothing_charstar,
2877 debug_nothing_charstar,
2878 dwarf2out_assembly_start,
2879 debug_nothing_int_charstar,
2880 debug_nothing_int_charstar,
2881 debug_nothing_int_charstar,
2882 debug_nothing_int,
2883 debug_nothing_int_int, /* begin_block */
2884 debug_nothing_int_int, /* end_block */
2885 debug_true_const_tree, /* ignore_block */
2886 dwarf2out_source_line, /* source_line */
2887 debug_nothing_int_int_charstar, /* begin_prologue */
2888 debug_nothing_int_charstar, /* end_prologue */
2889 debug_nothing_int_charstar, /* begin_epilogue */
2890 debug_nothing_int_charstar, /* end_epilogue */
2891 debug_nothing_tree, /* begin_function */
2892 debug_nothing_int, /* end_function */
2893 debug_nothing_tree, /* register_main_translation_unit */
2894 debug_nothing_tree, /* function_decl */
2895 debug_nothing_tree, /* early_global_decl */
2896 debug_nothing_tree, /* late_global_decl */
2897 debug_nothing_tree_int, /* type_decl */
2898 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2899 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2900 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2901 debug_nothing_tree, /* deferred_inline_function */
2902 debug_nothing_tree, /* outlining_inline_function */
2903 debug_nothing_rtx_code_label, /* label */
2904 debug_nothing_int, /* handle_pch */
2905 debug_nothing_rtx_insn, /* var_location */
2906 debug_nothing_tree, /* inline_entry */
2907 debug_nothing_tree, /* size_function */
2908 debug_nothing_void, /* switch_text_section */
2909 debug_nothing_tree_tree, /* set_name */
2910 0, /* start_end_main_source_file */
2911 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2912 };
2913 \f
2914 /* NOTE: In the comments in this file, many references are made to
2915 "Debugging Information Entries". This term is abbreviated as `DIE'
2916 throughout the remainder of this file. */
2917
2918 /* An internal representation of the DWARF output is built, and then
2919 walked to generate the DWARF debugging info. The walk of the internal
2920 representation is done after the entire program has been compiled.
2921 The types below are used to describe the internal representation. */
2922
2923 /* Whether to put type DIEs into their own section .debug_types instead
2924 of making them part of the .debug_info section. Only supported for
2925 Dwarf V4 or higher and the user didn't disable them through
2926 -fno-debug-types-section. It is more efficient to put them in a
2927 separate comdat sections since the linker will then be able to
2928 remove duplicates. But not all tools support .debug_types sections
2929 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2930 it is DW_UT_type unit type in .debug_info section. For late LTO
2931 debug there should be almost no types emitted so avoid enabling
2932 -fdebug-types-section there. */
2933
2934 #define use_debug_types (dwarf_version >= 4 \
2935 && flag_debug_types_section \
2936 && !in_lto_p)
2937
2938 /* Various DIE's use offsets relative to the beginning of the
2939 .debug_info section to refer to each other. */
2940
2941 typedef long int dw_offset;
2942
2943 struct comdat_type_node;
2944
2945 /* The entries in the line_info table more-or-less mirror the opcodes
2946 that are used in the real dwarf line table. Arrays of these entries
2947 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2948 supported. */
2949
2950 enum dw_line_info_opcode {
2951 /* Emit DW_LNE_set_address; the operand is the label index. */
2952 LI_set_address,
2953
2954 /* Emit a row to the matrix with the given line. This may be done
2955 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2956 special opcodes. */
2957 LI_set_line,
2958
2959 /* Emit a DW_LNS_set_file. */
2960 LI_set_file,
2961
2962 /* Emit a DW_LNS_set_column. */
2963 LI_set_column,
2964
2965 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2966 LI_negate_stmt,
2967
2968 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2969 LI_set_prologue_end,
2970 LI_set_epilogue_begin,
2971
2972 /* Emit a DW_LNE_set_discriminator. */
2973 LI_set_discriminator,
2974
2975 /* Output a Fixed Advance PC; the target PC is the label index; the
2976 base PC is the previous LI_adv_address or LI_set_address entry.
2977 We only use this when emitting debug views without assembler
2978 support, at explicit user request. Ideally, we should only use
2979 it when the offset might be zero but we can't tell: it's the only
2980 way to maybe change the PC without resetting the view number. */
2981 LI_adv_address
2982 };
2983
2984 typedef struct GTY(()) dw_line_info_struct {
2985 enum dw_line_info_opcode opcode;
2986 unsigned int val;
2987 } dw_line_info_entry;
2988
2989
2990 struct GTY(()) dw_line_info_table {
2991 /* The label that marks the end of this section. */
2992 const char *end_label;
2993
2994 /* The values for the last row of the matrix, as collected in the table.
2995 These are used to minimize the changes to the next row. */
2996 unsigned int file_num;
2997 unsigned int line_num;
2998 unsigned int column_num;
2999 int discrim_num;
3000 bool is_stmt;
3001 bool in_use;
3002
3003 /* This denotes the NEXT view number.
3004
3005 If it is 0, it is known that the NEXT view will be the first view
3006 at the given PC.
3007
3008 If it is -1, we're forcing the view number to be reset, e.g. at a
3009 function entry.
3010
3011 The meaning of other nonzero values depends on whether we're
3012 computing views internally or leaving it for the assembler to do
3013 so. If we're emitting them internally, view denotes the view
3014 number since the last known advance of PC. If we're leaving it
3015 for the assembler, it denotes the LVU label number that we're
3016 going to ask the assembler to assign. */
3017 var_loc_view view;
3018
3019 /* This counts the number of symbolic views emitted in this table
3020 since the latest view reset. Its max value, over all tables,
3021 sets symview_upper_bound. */
3022 var_loc_view symviews_since_reset;
3023
3024 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3025 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3026 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3027 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3028
3029 vec<dw_line_info_entry, va_gc> *entries;
3030 };
3031
3032 /* This is an upper bound for view numbers that the assembler may
3033 assign to symbolic views output in this translation. It is used to
3034 decide how big a field to use to represent view numbers in
3035 symview-classed attributes. */
3036
3037 static var_loc_view symview_upper_bound;
3038
3039 /* If we're keep track of location views and their reset points, and
3040 INSN is a reset point (i.e., it necessarily advances the PC), mark
3041 the next view in TABLE as reset. */
3042
3043 static void
3044 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3045 {
3046 if (!debug_internal_reset_location_views)
3047 return;
3048
3049 /* Maybe turn (part of?) this test into a default target hook. */
3050 int reset = 0;
3051
3052 if (targetm.reset_location_view)
3053 reset = targetm.reset_location_view (insn);
3054
3055 if (reset)
3056 ;
3057 else if (JUMP_TABLE_DATA_P (insn))
3058 reset = 1;
3059 else if (GET_CODE (insn) == USE
3060 || GET_CODE (insn) == CLOBBER
3061 || GET_CODE (insn) == ASM_INPUT
3062 || asm_noperands (insn) >= 0)
3063 ;
3064 else if (get_attr_min_length (insn) > 0)
3065 reset = 1;
3066
3067 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3068 RESET_NEXT_VIEW (table->view);
3069 }
3070
3071 /* Each DIE attribute has a field specifying the attribute kind,
3072 a link to the next attribute in the chain, and an attribute value.
3073 Attributes are typically linked below the DIE they modify. */
3074
3075 typedef struct GTY(()) dw_attr_struct {
3076 enum dwarf_attribute dw_attr;
3077 dw_val_node dw_attr_val;
3078 }
3079 dw_attr_node;
3080
3081
3082 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3083 The children of each node form a circular list linked by
3084 die_sib. die_child points to the node *before* the "first" child node. */
3085
3086 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3087 union die_symbol_or_type_node
3088 {
3089 const char * GTY ((tag ("0"))) die_symbol;
3090 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3091 }
3092 GTY ((desc ("%0.comdat_type_p"))) die_id;
3093 vec<dw_attr_node, va_gc> *die_attr;
3094 dw_die_ref die_parent;
3095 dw_die_ref die_child;
3096 dw_die_ref die_sib;
3097 dw_die_ref die_definition; /* ref from a specification to its definition */
3098 dw_offset die_offset;
3099 unsigned long die_abbrev;
3100 int die_mark;
3101 unsigned int decl_id;
3102 enum dwarf_tag die_tag;
3103 /* Die is used and must not be pruned as unused. */
3104 BOOL_BITFIELD die_perennial_p : 1;
3105 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3106 /* For an external ref to die_symbol if die_offset contains an extra
3107 offset to that symbol. */
3108 BOOL_BITFIELD with_offset : 1;
3109 /* Whether this DIE was removed from the DIE tree, for example via
3110 prune_unused_types. We don't consider those present from the
3111 DIE lookup routines. */
3112 BOOL_BITFIELD removed : 1;
3113 /* Lots of spare bits. */
3114 }
3115 die_node;
3116
3117 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3118 static bool early_dwarf;
3119 static bool early_dwarf_finished;
3120 class set_early_dwarf {
3121 public:
3122 bool saved;
3123 set_early_dwarf () : saved(early_dwarf)
3124 {
3125 gcc_assert (! early_dwarf_finished);
3126 early_dwarf = true;
3127 }
3128 ~set_early_dwarf () { early_dwarf = saved; }
3129 };
3130
3131 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3132 #define FOR_EACH_CHILD(die, c, expr) do { \
3133 c = die->die_child; \
3134 if (c) do { \
3135 c = c->die_sib; \
3136 expr; \
3137 } while (c != die->die_child); \
3138 } while (0)
3139
3140 /* The pubname structure */
3141
3142 typedef struct GTY(()) pubname_struct {
3143 dw_die_ref die;
3144 const char *name;
3145 }
3146 pubname_entry;
3147
3148
3149 struct GTY(()) dw_ranges {
3150 const char *label;
3151 /* If this is positive, it's a block number, otherwise it's a
3152 bitwise-negated index into dw_ranges_by_label. */
3153 int num;
3154 /* Index for the range list for DW_FORM_rnglistx. */
3155 unsigned int idx : 31;
3156 /* True if this range might be possibly in a different section
3157 from previous entry. */
3158 unsigned int maybe_new_sec : 1;
3159 };
3160
3161 /* A structure to hold a macinfo entry. */
3162
3163 typedef struct GTY(()) macinfo_struct {
3164 unsigned char code;
3165 unsigned HOST_WIDE_INT lineno;
3166 const char *info;
3167 }
3168 macinfo_entry;
3169
3170
3171 struct GTY(()) dw_ranges_by_label {
3172 const char *begin;
3173 const char *end;
3174 };
3175
3176 /* The comdat type node structure. */
3177 struct GTY(()) comdat_type_node
3178 {
3179 dw_die_ref root_die;
3180 dw_die_ref type_die;
3181 dw_die_ref skeleton_die;
3182 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3183 comdat_type_node *next;
3184 };
3185
3186 /* A list of DIEs for which we can't determine ancestry (parent_die
3187 field) just yet. Later in dwarf2out_finish we will fill in the
3188 missing bits. */
3189 typedef struct GTY(()) limbo_die_struct {
3190 dw_die_ref die;
3191 /* The tree for which this DIE was created. We use this to
3192 determine ancestry later. */
3193 tree created_for;
3194 struct limbo_die_struct *next;
3195 }
3196 limbo_die_node;
3197
3198 typedef struct skeleton_chain_struct
3199 {
3200 dw_die_ref old_die;
3201 dw_die_ref new_die;
3202 struct skeleton_chain_struct *parent;
3203 }
3204 skeleton_chain_node;
3205
3206 /* Define a macro which returns nonzero for a TYPE_DECL which was
3207 implicitly generated for a type.
3208
3209 Note that, unlike the C front-end (which generates a NULL named
3210 TYPE_DECL node for each complete tagged type, each array type,
3211 and each function type node created) the C++ front-end generates
3212 a _named_ TYPE_DECL node for each tagged type node created.
3213 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3214 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3215 front-end, but for each type, tagged or not. */
3216
3217 #define TYPE_DECL_IS_STUB(decl) \
3218 (DECL_NAME (decl) == NULL_TREE \
3219 || (DECL_ARTIFICIAL (decl) \
3220 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3221 /* This is necessary for stub decls that \
3222 appear in nested inline functions. */ \
3223 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3224 && (decl_ultimate_origin (decl) \
3225 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3226
3227 /* Information concerning the compilation unit's programming
3228 language, and compiler version. */
3229
3230 /* Fixed size portion of the DWARF compilation unit header. */
3231 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3232 (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size \
3233 + (dwarf_version >= 5 ? 4 : 3))
3234
3235 /* Fixed size portion of the DWARF comdat type unit header. */
3236 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3237 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3238 + DWARF_TYPE_SIGNATURE_SIZE + dwarf_offset_size)
3239
3240 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3241 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3242 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3243
3244 /* Fixed size portion of public names info. */
3245 #define DWARF_PUBNAMES_HEADER_SIZE (2 * dwarf_offset_size + 2)
3246
3247 /* Fixed size portion of the address range info. */
3248 #define DWARF_ARANGES_HEADER_SIZE \
3249 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \
3250 DWARF2_ADDR_SIZE * 2) \
3251 - DWARF_INITIAL_LENGTH_SIZE)
3252
3253 /* Size of padding portion in the address range info. It must be
3254 aligned to twice the pointer size. */
3255 #define DWARF_ARANGES_PAD_SIZE \
3256 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4, \
3257 DWARF2_ADDR_SIZE * 2) \
3258 - (DWARF_INITIAL_LENGTH_SIZE + dwarf_offset_size + 4))
3259
3260 /* Use assembler line directives if available. */
3261 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3262 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3263 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3264 #else
3265 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3266 #endif
3267 #endif
3268
3269 /* Use assembler views in line directives if available. */
3270 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3271 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3272 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3273 #else
3274 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3275 #endif
3276 #endif
3277
3278 /* Return true if GCC configure detected assembler support for .loc. */
3279
3280 bool
3281 dwarf2out_default_as_loc_support (void)
3282 {
3283 return DWARF2_ASM_LINE_DEBUG_INFO;
3284 #if (GCC_VERSION >= 3000)
3285 # undef DWARF2_ASM_LINE_DEBUG_INFO
3286 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3287 #endif
3288 }
3289
3290 /* Return true if GCC configure detected assembler support for views
3291 in .loc directives. */
3292
3293 bool
3294 dwarf2out_default_as_locview_support (void)
3295 {
3296 return DWARF2_ASM_VIEW_DEBUG_INFO;
3297 #if (GCC_VERSION >= 3000)
3298 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3299 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3300 #endif
3301 }
3302
3303 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3304 view computation, and it refers to a view identifier for which we
3305 will not emit a label because it is known to map to a view number
3306 zero. We won't allocate the bitmap if we're not using assembler
3307 support for location views, but we have to make the variable
3308 visible for GGC and for code that will be optimized out for lack of
3309 support but that's still parsed and compiled. We could abstract it
3310 out with macros, but it's not worth it. */
3311 static GTY(()) bitmap zero_view_p;
3312
3313 /* Evaluate to TRUE iff N is known to identify the first location view
3314 at its PC. When not using assembler location view computation,
3315 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3316 and views label numbers recorded in it are the ones known to be
3317 zero. */
3318 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3319 || (N) == (var_loc_view)-1 \
3320 || (zero_view_p \
3321 && bitmap_bit_p (zero_view_p, (N))))
3322
3323 /* Return true iff we're to emit .loc directives for the assembler to
3324 generate line number sections.
3325
3326 When we're not emitting views, all we need from the assembler is
3327 support for .loc directives.
3328
3329 If we are emitting views, we can only use the assembler's .loc
3330 support if it also supports views.
3331
3332 When the compiler is emitting the line number programs and
3333 computing view numbers itself, it resets view numbers at known PC
3334 changes and counts from that, and then it emits view numbers as
3335 literal constants in locviewlists. There are cases in which the
3336 compiler is not sure about PC changes, e.g. when extra alignment is
3337 requested for a label. In these cases, the compiler may not reset
3338 the view counter, and the potential PC advance in the line number
3339 program will use an opcode that does not reset the view counter
3340 even if the PC actually changes, so that compiler and debug info
3341 consumer can keep view numbers in sync.
3342
3343 When the compiler defers view computation to the assembler, it
3344 emits symbolic view numbers in locviewlists, with the exception of
3345 views known to be zero (forced resets, or reset after
3346 compiler-visible PC changes): instead of emitting symbols for
3347 these, we emit literal zero and assert the assembler agrees with
3348 the compiler's assessment. We could use symbolic views everywhere,
3349 instead of special-casing zero views, but then we'd be unable to
3350 optimize out locviewlists that contain only zeros. */
3351
3352 static bool
3353 output_asm_line_debug_info (void)
3354 {
3355 return (dwarf2out_as_loc_support
3356 && (dwarf2out_as_locview_support
3357 || !debug_variable_location_views));
3358 }
3359
3360 static bool asm_outputs_debug_line_str (void);
3361
3362 /* Minimum line offset in a special line info. opcode.
3363 This value was chosen to give a reasonable range of values. */
3364 #define DWARF_LINE_BASE -10
3365
3366 /* First special line opcode - leave room for the standard opcodes. */
3367 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3368
3369 /* Range of line offsets in a special line info. opcode. */
3370 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3371
3372 /* Flag that indicates the initial value of the is_stmt_start flag.
3373 In the present implementation, we do not mark any lines as
3374 the beginning of a source statement, because that information
3375 is not made available by the GCC front-end. */
3376 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3377
3378 /* Maximum number of operations per instruction bundle. */
3379 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3380 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3381 #endif
3382
3383 /* This location is used by calc_die_sizes() to keep track
3384 the offset of each DIE within the .debug_info section. */
3385 static unsigned long next_die_offset;
3386
3387 /* Record the root of the DIE's built for the current compilation unit. */
3388 static GTY(()) dw_die_ref single_comp_unit_die;
3389
3390 /* A list of type DIEs that have been separated into comdat sections. */
3391 static GTY(()) comdat_type_node *comdat_type_list;
3392
3393 /* A list of CU DIEs that have been separated. */
3394 static GTY(()) limbo_die_node *cu_die_list;
3395
3396 /* A list of DIEs with a NULL parent waiting to be relocated. */
3397 static GTY(()) limbo_die_node *limbo_die_list;
3398
3399 /* A list of DIEs for which we may have to generate
3400 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3401 static GTY(()) limbo_die_node *deferred_asm_name;
3402
3403 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3404 {
3405 typedef const char *compare_type;
3406
3407 static hashval_t hash (dwarf_file_data *);
3408 static bool equal (dwarf_file_data *, const char *);
3409 };
3410
3411 /* Filenames referenced by this compilation unit. */
3412 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3413
3414 struct decl_die_hasher : ggc_ptr_hash<die_node>
3415 {
3416 typedef tree compare_type;
3417
3418 static hashval_t hash (die_node *);
3419 static bool equal (die_node *, tree);
3420 };
3421 /* A hash table of references to DIE's that describe declarations.
3422 The key is a DECL_UID() which is a unique number identifying each decl. */
3423 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3424
3425 struct GTY ((for_user)) variable_value_struct {
3426 unsigned int decl_id;
3427 vec<dw_die_ref, va_gc> *dies;
3428 };
3429
3430 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3431 {
3432 typedef tree compare_type;
3433
3434 static hashval_t hash (variable_value_struct *);
3435 static bool equal (variable_value_struct *, tree);
3436 };
3437 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3438 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3439 DECL_CONTEXT of the referenced VAR_DECLs. */
3440 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3441
3442 struct block_die_hasher : ggc_ptr_hash<die_struct>
3443 {
3444 static hashval_t hash (die_struct *);
3445 static bool equal (die_struct *, die_struct *);
3446 };
3447
3448 /* A hash table of references to DIE's that describe COMMON blocks.
3449 The key is DECL_UID() ^ die_parent. */
3450 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3451
3452 typedef struct GTY(()) die_arg_entry_struct {
3453 dw_die_ref die;
3454 tree arg;
3455 } die_arg_entry;
3456
3457
3458 /* Node of the variable location list. */
3459 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3460 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3461 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3462 in mode of the EXPR_LIST node and first EXPR_LIST operand
3463 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3464 location or NULL for padding. For larger bitsizes,
3465 mode is 0 and first operand is a CONCAT with bitsize
3466 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3467 NULL as second operand. */
3468 rtx GTY (()) loc;
3469 const char * GTY (()) label;
3470 struct var_loc_node * GTY (()) next;
3471 var_loc_view view;
3472 };
3473
3474 /* Variable location list. */
3475 struct GTY ((for_user)) var_loc_list_def {
3476 struct var_loc_node * GTY (()) first;
3477
3478 /* Pointer to the last but one or last element of the
3479 chained list. If the list is empty, both first and
3480 last are NULL, if the list contains just one node
3481 or the last node certainly is not redundant, it points
3482 to the last node, otherwise points to the last but one.
3483 Do not mark it for GC because it is marked through the chain. */
3484 struct var_loc_node * GTY ((skip ("%h"))) last;
3485
3486 /* Pointer to the last element before section switch,
3487 if NULL, either sections weren't switched or first
3488 is after section switch. */
3489 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3490
3491 /* DECL_UID of the variable decl. */
3492 unsigned int decl_id;
3493 };
3494 typedef struct var_loc_list_def var_loc_list;
3495
3496 /* Call argument location list. */
3497 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3498 rtx GTY (()) call_arg_loc_note;
3499 const char * GTY (()) label;
3500 tree GTY (()) block;
3501 bool tail_call_p;
3502 rtx GTY (()) symbol_ref;
3503 struct call_arg_loc_node * GTY (()) next;
3504 };
3505
3506
3507 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3508 {
3509 typedef const_tree compare_type;
3510
3511 static hashval_t hash (var_loc_list *);
3512 static bool equal (var_loc_list *, const_tree);
3513 };
3514
3515 /* Table of decl location linked lists. */
3516 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3517
3518 /* Head and tail of call_arg_loc chain. */
3519 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3520 static struct call_arg_loc_node *call_arg_loc_last;
3521
3522 /* Number of call sites in the current function. */
3523 static int call_site_count = -1;
3524 /* Number of tail call sites in the current function. */
3525 static int tail_call_site_count = -1;
3526
3527 /* A cached location list. */
3528 struct GTY ((for_user)) cached_dw_loc_list_def {
3529 /* The DECL_UID of the decl that this entry describes. */
3530 unsigned int decl_id;
3531
3532 /* The cached location list. */
3533 dw_loc_list_ref loc_list;
3534 };
3535 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3536
3537 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3538 {
3539
3540 typedef const_tree compare_type;
3541
3542 static hashval_t hash (cached_dw_loc_list *);
3543 static bool equal (cached_dw_loc_list *, const_tree);
3544 };
3545
3546 /* Table of cached location lists. */
3547 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3548
3549 /* A vector of references to DIE's that are uniquely identified by their tag,
3550 presence/absence of children DIE's, and list of attribute/value pairs. */
3551 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3552
3553 /* A hash map to remember the stack usage for DWARF procedures. The value
3554 stored is the stack size difference between before the DWARF procedure
3555 invokation and after it returned. In other words, for a DWARF procedure
3556 that consumes N stack slots and that pushes M ones, this stores M - N. */
3557 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3558
3559 /* A global counter for generating labels for line number data. */
3560 static unsigned int line_info_label_num;
3561
3562 /* The current table to which we should emit line number information
3563 for the current function. This will be set up at the beginning of
3564 assembly for the function. */
3565 static GTY(()) dw_line_info_table *cur_line_info_table;
3566
3567 /* The two default tables of line number info. */
3568 static GTY(()) dw_line_info_table *text_section_line_info;
3569 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3570
3571 /* The set of all non-default tables of line number info. */
3572 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3573
3574 /* A flag to tell pubnames/types export if there is an info section to
3575 refer to. */
3576 static bool info_section_emitted;
3577
3578 /* A pointer to the base of a table that contains a list of publicly
3579 accessible names. */
3580 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3581
3582 /* A pointer to the base of a table that contains a list of publicly
3583 accessible types. */
3584 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3585
3586 /* A pointer to the base of a table that contains a list of macro
3587 defines/undefines (and file start/end markers). */
3588 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3589
3590 /* True if .debug_macinfo or .debug_macros section is going to be
3591 emitted. */
3592 #define have_macinfo \
3593 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3594 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3595 && !macinfo_table->is_empty ())
3596
3597 /* Vector of dies for which we should generate .debug_ranges info. */
3598 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3599
3600 /* Vector of pairs of labels referenced in ranges_table. */
3601 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3602
3603 /* Whether we have location lists that need outputting */
3604 static GTY(()) bool have_location_lists;
3605
3606 /* Unique label counter. */
3607 static GTY(()) unsigned int loclabel_num;
3608
3609 /* Unique label counter for point-of-call tables. */
3610 static GTY(()) unsigned int poc_label_num;
3611
3612 /* The last file entry emitted by maybe_emit_file(). */
3613 static GTY(()) struct dwarf_file_data * last_emitted_file;
3614
3615 /* Number of internal labels generated by gen_internal_sym(). */
3616 static GTY(()) int label_num;
3617
3618 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3619
3620 /* Instances of generic types for which we need to generate debug
3621 info that describe their generic parameters and arguments. That
3622 generation needs to happen once all types are properly laid out so
3623 we do it at the end of compilation. */
3624 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3625
3626 /* Offset from the "steady-state frame pointer" to the frame base,
3627 within the current function. */
3628 static poly_int64 frame_pointer_fb_offset;
3629 static bool frame_pointer_fb_offset_valid;
3630
3631 static vec<dw_die_ref> base_types;
3632
3633 /* Flags to represent a set of attribute classes for attributes that represent
3634 a scalar value (bounds, pointers, ...). */
3635 enum dw_scalar_form
3636 {
3637 dw_scalar_form_constant = 0x01,
3638 dw_scalar_form_exprloc = 0x02,
3639 dw_scalar_form_reference = 0x04
3640 };
3641
3642 /* Forward declarations for functions defined in this file. */
3643
3644 static int is_pseudo_reg (const_rtx);
3645 static tree type_main_variant (tree);
3646 static int is_tagged_type (const_tree);
3647 static const char *dwarf_tag_name (unsigned);
3648 static const char *dwarf_attr_name (unsigned);
3649 static const char *dwarf_form_name (unsigned);
3650 static tree decl_ultimate_origin (const_tree);
3651 static tree decl_class_context (tree);
3652 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3653 static inline enum dw_val_class AT_class (dw_attr_node *);
3654 static inline unsigned int AT_index (dw_attr_node *);
3655 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3656 static inline unsigned AT_flag (dw_attr_node *);
3657 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3658 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3659 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3660 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3661 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3662 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3663 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3664 unsigned int, unsigned char *);
3665 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3666 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3667 static inline const char *AT_string (dw_attr_node *);
3668 static enum dwarf_form AT_string_form (dw_attr_node *);
3669 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3670 static void add_AT_specification (dw_die_ref, dw_die_ref);
3671 static inline dw_die_ref AT_ref (dw_attr_node *);
3672 static inline int AT_ref_external (dw_attr_node *);
3673 static inline void set_AT_ref_external (dw_attr_node *, int);
3674 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3675 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3676 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3677 dw_loc_list_ref);
3678 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3679 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3680 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3681 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3682 static void remove_addr_table_entry (addr_table_entry *);
3683 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3684 static inline rtx AT_addr (dw_attr_node *);
3685 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3686 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3687 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3688 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3689 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3690 unsigned long, bool);
3691 static inline const char *AT_lbl (dw_attr_node *);
3692 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3693 static const char *get_AT_low_pc (dw_die_ref);
3694 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3695 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3696 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3697 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3698 static bool is_c (void);
3699 static bool is_cxx (void);
3700 static bool is_cxx (const_tree);
3701 static bool is_fortran (void);
3702 static bool is_ada (void);
3703 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3704 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3705 static void add_child_die (dw_die_ref, dw_die_ref);
3706 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3707 static dw_die_ref lookup_type_die (tree);
3708 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3709 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3710 static void equate_type_number_to_die (tree, dw_die_ref);
3711 static dw_die_ref lookup_decl_die (tree);
3712 static var_loc_list *lookup_decl_loc (const_tree);
3713 static void equate_decl_number_to_die (tree, dw_die_ref);
3714 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3715 static void print_spaces (FILE *);
3716 static void print_die (dw_die_ref, FILE *);
3717 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3718 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3719 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3720 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3721 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3722 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3723 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3724 struct md5_ctx *, int *);
3725 struct checksum_attributes;
3726 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3727 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3728 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3729 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3730 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3731 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3732 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3733 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3734 static int is_type_die (dw_die_ref);
3735 static inline bool is_template_instantiation (dw_die_ref);
3736 static int is_declaration_die (dw_die_ref);
3737 static int should_move_die_to_comdat (dw_die_ref);
3738 static dw_die_ref clone_as_declaration (dw_die_ref);
3739 static dw_die_ref clone_die (dw_die_ref);
3740 static dw_die_ref clone_tree (dw_die_ref);
3741 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3742 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3743 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3744 static dw_die_ref generate_skeleton (dw_die_ref);
3745 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3746 dw_die_ref,
3747 dw_die_ref);
3748 static void break_out_comdat_types (dw_die_ref);
3749 static void copy_decls_for_unworthy_types (dw_die_ref);
3750
3751 static void add_sibling_attributes (dw_die_ref);
3752 static void output_location_lists (dw_die_ref);
3753 static int constant_size (unsigned HOST_WIDE_INT);
3754 static unsigned long size_of_die (dw_die_ref);
3755 static void calc_die_sizes (dw_die_ref);
3756 static void calc_base_type_die_sizes (void);
3757 static void mark_dies (dw_die_ref);
3758 static void unmark_dies (dw_die_ref);
3759 static void unmark_all_dies (dw_die_ref);
3760 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3761 static unsigned long size_of_aranges (void);
3762 static enum dwarf_form value_format (dw_attr_node *);
3763 static void output_value_format (dw_attr_node *);
3764 static void output_abbrev_section (void);
3765 static void output_die_abbrevs (unsigned long, dw_die_ref);
3766 static void output_die (dw_die_ref);
3767 static void output_compilation_unit_header (enum dwarf_unit_type);
3768 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3769 static void output_comdat_type_unit (comdat_type_node *, bool);
3770 static const char *dwarf2_name (tree, int);
3771 static void add_pubname (tree, dw_die_ref);
3772 static void add_enumerator_pubname (const char *, dw_die_ref);
3773 static void add_pubname_string (const char *, dw_die_ref);
3774 static void add_pubtype (tree, dw_die_ref);
3775 static void output_pubnames (vec<pubname_entry, va_gc> *);
3776 static void output_aranges (void);
3777 static unsigned int add_ranges (const_tree, bool = false);
3778 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3779 bool *, bool);
3780 static void output_ranges (void);
3781 static dw_line_info_table *new_line_info_table (void);
3782 static void output_line_info (bool);
3783 static void output_file_names (void);
3784 static dw_die_ref base_type_die (tree, bool);
3785 static int is_base_type (tree);
3786 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3787 static int decl_quals (const_tree);
3788 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3789 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3790 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3791 static unsigned int dbx_reg_number (const_rtx);
3792 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3793 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3794 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3795 enum var_init_status);
3796 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3797 enum var_init_status);
3798 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3799 enum var_init_status);
3800 static int is_based_loc (const_rtx);
3801 static bool resolve_one_addr (rtx *);
3802 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3803 enum var_init_status);
3804 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3805 enum var_init_status);
3806 struct loc_descr_context;
3807 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3808 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3809 static dw_loc_list_ref loc_list_from_tree (tree, int,
3810 struct loc_descr_context *);
3811 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3812 struct loc_descr_context *);
3813 static tree field_type (const_tree);
3814 static unsigned int simple_type_align_in_bits (const_tree);
3815 static unsigned int simple_decl_align_in_bits (const_tree);
3816 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3817 struct vlr_context;
3818 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3819 HOST_WIDE_INT *);
3820 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3821 dw_loc_list_ref);
3822 static void add_data_member_location_attribute (dw_die_ref, tree,
3823 struct vlr_context *);
3824 static bool add_const_value_attribute (dw_die_ref, rtx);
3825 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3826 static void insert_wide_int (const wide_int &, unsigned char *, int);
3827 static void insert_float (const_rtx, unsigned char *);
3828 static rtx rtl_for_decl_location (tree);
3829 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3830 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3831 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3832 static void add_name_attribute (dw_die_ref, const char *);
3833 static void add_desc_attribute (dw_die_ref, tree);
3834 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3835 static void add_comp_dir_attribute (dw_die_ref);
3836 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3837 struct loc_descr_context *);
3838 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3839 struct loc_descr_context *);
3840 static void add_subscript_info (dw_die_ref, tree, bool);
3841 static void add_byte_size_attribute (dw_die_ref, tree);
3842 static void add_alignment_attribute (dw_die_ref, tree);
3843 static void add_bit_offset_attribute (dw_die_ref, tree);
3844 static void add_bit_size_attribute (dw_die_ref, tree);
3845 static void add_prototyped_attribute (dw_die_ref, tree);
3846 static void add_abstract_origin_attribute (dw_die_ref, tree);
3847 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3848 static void add_src_coords_attributes (dw_die_ref, tree);
3849 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3850 static void add_discr_value (dw_die_ref, dw_discr_value *);
3851 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3852 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3853 static dw_die_ref scope_die_for (tree, dw_die_ref);
3854 static inline int local_scope_p (dw_die_ref);
3855 static inline int class_scope_p (dw_die_ref);
3856 static inline int class_or_namespace_scope_p (dw_die_ref);
3857 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3858 static void add_calling_convention_attribute (dw_die_ref, tree);
3859 static const char *type_tag (const_tree);
3860 static tree member_declared_type (const_tree);
3861 #if 0
3862 static const char *decl_start_label (tree);
3863 #endif
3864 static void gen_array_type_die (tree, dw_die_ref);
3865 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3866 #if 0
3867 static void gen_entry_point_die (tree, dw_die_ref);
3868 #endif
3869 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3870 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3871 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3872 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3873 static void gen_formal_types_die (tree, dw_die_ref);
3874 static void gen_subprogram_die (tree, dw_die_ref);
3875 static void gen_variable_die (tree, tree, dw_die_ref);
3876 static void gen_const_die (tree, dw_die_ref);
3877 static void gen_label_die (tree, dw_die_ref);
3878 static void gen_lexical_block_die (tree, dw_die_ref);
3879 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3880 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3881 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3882 static dw_die_ref gen_compile_unit_die (const char *);
3883 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3884 static void gen_member_die (tree, dw_die_ref);
3885 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3886 enum debug_info_usage);
3887 static void gen_subroutine_type_die (tree, dw_die_ref);
3888 static void gen_typedef_die (tree, dw_die_ref);
3889 static void gen_type_die (tree, dw_die_ref);
3890 static void gen_block_die (tree, dw_die_ref);
3891 static void decls_for_scope (tree, dw_die_ref, bool = true);
3892 static bool is_naming_typedef_decl (const_tree);
3893 static inline dw_die_ref get_context_die (tree);
3894 static void gen_namespace_die (tree, dw_die_ref);
3895 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3896 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3897 static dw_die_ref force_decl_die (tree);
3898 static dw_die_ref force_type_die (tree);
3899 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3900 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3901 static struct dwarf_file_data * lookup_filename (const char *);
3902 static void retry_incomplete_types (void);
3903 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3904 static void gen_generic_params_dies (tree);
3905 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3906 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3907 static void splice_child_die (dw_die_ref, dw_die_ref);
3908 static int file_info_cmp (const void *, const void *);
3909 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3910 const char *, var_loc_view, const char *);
3911 static void output_loc_list (dw_loc_list_ref);
3912 static char *gen_internal_sym (const char *);
3913 static bool want_pubnames (void);
3914
3915 static void prune_unmark_dies (dw_die_ref);
3916 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3917 static void prune_unused_types_mark (dw_die_ref, int);
3918 static void prune_unused_types_walk (dw_die_ref);
3919 static void prune_unused_types_walk_attribs (dw_die_ref);
3920 static void prune_unused_types_prune (dw_die_ref);
3921 static void prune_unused_types (void);
3922 static int maybe_emit_file (struct dwarf_file_data *fd);
3923 static inline const char *AT_vms_delta1 (dw_attr_node *);
3924 static inline const char *AT_vms_delta2 (dw_attr_node *);
3925 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3926 const char *, const char *);
3927 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3928 static void gen_remaining_tmpl_value_param_die_attribute (void);
3929 static bool generic_type_p (tree);
3930 static void schedule_generic_params_dies_gen (tree t);
3931 static void gen_scheduled_generic_parms_dies (void);
3932 static void resolve_variable_values (void);
3933
3934 static const char *comp_dir_string (void);
3935
3936 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3937
3938 /* enum for tracking thread-local variables whose address is really an offset
3939 relative to the TLS pointer, which will need link-time relocation, but will
3940 not need relocation by the DWARF consumer. */
3941
3942 enum dtprel_bool
3943 {
3944 dtprel_false = 0,
3945 dtprel_true = 1
3946 };
3947
3948 /* Return the operator to use for an address of a variable. For dtprel_true, we
3949 use DW_OP_const*. For regular variables, which need both link-time
3950 relocation and consumer-level relocation (e.g., to account for shared objects
3951 loaded at a random address), we use DW_OP_addr*. */
3952
3953 static inline enum dwarf_location_atom
3954 dw_addr_op (enum dtprel_bool dtprel)
3955 {
3956 if (dtprel == dtprel_true)
3957 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3958 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3959 else
3960 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3961 }
3962
3963 /* Return a pointer to a newly allocated address location description. If
3964 dwarf_split_debug_info is true, then record the address with the appropriate
3965 relocation. */
3966 static inline dw_loc_descr_ref
3967 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3968 {
3969 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3970
3971 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3972 ref->dw_loc_oprnd1.v.val_addr = addr;
3973 ref->dtprel = dtprel;
3974 if (dwarf_split_debug_info)
3975 ref->dw_loc_oprnd1.val_entry
3976 = add_addr_table_entry (addr,
3977 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3978 else
3979 ref->dw_loc_oprnd1.val_entry = NULL;
3980
3981 return ref;
3982 }
3983
3984 /* Section names used to hold DWARF debugging information. */
3985
3986 #ifndef DEBUG_INFO_SECTION
3987 #define DEBUG_INFO_SECTION ".debug_info"
3988 #endif
3989 #ifndef DEBUG_DWO_INFO_SECTION
3990 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3991 #endif
3992 #ifndef DEBUG_LTO_INFO_SECTION
3993 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3994 #endif
3995 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3996 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3997 #endif
3998 #ifndef DEBUG_ABBREV_SECTION
3999 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
4000 #endif
4001 #ifndef DEBUG_LTO_ABBREV_SECTION
4002 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
4003 #endif
4004 #ifndef DEBUG_DWO_ABBREV_SECTION
4005 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
4006 #endif
4007 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
4008 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
4009 #endif
4010 #ifndef DEBUG_ARANGES_SECTION
4011 #define DEBUG_ARANGES_SECTION ".debug_aranges"
4012 #endif
4013 #ifndef DEBUG_ADDR_SECTION
4014 #define DEBUG_ADDR_SECTION ".debug_addr"
4015 #endif
4016 #ifndef DEBUG_MACINFO_SECTION
4017 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4018 #endif
4019 #ifndef DEBUG_LTO_MACINFO_SECTION
4020 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4021 #endif
4022 #ifndef DEBUG_DWO_MACINFO_SECTION
4023 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4024 #endif
4025 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4026 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4027 #endif
4028 #ifndef DEBUG_MACRO_SECTION
4029 #define DEBUG_MACRO_SECTION ".debug_macro"
4030 #endif
4031 #ifndef DEBUG_LTO_MACRO_SECTION
4032 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4033 #endif
4034 #ifndef DEBUG_DWO_MACRO_SECTION
4035 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4036 #endif
4037 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4038 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4039 #endif
4040 #ifndef DEBUG_LINE_SECTION
4041 #define DEBUG_LINE_SECTION ".debug_line"
4042 #endif
4043 #ifndef DEBUG_LTO_LINE_SECTION
4044 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4045 #endif
4046 #ifndef DEBUG_DWO_LINE_SECTION
4047 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4048 #endif
4049 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4050 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4051 #endif
4052 #ifndef DEBUG_LOC_SECTION
4053 #define DEBUG_LOC_SECTION ".debug_loc"
4054 #endif
4055 #ifndef DEBUG_DWO_LOC_SECTION
4056 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4057 #endif
4058 #ifndef DEBUG_LOCLISTS_SECTION
4059 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4060 #endif
4061 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4062 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4063 #endif
4064 #ifndef DEBUG_PUBNAMES_SECTION
4065 #define DEBUG_PUBNAMES_SECTION \
4066 ((debug_generate_pub_sections == 2) \
4067 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4068 #endif
4069 #ifndef DEBUG_PUBTYPES_SECTION
4070 #define DEBUG_PUBTYPES_SECTION \
4071 ((debug_generate_pub_sections == 2) \
4072 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4073 #endif
4074 #ifndef DEBUG_STR_OFFSETS_SECTION
4075 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4076 #endif
4077 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4078 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4079 #endif
4080 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4081 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4082 #endif
4083 #ifndef DEBUG_STR_SECTION
4084 #define DEBUG_STR_SECTION ".debug_str"
4085 #endif
4086 #ifndef DEBUG_LTO_STR_SECTION
4087 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4088 #endif
4089 #ifndef DEBUG_STR_DWO_SECTION
4090 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4091 #endif
4092 #ifndef DEBUG_LTO_STR_DWO_SECTION
4093 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4094 #endif
4095 #ifndef DEBUG_RANGES_SECTION
4096 #define DEBUG_RANGES_SECTION ".debug_ranges"
4097 #endif
4098 #ifndef DEBUG_RNGLISTS_SECTION
4099 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4100 #endif
4101 #ifndef DEBUG_LINE_STR_SECTION
4102 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4103 #endif
4104 #ifndef DEBUG_LTO_LINE_STR_SECTION
4105 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4106 #endif
4107
4108 /* Standard ELF section names for compiled code and data. */
4109 #ifndef TEXT_SECTION_NAME
4110 #define TEXT_SECTION_NAME ".text"
4111 #endif
4112
4113 /* Section flags for .debug_str section. */
4114 #define DEBUG_STR_SECTION_FLAGS \
4115 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4116 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4117 : SECTION_DEBUG)
4118
4119 /* Section flags for .debug_str.dwo section. */
4120 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4121
4122 /* Attribute used to refer to the macro section. */
4123 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4124 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4125
4126 /* Labels we insert at beginning sections we can reference instead of
4127 the section names themselves. */
4128
4129 #ifndef TEXT_SECTION_LABEL
4130 #define TEXT_SECTION_LABEL "Ltext"
4131 #endif
4132 #ifndef COLD_TEXT_SECTION_LABEL
4133 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4134 #endif
4135 #ifndef DEBUG_LINE_SECTION_LABEL
4136 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4137 #endif
4138 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4139 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4140 #endif
4141 #ifndef DEBUG_INFO_SECTION_LABEL
4142 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4143 #endif
4144 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4145 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4146 #endif
4147 #ifndef DEBUG_ABBREV_SECTION_LABEL
4148 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4149 #endif
4150 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4151 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4152 #endif
4153 #ifndef DEBUG_ADDR_SECTION_LABEL
4154 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4155 #endif
4156 #ifndef DEBUG_LOC_SECTION_LABEL
4157 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4158 #endif
4159 #ifndef DEBUG_RANGES_SECTION_LABEL
4160 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4161 #endif
4162 #ifndef DEBUG_MACINFO_SECTION_LABEL
4163 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4164 #endif
4165 #ifndef DEBUG_MACRO_SECTION_LABEL
4166 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4167 #endif
4168 #define SKELETON_COMP_DIE_ABBREV 1
4169 #define SKELETON_TYPE_DIE_ABBREV 2
4170
4171 /* Definitions of defaults for formats and names of various special
4172 (artificial) labels which may be generated within this file (when the -g
4173 options is used and DWARF2_DEBUGGING_INFO is in effect.
4174 If necessary, these may be overridden from within the tm.h file, but
4175 typically, overriding these defaults is unnecessary. */
4176
4177 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4178 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4179 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4180 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4181 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4182 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4183 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4184 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4185 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4186 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4187 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4188 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4189 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4190 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4191 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4192
4193 #ifndef TEXT_END_LABEL
4194 #define TEXT_END_LABEL "Letext"
4195 #endif
4196 #ifndef COLD_END_LABEL
4197 #define COLD_END_LABEL "Letext_cold"
4198 #endif
4199 #ifndef BLOCK_BEGIN_LABEL
4200 #define BLOCK_BEGIN_LABEL "LBB"
4201 #endif
4202 #ifndef BLOCK_INLINE_ENTRY_LABEL
4203 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4204 #endif
4205 #ifndef BLOCK_END_LABEL
4206 #define BLOCK_END_LABEL "LBE"
4207 #endif
4208 #ifndef LINE_CODE_LABEL
4209 #define LINE_CODE_LABEL "LM"
4210 #endif
4211
4212 \f
4213 /* Return the root of the DIE's built for the current compilation unit. */
4214 static dw_die_ref
4215 comp_unit_die (void)
4216 {
4217 if (!single_comp_unit_die)
4218 single_comp_unit_die = gen_compile_unit_die (NULL);
4219 return single_comp_unit_die;
4220 }
4221
4222 /* We allow a language front-end to designate a function that is to be
4223 called to "demangle" any name before it is put into a DIE. */
4224
4225 static const char *(*demangle_name_func) (const char *);
4226
4227 void
4228 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4229 {
4230 demangle_name_func = func;
4231 }
4232
4233 /* Test if rtl node points to a pseudo register. */
4234
4235 static inline int
4236 is_pseudo_reg (const_rtx rtl)
4237 {
4238 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4239 || (GET_CODE (rtl) == SUBREG
4240 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4241 }
4242
4243 /* Return a reference to a type, with its const and volatile qualifiers
4244 removed. */
4245
4246 static inline tree
4247 type_main_variant (tree type)
4248 {
4249 type = TYPE_MAIN_VARIANT (type);
4250
4251 /* ??? There really should be only one main variant among any group of
4252 variants of a given type (and all of the MAIN_VARIANT values for all
4253 members of the group should point to that one type) but sometimes the C
4254 front-end messes this up for array types, so we work around that bug
4255 here. */
4256 if (TREE_CODE (type) == ARRAY_TYPE)
4257 while (type != TYPE_MAIN_VARIANT (type))
4258 type = TYPE_MAIN_VARIANT (type);
4259
4260 return type;
4261 }
4262
4263 /* Return nonzero if the given type node represents a tagged type. */
4264
4265 static inline int
4266 is_tagged_type (const_tree type)
4267 {
4268 enum tree_code code = TREE_CODE (type);
4269
4270 return (code == RECORD_TYPE || code == UNION_TYPE
4271 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4272 }
4273
4274 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4275
4276 static void
4277 get_ref_die_offset_label (char *label, dw_die_ref ref)
4278 {
4279 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4280 }
4281
4282 /* Return die_offset of a DIE reference to a base type. */
4283
4284 static unsigned long int
4285 get_base_type_offset (dw_die_ref ref)
4286 {
4287 if (ref->die_offset)
4288 return ref->die_offset;
4289 if (comp_unit_die ()->die_abbrev)
4290 {
4291 calc_base_type_die_sizes ();
4292 gcc_assert (ref->die_offset);
4293 }
4294 return ref->die_offset;
4295 }
4296
4297 /* Return die_offset of a DIE reference other than base type. */
4298
4299 static unsigned long int
4300 get_ref_die_offset (dw_die_ref ref)
4301 {
4302 gcc_assert (ref->die_offset);
4303 return ref->die_offset;
4304 }
4305
4306 /* Convert a DIE tag into its string name. */
4307
4308 static const char *
4309 dwarf_tag_name (unsigned int tag)
4310 {
4311 const char *name = get_DW_TAG_name (tag);
4312
4313 if (name != NULL)
4314 return name;
4315
4316 return "DW_TAG_<unknown>";
4317 }
4318
4319 /* Convert a DWARF attribute code into its string name. */
4320
4321 static const char *
4322 dwarf_attr_name (unsigned int attr)
4323 {
4324 const char *name;
4325
4326 switch (attr)
4327 {
4328 #if VMS_DEBUGGING_INFO
4329 case DW_AT_HP_prologue:
4330 return "DW_AT_HP_prologue";
4331 #else
4332 case DW_AT_MIPS_loop_unroll_factor:
4333 return "DW_AT_MIPS_loop_unroll_factor";
4334 #endif
4335
4336 #if VMS_DEBUGGING_INFO
4337 case DW_AT_HP_epilogue:
4338 return "DW_AT_HP_epilogue";
4339 #else
4340 case DW_AT_MIPS_stride:
4341 return "DW_AT_MIPS_stride";
4342 #endif
4343 }
4344
4345 name = get_DW_AT_name (attr);
4346
4347 if (name != NULL)
4348 return name;
4349
4350 return "DW_AT_<unknown>";
4351 }
4352
4353 /* Convert a DWARF value form code into its string name. */
4354
4355 static const char *
4356 dwarf_form_name (unsigned int form)
4357 {
4358 const char *name = get_DW_FORM_name (form);
4359
4360 if (name != NULL)
4361 return name;
4362
4363 return "DW_FORM_<unknown>";
4364 }
4365 \f
4366 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4367 instance of an inlined instance of a decl which is local to an inline
4368 function, so we have to trace all of the way back through the origin chain
4369 to find out what sort of node actually served as the original seed for the
4370 given block. */
4371
4372 static tree
4373 decl_ultimate_origin (const_tree decl)
4374 {
4375 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4376 return NULL_TREE;
4377
4378 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4379 we're trying to output the abstract instance of this function. */
4380 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4381 return NULL_TREE;
4382
4383 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4384 most distant ancestor, this should never happen. */
4385 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4386
4387 return DECL_ABSTRACT_ORIGIN (decl);
4388 }
4389
4390 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4391 of a virtual function may refer to a base class, so we check the 'this'
4392 parameter. */
4393
4394 static tree
4395 decl_class_context (tree decl)
4396 {
4397 tree context = NULL_TREE;
4398
4399 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4400 context = DECL_CONTEXT (decl);
4401 else
4402 context = TYPE_MAIN_VARIANT
4403 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4404
4405 if (context && !TYPE_P (context))
4406 context = NULL_TREE;
4407
4408 return context;
4409 }
4410 \f
4411 /* Add an attribute/value pair to a DIE. */
4412
4413 static inline void
4414 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4415 {
4416 /* Maybe this should be an assert? */
4417 if (die == NULL)
4418 return;
4419
4420 if (flag_checking)
4421 {
4422 /* Check we do not add duplicate attrs. Can't use get_AT here
4423 because that recurses to the specification/abstract origin DIE. */
4424 dw_attr_node *a;
4425 unsigned ix;
4426 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4427 gcc_assert (a->dw_attr != attr->dw_attr);
4428 }
4429
4430 vec_safe_reserve (die->die_attr, 1);
4431 vec_safe_push (die->die_attr, *attr);
4432 }
4433
4434 static inline enum dw_val_class
4435 AT_class (dw_attr_node *a)
4436 {
4437 return a->dw_attr_val.val_class;
4438 }
4439
4440 /* Return the index for any attribute that will be referenced with a
4441 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4442 indices are stored in dw_attr_val.v.val_str for reference counting
4443 pruning. */
4444
4445 static inline unsigned int
4446 AT_index (dw_attr_node *a)
4447 {
4448 if (AT_class (a) == dw_val_class_str)
4449 return a->dw_attr_val.v.val_str->index;
4450 else if (a->dw_attr_val.val_entry != NULL)
4451 return a->dw_attr_val.val_entry->index;
4452 return NOT_INDEXED;
4453 }
4454
4455 /* Add a flag value attribute to a DIE. */
4456
4457 static inline void
4458 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4459 {
4460 dw_attr_node attr;
4461
4462 attr.dw_attr = attr_kind;
4463 attr.dw_attr_val.val_class = dw_val_class_flag;
4464 attr.dw_attr_val.val_entry = NULL;
4465 attr.dw_attr_val.v.val_flag = flag;
4466 add_dwarf_attr (die, &attr);
4467 }
4468
4469 static inline unsigned
4470 AT_flag (dw_attr_node *a)
4471 {
4472 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4473 return a->dw_attr_val.v.val_flag;
4474 }
4475
4476 /* Add a signed integer attribute value to a DIE. */
4477
4478 static inline void
4479 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4480 {
4481 dw_attr_node attr;
4482
4483 attr.dw_attr = attr_kind;
4484 attr.dw_attr_val.val_class = dw_val_class_const;
4485 attr.dw_attr_val.val_entry = NULL;
4486 attr.dw_attr_val.v.val_int = int_val;
4487 add_dwarf_attr (die, &attr);
4488 }
4489
4490 static inline HOST_WIDE_INT
4491 AT_int (dw_attr_node *a)
4492 {
4493 gcc_assert (a && (AT_class (a) == dw_val_class_const
4494 || AT_class (a) == dw_val_class_const_implicit));
4495 return a->dw_attr_val.v.val_int;
4496 }
4497
4498 /* Add an unsigned integer attribute value to a DIE. */
4499
4500 static inline void
4501 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4502 unsigned HOST_WIDE_INT unsigned_val)
4503 {
4504 dw_attr_node attr;
4505
4506 attr.dw_attr = attr_kind;
4507 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4508 attr.dw_attr_val.val_entry = NULL;
4509 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4510 add_dwarf_attr (die, &attr);
4511 }
4512
4513 static inline unsigned HOST_WIDE_INT
4514 AT_unsigned (dw_attr_node *a)
4515 {
4516 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4517 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4518 return a->dw_attr_val.v.val_unsigned;
4519 }
4520
4521 /* Add an unsigned wide integer attribute value to a DIE. */
4522
4523 static inline void
4524 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4525 const wide_int& w)
4526 {
4527 dw_attr_node attr;
4528
4529 attr.dw_attr = attr_kind;
4530 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4531 attr.dw_attr_val.val_entry = NULL;
4532 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4533 *attr.dw_attr_val.v.val_wide = w;
4534 add_dwarf_attr (die, &attr);
4535 }
4536
4537 /* Add an unsigned double integer attribute value to a DIE. */
4538
4539 static inline void
4540 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4541 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4542 {
4543 dw_attr_node attr;
4544
4545 attr.dw_attr = attr_kind;
4546 attr.dw_attr_val.val_class = dw_val_class_const_double;
4547 attr.dw_attr_val.val_entry = NULL;
4548 attr.dw_attr_val.v.val_double.high = high;
4549 attr.dw_attr_val.v.val_double.low = low;
4550 add_dwarf_attr (die, &attr);
4551 }
4552
4553 /* Add a floating point attribute value to a DIE and return it. */
4554
4555 static inline void
4556 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4557 unsigned int length, unsigned int elt_size, unsigned char *array)
4558 {
4559 dw_attr_node attr;
4560
4561 attr.dw_attr = attr_kind;
4562 attr.dw_attr_val.val_class = dw_val_class_vec;
4563 attr.dw_attr_val.val_entry = NULL;
4564 attr.dw_attr_val.v.val_vec.length = length;
4565 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4566 attr.dw_attr_val.v.val_vec.array = array;
4567 add_dwarf_attr (die, &attr);
4568 }
4569
4570 /* Add an 8-byte data attribute value to a DIE. */
4571
4572 static inline void
4573 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4574 unsigned char data8[8])
4575 {
4576 dw_attr_node attr;
4577
4578 attr.dw_attr = attr_kind;
4579 attr.dw_attr_val.val_class = dw_val_class_data8;
4580 attr.dw_attr_val.val_entry = NULL;
4581 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4582 add_dwarf_attr (die, &attr);
4583 }
4584
4585 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4586 dwarf_split_debug_info, address attributes in dies destined for the
4587 final executable have force_direct set to avoid using indexed
4588 references. */
4589
4590 static inline void
4591 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4592 bool force_direct)
4593 {
4594 dw_attr_node attr;
4595 char * lbl_id;
4596
4597 lbl_id = xstrdup (lbl_low);
4598 attr.dw_attr = DW_AT_low_pc;
4599 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4600 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4601 if (dwarf_split_debug_info && !force_direct)
4602 attr.dw_attr_val.val_entry
4603 = add_addr_table_entry (lbl_id, ate_kind_label);
4604 else
4605 attr.dw_attr_val.val_entry = NULL;
4606 add_dwarf_attr (die, &attr);
4607
4608 attr.dw_attr = DW_AT_high_pc;
4609 if (dwarf_version < 4)
4610 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4611 else
4612 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4613 lbl_id = xstrdup (lbl_high);
4614 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4615 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4616 && dwarf_split_debug_info && !force_direct)
4617 attr.dw_attr_val.val_entry
4618 = add_addr_table_entry (lbl_id, ate_kind_label);
4619 else
4620 attr.dw_attr_val.val_entry = NULL;
4621 add_dwarf_attr (die, &attr);
4622 }
4623
4624 /* Hash and equality functions for debug_str_hash. */
4625
4626 hashval_t
4627 indirect_string_hasher::hash (indirect_string_node *x)
4628 {
4629 return htab_hash_string (x->str);
4630 }
4631
4632 bool
4633 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4634 {
4635 return strcmp (x1->str, x2) == 0;
4636 }
4637
4638 /* Add STR to the given string hash table. */
4639
4640 static struct indirect_string_node *
4641 find_AT_string_in_table (const char *str,
4642 hash_table<indirect_string_hasher> *table,
4643 enum insert_option insert = INSERT)
4644 {
4645 struct indirect_string_node *node;
4646
4647 indirect_string_node **slot
4648 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4649 if (*slot == NULL)
4650 {
4651 node = ggc_cleared_alloc<indirect_string_node> ();
4652 node->str = ggc_strdup (str);
4653 *slot = node;
4654 }
4655 else
4656 node = *slot;
4657
4658 node->refcount++;
4659 return node;
4660 }
4661
4662 /* Add STR to the indirect string hash table. */
4663
4664 static struct indirect_string_node *
4665 find_AT_string (const char *str, enum insert_option insert = INSERT)
4666 {
4667 if (! debug_str_hash)
4668 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4669
4670 return find_AT_string_in_table (str, debug_str_hash, insert);
4671 }
4672
4673 /* Add a string attribute value to a DIE. */
4674
4675 static inline void
4676 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4677 {
4678 dw_attr_node attr;
4679 struct indirect_string_node *node;
4680
4681 node = find_AT_string (str);
4682
4683 attr.dw_attr = attr_kind;
4684 attr.dw_attr_val.val_class = dw_val_class_str;
4685 attr.dw_attr_val.val_entry = NULL;
4686 attr.dw_attr_val.v.val_str = node;
4687 add_dwarf_attr (die, &attr);
4688 }
4689
4690 static inline const char *
4691 AT_string (dw_attr_node *a)
4692 {
4693 gcc_assert (a && AT_class (a) == dw_val_class_str);
4694 return a->dw_attr_val.v.val_str->str;
4695 }
4696
4697 /* Call this function directly to bypass AT_string_form's logic to put
4698 the string inline in the die. */
4699
4700 static void
4701 set_indirect_string (struct indirect_string_node *node)
4702 {
4703 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4704 /* Already indirect is a no op. */
4705 if (node->form == DW_FORM_strp
4706 || node->form == DW_FORM_line_strp
4707 || node->form == dwarf_FORM (DW_FORM_strx))
4708 {
4709 gcc_assert (node->label);
4710 return;
4711 }
4712 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4713 ++dw2_string_counter;
4714 node->label = xstrdup (label);
4715
4716 if (!dwarf_split_debug_info)
4717 {
4718 node->form = DW_FORM_strp;
4719 node->index = NOT_INDEXED;
4720 }
4721 else
4722 {
4723 node->form = dwarf_FORM (DW_FORM_strx);
4724 node->index = NO_INDEX_ASSIGNED;
4725 }
4726 }
4727
4728 /* A helper function for dwarf2out_finish, called to reset indirect
4729 string decisions done for early LTO dwarf output before fat object
4730 dwarf output. */
4731
4732 int
4733 reset_indirect_string (indirect_string_node **h, void *)
4734 {
4735 struct indirect_string_node *node = *h;
4736 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4737 {
4738 free (node->label);
4739 node->label = NULL;
4740 node->form = (dwarf_form) 0;
4741 node->index = 0;
4742 }
4743 return 1;
4744 }
4745
4746 /* Add a string representing a file or filepath attribute value to a DIE. */
4747
4748 static inline void
4749 add_filepath_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
4750 const char *str)
4751 {
4752 if (! asm_outputs_debug_line_str ())
4753 add_AT_string (die, attr_kind, str);
4754 else
4755 {
4756 dw_attr_node attr;
4757 struct indirect_string_node *node;
4758
4759 if (!debug_line_str_hash)
4760 debug_line_str_hash
4761 = hash_table<indirect_string_hasher>::create_ggc (10);
4762
4763 node = find_AT_string_in_table (str, debug_line_str_hash);
4764 set_indirect_string (node);
4765 node->form = DW_FORM_line_strp;
4766
4767 attr.dw_attr = attr_kind;
4768 attr.dw_attr_val.val_class = dw_val_class_str;
4769 attr.dw_attr_val.val_entry = NULL;
4770 attr.dw_attr_val.v.val_str = node;
4771 add_dwarf_attr (die, &attr);
4772 }
4773 }
4774
4775 /* Find out whether a string should be output inline in DIE
4776 or out-of-line in .debug_str section. */
4777
4778 static enum dwarf_form
4779 find_string_form (struct indirect_string_node *node)
4780 {
4781 unsigned int len;
4782
4783 if (node->form)
4784 return node->form;
4785
4786 len = strlen (node->str) + 1;
4787
4788 /* If the string is shorter or equal to the size of the reference, it is
4789 always better to put it inline. */
4790 if (len <= (unsigned) dwarf_offset_size || node->refcount == 0)
4791 return node->form = DW_FORM_string;
4792
4793 /* If we cannot expect the linker to merge strings in .debug_str
4794 section, only put it into .debug_str if it is worth even in this
4795 single module. */
4796 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4797 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4798 && (len - dwarf_offset_size) * node->refcount <= len))
4799 return node->form = DW_FORM_string;
4800
4801 set_indirect_string (node);
4802
4803 return node->form;
4804 }
4805
4806 /* Find out whether the string referenced from the attribute should be
4807 output inline in DIE or out-of-line in .debug_str section. */
4808
4809 static enum dwarf_form
4810 AT_string_form (dw_attr_node *a)
4811 {
4812 gcc_assert (a && AT_class (a) == dw_val_class_str);
4813 return find_string_form (a->dw_attr_val.v.val_str);
4814 }
4815
4816 /* Add a DIE reference attribute value to a DIE. */
4817
4818 static inline void
4819 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4820 {
4821 dw_attr_node attr;
4822 gcc_checking_assert (targ_die != NULL);
4823
4824 /* With LTO we can end up trying to reference something we didn't create
4825 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4826 if (targ_die == NULL)
4827 return;
4828
4829 attr.dw_attr = attr_kind;
4830 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4831 attr.dw_attr_val.val_entry = NULL;
4832 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4833 attr.dw_attr_val.v.val_die_ref.external = 0;
4834 add_dwarf_attr (die, &attr);
4835 }
4836
4837 /* Change DIE reference REF to point to NEW_DIE instead. */
4838
4839 static inline void
4840 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4841 {
4842 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4843 ref->dw_attr_val.v.val_die_ref.die = new_die;
4844 ref->dw_attr_val.v.val_die_ref.external = 0;
4845 }
4846
4847 /* Add an AT_specification attribute to a DIE, and also make the back
4848 pointer from the specification to the definition. */
4849
4850 static inline void
4851 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4852 {
4853 add_AT_die_ref (die, DW_AT_specification, targ_die);
4854 gcc_assert (!targ_die->die_definition);
4855 targ_die->die_definition = die;
4856 }
4857
4858 static inline dw_die_ref
4859 AT_ref (dw_attr_node *a)
4860 {
4861 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4862 return a->dw_attr_val.v.val_die_ref.die;
4863 }
4864
4865 static inline int
4866 AT_ref_external (dw_attr_node *a)
4867 {
4868 if (a && AT_class (a) == dw_val_class_die_ref)
4869 return a->dw_attr_val.v.val_die_ref.external;
4870
4871 return 0;
4872 }
4873
4874 static inline void
4875 set_AT_ref_external (dw_attr_node *a, int i)
4876 {
4877 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4878 a->dw_attr_val.v.val_die_ref.external = i;
4879 }
4880
4881 /* Add a location description attribute value to a DIE. */
4882
4883 static inline void
4884 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4885 {
4886 dw_attr_node attr;
4887
4888 attr.dw_attr = attr_kind;
4889 attr.dw_attr_val.val_class = dw_val_class_loc;
4890 attr.dw_attr_val.val_entry = NULL;
4891 attr.dw_attr_val.v.val_loc = loc;
4892 add_dwarf_attr (die, &attr);
4893 }
4894
4895 static inline dw_loc_descr_ref
4896 AT_loc (dw_attr_node *a)
4897 {
4898 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4899 return a->dw_attr_val.v.val_loc;
4900 }
4901
4902 static inline void
4903 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4904 {
4905 dw_attr_node attr;
4906
4907 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4908 return;
4909
4910 attr.dw_attr = attr_kind;
4911 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4912 attr.dw_attr_val.val_entry = NULL;
4913 attr.dw_attr_val.v.val_loc_list = loc_list;
4914 add_dwarf_attr (die, &attr);
4915 have_location_lists = true;
4916 }
4917
4918 static inline dw_loc_list_ref
4919 AT_loc_list (dw_attr_node *a)
4920 {
4921 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4922 return a->dw_attr_val.v.val_loc_list;
4923 }
4924
4925 /* Add a view list attribute to DIE. It must have a DW_AT_location
4926 attribute, because the view list complements the location list. */
4927
4928 static inline void
4929 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4930 {
4931 dw_attr_node attr;
4932
4933 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4934 return;
4935
4936 attr.dw_attr = attr_kind;
4937 attr.dw_attr_val.val_class = dw_val_class_view_list;
4938 attr.dw_attr_val.val_entry = NULL;
4939 attr.dw_attr_val.v.val_view_list = die;
4940 add_dwarf_attr (die, &attr);
4941 gcc_checking_assert (get_AT (die, DW_AT_location));
4942 gcc_assert (have_location_lists);
4943 }
4944
4945 /* Return a pointer to the location list referenced by the attribute.
4946 If the named attribute is a view list, look up the corresponding
4947 DW_AT_location attribute and return its location list. */
4948
4949 static inline dw_loc_list_ref *
4950 AT_loc_list_ptr (dw_attr_node *a)
4951 {
4952 gcc_assert (a);
4953 switch (AT_class (a))
4954 {
4955 case dw_val_class_loc_list:
4956 return &a->dw_attr_val.v.val_loc_list;
4957 case dw_val_class_view_list:
4958 {
4959 dw_attr_node *l;
4960 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4961 if (!l)
4962 return NULL;
4963 gcc_checking_assert (l + 1 == a);
4964 return AT_loc_list_ptr (l);
4965 }
4966 default:
4967 gcc_unreachable ();
4968 }
4969 }
4970
4971 /* Return the location attribute value associated with a view list
4972 attribute value. */
4973
4974 static inline dw_val_node *
4975 view_list_to_loc_list_val_node (dw_val_node *val)
4976 {
4977 gcc_assert (val->val_class == dw_val_class_view_list);
4978 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4979 if (!loc)
4980 return NULL;
4981 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4982 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4983 return &loc->dw_attr_val;
4984 }
4985
4986 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4987 {
4988 static hashval_t hash (addr_table_entry *);
4989 static bool equal (addr_table_entry *, addr_table_entry *);
4990 };
4991
4992 /* Table of entries into the .debug_addr section. */
4993
4994 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4995
4996 /* Hash an address_table_entry. */
4997
4998 hashval_t
4999 addr_hasher::hash (addr_table_entry *a)
5000 {
5001 inchash::hash hstate;
5002 switch (a->kind)
5003 {
5004 case ate_kind_rtx:
5005 hstate.add_int (0);
5006 break;
5007 case ate_kind_rtx_dtprel:
5008 hstate.add_int (1);
5009 break;
5010 case ate_kind_label:
5011 return htab_hash_string (a->addr.label);
5012 default:
5013 gcc_unreachable ();
5014 }
5015 inchash::add_rtx (a->addr.rtl, hstate);
5016 return hstate.end ();
5017 }
5018
5019 /* Determine equality for two address_table_entries. */
5020
5021 bool
5022 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
5023 {
5024 if (a1->kind != a2->kind)
5025 return 0;
5026 switch (a1->kind)
5027 {
5028 case ate_kind_rtx:
5029 case ate_kind_rtx_dtprel:
5030 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
5031 case ate_kind_label:
5032 return strcmp (a1->addr.label, a2->addr.label) == 0;
5033 default:
5034 gcc_unreachable ();
5035 }
5036 }
5037
5038 /* Initialize an addr_table_entry. */
5039
5040 void
5041 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5042 {
5043 e->kind = kind;
5044 switch (kind)
5045 {
5046 case ate_kind_rtx:
5047 case ate_kind_rtx_dtprel:
5048 e->addr.rtl = (rtx) addr;
5049 break;
5050 case ate_kind_label:
5051 e->addr.label = (char *) addr;
5052 break;
5053 }
5054 e->refcount = 0;
5055 e->index = NO_INDEX_ASSIGNED;
5056 }
5057
5058 /* Add attr to the address table entry to the table. Defer setting an
5059 index until output time. */
5060
5061 static addr_table_entry *
5062 add_addr_table_entry (void *addr, enum ate_kind kind)
5063 {
5064 addr_table_entry *node;
5065 addr_table_entry finder;
5066
5067 gcc_assert (dwarf_split_debug_info);
5068 if (! addr_index_table)
5069 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5070 init_addr_table_entry (&finder, kind, addr);
5071 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5072
5073 if (*slot == HTAB_EMPTY_ENTRY)
5074 {
5075 node = ggc_cleared_alloc<addr_table_entry> ();
5076 init_addr_table_entry (node, kind, addr);
5077 *slot = node;
5078 }
5079 else
5080 node = *slot;
5081
5082 node->refcount++;
5083 return node;
5084 }
5085
5086 /* Remove an entry from the addr table by decrementing its refcount.
5087 Strictly, decrementing the refcount would be enough, but the
5088 assertion that the entry is actually in the table has found
5089 bugs. */
5090
5091 static void
5092 remove_addr_table_entry (addr_table_entry *entry)
5093 {
5094 gcc_assert (dwarf_split_debug_info && addr_index_table);
5095 /* After an index is assigned, the table is frozen. */
5096 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5097 entry->refcount--;
5098 }
5099
5100 /* Given a location list, remove all addresses it refers to from the
5101 address_table. */
5102
5103 static void
5104 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5105 {
5106 for (; descr; descr = descr->dw_loc_next)
5107 if (descr->dw_loc_oprnd1.val_entry != NULL)
5108 {
5109 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5110 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5111 }
5112 }
5113
5114 /* A helper function for dwarf2out_finish called through
5115 htab_traverse. Assign an addr_table_entry its index. All entries
5116 must be collected into the table when this function is called,
5117 because the indexing code relies on htab_traverse to traverse nodes
5118 in the same order for each run. */
5119
5120 int
5121 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5122 {
5123 addr_table_entry *node = *h;
5124
5125 /* Don't index unreferenced nodes. */
5126 if (node->refcount == 0)
5127 return 1;
5128
5129 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5130 node->index = *index;
5131 *index += 1;
5132
5133 return 1;
5134 }
5135
5136 /* Add an address constant attribute value to a DIE. When using
5137 dwarf_split_debug_info, address attributes in dies destined for the
5138 final executable should be direct references--setting the parameter
5139 force_direct ensures this behavior. */
5140
5141 static inline void
5142 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5143 bool force_direct)
5144 {
5145 dw_attr_node attr;
5146
5147 attr.dw_attr = attr_kind;
5148 attr.dw_attr_val.val_class = dw_val_class_addr;
5149 attr.dw_attr_val.v.val_addr = addr;
5150 if (dwarf_split_debug_info && !force_direct)
5151 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5152 else
5153 attr.dw_attr_val.val_entry = NULL;
5154 add_dwarf_attr (die, &attr);
5155 }
5156
5157 /* Get the RTX from to an address DIE attribute. */
5158
5159 static inline rtx
5160 AT_addr (dw_attr_node *a)
5161 {
5162 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5163 return a->dw_attr_val.v.val_addr;
5164 }
5165
5166 /* Add a file attribute value to a DIE. */
5167
5168 static inline void
5169 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5170 struct dwarf_file_data *fd)
5171 {
5172 dw_attr_node attr;
5173
5174 attr.dw_attr = attr_kind;
5175 attr.dw_attr_val.val_class = dw_val_class_file;
5176 attr.dw_attr_val.val_entry = NULL;
5177 attr.dw_attr_val.v.val_file = fd;
5178 add_dwarf_attr (die, &attr);
5179 }
5180
5181 /* Get the dwarf_file_data from a file DIE attribute. */
5182
5183 static inline struct dwarf_file_data *
5184 AT_file (dw_attr_node *a)
5185 {
5186 gcc_assert (a && (AT_class (a) == dw_val_class_file
5187 || AT_class (a) == dw_val_class_file_implicit));
5188 return a->dw_attr_val.v.val_file;
5189 }
5190
5191 /* Add a vms delta attribute value to a DIE. */
5192
5193 static inline void
5194 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5195 const char *lbl1, const char *lbl2)
5196 {
5197 dw_attr_node attr;
5198
5199 attr.dw_attr = attr_kind;
5200 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5201 attr.dw_attr_val.val_entry = NULL;
5202 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5203 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5204 add_dwarf_attr (die, &attr);
5205 }
5206
5207 /* Add a symbolic view identifier attribute value to a DIE. */
5208
5209 static inline void
5210 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5211 const char *view_label)
5212 {
5213 dw_attr_node attr;
5214
5215 attr.dw_attr = attr_kind;
5216 attr.dw_attr_val.val_class = dw_val_class_symview;
5217 attr.dw_attr_val.val_entry = NULL;
5218 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5219 add_dwarf_attr (die, &attr);
5220 }
5221
5222 /* Add a label identifier attribute value to a DIE. */
5223
5224 static inline void
5225 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5226 const char *lbl_id)
5227 {
5228 dw_attr_node attr;
5229
5230 attr.dw_attr = attr_kind;
5231 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5232 attr.dw_attr_val.val_entry = NULL;
5233 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5234 if (dwarf_split_debug_info)
5235 attr.dw_attr_val.val_entry
5236 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5237 ate_kind_label);
5238 add_dwarf_attr (die, &attr);
5239 }
5240
5241 /* Add a section offset attribute value to a DIE, an offset into the
5242 debug_line section. */
5243
5244 static inline void
5245 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5246 const char *label)
5247 {
5248 dw_attr_node attr;
5249
5250 attr.dw_attr = attr_kind;
5251 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5252 attr.dw_attr_val.val_entry = NULL;
5253 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5254 add_dwarf_attr (die, &attr);
5255 }
5256
5257 /* Add a section offset attribute value to a DIE, an offset into the
5258 debug_macinfo section. */
5259
5260 static inline void
5261 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5262 const char *label)
5263 {
5264 dw_attr_node attr;
5265
5266 attr.dw_attr = attr_kind;
5267 attr.dw_attr_val.val_class = dw_val_class_macptr;
5268 attr.dw_attr_val.val_entry = NULL;
5269 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5270 add_dwarf_attr (die, &attr);
5271 }
5272
5273 /* Add a range_list attribute value to a DIE. When using
5274 dwarf_split_debug_info, address attributes in dies destined for the
5275 final executable should be direct references--setting the parameter
5276 force_direct ensures this behavior. */
5277
5278 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5279 #define RELOCATED_OFFSET (NULL)
5280
5281 static void
5282 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5283 long unsigned int offset, bool force_direct)
5284 {
5285 dw_attr_node attr;
5286
5287 attr.dw_attr = attr_kind;
5288 attr.dw_attr_val.val_class = dw_val_class_range_list;
5289 /* For the range_list attribute, use val_entry to store whether the
5290 offset should follow split-debug-info or normal semantics. This
5291 value is read in output_range_list_offset. */
5292 if (dwarf_split_debug_info && !force_direct)
5293 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5294 else
5295 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5296 attr.dw_attr_val.v.val_offset = offset;
5297 add_dwarf_attr (die, &attr);
5298 }
5299
5300 /* Return the start label of a delta attribute. */
5301
5302 static inline const char *
5303 AT_vms_delta1 (dw_attr_node *a)
5304 {
5305 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5306 return a->dw_attr_val.v.val_vms_delta.lbl1;
5307 }
5308
5309 /* Return the end label of a delta attribute. */
5310
5311 static inline const char *
5312 AT_vms_delta2 (dw_attr_node *a)
5313 {
5314 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5315 return a->dw_attr_val.v.val_vms_delta.lbl2;
5316 }
5317
5318 static inline const char *
5319 AT_lbl (dw_attr_node *a)
5320 {
5321 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5322 || AT_class (a) == dw_val_class_lineptr
5323 || AT_class (a) == dw_val_class_macptr
5324 || AT_class (a) == dw_val_class_loclistsptr
5325 || AT_class (a) == dw_val_class_high_pc));
5326 return a->dw_attr_val.v.val_lbl_id;
5327 }
5328
5329 /* Get the attribute of type attr_kind. */
5330
5331 static dw_attr_node *
5332 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5333 {
5334 dw_attr_node *a;
5335 unsigned ix;
5336 dw_die_ref spec = NULL;
5337
5338 if (! die)
5339 return NULL;
5340
5341 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5342 if (a->dw_attr == attr_kind)
5343 return a;
5344 else if (a->dw_attr == DW_AT_specification
5345 || a->dw_attr == DW_AT_abstract_origin)
5346 spec = AT_ref (a);
5347
5348 if (spec)
5349 return get_AT (spec, attr_kind);
5350
5351 return NULL;
5352 }
5353
5354 /* Returns the parent of the declaration of DIE. */
5355
5356 static dw_die_ref
5357 get_die_parent (dw_die_ref die)
5358 {
5359 dw_die_ref t;
5360
5361 if (!die)
5362 return NULL;
5363
5364 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5365 || (t = get_AT_ref (die, DW_AT_specification)))
5366 die = t;
5367
5368 return die->die_parent;
5369 }
5370
5371 /* Return the "low pc" attribute value, typically associated with a subprogram
5372 DIE. Return null if the "low pc" attribute is either not present, or if it
5373 cannot be represented as an assembler label identifier. */
5374
5375 static inline const char *
5376 get_AT_low_pc (dw_die_ref die)
5377 {
5378 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5379
5380 return a ? AT_lbl (a) : NULL;
5381 }
5382
5383 /* Return the value of the string attribute designated by ATTR_KIND, or
5384 NULL if it is not present. */
5385
5386 static inline const char *
5387 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5388 {
5389 dw_attr_node *a = get_AT (die, attr_kind);
5390
5391 return a ? AT_string (a) : NULL;
5392 }
5393
5394 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5395 if it is not present. */
5396
5397 static inline int
5398 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5399 {
5400 dw_attr_node *a = get_AT (die, attr_kind);
5401
5402 return a ? AT_flag (a) : 0;
5403 }
5404
5405 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5406 if it is not present. */
5407
5408 static inline unsigned
5409 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5410 {
5411 dw_attr_node *a = get_AT (die, attr_kind);
5412
5413 return a ? AT_unsigned (a) : 0;
5414 }
5415
5416 static inline dw_die_ref
5417 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5418 {
5419 dw_attr_node *a = get_AT (die, attr_kind);
5420
5421 return a ? AT_ref (a) : NULL;
5422 }
5423
5424 static inline struct dwarf_file_data *
5425 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5426 {
5427 dw_attr_node *a = get_AT (die, attr_kind);
5428
5429 return a ? AT_file (a) : NULL;
5430 }
5431
5432 /* Return TRUE if the language is C. */
5433
5434 static inline bool
5435 is_c (void)
5436 {
5437 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5438
5439 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5440 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5441
5442
5443 }
5444
5445 /* Return TRUE if the language is C++. */
5446
5447 static inline bool
5448 is_cxx (void)
5449 {
5450 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5451
5452 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5453 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5454 }
5455
5456 /* Return TRUE if DECL was created by the C++ frontend. */
5457
5458 static bool
5459 is_cxx (const_tree decl)
5460 {
5461 if (in_lto_p)
5462 {
5463 const_tree context = get_ultimate_context (decl);
5464 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5465 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5466 }
5467 return is_cxx ();
5468 }
5469
5470 /* Return TRUE if the language is Fortran. */
5471
5472 static inline bool
5473 is_fortran (void)
5474 {
5475 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5476
5477 return (lang == DW_LANG_Fortran77
5478 || lang == DW_LANG_Fortran90
5479 || lang == DW_LANG_Fortran95
5480 || lang == DW_LANG_Fortran03
5481 || lang == DW_LANG_Fortran08);
5482 }
5483
5484 static inline bool
5485 is_fortran (const_tree decl)
5486 {
5487 if (in_lto_p)
5488 {
5489 const_tree context = get_ultimate_context (decl);
5490 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5491 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5492 "GNU Fortran", 11) == 0
5493 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5494 "GNU F77") == 0);
5495 }
5496 return is_fortran ();
5497 }
5498
5499 /* Return TRUE if the language is Ada. */
5500
5501 static inline bool
5502 is_ada (void)
5503 {
5504 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5505
5506 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5507 }
5508
5509 /* Return TRUE if the language is D. */
5510
5511 static inline bool
5512 is_dlang (void)
5513 {
5514 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5515
5516 return lang == DW_LANG_D;
5517 }
5518
5519 /* Remove the specified attribute if present. Return TRUE if removal
5520 was successful. */
5521
5522 static bool
5523 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5524 {
5525 dw_attr_node *a;
5526 unsigned ix;
5527
5528 if (! die)
5529 return false;
5530
5531 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5532 if (a->dw_attr == attr_kind)
5533 {
5534 if (AT_class (a) == dw_val_class_str)
5535 if (a->dw_attr_val.v.val_str->refcount)
5536 a->dw_attr_val.v.val_str->refcount--;
5537
5538 /* vec::ordered_remove should help reduce the number of abbrevs
5539 that are needed. */
5540 die->die_attr->ordered_remove (ix);
5541 return true;
5542 }
5543 return false;
5544 }
5545
5546 /* Remove CHILD from its parent. PREV must have the property that
5547 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5548
5549 static void
5550 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5551 {
5552 gcc_assert (child->die_parent == prev->die_parent);
5553 gcc_assert (prev->die_sib == child);
5554 if (prev == child)
5555 {
5556 gcc_assert (child->die_parent->die_child == child);
5557 prev = NULL;
5558 }
5559 else
5560 prev->die_sib = child->die_sib;
5561 if (child->die_parent->die_child == child)
5562 child->die_parent->die_child = prev;
5563 child->die_sib = NULL;
5564 }
5565
5566 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5567 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5568
5569 static void
5570 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5571 {
5572 dw_die_ref parent = old_child->die_parent;
5573
5574 gcc_assert (parent == prev->die_parent);
5575 gcc_assert (prev->die_sib == old_child);
5576
5577 new_child->die_parent = parent;
5578 if (prev == old_child)
5579 {
5580 gcc_assert (parent->die_child == old_child);
5581 new_child->die_sib = new_child;
5582 }
5583 else
5584 {
5585 prev->die_sib = new_child;
5586 new_child->die_sib = old_child->die_sib;
5587 }
5588 if (old_child->die_parent->die_child == old_child)
5589 old_child->die_parent->die_child = new_child;
5590 old_child->die_sib = NULL;
5591 }
5592
5593 /* Move all children from OLD_PARENT to NEW_PARENT. */
5594
5595 static void
5596 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5597 {
5598 dw_die_ref c;
5599 new_parent->die_child = old_parent->die_child;
5600 old_parent->die_child = NULL;
5601 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5602 }
5603
5604 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5605 matches TAG. */
5606
5607 static void
5608 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5609 {
5610 dw_die_ref c;
5611
5612 c = die->die_child;
5613 if (c) do {
5614 dw_die_ref prev = c;
5615 c = c->die_sib;
5616 while (c->die_tag == tag)
5617 {
5618 remove_child_with_prev (c, prev);
5619 c->die_parent = NULL;
5620 /* Might have removed every child. */
5621 if (die->die_child == NULL)
5622 return;
5623 c = prev->die_sib;
5624 }
5625 } while (c != die->die_child);
5626 }
5627
5628 /* Add a CHILD_DIE as the last child of DIE. */
5629
5630 static void
5631 add_child_die (dw_die_ref die, dw_die_ref child_die)
5632 {
5633 /* FIXME this should probably be an assert. */
5634 if (! die || ! child_die)
5635 return;
5636 gcc_assert (die != child_die);
5637
5638 child_die->die_parent = die;
5639 if (die->die_child)
5640 {
5641 child_die->die_sib = die->die_child->die_sib;
5642 die->die_child->die_sib = child_die;
5643 }
5644 else
5645 child_die->die_sib = child_die;
5646 die->die_child = child_die;
5647 }
5648
5649 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5650
5651 static void
5652 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5653 dw_die_ref after_die)
5654 {
5655 gcc_assert (die
5656 && child_die
5657 && after_die
5658 && die->die_child
5659 && die != child_die);
5660
5661 child_die->die_parent = die;
5662 child_die->die_sib = after_die->die_sib;
5663 after_die->die_sib = child_die;
5664 if (die->die_child == after_die)
5665 die->die_child = child_die;
5666 }
5667
5668 /* Unassociate CHILD from its parent, and make its parent be
5669 NEW_PARENT. */
5670
5671 static void
5672 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5673 {
5674 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5675 if (p->die_sib == child)
5676 {
5677 remove_child_with_prev (child, p);
5678 break;
5679 }
5680 add_child_die (new_parent, child);
5681 }
5682
5683 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5684 is the specification, to the end of PARENT's list of children.
5685 This is done by removing and re-adding it. */
5686
5687 static void
5688 splice_child_die (dw_die_ref parent, dw_die_ref child)
5689 {
5690 /* We want the declaration DIE from inside the class, not the
5691 specification DIE at toplevel. */
5692 if (child->die_parent != parent)
5693 {
5694 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5695
5696 if (tmp)
5697 child = tmp;
5698 }
5699
5700 gcc_assert (child->die_parent == parent
5701 || (child->die_parent
5702 == get_AT_ref (parent, DW_AT_specification)));
5703
5704 reparent_child (child, parent);
5705 }
5706
5707 /* Create and return a new die with TAG_VALUE as tag. */
5708
5709 static inline dw_die_ref
5710 new_die_raw (enum dwarf_tag tag_value)
5711 {
5712 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5713 die->die_tag = tag_value;
5714 return die;
5715 }
5716
5717 /* Create and return a new die with a parent of PARENT_DIE. If
5718 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5719 associated tree T must be supplied to determine parenthood
5720 later. */
5721
5722 static inline dw_die_ref
5723 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5724 {
5725 dw_die_ref die = new_die_raw (tag_value);
5726
5727 if (parent_die != NULL)
5728 add_child_die (parent_die, die);
5729 else
5730 {
5731 limbo_die_node *limbo_node;
5732
5733 /* No DIEs created after early dwarf should end up in limbo,
5734 because the limbo list should not persist past LTO
5735 streaming. */
5736 if (tag_value != DW_TAG_compile_unit
5737 /* These are allowed because they're generated while
5738 breaking out COMDAT units late. */
5739 && tag_value != DW_TAG_type_unit
5740 && tag_value != DW_TAG_skeleton_unit
5741 && !early_dwarf
5742 /* Allow nested functions to live in limbo because they will
5743 only temporarily live there, as decls_for_scope will fix
5744 them up. */
5745 && (TREE_CODE (t) != FUNCTION_DECL
5746 || !decl_function_context (t))
5747 /* Same as nested functions above but for types. Types that
5748 are local to a function will be fixed in
5749 decls_for_scope. */
5750 && (!RECORD_OR_UNION_TYPE_P (t)
5751 || !TYPE_CONTEXT (t)
5752 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5753 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5754 especially in the ltrans stage, but once we implement LTO
5755 dwarf streaming, we should remove this exception. */
5756 && !in_lto_p)
5757 {
5758 fprintf (stderr, "symbol ended up in limbo too late:");
5759 debug_generic_stmt (t);
5760 gcc_unreachable ();
5761 }
5762
5763 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5764 limbo_node->die = die;
5765 limbo_node->created_for = t;
5766 limbo_node->next = limbo_die_list;
5767 limbo_die_list = limbo_node;
5768 }
5769
5770 return die;
5771 }
5772
5773 /* Return the DIE associated with the given type specifier. */
5774
5775 static inline dw_die_ref
5776 lookup_type_die (tree type)
5777 {
5778 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5779 if (die && die->removed)
5780 {
5781 TYPE_SYMTAB_DIE (type) = NULL;
5782 return NULL;
5783 }
5784 return die;
5785 }
5786
5787 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5788 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5789 anonymous type instead the one of the naming typedef. */
5790
5791 static inline dw_die_ref
5792 strip_naming_typedef (tree type, dw_die_ref type_die)
5793 {
5794 if (type
5795 && TREE_CODE (type) == RECORD_TYPE
5796 && type_die
5797 && type_die->die_tag == DW_TAG_typedef
5798 && is_naming_typedef_decl (TYPE_NAME (type)))
5799 type_die = get_AT_ref (type_die, DW_AT_type);
5800 return type_die;
5801 }
5802
5803 /* Like lookup_type_die, but if type is an anonymous type named by a
5804 typedef[1], return the DIE of the anonymous type instead the one of
5805 the naming typedef. This is because in gen_typedef_die, we did
5806 equate the anonymous struct named by the typedef with the DIE of
5807 the naming typedef. So by default, lookup_type_die on an anonymous
5808 struct yields the DIE of the naming typedef.
5809
5810 [1]: Read the comment of is_naming_typedef_decl to learn about what
5811 a naming typedef is. */
5812
5813 static inline dw_die_ref
5814 lookup_type_die_strip_naming_typedef (tree type)
5815 {
5816 dw_die_ref die = lookup_type_die (type);
5817 return strip_naming_typedef (type, die);
5818 }
5819
5820 /* Equate a DIE to a given type specifier. */
5821
5822 static inline void
5823 equate_type_number_to_die (tree type, dw_die_ref type_die)
5824 {
5825 TYPE_SYMTAB_DIE (type) = type_die;
5826 }
5827
5828 static dw_die_ref maybe_create_die_with_external_ref (tree);
5829 struct GTY(()) sym_off_pair
5830 {
5831 const char * GTY((skip)) sym;
5832 unsigned HOST_WIDE_INT off;
5833 };
5834 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5835
5836 /* Returns a hash value for X (which really is a die_struct). */
5837
5838 inline hashval_t
5839 decl_die_hasher::hash (die_node *x)
5840 {
5841 return (hashval_t) x->decl_id;
5842 }
5843
5844 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5845
5846 inline bool
5847 decl_die_hasher::equal (die_node *x, tree y)
5848 {
5849 return (x->decl_id == DECL_UID (y));
5850 }
5851
5852 /* Return the DIE associated with a given declaration. */
5853
5854 static inline dw_die_ref
5855 lookup_decl_die (tree decl)
5856 {
5857 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5858 NO_INSERT);
5859 if (!die)
5860 {
5861 if (in_lto_p)
5862 return maybe_create_die_with_external_ref (decl);
5863 return NULL;
5864 }
5865 if ((*die)->removed)
5866 {
5867 decl_die_table->clear_slot (die);
5868 return NULL;
5869 }
5870 return *die;
5871 }
5872
5873
5874 /* Return the DIE associated with BLOCK. */
5875
5876 static inline dw_die_ref
5877 lookup_block_die (tree block)
5878 {
5879 dw_die_ref die = BLOCK_DIE (block);
5880 if (!die && in_lto_p)
5881 return maybe_create_die_with_external_ref (block);
5882 return die;
5883 }
5884
5885 /* Associate DIE with BLOCK. */
5886
5887 static inline void
5888 equate_block_to_die (tree block, dw_die_ref die)
5889 {
5890 BLOCK_DIE (block) = die;
5891 }
5892 #undef BLOCK_DIE
5893
5894
5895 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5896 style reference. Return true if we found one refering to a DIE for
5897 DECL, otherwise return false. */
5898
5899 static bool
5900 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5901 unsigned HOST_WIDE_INT *off)
5902 {
5903 dw_die_ref die;
5904
5905 if (in_lto_p)
5906 {
5907 /* During WPA stage and incremental linking we use a hash-map
5908 to store the decl <-> label + offset map. */
5909 if (!external_die_map)
5910 return false;
5911 sym_off_pair *desc = external_die_map->get (decl);
5912 if (!desc)
5913 return false;
5914 *sym = desc->sym;
5915 *off = desc->off;
5916 return true;
5917 }
5918
5919 if (TREE_CODE (decl) == BLOCK)
5920 die = lookup_block_die (decl);
5921 else
5922 die = lookup_decl_die (decl);
5923 if (!die)
5924 return false;
5925
5926 /* Similar to get_ref_die_offset_label, but using the "correct"
5927 label. */
5928 *off = die->die_offset;
5929 while (die->die_parent)
5930 die = die->die_parent;
5931 /* For the containing CU DIE we compute a die_symbol in
5932 compute_comp_unit_symbol. */
5933 gcc_assert (die->die_tag == DW_TAG_compile_unit
5934 && die->die_id.die_symbol != NULL);
5935 *sym = die->die_id.die_symbol;
5936 return true;
5937 }
5938
5939 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5940
5941 static void
5942 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5943 const char *symbol, HOST_WIDE_INT offset)
5944 {
5945 /* Create a fake DIE that contains the reference. Don't use
5946 new_die because we don't want to end up in the limbo list. */
5947 /* ??? We probably want to share these, thus put a ref to the DIE
5948 we create here to the external_die_map entry. */
5949 dw_die_ref ref = new_die_raw (die->die_tag);
5950 ref->die_id.die_symbol = symbol;
5951 ref->die_offset = offset;
5952 ref->with_offset = 1;
5953 add_AT_die_ref (die, attr_kind, ref);
5954 }
5955
5956 /* Create a DIE for DECL if required and add a reference to a DIE
5957 at SYMBOL + OFFSET which contains attributes dumped early. */
5958
5959 static void
5960 dwarf2out_register_external_die (tree decl, const char *sym,
5961 unsigned HOST_WIDE_INT off)
5962 {
5963 if (debug_info_level == DINFO_LEVEL_NONE)
5964 return;
5965
5966 if (!external_die_map)
5967 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5968 gcc_checking_assert (!external_die_map->get (decl));
5969 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5970 external_die_map->put (decl, p);
5971 }
5972
5973 /* If we have a registered external DIE for DECL return a new DIE for
5974 the concrete instance with an appropriate abstract origin. */
5975
5976 static dw_die_ref
5977 maybe_create_die_with_external_ref (tree decl)
5978 {
5979 if (!external_die_map)
5980 return NULL;
5981 sym_off_pair *desc = external_die_map->get (decl);
5982 if (!desc)
5983 return NULL;
5984
5985 const char *sym = desc->sym;
5986 unsigned HOST_WIDE_INT off = desc->off;
5987 external_die_map->remove (decl);
5988
5989 in_lto_p = false;
5990 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5991 ? lookup_block_die (decl) : lookup_decl_die (decl));
5992 gcc_assert (!die);
5993 in_lto_p = true;
5994
5995 tree ctx;
5996 dw_die_ref parent = NULL;
5997 /* Need to lookup a DIE for the decls context - the containing
5998 function or translation unit. */
5999 if (TREE_CODE (decl) == BLOCK)
6000 {
6001 ctx = BLOCK_SUPERCONTEXT (decl);
6002 /* ??? We do not output DIEs for all scopes thus skip as
6003 many DIEs as needed. */
6004 while (TREE_CODE (ctx) == BLOCK
6005 && !lookup_block_die (ctx))
6006 ctx = BLOCK_SUPERCONTEXT (ctx);
6007 }
6008 else
6009 ctx = DECL_CONTEXT (decl);
6010 /* Peel types in the context stack. */
6011 while (ctx && TYPE_P (ctx))
6012 ctx = TYPE_CONTEXT (ctx);
6013 /* Likewise namespaces in case we do not want to emit DIEs for them. */
6014 if (debug_info_level <= DINFO_LEVEL_TERSE)
6015 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
6016 ctx = DECL_CONTEXT (ctx);
6017 if (ctx)
6018 {
6019 if (TREE_CODE (ctx) == BLOCK)
6020 parent = lookup_block_die (ctx);
6021 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
6022 /* Keep the 1:1 association during WPA. */
6023 && !flag_wpa
6024 && flag_incremental_link != INCREMENTAL_LINK_LTO)
6025 /* Otherwise all late annotations go to the main CU which
6026 imports the original CUs. */
6027 parent = comp_unit_die ();
6028 else if (TREE_CODE (ctx) == FUNCTION_DECL
6029 && TREE_CODE (decl) != FUNCTION_DECL
6030 && TREE_CODE (decl) != PARM_DECL
6031 && TREE_CODE (decl) != RESULT_DECL
6032 && TREE_CODE (decl) != BLOCK)
6033 /* Leave function local entities parent determination to when
6034 we process scope vars. */
6035 ;
6036 else
6037 parent = lookup_decl_die (ctx);
6038 }
6039 else
6040 /* In some cases the FEs fail to set DECL_CONTEXT properly.
6041 Handle this case gracefully by globalizing stuff. */
6042 parent = comp_unit_die ();
6043 /* Create a DIE "stub". */
6044 switch (TREE_CODE (decl))
6045 {
6046 case TRANSLATION_UNIT_DECL:
6047 {
6048 die = comp_unit_die ();
6049 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6050 to create a DIE for the original CUs. */
6051 return die;
6052 }
6053 case NAMESPACE_DECL:
6054 if (is_fortran (decl))
6055 die = new_die (DW_TAG_module, parent, decl);
6056 else
6057 die = new_die (DW_TAG_namespace, parent, decl);
6058 break;
6059 case FUNCTION_DECL:
6060 die = new_die (DW_TAG_subprogram, parent, decl);
6061 break;
6062 case VAR_DECL:
6063 die = new_die (DW_TAG_variable, parent, decl);
6064 break;
6065 case RESULT_DECL:
6066 die = new_die (DW_TAG_variable, parent, decl);
6067 break;
6068 case PARM_DECL:
6069 die = new_die (DW_TAG_formal_parameter, parent, decl);
6070 break;
6071 case CONST_DECL:
6072 die = new_die (DW_TAG_constant, parent, decl);
6073 break;
6074 case LABEL_DECL:
6075 die = new_die (DW_TAG_label, parent, decl);
6076 break;
6077 case BLOCK:
6078 die = new_die (DW_TAG_lexical_block, parent, decl);
6079 break;
6080 default:
6081 gcc_unreachable ();
6082 }
6083 if (TREE_CODE (decl) == BLOCK)
6084 equate_block_to_die (decl, die);
6085 else
6086 equate_decl_number_to_die (decl, die);
6087
6088 add_desc_attribute (die, decl);
6089
6090 /* Add a reference to the DIE providing early debug at $sym + off. */
6091 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6092
6093 return die;
6094 }
6095
6096 /* Returns a hash value for X (which really is a var_loc_list). */
6097
6098 inline hashval_t
6099 decl_loc_hasher::hash (var_loc_list *x)
6100 {
6101 return (hashval_t) x->decl_id;
6102 }
6103
6104 /* Return nonzero if decl_id of var_loc_list X is the same as
6105 UID of decl *Y. */
6106
6107 inline bool
6108 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6109 {
6110 return (x->decl_id == DECL_UID (y));
6111 }
6112
6113 /* Return the var_loc list associated with a given declaration. */
6114
6115 static inline var_loc_list *
6116 lookup_decl_loc (const_tree decl)
6117 {
6118 if (!decl_loc_table)
6119 return NULL;
6120 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6121 }
6122
6123 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6124
6125 inline hashval_t
6126 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6127 {
6128 return (hashval_t) x->decl_id;
6129 }
6130
6131 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6132 UID of decl *Y. */
6133
6134 inline bool
6135 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6136 {
6137 return (x->decl_id == DECL_UID (y));
6138 }
6139
6140 /* Equate a DIE to a particular declaration. */
6141
6142 static void
6143 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6144 {
6145 unsigned int decl_id = DECL_UID (decl);
6146
6147 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6148 decl_die->decl_id = decl_id;
6149 }
6150
6151 /* Return how many bits covers PIECE EXPR_LIST. */
6152
6153 static HOST_WIDE_INT
6154 decl_piece_bitsize (rtx piece)
6155 {
6156 int ret = (int) GET_MODE (piece);
6157 if (ret)
6158 return ret;
6159 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6160 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6161 return INTVAL (XEXP (XEXP (piece, 0), 0));
6162 }
6163
6164 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6165
6166 static rtx *
6167 decl_piece_varloc_ptr (rtx piece)
6168 {
6169 if ((int) GET_MODE (piece))
6170 return &XEXP (piece, 0);
6171 else
6172 return &XEXP (XEXP (piece, 0), 1);
6173 }
6174
6175 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6176 Next is the chain of following piece nodes. */
6177
6178 static rtx_expr_list *
6179 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6180 {
6181 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6182 return alloc_EXPR_LIST (bitsize, loc_note, next);
6183 else
6184 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6185 GEN_INT (bitsize),
6186 loc_note), next);
6187 }
6188
6189 /* Return rtx that should be stored into loc field for
6190 LOC_NOTE and BITPOS/BITSIZE. */
6191
6192 static rtx
6193 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6194 HOST_WIDE_INT bitsize)
6195 {
6196 if (bitsize != -1)
6197 {
6198 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6199 if (bitpos != 0)
6200 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6201 }
6202 return loc_note;
6203 }
6204
6205 /* This function either modifies location piece list *DEST in
6206 place (if SRC and INNER is NULL), or copies location piece list
6207 *SRC to *DEST while modifying it. Location BITPOS is modified
6208 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6209 not copied and if needed some padding around it is added.
6210 When modifying in place, DEST should point to EXPR_LIST where
6211 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6212 to the start of the whole list and INNER points to the EXPR_LIST
6213 where earlier pieces cover PIECE_BITPOS bits. */
6214
6215 static void
6216 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6217 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6218 HOST_WIDE_INT bitsize, rtx loc_note)
6219 {
6220 HOST_WIDE_INT diff;
6221 bool copy = inner != NULL;
6222
6223 if (copy)
6224 {
6225 /* First copy all nodes preceding the current bitpos. */
6226 while (src != inner)
6227 {
6228 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6229 decl_piece_bitsize (*src), NULL_RTX);
6230 dest = &XEXP (*dest, 1);
6231 src = &XEXP (*src, 1);
6232 }
6233 }
6234 /* Add padding if needed. */
6235 if (bitpos != piece_bitpos)
6236 {
6237 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6238 copy ? NULL_RTX : *dest);
6239 dest = &XEXP (*dest, 1);
6240 }
6241 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6242 {
6243 gcc_assert (!copy);
6244 /* A piece with correct bitpos and bitsize already exist,
6245 just update the location for it and return. */
6246 *decl_piece_varloc_ptr (*dest) = loc_note;
6247 return;
6248 }
6249 /* Add the piece that changed. */
6250 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6251 dest = &XEXP (*dest, 1);
6252 /* Skip over pieces that overlap it. */
6253 diff = bitpos - piece_bitpos + bitsize;
6254 if (!copy)
6255 src = dest;
6256 while (diff > 0 && *src)
6257 {
6258 rtx piece = *src;
6259 diff -= decl_piece_bitsize (piece);
6260 if (copy)
6261 src = &XEXP (piece, 1);
6262 else
6263 {
6264 *src = XEXP (piece, 1);
6265 free_EXPR_LIST_node (piece);
6266 }
6267 }
6268 /* Add padding if needed. */
6269 if (diff < 0 && *src)
6270 {
6271 if (!copy)
6272 dest = src;
6273 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6274 dest = &XEXP (*dest, 1);
6275 }
6276 if (!copy)
6277 return;
6278 /* Finally copy all nodes following it. */
6279 while (*src)
6280 {
6281 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6282 decl_piece_bitsize (*src), NULL_RTX);
6283 dest = &XEXP (*dest, 1);
6284 src = &XEXP (*src, 1);
6285 }
6286 }
6287
6288 /* Add a variable location node to the linked list for DECL. */
6289
6290 static struct var_loc_node *
6291 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6292 {
6293 unsigned int decl_id;
6294 var_loc_list *temp;
6295 struct var_loc_node *loc = NULL;
6296 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6297
6298 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6299 {
6300 tree realdecl = DECL_DEBUG_EXPR (decl);
6301 if (handled_component_p (realdecl)
6302 || (TREE_CODE (realdecl) == MEM_REF
6303 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6304 {
6305 bool reverse;
6306 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6307 &bitsize, &reverse);
6308 if (!innerdecl
6309 || !DECL_P (innerdecl)
6310 || DECL_IGNORED_P (innerdecl)
6311 || TREE_STATIC (innerdecl)
6312 || bitsize == 0
6313 || bitpos + bitsize > 256)
6314 return NULL;
6315 decl = innerdecl;
6316 }
6317 }
6318
6319 decl_id = DECL_UID (decl);
6320 var_loc_list **slot
6321 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6322 if (*slot == NULL)
6323 {
6324 temp = ggc_cleared_alloc<var_loc_list> ();
6325 temp->decl_id = decl_id;
6326 *slot = temp;
6327 }
6328 else
6329 temp = *slot;
6330
6331 /* For PARM_DECLs try to keep around the original incoming value,
6332 even if that means we'll emit a zero-range .debug_loc entry. */
6333 if (temp->last
6334 && temp->first == temp->last
6335 && TREE_CODE (decl) == PARM_DECL
6336 && NOTE_P (temp->first->loc)
6337 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6338 && DECL_INCOMING_RTL (decl)
6339 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6340 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6341 == GET_CODE (DECL_INCOMING_RTL (decl))
6342 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6343 && (bitsize != -1
6344 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6345 NOTE_VAR_LOCATION_LOC (loc_note))
6346 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6347 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6348 {
6349 loc = ggc_cleared_alloc<var_loc_node> ();
6350 temp->first->next = loc;
6351 temp->last = loc;
6352 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6353 }
6354 else if (temp->last)
6355 {
6356 struct var_loc_node *last = temp->last, *unused = NULL;
6357 rtx *piece_loc = NULL, last_loc_note;
6358 HOST_WIDE_INT piece_bitpos = 0;
6359 if (last->next)
6360 {
6361 last = last->next;
6362 gcc_assert (last->next == NULL);
6363 }
6364 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6365 {
6366 piece_loc = &last->loc;
6367 do
6368 {
6369 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6370 if (piece_bitpos + cur_bitsize > bitpos)
6371 break;
6372 piece_bitpos += cur_bitsize;
6373 piece_loc = &XEXP (*piece_loc, 1);
6374 }
6375 while (*piece_loc);
6376 }
6377 /* TEMP->LAST here is either pointer to the last but one or
6378 last element in the chained list, LAST is pointer to the
6379 last element. */
6380 if (label && strcmp (last->label, label) == 0 && last->view == view)
6381 {
6382 /* For SRA optimized variables if there weren't any real
6383 insns since last note, just modify the last node. */
6384 if (piece_loc != NULL)
6385 {
6386 adjust_piece_list (piece_loc, NULL, NULL,
6387 bitpos, piece_bitpos, bitsize, loc_note);
6388 return NULL;
6389 }
6390 /* If the last note doesn't cover any instructions, remove it. */
6391 if (temp->last != last)
6392 {
6393 temp->last->next = NULL;
6394 unused = last;
6395 last = temp->last;
6396 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6397 }
6398 else
6399 {
6400 gcc_assert (temp->first == temp->last
6401 || (temp->first->next == temp->last
6402 && TREE_CODE (decl) == PARM_DECL));
6403 memset (temp->last, '\0', sizeof (*temp->last));
6404 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6405 return temp->last;
6406 }
6407 }
6408 if (bitsize == -1 && NOTE_P (last->loc))
6409 last_loc_note = last->loc;
6410 else if (piece_loc != NULL
6411 && *piece_loc != NULL_RTX
6412 && piece_bitpos == bitpos
6413 && decl_piece_bitsize (*piece_loc) == bitsize)
6414 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6415 else
6416 last_loc_note = NULL_RTX;
6417 /* If the current location is the same as the end of the list,
6418 and either both or neither of the locations is uninitialized,
6419 we have nothing to do. */
6420 if (last_loc_note == NULL_RTX
6421 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6422 NOTE_VAR_LOCATION_LOC (loc_note)))
6423 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6424 != NOTE_VAR_LOCATION_STATUS (loc_note))
6425 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6426 == VAR_INIT_STATUS_UNINITIALIZED)
6427 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6428 == VAR_INIT_STATUS_UNINITIALIZED))))
6429 {
6430 /* Add LOC to the end of list and update LAST. If the last
6431 element of the list has been removed above, reuse its
6432 memory for the new node, otherwise allocate a new one. */
6433 if (unused)
6434 {
6435 loc = unused;
6436 memset (loc, '\0', sizeof (*loc));
6437 }
6438 else
6439 loc = ggc_cleared_alloc<var_loc_node> ();
6440 if (bitsize == -1 || piece_loc == NULL)
6441 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6442 else
6443 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6444 bitpos, piece_bitpos, bitsize, loc_note);
6445 last->next = loc;
6446 /* Ensure TEMP->LAST will point either to the new last but one
6447 element of the chain, or to the last element in it. */
6448 if (last != temp->last)
6449 temp->last = last;
6450 }
6451 else if (unused)
6452 ggc_free (unused);
6453 }
6454 else
6455 {
6456 loc = ggc_cleared_alloc<var_loc_node> ();
6457 temp->first = loc;
6458 temp->last = loc;
6459 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6460 }
6461 return loc;
6462 }
6463 \f
6464 /* Keep track of the number of spaces used to indent the
6465 output of the debugging routines that print the structure of
6466 the DIE internal representation. */
6467 static int print_indent;
6468
6469 /* Indent the line the number of spaces given by print_indent. */
6470
6471 static inline void
6472 print_spaces (FILE *outfile)
6473 {
6474 fprintf (outfile, "%*s", print_indent, "");
6475 }
6476
6477 /* Print a type signature in hex. */
6478
6479 static inline void
6480 print_signature (FILE *outfile, char *sig)
6481 {
6482 int i;
6483
6484 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6485 fprintf (outfile, "%02x", sig[i] & 0xff);
6486 }
6487
6488 static inline void
6489 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6490 {
6491 if (discr_value->pos)
6492 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6493 else
6494 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6495 }
6496
6497 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6498
6499 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6500 RECURSE, output location descriptor operations. */
6501
6502 static void
6503 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6504 {
6505 switch (val->val_class)
6506 {
6507 case dw_val_class_addr:
6508 fprintf (outfile, "address");
6509 break;
6510 case dw_val_class_offset:
6511 fprintf (outfile, "offset");
6512 break;
6513 case dw_val_class_loc:
6514 fprintf (outfile, "location descriptor");
6515 if (val->v.val_loc == NULL)
6516 fprintf (outfile, " -> <null>\n");
6517 else if (recurse)
6518 {
6519 fprintf (outfile, ":\n");
6520 print_indent += 4;
6521 print_loc_descr (val->v.val_loc, outfile);
6522 print_indent -= 4;
6523 }
6524 else
6525 {
6526 if (flag_dump_noaddr || flag_dump_unnumbered)
6527 fprintf (outfile, " #\n");
6528 else
6529 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6530 }
6531 break;
6532 case dw_val_class_loc_list:
6533 fprintf (outfile, "location list -> label:%s",
6534 val->v.val_loc_list->ll_symbol);
6535 break;
6536 case dw_val_class_view_list:
6537 val = view_list_to_loc_list_val_node (val);
6538 fprintf (outfile, "location list with views -> labels:%s and %s",
6539 val->v.val_loc_list->ll_symbol,
6540 val->v.val_loc_list->vl_symbol);
6541 break;
6542 case dw_val_class_range_list:
6543 fprintf (outfile, "range list");
6544 break;
6545 case dw_val_class_const:
6546 case dw_val_class_const_implicit:
6547 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6548 break;
6549 case dw_val_class_unsigned_const:
6550 case dw_val_class_unsigned_const_implicit:
6551 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6552 break;
6553 case dw_val_class_const_double:
6554 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6555 HOST_WIDE_INT_PRINT_UNSIGNED")",
6556 val->v.val_double.high,
6557 val->v.val_double.low);
6558 break;
6559 case dw_val_class_wide_int:
6560 {
6561 int i = val->v.val_wide->get_len ();
6562 fprintf (outfile, "constant (");
6563 gcc_assert (i > 0);
6564 if (val->v.val_wide->elt (i - 1) == 0)
6565 fprintf (outfile, "0x");
6566 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6567 val->v.val_wide->elt (--i));
6568 while (--i >= 0)
6569 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6570 val->v.val_wide->elt (i));
6571 fprintf (outfile, ")");
6572 break;
6573 }
6574 case dw_val_class_vec:
6575 fprintf (outfile, "floating-point or vector constant");
6576 break;
6577 case dw_val_class_flag:
6578 fprintf (outfile, "%u", val->v.val_flag);
6579 break;
6580 case dw_val_class_die_ref:
6581 if (val->v.val_die_ref.die != NULL)
6582 {
6583 dw_die_ref die = val->v.val_die_ref.die;
6584
6585 if (die->comdat_type_p)
6586 {
6587 fprintf (outfile, "die -> signature: ");
6588 print_signature (outfile,
6589 die->die_id.die_type_node->signature);
6590 }
6591 else if (die->die_id.die_symbol)
6592 {
6593 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6594 if (die->with_offset)
6595 fprintf (outfile, " + %ld", die->die_offset);
6596 }
6597 else
6598 fprintf (outfile, "die -> %ld", die->die_offset);
6599 if (flag_dump_noaddr || flag_dump_unnumbered)
6600 fprintf (outfile, " #");
6601 else
6602 fprintf (outfile, " (%p)", (void *) die);
6603 }
6604 else
6605 fprintf (outfile, "die -> <null>");
6606 break;
6607 case dw_val_class_vms_delta:
6608 fprintf (outfile, "delta: @slotcount(%s-%s)",
6609 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6610 break;
6611 case dw_val_class_symview:
6612 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6613 break;
6614 case dw_val_class_lbl_id:
6615 case dw_val_class_lineptr:
6616 case dw_val_class_macptr:
6617 case dw_val_class_loclistsptr:
6618 case dw_val_class_high_pc:
6619 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6620 break;
6621 case dw_val_class_str:
6622 if (val->v.val_str->str != NULL)
6623 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6624 else
6625 fprintf (outfile, "<null>");
6626 break;
6627 case dw_val_class_file:
6628 case dw_val_class_file_implicit:
6629 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6630 val->v.val_file->emitted_number);
6631 break;
6632 case dw_val_class_data8:
6633 {
6634 int i;
6635
6636 for (i = 0; i < 8; i++)
6637 fprintf (outfile, "%02x", val->v.val_data8[i]);
6638 break;
6639 }
6640 case dw_val_class_discr_value:
6641 print_discr_value (outfile, &val->v.val_discr_value);
6642 break;
6643 case dw_val_class_discr_list:
6644 for (dw_discr_list_ref node = val->v.val_discr_list;
6645 node != NULL;
6646 node = node->dw_discr_next)
6647 {
6648 if (node->dw_discr_range)
6649 {
6650 fprintf (outfile, " .. ");
6651 print_discr_value (outfile, &node->dw_discr_lower_bound);
6652 print_discr_value (outfile, &node->dw_discr_upper_bound);
6653 }
6654 else
6655 print_discr_value (outfile, &node->dw_discr_lower_bound);
6656
6657 if (node->dw_discr_next != NULL)
6658 fprintf (outfile, " | ");
6659 }
6660 default:
6661 break;
6662 }
6663 }
6664
6665 /* Likewise, for a DIE attribute. */
6666
6667 static void
6668 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6669 {
6670 print_dw_val (&a->dw_attr_val, recurse, outfile);
6671 }
6672
6673
6674 /* Print the list of operands in the LOC location description to OUTFILE. This
6675 routine is a debugging aid only. */
6676
6677 static void
6678 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6679 {
6680 dw_loc_descr_ref l = loc;
6681
6682 if (loc == NULL)
6683 {
6684 print_spaces (outfile);
6685 fprintf (outfile, "<null>\n");
6686 return;
6687 }
6688
6689 for (l = loc; l != NULL; l = l->dw_loc_next)
6690 {
6691 print_spaces (outfile);
6692 if (flag_dump_noaddr || flag_dump_unnumbered)
6693 fprintf (outfile, "#");
6694 else
6695 fprintf (outfile, "(%p)", (void *) l);
6696 fprintf (outfile, " %s",
6697 dwarf_stack_op_name (l->dw_loc_opc));
6698 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6699 {
6700 fprintf (outfile, " ");
6701 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6702 }
6703 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6704 {
6705 fprintf (outfile, ", ");
6706 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6707 }
6708 fprintf (outfile, "\n");
6709 }
6710 }
6711
6712 /* Print the information associated with a given DIE, and its children.
6713 This routine is a debugging aid only. */
6714
6715 static void
6716 print_die (dw_die_ref die, FILE *outfile)
6717 {
6718 dw_attr_node *a;
6719 dw_die_ref c;
6720 unsigned ix;
6721
6722 print_spaces (outfile);
6723 fprintf (outfile, "DIE %4ld: %s ",
6724 die->die_offset, dwarf_tag_name (die->die_tag));
6725 if (flag_dump_noaddr || flag_dump_unnumbered)
6726 fprintf (outfile, "#\n");
6727 else
6728 fprintf (outfile, "(%p)\n", (void*) die);
6729 print_spaces (outfile);
6730 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6731 fprintf (outfile, " offset: %ld", die->die_offset);
6732 fprintf (outfile, " mark: %d\n", die->die_mark);
6733
6734 if (die->comdat_type_p)
6735 {
6736 print_spaces (outfile);
6737 fprintf (outfile, " signature: ");
6738 print_signature (outfile, die->die_id.die_type_node->signature);
6739 fprintf (outfile, "\n");
6740 }
6741
6742 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6743 {
6744 print_spaces (outfile);
6745 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6746
6747 print_attribute (a, true, outfile);
6748 fprintf (outfile, "\n");
6749 }
6750
6751 if (die->die_child != NULL)
6752 {
6753 print_indent += 4;
6754 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6755 print_indent -= 4;
6756 }
6757 if (print_indent == 0)
6758 fprintf (outfile, "\n");
6759 }
6760
6761 /* Print the list of operations in the LOC location description. */
6762
6763 DEBUG_FUNCTION void
6764 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6765 {
6766 print_loc_descr (loc, stderr);
6767 }
6768
6769 /* Print the information collected for a given DIE. */
6770
6771 DEBUG_FUNCTION void
6772 debug_dwarf_die (dw_die_ref die)
6773 {
6774 print_die (die, stderr);
6775 }
6776
6777 DEBUG_FUNCTION void
6778 debug (die_struct &ref)
6779 {
6780 print_die (&ref, stderr);
6781 }
6782
6783 DEBUG_FUNCTION void
6784 debug (die_struct *ptr)
6785 {
6786 if (ptr)
6787 debug (*ptr);
6788 else
6789 fprintf (stderr, "<nil>\n");
6790 }
6791
6792
6793 /* Print all DWARF information collected for the compilation unit.
6794 This routine is a debugging aid only. */
6795
6796 DEBUG_FUNCTION void
6797 debug_dwarf (void)
6798 {
6799 print_indent = 0;
6800 print_die (comp_unit_die (), stderr);
6801 }
6802
6803 /* Verify the DIE tree structure. */
6804
6805 DEBUG_FUNCTION void
6806 verify_die (dw_die_ref die)
6807 {
6808 gcc_assert (!die->die_mark);
6809 if (die->die_parent == NULL
6810 && die->die_sib == NULL)
6811 return;
6812 /* Verify the die_sib list is cyclic. */
6813 dw_die_ref x = die;
6814 do
6815 {
6816 x->die_mark = 1;
6817 x = x->die_sib;
6818 }
6819 while (x && !x->die_mark);
6820 gcc_assert (x == die);
6821 x = die;
6822 do
6823 {
6824 /* Verify all dies have the same parent. */
6825 gcc_assert (x->die_parent == die->die_parent);
6826 if (x->die_child)
6827 {
6828 /* Verify the child has the proper parent and recurse. */
6829 gcc_assert (x->die_child->die_parent == x);
6830 verify_die (x->die_child);
6831 }
6832 x->die_mark = 0;
6833 x = x->die_sib;
6834 }
6835 while (x && x->die_mark);
6836 }
6837
6838 /* Sanity checks on DIEs. */
6839
6840 static void
6841 check_die (dw_die_ref die)
6842 {
6843 unsigned ix;
6844 dw_attr_node *a;
6845 bool inline_found = false;
6846 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6847 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6848 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6849 {
6850 switch (a->dw_attr)
6851 {
6852 case DW_AT_inline:
6853 if (a->dw_attr_val.v.val_unsigned)
6854 inline_found = true;
6855 break;
6856 case DW_AT_location:
6857 ++n_location;
6858 break;
6859 case DW_AT_low_pc:
6860 ++n_low_pc;
6861 break;
6862 case DW_AT_high_pc:
6863 ++n_high_pc;
6864 break;
6865 case DW_AT_artificial:
6866 ++n_artificial;
6867 break;
6868 case DW_AT_decl_column:
6869 ++n_decl_column;
6870 break;
6871 case DW_AT_decl_line:
6872 ++n_decl_line;
6873 break;
6874 case DW_AT_decl_file:
6875 ++n_decl_file;
6876 break;
6877 default:
6878 break;
6879 }
6880 }
6881 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6882 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6883 {
6884 fprintf (stderr, "Duplicate attributes in DIE:\n");
6885 debug_dwarf_die (die);
6886 gcc_unreachable ();
6887 }
6888 if (inline_found)
6889 {
6890 /* A debugging information entry that is a member of an abstract
6891 instance tree [that has DW_AT_inline] should not contain any
6892 attributes which describe aspects of the subroutine which vary
6893 between distinct inlined expansions or distinct out-of-line
6894 expansions. */
6895 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6896 gcc_assert (a->dw_attr != DW_AT_low_pc
6897 && a->dw_attr != DW_AT_high_pc
6898 && a->dw_attr != DW_AT_location
6899 && a->dw_attr != DW_AT_frame_base
6900 && a->dw_attr != DW_AT_call_all_calls
6901 && a->dw_attr != DW_AT_GNU_all_call_sites);
6902 }
6903 }
6904 \f
6905 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6906 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6907 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6908
6909 /* Calculate the checksum of a location expression. */
6910
6911 static inline void
6912 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6913 {
6914 int tem;
6915 inchash::hash hstate;
6916 hashval_t hash;
6917
6918 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6919 CHECKSUM (tem);
6920 hash_loc_operands (loc, hstate);
6921 hash = hstate.end();
6922 CHECKSUM (hash);
6923 }
6924
6925 /* Calculate the checksum of an attribute. */
6926
6927 static void
6928 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6929 {
6930 dw_loc_descr_ref loc;
6931 rtx r;
6932
6933 CHECKSUM (at->dw_attr);
6934
6935 /* We don't care that this was compiled with a different compiler
6936 snapshot; if the output is the same, that's what matters. */
6937 if (at->dw_attr == DW_AT_producer)
6938 return;
6939
6940 switch (AT_class (at))
6941 {
6942 case dw_val_class_const:
6943 case dw_val_class_const_implicit:
6944 CHECKSUM (at->dw_attr_val.v.val_int);
6945 break;
6946 case dw_val_class_unsigned_const:
6947 case dw_val_class_unsigned_const_implicit:
6948 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6949 break;
6950 case dw_val_class_const_double:
6951 CHECKSUM (at->dw_attr_val.v.val_double);
6952 break;
6953 case dw_val_class_wide_int:
6954 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6955 get_full_len (*at->dw_attr_val.v.val_wide)
6956 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6957 break;
6958 case dw_val_class_vec:
6959 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6960 (at->dw_attr_val.v.val_vec.length
6961 * at->dw_attr_val.v.val_vec.elt_size));
6962 break;
6963 case dw_val_class_flag:
6964 CHECKSUM (at->dw_attr_val.v.val_flag);
6965 break;
6966 case dw_val_class_str:
6967 CHECKSUM_STRING (AT_string (at));
6968 break;
6969
6970 case dw_val_class_addr:
6971 r = AT_addr (at);
6972 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6973 CHECKSUM_STRING (XSTR (r, 0));
6974 break;
6975
6976 case dw_val_class_offset:
6977 CHECKSUM (at->dw_attr_val.v.val_offset);
6978 break;
6979
6980 case dw_val_class_loc:
6981 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6982 loc_checksum (loc, ctx);
6983 break;
6984
6985 case dw_val_class_die_ref:
6986 die_checksum (AT_ref (at), ctx, mark);
6987 break;
6988
6989 case dw_val_class_fde_ref:
6990 case dw_val_class_vms_delta:
6991 case dw_val_class_symview:
6992 case dw_val_class_lbl_id:
6993 case dw_val_class_lineptr:
6994 case dw_val_class_macptr:
6995 case dw_val_class_loclistsptr:
6996 case dw_val_class_high_pc:
6997 break;
6998
6999 case dw_val_class_file:
7000 case dw_val_class_file_implicit:
7001 CHECKSUM_STRING (AT_file (at)->filename);
7002 break;
7003
7004 case dw_val_class_data8:
7005 CHECKSUM (at->dw_attr_val.v.val_data8);
7006 break;
7007
7008 default:
7009 break;
7010 }
7011 }
7012
7013 /* Calculate the checksum of a DIE. */
7014
7015 static void
7016 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7017 {
7018 dw_die_ref c;
7019 dw_attr_node *a;
7020 unsigned ix;
7021
7022 /* To avoid infinite recursion. */
7023 if (die->die_mark)
7024 {
7025 CHECKSUM (die->die_mark);
7026 return;
7027 }
7028 die->die_mark = ++(*mark);
7029
7030 CHECKSUM (die->die_tag);
7031
7032 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7033 attr_checksum (a, ctx, mark);
7034
7035 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
7036 }
7037
7038 #undef CHECKSUM
7039 #undef CHECKSUM_BLOCK
7040 #undef CHECKSUM_STRING
7041
7042 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
7043 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
7044 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7045 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7046 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7047 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7048 #define CHECKSUM_ATTR(FOO) \
7049 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7050
7051 /* Calculate the checksum of a number in signed LEB128 format. */
7052
7053 static void
7054 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7055 {
7056 unsigned char byte;
7057 bool more;
7058
7059 while (1)
7060 {
7061 byte = (value & 0x7f);
7062 value >>= 7;
7063 more = !((value == 0 && (byte & 0x40) == 0)
7064 || (value == -1 && (byte & 0x40) != 0));
7065 if (more)
7066 byte |= 0x80;
7067 CHECKSUM (byte);
7068 if (!more)
7069 break;
7070 }
7071 }
7072
7073 /* Calculate the checksum of a number in unsigned LEB128 format. */
7074
7075 static void
7076 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7077 {
7078 while (1)
7079 {
7080 unsigned char byte = (value & 0x7f);
7081 value >>= 7;
7082 if (value != 0)
7083 /* More bytes to follow. */
7084 byte |= 0x80;
7085 CHECKSUM (byte);
7086 if (value == 0)
7087 break;
7088 }
7089 }
7090
7091 /* Checksum the context of the DIE. This adds the names of any
7092 surrounding namespaces or structures to the checksum. */
7093
7094 static void
7095 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7096 {
7097 const char *name;
7098 dw_die_ref spec;
7099 int tag = die->die_tag;
7100
7101 if (tag != DW_TAG_namespace
7102 && tag != DW_TAG_structure_type
7103 && tag != DW_TAG_class_type)
7104 return;
7105
7106 name = get_AT_string (die, DW_AT_name);
7107
7108 spec = get_AT_ref (die, DW_AT_specification);
7109 if (spec != NULL)
7110 die = spec;
7111
7112 if (die->die_parent != NULL)
7113 checksum_die_context (die->die_parent, ctx);
7114
7115 CHECKSUM_ULEB128 ('C');
7116 CHECKSUM_ULEB128 (tag);
7117 if (name != NULL)
7118 CHECKSUM_STRING (name);
7119 }
7120
7121 /* Calculate the checksum of a location expression. */
7122
7123 static inline void
7124 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7125 {
7126 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7127 were emitted as a DW_FORM_sdata instead of a location expression. */
7128 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7129 {
7130 CHECKSUM_ULEB128 (DW_FORM_sdata);
7131 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7132 return;
7133 }
7134
7135 /* Otherwise, just checksum the raw location expression. */
7136 while (loc != NULL)
7137 {
7138 inchash::hash hstate;
7139 hashval_t hash;
7140
7141 CHECKSUM_ULEB128 (loc->dtprel);
7142 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7143 hash_loc_operands (loc, hstate);
7144 hash = hstate.end ();
7145 CHECKSUM (hash);
7146 loc = loc->dw_loc_next;
7147 }
7148 }
7149
7150 /* Calculate the checksum of an attribute. */
7151
7152 static void
7153 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7154 struct md5_ctx *ctx, int *mark)
7155 {
7156 dw_loc_descr_ref loc;
7157 rtx r;
7158
7159 if (AT_class (at) == dw_val_class_die_ref)
7160 {
7161 dw_die_ref target_die = AT_ref (at);
7162
7163 /* For pointer and reference types, we checksum only the (qualified)
7164 name of the target type (if there is a name). For friend entries,
7165 we checksum only the (qualified) name of the target type or function.
7166 This allows the checksum to remain the same whether the target type
7167 is complete or not. */
7168 if ((at->dw_attr == DW_AT_type
7169 && (tag == DW_TAG_pointer_type
7170 || tag == DW_TAG_reference_type
7171 || tag == DW_TAG_rvalue_reference_type
7172 || tag == DW_TAG_ptr_to_member_type))
7173 || (at->dw_attr == DW_AT_friend
7174 && tag == DW_TAG_friend))
7175 {
7176 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7177
7178 if (name_attr != NULL)
7179 {
7180 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7181
7182 if (decl == NULL)
7183 decl = target_die;
7184 CHECKSUM_ULEB128 ('N');
7185 CHECKSUM_ULEB128 (at->dw_attr);
7186 if (decl->die_parent != NULL)
7187 checksum_die_context (decl->die_parent, ctx);
7188 CHECKSUM_ULEB128 ('E');
7189 CHECKSUM_STRING (AT_string (name_attr));
7190 return;
7191 }
7192 }
7193
7194 /* For all other references to another DIE, we check to see if the
7195 target DIE has already been visited. If it has, we emit a
7196 backward reference; if not, we descend recursively. */
7197 if (target_die->die_mark > 0)
7198 {
7199 CHECKSUM_ULEB128 ('R');
7200 CHECKSUM_ULEB128 (at->dw_attr);
7201 CHECKSUM_ULEB128 (target_die->die_mark);
7202 }
7203 else
7204 {
7205 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7206
7207 if (decl == NULL)
7208 decl = target_die;
7209 target_die->die_mark = ++(*mark);
7210 CHECKSUM_ULEB128 ('T');
7211 CHECKSUM_ULEB128 (at->dw_attr);
7212 if (decl->die_parent != NULL)
7213 checksum_die_context (decl->die_parent, ctx);
7214 die_checksum_ordered (target_die, ctx, mark);
7215 }
7216 return;
7217 }
7218
7219 CHECKSUM_ULEB128 ('A');
7220 CHECKSUM_ULEB128 (at->dw_attr);
7221
7222 switch (AT_class (at))
7223 {
7224 case dw_val_class_const:
7225 case dw_val_class_const_implicit:
7226 CHECKSUM_ULEB128 (DW_FORM_sdata);
7227 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7228 break;
7229
7230 case dw_val_class_unsigned_const:
7231 case dw_val_class_unsigned_const_implicit:
7232 CHECKSUM_ULEB128 (DW_FORM_sdata);
7233 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7234 break;
7235
7236 case dw_val_class_const_double:
7237 CHECKSUM_ULEB128 (DW_FORM_block);
7238 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7239 CHECKSUM (at->dw_attr_val.v.val_double);
7240 break;
7241
7242 case dw_val_class_wide_int:
7243 CHECKSUM_ULEB128 (DW_FORM_block);
7244 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7245 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7246 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7247 get_full_len (*at->dw_attr_val.v.val_wide)
7248 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7249 break;
7250
7251 case dw_val_class_vec:
7252 CHECKSUM_ULEB128 (DW_FORM_block);
7253 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7254 * at->dw_attr_val.v.val_vec.elt_size);
7255 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7256 (at->dw_attr_val.v.val_vec.length
7257 * at->dw_attr_val.v.val_vec.elt_size));
7258 break;
7259
7260 case dw_val_class_flag:
7261 CHECKSUM_ULEB128 (DW_FORM_flag);
7262 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7263 break;
7264
7265 case dw_val_class_str:
7266 CHECKSUM_ULEB128 (DW_FORM_string);
7267 CHECKSUM_STRING (AT_string (at));
7268 break;
7269
7270 case dw_val_class_addr:
7271 r = AT_addr (at);
7272 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7273 CHECKSUM_ULEB128 (DW_FORM_string);
7274 CHECKSUM_STRING (XSTR (r, 0));
7275 break;
7276
7277 case dw_val_class_offset:
7278 CHECKSUM_ULEB128 (DW_FORM_sdata);
7279 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7280 break;
7281
7282 case dw_val_class_loc:
7283 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7284 loc_checksum_ordered (loc, ctx);
7285 break;
7286
7287 case dw_val_class_fde_ref:
7288 case dw_val_class_symview:
7289 case dw_val_class_lbl_id:
7290 case dw_val_class_lineptr:
7291 case dw_val_class_macptr:
7292 case dw_val_class_loclistsptr:
7293 case dw_val_class_high_pc:
7294 break;
7295
7296 case dw_val_class_file:
7297 case dw_val_class_file_implicit:
7298 CHECKSUM_ULEB128 (DW_FORM_string);
7299 CHECKSUM_STRING (AT_file (at)->filename);
7300 break;
7301
7302 case dw_val_class_data8:
7303 CHECKSUM (at->dw_attr_val.v.val_data8);
7304 break;
7305
7306 default:
7307 break;
7308 }
7309 }
7310
7311 struct checksum_attributes
7312 {
7313 dw_attr_node *at_name;
7314 dw_attr_node *at_type;
7315 dw_attr_node *at_friend;
7316 dw_attr_node *at_accessibility;
7317 dw_attr_node *at_address_class;
7318 dw_attr_node *at_alignment;
7319 dw_attr_node *at_allocated;
7320 dw_attr_node *at_artificial;
7321 dw_attr_node *at_associated;
7322 dw_attr_node *at_binary_scale;
7323 dw_attr_node *at_bit_offset;
7324 dw_attr_node *at_bit_size;
7325 dw_attr_node *at_bit_stride;
7326 dw_attr_node *at_byte_size;
7327 dw_attr_node *at_byte_stride;
7328 dw_attr_node *at_const_value;
7329 dw_attr_node *at_containing_type;
7330 dw_attr_node *at_count;
7331 dw_attr_node *at_data_location;
7332 dw_attr_node *at_data_member_location;
7333 dw_attr_node *at_decimal_scale;
7334 dw_attr_node *at_decimal_sign;
7335 dw_attr_node *at_default_value;
7336 dw_attr_node *at_digit_count;
7337 dw_attr_node *at_discr;
7338 dw_attr_node *at_discr_list;
7339 dw_attr_node *at_discr_value;
7340 dw_attr_node *at_encoding;
7341 dw_attr_node *at_endianity;
7342 dw_attr_node *at_explicit;
7343 dw_attr_node *at_is_optional;
7344 dw_attr_node *at_location;
7345 dw_attr_node *at_lower_bound;
7346 dw_attr_node *at_mutable;
7347 dw_attr_node *at_ordering;
7348 dw_attr_node *at_picture_string;
7349 dw_attr_node *at_prototyped;
7350 dw_attr_node *at_small;
7351 dw_attr_node *at_segment;
7352 dw_attr_node *at_string_length;
7353 dw_attr_node *at_string_length_bit_size;
7354 dw_attr_node *at_string_length_byte_size;
7355 dw_attr_node *at_threads_scaled;
7356 dw_attr_node *at_upper_bound;
7357 dw_attr_node *at_use_location;
7358 dw_attr_node *at_use_UTF8;
7359 dw_attr_node *at_variable_parameter;
7360 dw_attr_node *at_virtuality;
7361 dw_attr_node *at_visibility;
7362 dw_attr_node *at_vtable_elem_location;
7363 };
7364
7365 /* Collect the attributes that we will want to use for the checksum. */
7366
7367 static void
7368 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7369 {
7370 dw_attr_node *a;
7371 unsigned ix;
7372
7373 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7374 {
7375 switch (a->dw_attr)
7376 {
7377 case DW_AT_name:
7378 attrs->at_name = a;
7379 break;
7380 case DW_AT_type:
7381 attrs->at_type = a;
7382 break;
7383 case DW_AT_friend:
7384 attrs->at_friend = a;
7385 break;
7386 case DW_AT_accessibility:
7387 attrs->at_accessibility = a;
7388 break;
7389 case DW_AT_address_class:
7390 attrs->at_address_class = a;
7391 break;
7392 case DW_AT_alignment:
7393 attrs->at_alignment = a;
7394 break;
7395 case DW_AT_allocated:
7396 attrs->at_allocated = a;
7397 break;
7398 case DW_AT_artificial:
7399 attrs->at_artificial = a;
7400 break;
7401 case DW_AT_associated:
7402 attrs->at_associated = a;
7403 break;
7404 case DW_AT_binary_scale:
7405 attrs->at_binary_scale = a;
7406 break;
7407 case DW_AT_bit_offset:
7408 attrs->at_bit_offset = a;
7409 break;
7410 case DW_AT_bit_size:
7411 attrs->at_bit_size = a;
7412 break;
7413 case DW_AT_bit_stride:
7414 attrs->at_bit_stride = a;
7415 break;
7416 case DW_AT_byte_size:
7417 attrs->at_byte_size = a;
7418 break;
7419 case DW_AT_byte_stride:
7420 attrs->at_byte_stride = a;
7421 break;
7422 case DW_AT_const_value:
7423 attrs->at_const_value = a;
7424 break;
7425 case DW_AT_containing_type:
7426 attrs->at_containing_type = a;
7427 break;
7428 case DW_AT_count:
7429 attrs->at_count = a;
7430 break;
7431 case DW_AT_data_location:
7432 attrs->at_data_location = a;
7433 break;
7434 case DW_AT_data_member_location:
7435 attrs->at_data_member_location = a;
7436 break;
7437 case DW_AT_decimal_scale:
7438 attrs->at_decimal_scale = a;
7439 break;
7440 case DW_AT_decimal_sign:
7441 attrs->at_decimal_sign = a;
7442 break;
7443 case DW_AT_default_value:
7444 attrs->at_default_value = a;
7445 break;
7446 case DW_AT_digit_count:
7447 attrs->at_digit_count = a;
7448 break;
7449 case DW_AT_discr:
7450 attrs->at_discr = a;
7451 break;
7452 case DW_AT_discr_list:
7453 attrs->at_discr_list = a;
7454 break;
7455 case DW_AT_discr_value:
7456 attrs->at_discr_value = a;
7457 break;
7458 case DW_AT_encoding:
7459 attrs->at_encoding = a;
7460 break;
7461 case DW_AT_endianity:
7462 attrs->at_endianity = a;
7463 break;
7464 case DW_AT_explicit:
7465 attrs->at_explicit = a;
7466 break;
7467 case DW_AT_is_optional:
7468 attrs->at_is_optional = a;
7469 break;
7470 case DW_AT_location:
7471 attrs->at_location = a;
7472 break;
7473 case DW_AT_lower_bound:
7474 attrs->at_lower_bound = a;
7475 break;
7476 case DW_AT_mutable:
7477 attrs->at_mutable = a;
7478 break;
7479 case DW_AT_ordering:
7480 attrs->at_ordering = a;
7481 break;
7482 case DW_AT_picture_string:
7483 attrs->at_picture_string = a;
7484 break;
7485 case DW_AT_prototyped:
7486 attrs->at_prototyped = a;
7487 break;
7488 case DW_AT_small:
7489 attrs->at_small = a;
7490 break;
7491 case DW_AT_segment:
7492 attrs->at_segment = a;
7493 break;
7494 case DW_AT_string_length:
7495 attrs->at_string_length = a;
7496 break;
7497 case DW_AT_string_length_bit_size:
7498 attrs->at_string_length_bit_size = a;
7499 break;
7500 case DW_AT_string_length_byte_size:
7501 attrs->at_string_length_byte_size = a;
7502 break;
7503 case DW_AT_threads_scaled:
7504 attrs->at_threads_scaled = a;
7505 break;
7506 case DW_AT_upper_bound:
7507 attrs->at_upper_bound = a;
7508 break;
7509 case DW_AT_use_location:
7510 attrs->at_use_location = a;
7511 break;
7512 case DW_AT_use_UTF8:
7513 attrs->at_use_UTF8 = a;
7514 break;
7515 case DW_AT_variable_parameter:
7516 attrs->at_variable_parameter = a;
7517 break;
7518 case DW_AT_virtuality:
7519 attrs->at_virtuality = a;
7520 break;
7521 case DW_AT_visibility:
7522 attrs->at_visibility = a;
7523 break;
7524 case DW_AT_vtable_elem_location:
7525 attrs->at_vtable_elem_location = a;
7526 break;
7527 default:
7528 break;
7529 }
7530 }
7531 }
7532
7533 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7534
7535 static void
7536 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7537 {
7538 dw_die_ref c;
7539 dw_die_ref decl;
7540 struct checksum_attributes attrs;
7541
7542 CHECKSUM_ULEB128 ('D');
7543 CHECKSUM_ULEB128 (die->die_tag);
7544
7545 memset (&attrs, 0, sizeof (attrs));
7546
7547 decl = get_AT_ref (die, DW_AT_specification);
7548 if (decl != NULL)
7549 collect_checksum_attributes (&attrs, decl);
7550 collect_checksum_attributes (&attrs, die);
7551
7552 CHECKSUM_ATTR (attrs.at_name);
7553 CHECKSUM_ATTR (attrs.at_accessibility);
7554 CHECKSUM_ATTR (attrs.at_address_class);
7555 CHECKSUM_ATTR (attrs.at_allocated);
7556 CHECKSUM_ATTR (attrs.at_artificial);
7557 CHECKSUM_ATTR (attrs.at_associated);
7558 CHECKSUM_ATTR (attrs.at_binary_scale);
7559 CHECKSUM_ATTR (attrs.at_bit_offset);
7560 CHECKSUM_ATTR (attrs.at_bit_size);
7561 CHECKSUM_ATTR (attrs.at_bit_stride);
7562 CHECKSUM_ATTR (attrs.at_byte_size);
7563 CHECKSUM_ATTR (attrs.at_byte_stride);
7564 CHECKSUM_ATTR (attrs.at_const_value);
7565 CHECKSUM_ATTR (attrs.at_containing_type);
7566 CHECKSUM_ATTR (attrs.at_count);
7567 CHECKSUM_ATTR (attrs.at_data_location);
7568 CHECKSUM_ATTR (attrs.at_data_member_location);
7569 CHECKSUM_ATTR (attrs.at_decimal_scale);
7570 CHECKSUM_ATTR (attrs.at_decimal_sign);
7571 CHECKSUM_ATTR (attrs.at_default_value);
7572 CHECKSUM_ATTR (attrs.at_digit_count);
7573 CHECKSUM_ATTR (attrs.at_discr);
7574 CHECKSUM_ATTR (attrs.at_discr_list);
7575 CHECKSUM_ATTR (attrs.at_discr_value);
7576 CHECKSUM_ATTR (attrs.at_encoding);
7577 CHECKSUM_ATTR (attrs.at_endianity);
7578 CHECKSUM_ATTR (attrs.at_explicit);
7579 CHECKSUM_ATTR (attrs.at_is_optional);
7580 CHECKSUM_ATTR (attrs.at_location);
7581 CHECKSUM_ATTR (attrs.at_lower_bound);
7582 CHECKSUM_ATTR (attrs.at_mutable);
7583 CHECKSUM_ATTR (attrs.at_ordering);
7584 CHECKSUM_ATTR (attrs.at_picture_string);
7585 CHECKSUM_ATTR (attrs.at_prototyped);
7586 CHECKSUM_ATTR (attrs.at_small);
7587 CHECKSUM_ATTR (attrs.at_segment);
7588 CHECKSUM_ATTR (attrs.at_string_length);
7589 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7590 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7591 CHECKSUM_ATTR (attrs.at_threads_scaled);
7592 CHECKSUM_ATTR (attrs.at_upper_bound);
7593 CHECKSUM_ATTR (attrs.at_use_location);
7594 CHECKSUM_ATTR (attrs.at_use_UTF8);
7595 CHECKSUM_ATTR (attrs.at_variable_parameter);
7596 CHECKSUM_ATTR (attrs.at_virtuality);
7597 CHECKSUM_ATTR (attrs.at_visibility);
7598 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7599 CHECKSUM_ATTR (attrs.at_type);
7600 CHECKSUM_ATTR (attrs.at_friend);
7601 CHECKSUM_ATTR (attrs.at_alignment);
7602
7603 /* Checksum the child DIEs. */
7604 c = die->die_child;
7605 if (c) do {
7606 dw_attr_node *name_attr;
7607
7608 c = c->die_sib;
7609 name_attr = get_AT (c, DW_AT_name);
7610 if (is_template_instantiation (c))
7611 {
7612 /* Ignore instantiations of member type and function templates. */
7613 }
7614 else if (name_attr != NULL
7615 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7616 {
7617 /* Use a shallow checksum for named nested types and member
7618 functions. */
7619 CHECKSUM_ULEB128 ('S');
7620 CHECKSUM_ULEB128 (c->die_tag);
7621 CHECKSUM_STRING (AT_string (name_attr));
7622 }
7623 else
7624 {
7625 /* Use a deep checksum for other children. */
7626 /* Mark this DIE so it gets processed when unmarking. */
7627 if (c->die_mark == 0)
7628 c->die_mark = -1;
7629 die_checksum_ordered (c, ctx, mark);
7630 }
7631 } while (c != die->die_child);
7632
7633 CHECKSUM_ULEB128 (0);
7634 }
7635
7636 /* Add a type name and tag to a hash. */
7637 static void
7638 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7639 {
7640 CHECKSUM_ULEB128 (tag);
7641 CHECKSUM_STRING (name);
7642 }
7643
7644 #undef CHECKSUM
7645 #undef CHECKSUM_STRING
7646 #undef CHECKSUM_ATTR
7647 #undef CHECKSUM_LEB128
7648 #undef CHECKSUM_ULEB128
7649
7650 /* Generate the type signature for DIE. This is computed by generating an
7651 MD5 checksum over the DIE's tag, its relevant attributes, and its
7652 children. Attributes that are references to other DIEs are processed
7653 by recursion, using the MARK field to prevent infinite recursion.
7654 If the DIE is nested inside a namespace or another type, we also
7655 need to include that context in the signature. The lower 64 bits
7656 of the resulting MD5 checksum comprise the signature. */
7657
7658 static void
7659 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7660 {
7661 int mark;
7662 const char *name;
7663 unsigned char checksum[16];
7664 struct md5_ctx ctx;
7665 dw_die_ref decl;
7666 dw_die_ref parent;
7667
7668 name = get_AT_string (die, DW_AT_name);
7669 decl = get_AT_ref (die, DW_AT_specification);
7670 parent = get_die_parent (die);
7671
7672 /* First, compute a signature for just the type name (and its surrounding
7673 context, if any. This is stored in the type unit DIE for link-time
7674 ODR (one-definition rule) checking. */
7675
7676 if (is_cxx () && name != NULL)
7677 {
7678 md5_init_ctx (&ctx);
7679
7680 /* Checksum the names of surrounding namespaces and structures. */
7681 if (parent != NULL)
7682 checksum_die_context (parent, &ctx);
7683
7684 /* Checksum the current DIE. */
7685 die_odr_checksum (die->die_tag, name, &ctx);
7686 md5_finish_ctx (&ctx, checksum);
7687
7688 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7689 }
7690
7691 /* Next, compute the complete type signature. */
7692
7693 md5_init_ctx (&ctx);
7694 mark = 1;
7695 die->die_mark = mark;
7696
7697 /* Checksum the names of surrounding namespaces and structures. */
7698 if (parent != NULL)
7699 checksum_die_context (parent, &ctx);
7700
7701 /* Checksum the DIE and its children. */
7702 die_checksum_ordered (die, &ctx, &mark);
7703 unmark_all_dies (die);
7704 md5_finish_ctx (&ctx, checksum);
7705
7706 /* Store the signature in the type node and link the type DIE and the
7707 type node together. */
7708 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7709 DWARF_TYPE_SIGNATURE_SIZE);
7710 die->comdat_type_p = true;
7711 die->die_id.die_type_node = type_node;
7712 type_node->type_die = die;
7713
7714 /* If the DIE is a specification, link its declaration to the type node
7715 as well. */
7716 if (decl != NULL)
7717 {
7718 decl->comdat_type_p = true;
7719 decl->die_id.die_type_node = type_node;
7720 }
7721 }
7722
7723 /* Do the location expressions look same? */
7724 static inline int
7725 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7726 {
7727 return loc1->dw_loc_opc == loc2->dw_loc_opc
7728 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7729 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7730 }
7731
7732 /* Do the values look the same? */
7733 static int
7734 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7735 {
7736 dw_loc_descr_ref loc1, loc2;
7737 rtx r1, r2;
7738
7739 if (v1->val_class != v2->val_class)
7740 return 0;
7741
7742 switch (v1->val_class)
7743 {
7744 case dw_val_class_const:
7745 case dw_val_class_const_implicit:
7746 return v1->v.val_int == v2->v.val_int;
7747 case dw_val_class_unsigned_const:
7748 case dw_val_class_unsigned_const_implicit:
7749 return v1->v.val_unsigned == v2->v.val_unsigned;
7750 case dw_val_class_const_double:
7751 return v1->v.val_double.high == v2->v.val_double.high
7752 && v1->v.val_double.low == v2->v.val_double.low;
7753 case dw_val_class_wide_int:
7754 return *v1->v.val_wide == *v2->v.val_wide;
7755 case dw_val_class_vec:
7756 if (v1->v.val_vec.length != v2->v.val_vec.length
7757 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7758 return 0;
7759 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7760 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7761 return 0;
7762 return 1;
7763 case dw_val_class_flag:
7764 return v1->v.val_flag == v2->v.val_flag;
7765 case dw_val_class_str:
7766 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7767
7768 case dw_val_class_addr:
7769 r1 = v1->v.val_addr;
7770 r2 = v2->v.val_addr;
7771 if (GET_CODE (r1) != GET_CODE (r2))
7772 return 0;
7773 return !rtx_equal_p (r1, r2);
7774
7775 case dw_val_class_offset:
7776 return v1->v.val_offset == v2->v.val_offset;
7777
7778 case dw_val_class_loc:
7779 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7780 loc1 && loc2;
7781 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7782 if (!same_loc_p (loc1, loc2, mark))
7783 return 0;
7784 return !loc1 && !loc2;
7785
7786 case dw_val_class_die_ref:
7787 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7788
7789 case dw_val_class_symview:
7790 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7791
7792 case dw_val_class_fde_ref:
7793 case dw_val_class_vms_delta:
7794 case dw_val_class_lbl_id:
7795 case dw_val_class_lineptr:
7796 case dw_val_class_macptr:
7797 case dw_val_class_loclistsptr:
7798 case dw_val_class_high_pc:
7799 return 1;
7800
7801 case dw_val_class_file:
7802 case dw_val_class_file_implicit:
7803 return v1->v.val_file == v2->v.val_file;
7804
7805 case dw_val_class_data8:
7806 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7807
7808 default:
7809 return 1;
7810 }
7811 }
7812
7813 /* Do the attributes look the same? */
7814
7815 static int
7816 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7817 {
7818 if (at1->dw_attr != at2->dw_attr)
7819 return 0;
7820
7821 /* We don't care that this was compiled with a different compiler
7822 snapshot; if the output is the same, that's what matters. */
7823 if (at1->dw_attr == DW_AT_producer)
7824 return 1;
7825
7826 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7827 }
7828
7829 /* Do the dies look the same? */
7830
7831 static int
7832 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7833 {
7834 dw_die_ref c1, c2;
7835 dw_attr_node *a1;
7836 unsigned ix;
7837
7838 /* To avoid infinite recursion. */
7839 if (die1->die_mark)
7840 return die1->die_mark == die2->die_mark;
7841 die1->die_mark = die2->die_mark = ++(*mark);
7842
7843 if (die1->die_tag != die2->die_tag)
7844 return 0;
7845
7846 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7847 return 0;
7848
7849 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7850 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7851 return 0;
7852
7853 c1 = die1->die_child;
7854 c2 = die2->die_child;
7855 if (! c1)
7856 {
7857 if (c2)
7858 return 0;
7859 }
7860 else
7861 for (;;)
7862 {
7863 if (!same_die_p (c1, c2, mark))
7864 return 0;
7865 c1 = c1->die_sib;
7866 c2 = c2->die_sib;
7867 if (c1 == die1->die_child)
7868 {
7869 if (c2 == die2->die_child)
7870 break;
7871 else
7872 return 0;
7873 }
7874 }
7875
7876 return 1;
7877 }
7878
7879 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7880 children, and set die_symbol. */
7881
7882 static void
7883 compute_comp_unit_symbol (dw_die_ref unit_die)
7884 {
7885 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7886 const char *base = die_name ? lbasename (die_name) : "anonymous";
7887 char *name = XALLOCAVEC (char, strlen (base) + 64);
7888 char *p;
7889 int i, mark;
7890 unsigned char checksum[16];
7891 struct md5_ctx ctx;
7892
7893 /* Compute the checksum of the DIE, then append part of it as hex digits to
7894 the name filename of the unit. */
7895
7896 md5_init_ctx (&ctx);
7897 mark = 0;
7898 die_checksum (unit_die, &ctx, &mark);
7899 unmark_all_dies (unit_die);
7900 md5_finish_ctx (&ctx, checksum);
7901
7902 /* When we this for comp_unit_die () we have a DW_AT_name that might
7903 not start with a letter but with anything valid for filenames and
7904 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7905 character is not a letter. */
7906 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7907 clean_symbol_name (name);
7908
7909 p = name + strlen (name);
7910 for (i = 0; i < 4; i++)
7911 {
7912 sprintf (p, "%.2x", checksum[i]);
7913 p += 2;
7914 }
7915
7916 unit_die->die_id.die_symbol = xstrdup (name);
7917 }
7918
7919 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7920
7921 static int
7922 is_type_die (dw_die_ref die)
7923 {
7924 switch (die->die_tag)
7925 {
7926 case DW_TAG_array_type:
7927 case DW_TAG_class_type:
7928 case DW_TAG_interface_type:
7929 case DW_TAG_enumeration_type:
7930 case DW_TAG_pointer_type:
7931 case DW_TAG_reference_type:
7932 case DW_TAG_rvalue_reference_type:
7933 case DW_TAG_string_type:
7934 case DW_TAG_structure_type:
7935 case DW_TAG_subroutine_type:
7936 case DW_TAG_union_type:
7937 case DW_TAG_ptr_to_member_type:
7938 case DW_TAG_set_type:
7939 case DW_TAG_subrange_type:
7940 case DW_TAG_base_type:
7941 case DW_TAG_const_type:
7942 case DW_TAG_file_type:
7943 case DW_TAG_packed_type:
7944 case DW_TAG_volatile_type:
7945 case DW_TAG_typedef:
7946 return 1;
7947 default:
7948 return 0;
7949 }
7950 }
7951
7952 /* Returns true iff C is a compile-unit DIE. */
7953
7954 static inline bool
7955 is_cu_die (dw_die_ref c)
7956 {
7957 return c && (c->die_tag == DW_TAG_compile_unit
7958 || c->die_tag == DW_TAG_skeleton_unit);
7959 }
7960
7961 /* Returns true iff C is a unit DIE of some sort. */
7962
7963 static inline bool
7964 is_unit_die (dw_die_ref c)
7965 {
7966 return c && (c->die_tag == DW_TAG_compile_unit
7967 || c->die_tag == DW_TAG_partial_unit
7968 || c->die_tag == DW_TAG_type_unit
7969 || c->die_tag == DW_TAG_skeleton_unit);
7970 }
7971
7972 /* Returns true iff C is a namespace DIE. */
7973
7974 static inline bool
7975 is_namespace_die (dw_die_ref c)
7976 {
7977 return c && c->die_tag == DW_TAG_namespace;
7978 }
7979
7980 /* Return non-zero if this DIE is a template parameter. */
7981
7982 static inline bool
7983 is_template_parameter (dw_die_ref die)
7984 {
7985 switch (die->die_tag)
7986 {
7987 case DW_TAG_template_type_param:
7988 case DW_TAG_template_value_param:
7989 case DW_TAG_GNU_template_template_param:
7990 case DW_TAG_GNU_template_parameter_pack:
7991 return true;
7992 default:
7993 return false;
7994 }
7995 }
7996
7997 /* Return non-zero if this DIE represents a template instantiation. */
7998
7999 static inline bool
8000 is_template_instantiation (dw_die_ref die)
8001 {
8002 dw_die_ref c;
8003
8004 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
8005 return false;
8006 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
8007 return false;
8008 }
8009
8010 static char *
8011 gen_internal_sym (const char *prefix)
8012 {
8013 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
8014
8015 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
8016 return xstrdup (buf);
8017 }
8018
8019 /* Return non-zero if this DIE is a declaration. */
8020
8021 static int
8022 is_declaration_die (dw_die_ref die)
8023 {
8024 dw_attr_node *a;
8025 unsigned ix;
8026
8027 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8028 if (a->dw_attr == DW_AT_declaration)
8029 return 1;
8030
8031 return 0;
8032 }
8033
8034 /* Return non-zero if this DIE is nested inside a subprogram. */
8035
8036 static int
8037 is_nested_in_subprogram (dw_die_ref die)
8038 {
8039 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8040
8041 if (decl == NULL)
8042 decl = die;
8043 return local_scope_p (decl);
8044 }
8045
8046 /* Return non-zero if this DIE contains a defining declaration of a
8047 subprogram. */
8048
8049 static int
8050 contains_subprogram_definition (dw_die_ref die)
8051 {
8052 dw_die_ref c;
8053
8054 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8055 return 1;
8056 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8057 return 0;
8058 }
8059
8060 /* Return non-zero if this is a type DIE that should be moved to a
8061 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8062 unit type. */
8063
8064 static int
8065 should_move_die_to_comdat (dw_die_ref die)
8066 {
8067 switch (die->die_tag)
8068 {
8069 case DW_TAG_class_type:
8070 case DW_TAG_structure_type:
8071 case DW_TAG_enumeration_type:
8072 case DW_TAG_union_type:
8073 /* Don't move declarations, inlined instances, types nested in a
8074 subprogram, or types that contain subprogram definitions. */
8075 if (is_declaration_die (die)
8076 || get_AT (die, DW_AT_abstract_origin)
8077 || is_nested_in_subprogram (die)
8078 || contains_subprogram_definition (die))
8079 return 0;
8080 return 1;
8081 case DW_TAG_array_type:
8082 case DW_TAG_interface_type:
8083 case DW_TAG_pointer_type:
8084 case DW_TAG_reference_type:
8085 case DW_TAG_rvalue_reference_type:
8086 case DW_TAG_string_type:
8087 case DW_TAG_subroutine_type:
8088 case DW_TAG_ptr_to_member_type:
8089 case DW_TAG_set_type:
8090 case DW_TAG_subrange_type:
8091 case DW_TAG_base_type:
8092 case DW_TAG_const_type:
8093 case DW_TAG_file_type:
8094 case DW_TAG_packed_type:
8095 case DW_TAG_volatile_type:
8096 case DW_TAG_typedef:
8097 default:
8098 return 0;
8099 }
8100 }
8101
8102 /* Make a clone of DIE. */
8103
8104 static dw_die_ref
8105 clone_die (dw_die_ref die)
8106 {
8107 dw_die_ref clone = new_die_raw (die->die_tag);
8108 dw_attr_node *a;
8109 unsigned ix;
8110
8111 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8112 add_dwarf_attr (clone, a);
8113
8114 return clone;
8115 }
8116
8117 /* Make a clone of the tree rooted at DIE. */
8118
8119 static dw_die_ref
8120 clone_tree (dw_die_ref die)
8121 {
8122 dw_die_ref c;
8123 dw_die_ref clone = clone_die (die);
8124
8125 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8126
8127 return clone;
8128 }
8129
8130 /* Make a clone of DIE as a declaration. */
8131
8132 static dw_die_ref
8133 clone_as_declaration (dw_die_ref die)
8134 {
8135 dw_die_ref clone;
8136 dw_die_ref decl;
8137 dw_attr_node *a;
8138 unsigned ix;
8139
8140 /* If the DIE is already a declaration, just clone it. */
8141 if (is_declaration_die (die))
8142 return clone_die (die);
8143
8144 /* If the DIE is a specification, just clone its declaration DIE. */
8145 decl = get_AT_ref (die, DW_AT_specification);
8146 if (decl != NULL)
8147 {
8148 clone = clone_die (decl);
8149 if (die->comdat_type_p)
8150 add_AT_die_ref (clone, DW_AT_signature, die);
8151 return clone;
8152 }
8153
8154 clone = new_die_raw (die->die_tag);
8155
8156 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8157 {
8158 /* We don't want to copy over all attributes.
8159 For example we don't want DW_AT_byte_size because otherwise we will no
8160 longer have a declaration and GDB will treat it as a definition. */
8161
8162 switch (a->dw_attr)
8163 {
8164 case DW_AT_abstract_origin:
8165 case DW_AT_artificial:
8166 case DW_AT_containing_type:
8167 case DW_AT_external:
8168 case DW_AT_name:
8169 case DW_AT_type:
8170 case DW_AT_virtuality:
8171 case DW_AT_linkage_name:
8172 case DW_AT_MIPS_linkage_name:
8173 add_dwarf_attr (clone, a);
8174 break;
8175 case DW_AT_byte_size:
8176 case DW_AT_alignment:
8177 default:
8178 break;
8179 }
8180 }
8181
8182 if (die->comdat_type_p)
8183 add_AT_die_ref (clone, DW_AT_signature, die);
8184
8185 add_AT_flag (clone, DW_AT_declaration, 1);
8186 return clone;
8187 }
8188
8189
8190 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8191
8192 struct decl_table_entry
8193 {
8194 dw_die_ref orig;
8195 dw_die_ref copy;
8196 };
8197
8198 /* Helpers to manipulate hash table of copied declarations. */
8199
8200 /* Hashtable helpers. */
8201
8202 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8203 {
8204 typedef die_struct *compare_type;
8205 static inline hashval_t hash (const decl_table_entry *);
8206 static inline bool equal (const decl_table_entry *, const die_struct *);
8207 };
8208
8209 inline hashval_t
8210 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8211 {
8212 return htab_hash_pointer (entry->orig);
8213 }
8214
8215 inline bool
8216 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8217 const die_struct *entry2)
8218 {
8219 return entry1->orig == entry2;
8220 }
8221
8222 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8223
8224 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8225 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8226 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8227 to check if the ancestor has already been copied into UNIT. */
8228
8229 static dw_die_ref
8230 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8231 decl_hash_type *decl_table)
8232 {
8233 dw_die_ref parent = die->die_parent;
8234 dw_die_ref new_parent = unit;
8235 dw_die_ref copy;
8236 decl_table_entry **slot = NULL;
8237 struct decl_table_entry *entry = NULL;
8238
8239 /* If DIE refers to a stub unfold that so we get the appropriate
8240 DIE registered as orig in decl_table. */
8241 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8242 die = c;
8243
8244 if (decl_table)
8245 {
8246 /* Check if the entry has already been copied to UNIT. */
8247 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8248 INSERT);
8249 if (*slot != HTAB_EMPTY_ENTRY)
8250 {
8251 entry = *slot;
8252 return entry->copy;
8253 }
8254
8255 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8256 entry = XCNEW (struct decl_table_entry);
8257 entry->orig = die;
8258 entry->copy = NULL;
8259 *slot = entry;
8260 }
8261
8262 if (parent != NULL)
8263 {
8264 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8265 if (spec != NULL)
8266 parent = spec;
8267 if (!is_unit_die (parent))
8268 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8269 }
8270
8271 copy = clone_as_declaration (die);
8272 add_child_die (new_parent, copy);
8273
8274 if (decl_table)
8275 {
8276 /* Record the pointer to the copy. */
8277 entry->copy = copy;
8278 }
8279
8280 return copy;
8281 }
8282 /* Copy the declaration context to the new type unit DIE. This includes
8283 any surrounding namespace or type declarations. If the DIE has an
8284 AT_specification attribute, it also includes attributes and children
8285 attached to the specification, and returns a pointer to the original
8286 parent of the declaration DIE. Returns NULL otherwise. */
8287
8288 static dw_die_ref
8289 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8290 {
8291 dw_die_ref decl;
8292 dw_die_ref new_decl;
8293 dw_die_ref orig_parent = NULL;
8294
8295 decl = get_AT_ref (die, DW_AT_specification);
8296 if (decl == NULL)
8297 decl = die;
8298 else
8299 {
8300 unsigned ix;
8301 dw_die_ref c;
8302 dw_attr_node *a;
8303
8304 /* The original DIE will be changed to a declaration, and must
8305 be moved to be a child of the original declaration DIE. */
8306 orig_parent = decl->die_parent;
8307
8308 /* Copy the type node pointer from the new DIE to the original
8309 declaration DIE so we can forward references later. */
8310 decl->comdat_type_p = true;
8311 decl->die_id.die_type_node = die->die_id.die_type_node;
8312
8313 remove_AT (die, DW_AT_specification);
8314
8315 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8316 {
8317 if (a->dw_attr != DW_AT_name
8318 && a->dw_attr != DW_AT_declaration
8319 && a->dw_attr != DW_AT_external)
8320 add_dwarf_attr (die, a);
8321 }
8322
8323 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8324 }
8325
8326 if (decl->die_parent != NULL
8327 && !is_unit_die (decl->die_parent))
8328 {
8329 new_decl = copy_ancestor_tree (unit, decl, NULL);
8330 if (new_decl != NULL)
8331 {
8332 remove_AT (new_decl, DW_AT_signature);
8333 add_AT_specification (die, new_decl);
8334 }
8335 }
8336
8337 return orig_parent;
8338 }
8339
8340 /* Generate the skeleton ancestor tree for the given NODE, then clone
8341 the DIE and add the clone into the tree. */
8342
8343 static void
8344 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8345 {
8346 if (node->new_die != NULL)
8347 return;
8348
8349 node->new_die = clone_as_declaration (node->old_die);
8350
8351 if (node->parent != NULL)
8352 {
8353 generate_skeleton_ancestor_tree (node->parent);
8354 add_child_die (node->parent->new_die, node->new_die);
8355 }
8356 }
8357
8358 /* Generate a skeleton tree of DIEs containing any declarations that are
8359 found in the original tree. We traverse the tree looking for declaration
8360 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8361
8362 static void
8363 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8364 {
8365 skeleton_chain_node node;
8366 dw_die_ref c;
8367 dw_die_ref first;
8368 dw_die_ref prev = NULL;
8369 dw_die_ref next = NULL;
8370
8371 node.parent = parent;
8372
8373 first = c = parent->old_die->die_child;
8374 if (c)
8375 next = c->die_sib;
8376 if (c) do {
8377 if (prev == NULL || prev->die_sib == c)
8378 prev = c;
8379 c = next;
8380 next = (c == first ? NULL : c->die_sib);
8381 node.old_die = c;
8382 node.new_die = NULL;
8383 if (is_declaration_die (c))
8384 {
8385 if (is_template_instantiation (c))
8386 {
8387 /* Instantiated templates do not need to be cloned into the
8388 type unit. Just move the DIE and its children back to
8389 the skeleton tree (in the main CU). */
8390 remove_child_with_prev (c, prev);
8391 add_child_die (parent->new_die, c);
8392 c = prev;
8393 }
8394 else if (c->comdat_type_p)
8395 {
8396 /* This is the skeleton of earlier break_out_comdat_types
8397 type. Clone the existing DIE, but keep the children
8398 under the original (which is in the main CU). */
8399 dw_die_ref clone = clone_die (c);
8400
8401 replace_child (c, clone, prev);
8402 generate_skeleton_ancestor_tree (parent);
8403 add_child_die (parent->new_die, c);
8404 c = clone;
8405 continue;
8406 }
8407 else
8408 {
8409 /* Clone the existing DIE, move the original to the skeleton
8410 tree (which is in the main CU), and put the clone, with
8411 all the original's children, where the original came from
8412 (which is about to be moved to the type unit). */
8413 dw_die_ref clone = clone_die (c);
8414 move_all_children (c, clone);
8415
8416 /* If the original has a DW_AT_object_pointer attribute,
8417 it would now point to a child DIE just moved to the
8418 cloned tree, so we need to remove that attribute from
8419 the original. */
8420 remove_AT (c, DW_AT_object_pointer);
8421
8422 replace_child (c, clone, prev);
8423 generate_skeleton_ancestor_tree (parent);
8424 add_child_die (parent->new_die, c);
8425 node.old_die = clone;
8426 node.new_die = c;
8427 c = clone;
8428 }
8429 }
8430 generate_skeleton_bottom_up (&node);
8431 } while (next != NULL);
8432 }
8433
8434 /* Wrapper function for generate_skeleton_bottom_up. */
8435
8436 static dw_die_ref
8437 generate_skeleton (dw_die_ref die)
8438 {
8439 skeleton_chain_node node;
8440
8441 node.old_die = die;
8442 node.new_die = NULL;
8443 node.parent = NULL;
8444
8445 /* If this type definition is nested inside another type,
8446 and is not an instantiation of a template, always leave
8447 at least a declaration in its place. */
8448 if (die->die_parent != NULL
8449 && is_type_die (die->die_parent)
8450 && !is_template_instantiation (die))
8451 node.new_die = clone_as_declaration (die);
8452
8453 generate_skeleton_bottom_up (&node);
8454 return node.new_die;
8455 }
8456
8457 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8458 declaration. The original DIE is moved to a new compile unit so that
8459 existing references to it follow it to the new location. If any of the
8460 original DIE's descendants is a declaration, we need to replace the
8461 original DIE with a skeleton tree and move the declarations back into the
8462 skeleton tree. */
8463
8464 static dw_die_ref
8465 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8466 dw_die_ref prev)
8467 {
8468 dw_die_ref skeleton, orig_parent;
8469
8470 /* Copy the declaration context to the type unit DIE. If the returned
8471 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8472 that DIE. */
8473 orig_parent = copy_declaration_context (unit, child);
8474
8475 skeleton = generate_skeleton (child);
8476 if (skeleton == NULL)
8477 remove_child_with_prev (child, prev);
8478 else
8479 {
8480 skeleton->comdat_type_p = true;
8481 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8482
8483 /* If the original DIE was a specification, we need to put
8484 the skeleton under the parent DIE of the declaration.
8485 This leaves the original declaration in the tree, but
8486 it will be pruned later since there are no longer any
8487 references to it. */
8488 if (orig_parent != NULL)
8489 {
8490 remove_child_with_prev (child, prev);
8491 add_child_die (orig_parent, skeleton);
8492 }
8493 else
8494 replace_child (child, skeleton, prev);
8495 }
8496
8497 return skeleton;
8498 }
8499
8500 static void
8501 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8502 comdat_type_node *type_node,
8503 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8504
8505 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8506 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8507 DWARF procedure references in the DW_AT_location attribute. */
8508
8509 static dw_die_ref
8510 copy_dwarf_procedure (dw_die_ref die,
8511 comdat_type_node *type_node,
8512 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8513 {
8514 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8515
8516 /* DWARF procedures are not supposed to have children... */
8517 gcc_assert (die->die_child == NULL);
8518
8519 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8520 gcc_assert (vec_safe_length (die->die_attr) == 1
8521 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8522
8523 /* Do not copy more than once DWARF procedures. */
8524 bool existed;
8525 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8526 if (existed)
8527 return die_copy;
8528
8529 die_copy = clone_die (die);
8530 add_child_die (type_node->root_die, die_copy);
8531 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8532 return die_copy;
8533 }
8534
8535 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8536 procedures in DIE's attributes. */
8537
8538 static void
8539 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8540 comdat_type_node *type_node,
8541 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8542 {
8543 dw_attr_node *a;
8544 unsigned i;
8545
8546 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8547 {
8548 dw_loc_descr_ref loc;
8549
8550 if (a->dw_attr_val.val_class != dw_val_class_loc)
8551 continue;
8552
8553 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8554 {
8555 switch (loc->dw_loc_opc)
8556 {
8557 case DW_OP_call2:
8558 case DW_OP_call4:
8559 case DW_OP_call_ref:
8560 gcc_assert (loc->dw_loc_oprnd1.val_class
8561 == dw_val_class_die_ref);
8562 loc->dw_loc_oprnd1.v.val_die_ref.die
8563 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8564 type_node,
8565 copied_dwarf_procs);
8566
8567 default:
8568 break;
8569 }
8570 }
8571 }
8572 }
8573
8574 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8575 rewrite references to point to the copies.
8576
8577 References are looked for in DIE's attributes and recursively in all its
8578 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8579 mapping from old DWARF procedures to their copy. It is used not to copy
8580 twice the same DWARF procedure under TYPE_NODE. */
8581
8582 static void
8583 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8584 comdat_type_node *type_node,
8585 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8586 {
8587 dw_die_ref c;
8588
8589 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8590 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8591 type_node,
8592 copied_dwarf_procs));
8593 }
8594
8595 /* Traverse the DIE and set up additional .debug_types or .debug_info
8596 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8597 section. */
8598
8599 static void
8600 break_out_comdat_types (dw_die_ref die)
8601 {
8602 dw_die_ref c;
8603 dw_die_ref first;
8604 dw_die_ref prev = NULL;
8605 dw_die_ref next = NULL;
8606 dw_die_ref unit = NULL;
8607
8608 first = c = die->die_child;
8609 if (c)
8610 next = c->die_sib;
8611 if (c) do {
8612 if (prev == NULL || prev->die_sib == c)
8613 prev = c;
8614 c = next;
8615 next = (c == first ? NULL : c->die_sib);
8616 if (should_move_die_to_comdat (c))
8617 {
8618 dw_die_ref replacement;
8619 comdat_type_node *type_node;
8620
8621 /* Break out nested types into their own type units. */
8622 break_out_comdat_types (c);
8623
8624 /* Create a new type unit DIE as the root for the new tree. */
8625 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8626 add_AT_unsigned (unit, DW_AT_language,
8627 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8628
8629 /* Add the new unit's type DIE into the comdat type list. */
8630 type_node = ggc_cleared_alloc<comdat_type_node> ();
8631 type_node->root_die = unit;
8632 type_node->next = comdat_type_list;
8633 comdat_type_list = type_node;
8634
8635 /* Generate the type signature. */
8636 generate_type_signature (c, type_node);
8637
8638 /* Copy the declaration context, attributes, and children of the
8639 declaration into the new type unit DIE, then remove this DIE
8640 from the main CU (or replace it with a skeleton if necessary). */
8641 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8642 type_node->skeleton_die = replacement;
8643
8644 /* Add the DIE to the new compunit. */
8645 add_child_die (unit, c);
8646
8647 /* Types can reference DWARF procedures for type size or data location
8648 expressions. Calls in DWARF expressions cannot target procedures
8649 that are not in the same section. So we must copy DWARF procedures
8650 along with this type and then rewrite references to them. */
8651 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8652 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8653
8654 if (replacement != NULL)
8655 c = replacement;
8656 }
8657 else if (c->die_tag == DW_TAG_namespace
8658 || c->die_tag == DW_TAG_class_type
8659 || c->die_tag == DW_TAG_structure_type
8660 || c->die_tag == DW_TAG_union_type)
8661 {
8662 /* Look for nested types that can be broken out. */
8663 break_out_comdat_types (c);
8664 }
8665 } while (next != NULL);
8666 }
8667
8668 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8669 Enter all the cloned children into the hash table decl_table. */
8670
8671 static dw_die_ref
8672 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8673 {
8674 dw_die_ref c;
8675 dw_die_ref clone;
8676 struct decl_table_entry *entry;
8677 decl_table_entry **slot;
8678
8679 if (die->die_tag == DW_TAG_subprogram)
8680 clone = clone_as_declaration (die);
8681 else
8682 clone = clone_die (die);
8683
8684 slot = decl_table->find_slot_with_hash (die,
8685 htab_hash_pointer (die), INSERT);
8686
8687 /* Assert that DIE isn't in the hash table yet. If it would be there
8688 before, the ancestors would be necessarily there as well, therefore
8689 clone_tree_partial wouldn't be called. */
8690 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8691
8692 entry = XCNEW (struct decl_table_entry);
8693 entry->orig = die;
8694 entry->copy = clone;
8695 *slot = entry;
8696
8697 if (die->die_tag != DW_TAG_subprogram)
8698 FOR_EACH_CHILD (die, c,
8699 add_child_die (clone, clone_tree_partial (c, decl_table)));
8700
8701 return clone;
8702 }
8703
8704 /* Walk the DIE and its children, looking for references to incomplete
8705 or trivial types that are unmarked (i.e., that are not in the current
8706 type_unit). */
8707
8708 static void
8709 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8710 {
8711 dw_die_ref c;
8712 dw_attr_node *a;
8713 unsigned ix;
8714
8715 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8716 {
8717 if (AT_class (a) == dw_val_class_die_ref)
8718 {
8719 dw_die_ref targ = AT_ref (a);
8720 decl_table_entry **slot;
8721 struct decl_table_entry *entry;
8722
8723 if (targ->die_mark != 0 || targ->comdat_type_p)
8724 continue;
8725
8726 slot = decl_table->find_slot_with_hash (targ,
8727 htab_hash_pointer (targ),
8728 INSERT);
8729
8730 if (*slot != HTAB_EMPTY_ENTRY)
8731 {
8732 /* TARG has already been copied, so we just need to
8733 modify the reference to point to the copy. */
8734 entry = *slot;
8735 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8736 }
8737 else
8738 {
8739 dw_die_ref parent = unit;
8740 dw_die_ref copy = clone_die (targ);
8741
8742 /* Record in DECL_TABLE that TARG has been copied.
8743 Need to do this now, before the recursive call,
8744 because DECL_TABLE may be expanded and SLOT
8745 would no longer be a valid pointer. */
8746 entry = XCNEW (struct decl_table_entry);
8747 entry->orig = targ;
8748 entry->copy = copy;
8749 *slot = entry;
8750
8751 /* If TARG is not a declaration DIE, we need to copy its
8752 children. */
8753 if (!is_declaration_die (targ))
8754 {
8755 FOR_EACH_CHILD (
8756 targ, c,
8757 add_child_die (copy,
8758 clone_tree_partial (c, decl_table)));
8759 }
8760
8761 /* Make sure the cloned tree is marked as part of the
8762 type unit. */
8763 mark_dies (copy);
8764
8765 /* If TARG has surrounding context, copy its ancestor tree
8766 into the new type unit. */
8767 if (targ->die_parent != NULL
8768 && !is_unit_die (targ->die_parent))
8769 parent = copy_ancestor_tree (unit, targ->die_parent,
8770 decl_table);
8771
8772 add_child_die (parent, copy);
8773 a->dw_attr_val.v.val_die_ref.die = copy;
8774
8775 /* Make sure the newly-copied DIE is walked. If it was
8776 installed in a previously-added context, it won't
8777 get visited otherwise. */
8778 if (parent != unit)
8779 {
8780 /* Find the highest point of the newly-added tree,
8781 mark each node along the way, and walk from there. */
8782 parent->die_mark = 1;
8783 while (parent->die_parent
8784 && parent->die_parent->die_mark == 0)
8785 {
8786 parent = parent->die_parent;
8787 parent->die_mark = 1;
8788 }
8789 copy_decls_walk (unit, parent, decl_table);
8790 }
8791 }
8792 }
8793 }
8794
8795 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8796 }
8797
8798 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8799 and record them in DECL_TABLE. */
8800
8801 static void
8802 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8803 {
8804 dw_die_ref c;
8805
8806 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8807 {
8808 dw_die_ref targ = AT_ref (a);
8809 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8810 decl_table_entry **slot
8811 = decl_table->find_slot_with_hash (targ,
8812 htab_hash_pointer (targ),
8813 INSERT);
8814 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8815 /* Record in DECL_TABLE that TARG has been already copied
8816 by remove_child_or_replace_with_skeleton. */
8817 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8818 entry->orig = targ;
8819 entry->copy = die;
8820 *slot = entry;
8821 }
8822 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8823 }
8824
8825 /* Copy declarations for "unworthy" types into the new comdat section.
8826 Incomplete types, modified types, and certain other types aren't broken
8827 out into comdat sections of their own, so they don't have a signature,
8828 and we need to copy the declaration into the same section so that we
8829 don't have an external reference. */
8830
8831 static void
8832 copy_decls_for_unworthy_types (dw_die_ref unit)
8833 {
8834 mark_dies (unit);
8835 decl_hash_type decl_table (10);
8836 collect_skeleton_dies (unit, &decl_table);
8837 copy_decls_walk (unit, unit, &decl_table);
8838 unmark_dies (unit);
8839 }
8840
8841 /* Traverse the DIE and add a sibling attribute if it may have the
8842 effect of speeding up access to siblings. To save some space,
8843 avoid generating sibling attributes for DIE's without children. */
8844
8845 static void
8846 add_sibling_attributes (dw_die_ref die)
8847 {
8848 dw_die_ref c;
8849
8850 if (! die->die_child)
8851 return;
8852
8853 if (die->die_parent && die != die->die_parent->die_child)
8854 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8855
8856 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8857 }
8858
8859 /* Output all location lists for the DIE and its children. */
8860
8861 static void
8862 output_location_lists (dw_die_ref die)
8863 {
8864 dw_die_ref c;
8865 dw_attr_node *a;
8866 unsigned ix;
8867
8868 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8869 if (AT_class (a) == dw_val_class_loc_list)
8870 output_loc_list (AT_loc_list (a));
8871
8872 FOR_EACH_CHILD (die, c, output_location_lists (c));
8873 }
8874
8875 /* During assign_location_list_indexes and output_loclists_offset the
8876 current index, after it the number of assigned indexes (i.e. how
8877 large the .debug_loclists* offset table should be). */
8878 static unsigned int loc_list_idx;
8879
8880 /* Output all location list offsets for the DIE and its children. */
8881
8882 static void
8883 output_loclists_offsets (dw_die_ref die)
8884 {
8885 dw_die_ref c;
8886 dw_attr_node *a;
8887 unsigned ix;
8888
8889 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8890 if (AT_class (a) == dw_val_class_loc_list)
8891 {
8892 dw_loc_list_ref l = AT_loc_list (a);
8893 if (l->offset_emitted)
8894 continue;
8895 dw2_asm_output_delta (dwarf_offset_size, l->ll_symbol,
8896 loc_section_label, NULL);
8897 gcc_assert (l->hash == loc_list_idx);
8898 loc_list_idx++;
8899 l->offset_emitted = true;
8900 }
8901
8902 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8903 }
8904
8905 /* Recursively set indexes of location lists. */
8906
8907 static void
8908 assign_location_list_indexes (dw_die_ref die)
8909 {
8910 dw_die_ref c;
8911 dw_attr_node *a;
8912 unsigned ix;
8913
8914 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8915 if (AT_class (a) == dw_val_class_loc_list)
8916 {
8917 dw_loc_list_ref list = AT_loc_list (a);
8918 if (!list->num_assigned)
8919 {
8920 list->num_assigned = true;
8921 list->hash = loc_list_idx++;
8922 }
8923 }
8924
8925 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8926 }
8927
8928 /* We want to limit the number of external references, because they are
8929 larger than local references: a relocation takes multiple words, and
8930 even a sig8 reference is always eight bytes, whereas a local reference
8931 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8932 So if we encounter multiple external references to the same type DIE, we
8933 make a local typedef stub for it and redirect all references there.
8934
8935 This is the element of the hash table for keeping track of these
8936 references. */
8937
8938 struct external_ref
8939 {
8940 dw_die_ref type;
8941 dw_die_ref stub;
8942 unsigned n_refs;
8943 };
8944
8945 /* Hashtable helpers. */
8946
8947 struct external_ref_hasher : free_ptr_hash <external_ref>
8948 {
8949 static inline hashval_t hash (const external_ref *);
8950 static inline bool equal (const external_ref *, const external_ref *);
8951 };
8952
8953 inline hashval_t
8954 external_ref_hasher::hash (const external_ref *r)
8955 {
8956 dw_die_ref die = r->type;
8957 hashval_t h = 0;
8958
8959 /* We can't use the address of the DIE for hashing, because
8960 that will make the order of the stub DIEs non-deterministic. */
8961 if (! die->comdat_type_p)
8962 /* We have a symbol; use it to compute a hash. */
8963 h = htab_hash_string (die->die_id.die_symbol);
8964 else
8965 {
8966 /* We have a type signature; use a subset of the bits as the hash.
8967 The 8-byte signature is at least as large as hashval_t. */
8968 comdat_type_node *type_node = die->die_id.die_type_node;
8969 memcpy (&h, type_node->signature, sizeof (h));
8970 }
8971 return h;
8972 }
8973
8974 inline bool
8975 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8976 {
8977 return r1->type == r2->type;
8978 }
8979
8980 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8981
8982 /* Return a pointer to the external_ref for references to DIE. */
8983
8984 static struct external_ref *
8985 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8986 {
8987 struct external_ref ref, *ref_p;
8988 external_ref **slot;
8989
8990 ref.type = die;
8991 slot = map->find_slot (&ref, INSERT);
8992 if (*slot != HTAB_EMPTY_ENTRY)
8993 return *slot;
8994
8995 ref_p = XCNEW (struct external_ref);
8996 ref_p->type = die;
8997 *slot = ref_p;
8998 return ref_p;
8999 }
9000
9001 /* Subroutine of optimize_external_refs, below.
9002
9003 If we see a type skeleton, record it as our stub. If we see external
9004 references, remember how many we've seen. */
9005
9006 static void
9007 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
9008 {
9009 dw_die_ref c;
9010 dw_attr_node *a;
9011 unsigned ix;
9012 struct external_ref *ref_p;
9013
9014 if (is_type_die (die)
9015 && (c = get_AT_ref (die, DW_AT_signature)))
9016 {
9017 /* This is a local skeleton; use it for local references. */
9018 ref_p = lookup_external_ref (map, c);
9019 ref_p->stub = die;
9020 }
9021
9022 /* Scan the DIE references, and remember any that refer to DIEs from
9023 other CUs (i.e. those which are not marked). */
9024 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9025 if (AT_class (a) == dw_val_class_die_ref
9026 && (c = AT_ref (a))->die_mark == 0
9027 && is_type_die (c))
9028 {
9029 ref_p = lookup_external_ref (map, c);
9030 ref_p->n_refs++;
9031 }
9032
9033 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
9034 }
9035
9036 /* htab_traverse callback function for optimize_external_refs, below. SLOT
9037 points to an external_ref, DATA is the CU we're processing. If we don't
9038 already have a local stub, and we have multiple refs, build a stub. */
9039
9040 int
9041 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
9042 {
9043 struct external_ref *ref_p = *slot;
9044
9045 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9046 {
9047 /* We have multiple references to this type, so build a small stub.
9048 Both of these forms are a bit dodgy from the perspective of the
9049 DWARF standard, since technically they should have names. */
9050 dw_die_ref cu = data;
9051 dw_die_ref type = ref_p->type;
9052 dw_die_ref stub = NULL;
9053
9054 if (type->comdat_type_p)
9055 {
9056 /* If we refer to this type via sig8, use AT_signature. */
9057 stub = new_die (type->die_tag, cu, NULL_TREE);
9058 add_AT_die_ref (stub, DW_AT_signature, type);
9059 }
9060 else
9061 {
9062 /* Otherwise, use a typedef with no name. */
9063 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9064 add_AT_die_ref (stub, DW_AT_type, type);
9065 }
9066
9067 stub->die_mark++;
9068 ref_p->stub = stub;
9069 }
9070 return 1;
9071 }
9072
9073 /* DIE is a unit; look through all the DIE references to see if there are
9074 any external references to types, and if so, create local stubs for
9075 them which will be applied in build_abbrev_table. This is useful because
9076 references to local DIEs are smaller. */
9077
9078 static external_ref_hash_type *
9079 optimize_external_refs (dw_die_ref die)
9080 {
9081 external_ref_hash_type *map = new external_ref_hash_type (10);
9082 optimize_external_refs_1 (die, map);
9083 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9084 return map;
9085 }
9086
9087 /* The following 3 variables are temporaries that are computed only during the
9088 build_abbrev_table call and used and released during the following
9089 optimize_abbrev_table call. */
9090
9091 /* First abbrev_id that can be optimized based on usage. */
9092 static unsigned int abbrev_opt_start;
9093
9094 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9095 abbrev_id smaller than this, because they must be already sized
9096 during build_abbrev_table). */
9097 static unsigned int abbrev_opt_base_type_end;
9098
9099 /* Vector of usage counts during build_abbrev_table. Indexed by
9100 abbrev_id - abbrev_opt_start. */
9101 static vec<unsigned int> abbrev_usage_count;
9102
9103 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9104 static vec<dw_die_ref> sorted_abbrev_dies;
9105
9106 /* The format of each DIE (and its attribute value pairs) is encoded in an
9107 abbreviation table. This routine builds the abbreviation table and assigns
9108 a unique abbreviation id for each abbreviation entry. The children of each
9109 die are visited recursively. */
9110
9111 static void
9112 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9113 {
9114 unsigned int abbrev_id = 0;
9115 dw_die_ref c;
9116 dw_attr_node *a;
9117 unsigned ix;
9118 dw_die_ref abbrev;
9119
9120 /* Scan the DIE references, and replace any that refer to
9121 DIEs from other CUs (i.e. those which are not marked) with
9122 the local stubs we built in optimize_external_refs. */
9123 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9124 if (AT_class (a) == dw_val_class_die_ref
9125 && (c = AT_ref (a))->die_mark == 0)
9126 {
9127 struct external_ref *ref_p;
9128 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9129
9130 if (is_type_die (c)
9131 && (ref_p = lookup_external_ref (extern_map, c))
9132 && ref_p->stub && ref_p->stub != die)
9133 {
9134 gcc_assert (a->dw_attr != DW_AT_signature);
9135 change_AT_die_ref (a, ref_p->stub);
9136 }
9137 else
9138 /* We aren't changing this reference, so mark it external. */
9139 set_AT_ref_external (a, 1);
9140 }
9141
9142 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9143 {
9144 dw_attr_node *die_a, *abbrev_a;
9145 unsigned ix;
9146 bool ok = true;
9147
9148 if (abbrev_id == 0)
9149 continue;
9150 if (abbrev->die_tag != die->die_tag)
9151 continue;
9152 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9153 continue;
9154
9155 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9156 continue;
9157
9158 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9159 {
9160 abbrev_a = &(*abbrev->die_attr)[ix];
9161 if ((abbrev_a->dw_attr != die_a->dw_attr)
9162 || (value_format (abbrev_a) != value_format (die_a)))
9163 {
9164 ok = false;
9165 break;
9166 }
9167 }
9168 if (ok)
9169 break;
9170 }
9171
9172 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9173 {
9174 vec_safe_push (abbrev_die_table, die);
9175 if (abbrev_opt_start)
9176 abbrev_usage_count.safe_push (0);
9177 }
9178 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9179 {
9180 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9181 sorted_abbrev_dies.safe_push (die);
9182 }
9183
9184 die->die_abbrev = abbrev_id;
9185 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9186 }
9187
9188 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9189 by die_abbrev's usage count, from the most commonly used
9190 abbreviation to the least. */
9191
9192 static int
9193 die_abbrev_cmp (const void *p1, const void *p2)
9194 {
9195 dw_die_ref die1 = *(const dw_die_ref *) p1;
9196 dw_die_ref die2 = *(const dw_die_ref *) p2;
9197
9198 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9199 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9200
9201 if (die1->die_abbrev >= abbrev_opt_base_type_end
9202 && die2->die_abbrev >= abbrev_opt_base_type_end)
9203 {
9204 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9205 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9206 return -1;
9207 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9208 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9209 return 1;
9210 }
9211
9212 /* Stabilize the sort. */
9213 if (die1->die_abbrev < die2->die_abbrev)
9214 return -1;
9215 if (die1->die_abbrev > die2->die_abbrev)
9216 return 1;
9217
9218 return 0;
9219 }
9220
9221 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9222 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9223 into dw_val_class_const_implicit or
9224 dw_val_class_unsigned_const_implicit. */
9225
9226 static void
9227 optimize_implicit_const (unsigned int first_id, unsigned int end,
9228 vec<bool> &implicit_consts)
9229 {
9230 /* It never makes sense if there is just one DIE using the abbreviation. */
9231 if (end < first_id + 2)
9232 return;
9233
9234 dw_attr_node *a;
9235 unsigned ix, i;
9236 dw_die_ref die = sorted_abbrev_dies[first_id];
9237 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9238 if (implicit_consts[ix])
9239 {
9240 enum dw_val_class new_class = dw_val_class_none;
9241 switch (AT_class (a))
9242 {
9243 case dw_val_class_unsigned_const:
9244 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9245 continue;
9246
9247 /* The .debug_abbrev section will grow by
9248 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9249 in all the DIEs using that abbreviation. */
9250 if (constant_size (AT_unsigned (a)) * (end - first_id)
9251 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9252 continue;
9253
9254 new_class = dw_val_class_unsigned_const_implicit;
9255 break;
9256
9257 case dw_val_class_const:
9258 new_class = dw_val_class_const_implicit;
9259 break;
9260
9261 case dw_val_class_file:
9262 new_class = dw_val_class_file_implicit;
9263 break;
9264
9265 default:
9266 continue;
9267 }
9268 for (i = first_id; i < end; i++)
9269 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9270 = new_class;
9271 }
9272 }
9273
9274 /* Attempt to optimize abbreviation table from abbrev_opt_start
9275 abbreviation above. */
9276
9277 static void
9278 optimize_abbrev_table (void)
9279 {
9280 if (abbrev_opt_start
9281 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9282 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9283 {
9284 auto_vec<bool, 32> implicit_consts;
9285 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9286
9287 unsigned int abbrev_id = abbrev_opt_start - 1;
9288 unsigned int first_id = ~0U;
9289 unsigned int last_abbrev_id = 0;
9290 unsigned int i;
9291 dw_die_ref die;
9292 if (abbrev_opt_base_type_end > abbrev_opt_start)
9293 abbrev_id = abbrev_opt_base_type_end - 1;
9294 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9295 most commonly used abbreviations come first. */
9296 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9297 {
9298 dw_attr_node *a;
9299 unsigned ix;
9300
9301 /* If calc_base_type_die_sizes has been called, the CU and
9302 base types after it can't be optimized, because we've already
9303 calculated their DIE offsets. We've sorted them first. */
9304 if (die->die_abbrev < abbrev_opt_base_type_end)
9305 continue;
9306 if (die->die_abbrev != last_abbrev_id)
9307 {
9308 last_abbrev_id = die->die_abbrev;
9309 if (dwarf_version >= 5 && first_id != ~0U)
9310 optimize_implicit_const (first_id, i, implicit_consts);
9311 abbrev_id++;
9312 (*abbrev_die_table)[abbrev_id] = die;
9313 if (dwarf_version >= 5)
9314 {
9315 first_id = i;
9316 implicit_consts.truncate (0);
9317
9318 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9319 switch (AT_class (a))
9320 {
9321 case dw_val_class_const:
9322 case dw_val_class_unsigned_const:
9323 case dw_val_class_file:
9324 implicit_consts.safe_push (true);
9325 break;
9326 default:
9327 implicit_consts.safe_push (false);
9328 break;
9329 }
9330 }
9331 }
9332 else if (dwarf_version >= 5)
9333 {
9334 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9335 if (!implicit_consts[ix])
9336 continue;
9337 else
9338 {
9339 dw_attr_node *other_a
9340 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9341 if (!dw_val_equal_p (&a->dw_attr_val,
9342 &other_a->dw_attr_val))
9343 implicit_consts[ix] = false;
9344 }
9345 }
9346 die->die_abbrev = abbrev_id;
9347 }
9348 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9349 if (dwarf_version >= 5 && first_id != ~0U)
9350 optimize_implicit_const (first_id, i, implicit_consts);
9351 }
9352
9353 abbrev_opt_start = 0;
9354 abbrev_opt_base_type_end = 0;
9355 abbrev_usage_count.release ();
9356 sorted_abbrev_dies.release ();
9357 }
9358 \f
9359 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9360
9361 static int
9362 constant_size (unsigned HOST_WIDE_INT value)
9363 {
9364 int log;
9365
9366 if (value == 0)
9367 log = 0;
9368 else
9369 log = floor_log2 (value);
9370
9371 log = log / 8;
9372 log = 1 << (floor_log2 (log) + 1);
9373
9374 return log;
9375 }
9376
9377 /* Return the size of a DIE as it is represented in the
9378 .debug_info section. */
9379
9380 static unsigned long
9381 size_of_die (dw_die_ref die)
9382 {
9383 unsigned long size = 0;
9384 dw_attr_node *a;
9385 unsigned ix;
9386 enum dwarf_form form;
9387
9388 size += size_of_uleb128 (die->die_abbrev);
9389 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9390 {
9391 switch (AT_class (a))
9392 {
9393 case dw_val_class_addr:
9394 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9395 {
9396 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9397 size += size_of_uleb128 (AT_index (a));
9398 }
9399 else
9400 size += DWARF2_ADDR_SIZE;
9401 break;
9402 case dw_val_class_offset:
9403 size += dwarf_offset_size;
9404 break;
9405 case dw_val_class_loc:
9406 {
9407 unsigned long lsize = size_of_locs (AT_loc (a));
9408
9409 /* Block length. */
9410 if (dwarf_version >= 4)
9411 size += size_of_uleb128 (lsize);
9412 else
9413 size += constant_size (lsize);
9414 size += lsize;
9415 }
9416 break;
9417 case dw_val_class_loc_list:
9418 if (dwarf_split_debug_info && dwarf_version >= 5)
9419 {
9420 gcc_assert (AT_loc_list (a)->num_assigned);
9421 size += size_of_uleb128 (AT_loc_list (a)->hash);
9422 }
9423 else
9424 size += dwarf_offset_size;
9425 break;
9426 case dw_val_class_view_list:
9427 size += dwarf_offset_size;
9428 break;
9429 case dw_val_class_range_list:
9430 if (value_format (a) == DW_FORM_rnglistx)
9431 {
9432 gcc_assert (rnglist_idx);
9433 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9434 size += size_of_uleb128 (r->idx);
9435 }
9436 else
9437 size += dwarf_offset_size;
9438 break;
9439 case dw_val_class_const:
9440 size += size_of_sleb128 (AT_int (a));
9441 break;
9442 case dw_val_class_unsigned_const:
9443 {
9444 int csize = constant_size (AT_unsigned (a));
9445 if (dwarf_version == 3
9446 && a->dw_attr == DW_AT_data_member_location
9447 && csize >= 4)
9448 size += size_of_uleb128 (AT_unsigned (a));
9449 else
9450 size += csize;
9451 }
9452 break;
9453 case dw_val_class_symview:
9454 if (symview_upper_bound <= 0xff)
9455 size += 1;
9456 else if (symview_upper_bound <= 0xffff)
9457 size += 2;
9458 else if (symview_upper_bound <= 0xffffffff)
9459 size += 4;
9460 else
9461 size += 8;
9462 break;
9463 case dw_val_class_const_implicit:
9464 case dw_val_class_unsigned_const_implicit:
9465 case dw_val_class_file_implicit:
9466 /* These occupy no size in the DIE, just an extra sleb128 in
9467 .debug_abbrev. */
9468 break;
9469 case dw_val_class_const_double:
9470 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9471 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9472 size++; /* block */
9473 break;
9474 case dw_val_class_wide_int:
9475 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9476 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9477 if (get_full_len (*a->dw_attr_val.v.val_wide)
9478 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9479 size++; /* block */
9480 break;
9481 case dw_val_class_vec:
9482 size += constant_size (a->dw_attr_val.v.val_vec.length
9483 * a->dw_attr_val.v.val_vec.elt_size)
9484 + a->dw_attr_val.v.val_vec.length
9485 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9486 break;
9487 case dw_val_class_flag:
9488 if (dwarf_version >= 4)
9489 /* Currently all add_AT_flag calls pass in 1 as last argument,
9490 so DW_FORM_flag_present can be used. If that ever changes,
9491 we'll need to use DW_FORM_flag and have some optimization
9492 in build_abbrev_table that will change those to
9493 DW_FORM_flag_present if it is set to 1 in all DIEs using
9494 the same abbrev entry. */
9495 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9496 else
9497 size += 1;
9498 break;
9499 case dw_val_class_die_ref:
9500 if (AT_ref_external (a))
9501 {
9502 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9503 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9504 is sized by target address length, whereas in DWARF3
9505 it's always sized as an offset. */
9506 if (AT_ref (a)->comdat_type_p)
9507 size += DWARF_TYPE_SIGNATURE_SIZE;
9508 else if (dwarf_version == 2)
9509 size += DWARF2_ADDR_SIZE;
9510 else
9511 size += dwarf_offset_size;
9512 }
9513 else
9514 size += dwarf_offset_size;
9515 break;
9516 case dw_val_class_fde_ref:
9517 size += dwarf_offset_size;
9518 break;
9519 case dw_val_class_lbl_id:
9520 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9521 {
9522 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9523 size += size_of_uleb128 (AT_index (a));
9524 }
9525 else
9526 size += DWARF2_ADDR_SIZE;
9527 break;
9528 case dw_val_class_lineptr:
9529 case dw_val_class_macptr:
9530 case dw_val_class_loclistsptr:
9531 size += dwarf_offset_size;
9532 break;
9533 case dw_val_class_str:
9534 form = AT_string_form (a);
9535 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9536 size += dwarf_offset_size;
9537 else if (form == dwarf_FORM (DW_FORM_strx))
9538 size += size_of_uleb128 (AT_index (a));
9539 else
9540 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9541 break;
9542 case dw_val_class_file:
9543 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9544 break;
9545 case dw_val_class_data8:
9546 size += 8;
9547 break;
9548 case dw_val_class_vms_delta:
9549 size += dwarf_offset_size;
9550 break;
9551 case dw_val_class_high_pc:
9552 size += DWARF2_ADDR_SIZE;
9553 break;
9554 case dw_val_class_discr_value:
9555 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9556 break;
9557 case dw_val_class_discr_list:
9558 {
9559 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9560
9561 /* This is a block, so we have the block length and then its
9562 data. */
9563 size += constant_size (block_size) + block_size;
9564 }
9565 break;
9566 default:
9567 gcc_unreachable ();
9568 }
9569 }
9570
9571 return size;
9572 }
9573
9574 /* Size the debugging information associated with a given DIE. Visits the
9575 DIE's children recursively. Updates the global variable next_die_offset, on
9576 each time through. Uses the current value of next_die_offset to update the
9577 die_offset field in each DIE. */
9578
9579 static void
9580 calc_die_sizes (dw_die_ref die)
9581 {
9582 dw_die_ref c;
9583
9584 gcc_assert (die->die_offset == 0
9585 || (unsigned long int) die->die_offset == next_die_offset);
9586 die->die_offset = next_die_offset;
9587 next_die_offset += size_of_die (die);
9588
9589 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9590
9591 if (die->die_child != NULL)
9592 /* Count the null byte used to terminate sibling lists. */
9593 next_die_offset += 1;
9594 }
9595
9596 /* Size just the base type children at the start of the CU.
9597 This is needed because build_abbrev needs to size locs
9598 and sizing of type based stack ops needs to know die_offset
9599 values for the base types. */
9600
9601 static void
9602 calc_base_type_die_sizes (void)
9603 {
9604 unsigned long die_offset = (dwarf_split_debug_info
9605 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9606 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9607 unsigned int i;
9608 dw_die_ref base_type;
9609 #if ENABLE_ASSERT_CHECKING
9610 dw_die_ref prev = comp_unit_die ()->die_child;
9611 #endif
9612
9613 die_offset += size_of_die (comp_unit_die ());
9614 for (i = 0; base_types.iterate (i, &base_type); i++)
9615 {
9616 #if ENABLE_ASSERT_CHECKING
9617 gcc_assert (base_type->die_offset == 0
9618 && prev->die_sib == base_type
9619 && base_type->die_child == NULL
9620 && base_type->die_abbrev);
9621 prev = base_type;
9622 #endif
9623 if (abbrev_opt_start
9624 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9625 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9626 base_type->die_offset = die_offset;
9627 die_offset += size_of_die (base_type);
9628 }
9629 }
9630
9631 /* Set the marks for a die and its children. We do this so
9632 that we know whether or not a reference needs to use FORM_ref_addr; only
9633 DIEs in the same CU will be marked. We used to clear out the offset
9634 and use that as the flag, but ran into ordering problems. */
9635
9636 static void
9637 mark_dies (dw_die_ref die)
9638 {
9639 dw_die_ref c;
9640
9641 gcc_assert (!die->die_mark);
9642
9643 die->die_mark = 1;
9644 FOR_EACH_CHILD (die, c, mark_dies (c));
9645 }
9646
9647 /* Clear the marks for a die and its children. */
9648
9649 static void
9650 unmark_dies (dw_die_ref die)
9651 {
9652 dw_die_ref c;
9653
9654 if (! use_debug_types)
9655 gcc_assert (die->die_mark);
9656
9657 die->die_mark = 0;
9658 FOR_EACH_CHILD (die, c, unmark_dies (c));
9659 }
9660
9661 /* Clear the marks for a die, its children and referred dies. */
9662
9663 static void
9664 unmark_all_dies (dw_die_ref die)
9665 {
9666 dw_die_ref c;
9667 dw_attr_node *a;
9668 unsigned ix;
9669
9670 if (!die->die_mark)
9671 return;
9672 die->die_mark = 0;
9673
9674 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9675
9676 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9677 if (AT_class (a) == dw_val_class_die_ref)
9678 unmark_all_dies (AT_ref (a));
9679 }
9680
9681 /* Calculate if the entry should appear in the final output file. It may be
9682 from a pruned a type. */
9683
9684 static bool
9685 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9686 {
9687 /* By limiting gnu pubnames to definitions only, gold can generate a
9688 gdb index without entries for declarations, which don't include
9689 enough information to be useful. */
9690 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9691 return false;
9692
9693 if (table == pubname_table)
9694 {
9695 /* Enumerator names are part of the pubname table, but the
9696 parent DW_TAG_enumeration_type die may have been pruned.
9697 Don't output them if that is the case. */
9698 if (p->die->die_tag == DW_TAG_enumerator &&
9699 (p->die->die_parent == NULL
9700 || !p->die->die_parent->die_perennial_p))
9701 return false;
9702
9703 /* Everything else in the pubname table is included. */
9704 return true;
9705 }
9706
9707 /* The pubtypes table shouldn't include types that have been
9708 pruned. */
9709 return (p->die->die_offset != 0
9710 || !flag_eliminate_unused_debug_types);
9711 }
9712
9713 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9714 generated for the compilation unit. */
9715
9716 static unsigned long
9717 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9718 {
9719 unsigned long size;
9720 unsigned i;
9721 pubname_entry *p;
9722 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9723
9724 size = DWARF_PUBNAMES_HEADER_SIZE;
9725 FOR_EACH_VEC_ELT (*names, i, p)
9726 if (include_pubname_in_output (names, p))
9727 size += strlen (p->name) + dwarf_offset_size + 1 + space_for_flags;
9728
9729 size += dwarf_offset_size;
9730 return size;
9731 }
9732
9733 /* Return the size of the information in the .debug_aranges section. */
9734
9735 static unsigned long
9736 size_of_aranges (void)
9737 {
9738 unsigned long size;
9739
9740 size = DWARF_ARANGES_HEADER_SIZE;
9741
9742 /* Count the address/length pair for this compilation unit. */
9743 if (text_section_used)
9744 size += 2 * DWARF2_ADDR_SIZE;
9745 if (cold_text_section_used)
9746 size += 2 * DWARF2_ADDR_SIZE;
9747 if (have_multiple_function_sections)
9748 {
9749 unsigned fde_idx;
9750 dw_fde_ref fde;
9751
9752 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9753 {
9754 if (DECL_IGNORED_P (fde->decl))
9755 continue;
9756 if (!fde->in_std_section)
9757 size += 2 * DWARF2_ADDR_SIZE;
9758 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9759 size += 2 * DWARF2_ADDR_SIZE;
9760 }
9761 }
9762
9763 /* Count the two zero words used to terminated the address range table. */
9764 size += 2 * DWARF2_ADDR_SIZE;
9765 return size;
9766 }
9767 \f
9768 /* Select the encoding of an attribute value. */
9769
9770 static enum dwarf_form
9771 value_format (dw_attr_node *a)
9772 {
9773 switch (AT_class (a))
9774 {
9775 case dw_val_class_addr:
9776 /* Only very few attributes allow DW_FORM_addr. */
9777 switch (a->dw_attr)
9778 {
9779 case DW_AT_low_pc:
9780 case DW_AT_high_pc:
9781 case DW_AT_entry_pc:
9782 case DW_AT_trampoline:
9783 return (AT_index (a) == NOT_INDEXED
9784 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9785 default:
9786 break;
9787 }
9788 switch (DWARF2_ADDR_SIZE)
9789 {
9790 case 1:
9791 return DW_FORM_data1;
9792 case 2:
9793 return DW_FORM_data2;
9794 case 4:
9795 return DW_FORM_data4;
9796 case 8:
9797 return DW_FORM_data8;
9798 default:
9799 gcc_unreachable ();
9800 }
9801 case dw_val_class_loc_list:
9802 if (dwarf_split_debug_info
9803 && dwarf_version >= 5
9804 && AT_loc_list (a)->num_assigned)
9805 return DW_FORM_loclistx;
9806 /* FALLTHRU */
9807 case dw_val_class_view_list:
9808 case dw_val_class_range_list:
9809 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9810 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9811 care about sizes of .debug* sections in shared libraries and
9812 executables and don't take into account relocations that affect just
9813 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9814 table in the .debug_rnglists section. */
9815 if (dwarf_split_debug_info
9816 && dwarf_version >= 5
9817 && AT_class (a) == dw_val_class_range_list
9818 && rnglist_idx
9819 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9820 return DW_FORM_rnglistx;
9821 if (dwarf_version >= 4)
9822 return DW_FORM_sec_offset;
9823 /* FALLTHRU */
9824 case dw_val_class_vms_delta:
9825 case dw_val_class_offset:
9826 switch (dwarf_offset_size)
9827 {
9828 case 4:
9829 return DW_FORM_data4;
9830 case 8:
9831 return DW_FORM_data8;
9832 default:
9833 gcc_unreachable ();
9834 }
9835 case dw_val_class_loc:
9836 if (dwarf_version >= 4)
9837 return DW_FORM_exprloc;
9838 switch (constant_size (size_of_locs (AT_loc (a))))
9839 {
9840 case 1:
9841 return DW_FORM_block1;
9842 case 2:
9843 return DW_FORM_block2;
9844 case 4:
9845 return DW_FORM_block4;
9846 default:
9847 gcc_unreachable ();
9848 }
9849 case dw_val_class_const:
9850 return DW_FORM_sdata;
9851 case dw_val_class_unsigned_const:
9852 switch (constant_size (AT_unsigned (a)))
9853 {
9854 case 1:
9855 return DW_FORM_data1;
9856 case 2:
9857 return DW_FORM_data2;
9858 case 4:
9859 /* In DWARF3 DW_AT_data_member_location with
9860 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9861 constant, so we need to use DW_FORM_udata if we need
9862 a large constant. */
9863 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9864 return DW_FORM_udata;
9865 return DW_FORM_data4;
9866 case 8:
9867 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9868 return DW_FORM_udata;
9869 return DW_FORM_data8;
9870 default:
9871 gcc_unreachable ();
9872 }
9873 case dw_val_class_const_implicit:
9874 case dw_val_class_unsigned_const_implicit:
9875 case dw_val_class_file_implicit:
9876 return DW_FORM_implicit_const;
9877 case dw_val_class_const_double:
9878 switch (HOST_BITS_PER_WIDE_INT)
9879 {
9880 case 8:
9881 return DW_FORM_data2;
9882 case 16:
9883 return DW_FORM_data4;
9884 case 32:
9885 return DW_FORM_data8;
9886 case 64:
9887 if (dwarf_version >= 5)
9888 return DW_FORM_data16;
9889 /* FALLTHRU */
9890 default:
9891 return DW_FORM_block1;
9892 }
9893 case dw_val_class_wide_int:
9894 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9895 {
9896 case 8:
9897 return DW_FORM_data1;
9898 case 16:
9899 return DW_FORM_data2;
9900 case 32:
9901 return DW_FORM_data4;
9902 case 64:
9903 return DW_FORM_data8;
9904 case 128:
9905 if (dwarf_version >= 5)
9906 return DW_FORM_data16;
9907 /* FALLTHRU */
9908 default:
9909 return DW_FORM_block1;
9910 }
9911 case dw_val_class_symview:
9912 /* ??? We might use uleb128, but then we'd have to compute
9913 .debug_info offsets in the assembler. */
9914 if (symview_upper_bound <= 0xff)
9915 return DW_FORM_data1;
9916 else if (symview_upper_bound <= 0xffff)
9917 return DW_FORM_data2;
9918 else if (symview_upper_bound <= 0xffffffff)
9919 return DW_FORM_data4;
9920 else
9921 return DW_FORM_data8;
9922 case dw_val_class_vec:
9923 switch (constant_size (a->dw_attr_val.v.val_vec.length
9924 * a->dw_attr_val.v.val_vec.elt_size))
9925 {
9926 case 1:
9927 return DW_FORM_block1;
9928 case 2:
9929 return DW_FORM_block2;
9930 case 4:
9931 return DW_FORM_block4;
9932 default:
9933 gcc_unreachable ();
9934 }
9935 case dw_val_class_flag:
9936 if (dwarf_version >= 4)
9937 {
9938 /* Currently all add_AT_flag calls pass in 1 as last argument,
9939 so DW_FORM_flag_present can be used. If that ever changes,
9940 we'll need to use DW_FORM_flag and have some optimization
9941 in build_abbrev_table that will change those to
9942 DW_FORM_flag_present if it is set to 1 in all DIEs using
9943 the same abbrev entry. */
9944 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9945 return DW_FORM_flag_present;
9946 }
9947 return DW_FORM_flag;
9948 case dw_val_class_die_ref:
9949 if (AT_ref_external (a))
9950 {
9951 if (AT_ref (a)->comdat_type_p)
9952 return DW_FORM_ref_sig8;
9953 else
9954 return DW_FORM_ref_addr;
9955 }
9956 else
9957 return DW_FORM_ref;
9958 case dw_val_class_fde_ref:
9959 return DW_FORM_data;
9960 case dw_val_class_lbl_id:
9961 return (AT_index (a) == NOT_INDEXED
9962 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9963 case dw_val_class_lineptr:
9964 case dw_val_class_macptr:
9965 case dw_val_class_loclistsptr:
9966 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9967 case dw_val_class_str:
9968 return AT_string_form (a);
9969 case dw_val_class_file:
9970 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9971 {
9972 case 1:
9973 return DW_FORM_data1;
9974 case 2:
9975 return DW_FORM_data2;
9976 case 4:
9977 return DW_FORM_data4;
9978 default:
9979 gcc_unreachable ();
9980 }
9981
9982 case dw_val_class_data8:
9983 return DW_FORM_data8;
9984
9985 case dw_val_class_high_pc:
9986 switch (DWARF2_ADDR_SIZE)
9987 {
9988 case 1:
9989 return DW_FORM_data1;
9990 case 2:
9991 return DW_FORM_data2;
9992 case 4:
9993 return DW_FORM_data4;
9994 case 8:
9995 return DW_FORM_data8;
9996 default:
9997 gcc_unreachable ();
9998 }
9999
10000 case dw_val_class_discr_value:
10001 return (a->dw_attr_val.v.val_discr_value.pos
10002 ? DW_FORM_udata
10003 : DW_FORM_sdata);
10004 case dw_val_class_discr_list:
10005 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
10006 {
10007 case 1:
10008 return DW_FORM_block1;
10009 case 2:
10010 return DW_FORM_block2;
10011 case 4:
10012 return DW_FORM_block4;
10013 default:
10014 gcc_unreachable ();
10015 }
10016
10017 default:
10018 gcc_unreachable ();
10019 }
10020 }
10021
10022 /* Output the encoding of an attribute value. */
10023
10024 static void
10025 output_value_format (dw_attr_node *a)
10026 {
10027 enum dwarf_form form = value_format (a);
10028
10029 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
10030 }
10031
10032 /* Given a die and id, produce the appropriate abbreviations. */
10033
10034 static void
10035 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
10036 {
10037 unsigned ix;
10038 dw_attr_node *a_attr;
10039
10040 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
10041 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
10042 dwarf_tag_name (abbrev->die_tag));
10043
10044 if (abbrev->die_child != NULL)
10045 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10046 else
10047 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10048
10049 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10050 {
10051 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10052 dwarf_attr_name (a_attr->dw_attr));
10053 output_value_format (a_attr);
10054 if (value_format (a_attr) == DW_FORM_implicit_const)
10055 {
10056 if (AT_class (a_attr) == dw_val_class_file_implicit)
10057 {
10058 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10059 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10060 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10061 }
10062 else
10063 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10064 }
10065 }
10066
10067 dw2_asm_output_data (1, 0, NULL);
10068 dw2_asm_output_data (1, 0, NULL);
10069 }
10070
10071
10072 /* Output the .debug_abbrev section which defines the DIE abbreviation
10073 table. */
10074
10075 static void
10076 output_abbrev_section (void)
10077 {
10078 unsigned int abbrev_id;
10079 dw_die_ref abbrev;
10080
10081 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10082 if (abbrev_id != 0)
10083 output_die_abbrevs (abbrev_id, abbrev);
10084
10085 /* Terminate the table. */
10086 dw2_asm_output_data (1, 0, NULL);
10087 }
10088
10089 /* Return a new location list, given the begin and end range, and the
10090 expression. */
10091
10092 static inline dw_loc_list_ref
10093 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10094 const char *end, var_loc_view vend,
10095 const char *section)
10096 {
10097 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10098
10099 retlist->begin = begin;
10100 retlist->begin_entry = NULL;
10101 retlist->end = end;
10102 retlist->expr = expr;
10103 retlist->section = section;
10104 retlist->vbegin = vbegin;
10105 retlist->vend = vend;
10106
10107 return retlist;
10108 }
10109
10110 /* Return true iff there's any nonzero view number in the loc list.
10111
10112 ??? When views are not enabled, we'll often extend a single range
10113 to the entire function, so that we emit a single location
10114 expression rather than a location list. With views, even with a
10115 single range, we'll output a list if start or end have a nonzero
10116 view. If we change this, we may want to stop splitting a single
10117 range in dw_loc_list just because of a nonzero view, even if it
10118 straddles across hot/cold partitions. */
10119
10120 static bool
10121 loc_list_has_views (dw_loc_list_ref list)
10122 {
10123 if (!debug_variable_location_views)
10124 return false;
10125
10126 for (dw_loc_list_ref loc = list;
10127 loc != NULL; loc = loc->dw_loc_next)
10128 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10129 return true;
10130
10131 return false;
10132 }
10133
10134 /* Generate a new internal symbol for this location list node, if it
10135 hasn't got one yet. */
10136
10137 static inline void
10138 gen_llsym (dw_loc_list_ref list)
10139 {
10140 gcc_assert (!list->ll_symbol);
10141 list->ll_symbol = gen_internal_sym ("LLST");
10142
10143 if (!loc_list_has_views (list))
10144 return;
10145
10146 if (dwarf2out_locviews_in_attribute ())
10147 {
10148 /* Use the same label_num for the view list. */
10149 label_num--;
10150 list->vl_symbol = gen_internal_sym ("LVUS");
10151 }
10152 else
10153 list->vl_symbol = list->ll_symbol;
10154 }
10155
10156 /* Generate a symbol for the list, but only if we really want to emit
10157 it as a list. */
10158
10159 static inline void
10160 maybe_gen_llsym (dw_loc_list_ref list)
10161 {
10162 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10163 return;
10164
10165 gen_llsym (list);
10166 }
10167
10168 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10169 NULL, don't consider size of the location expression. If we're not
10170 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10171 representation in *SIZEP. */
10172
10173 static bool
10174 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10175 {
10176 /* Don't output an entry that starts and ends at the same address. */
10177 if (strcmp (curr->begin, curr->end) == 0
10178 && curr->vbegin == curr->vend && !curr->force)
10179 return true;
10180
10181 if (!sizep)
10182 return false;
10183
10184 unsigned long size = size_of_locs (curr->expr);
10185
10186 /* If the expression is too large, drop it on the floor. We could
10187 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10188 in the expression, but >= 64KB expressions for a single value
10189 in a single range are unlikely very useful. */
10190 if (dwarf_version < 5 && size > 0xffff)
10191 return true;
10192
10193 *sizep = size;
10194
10195 return false;
10196 }
10197
10198 /* Output a view pair loclist entry for CURR, if it requires one. */
10199
10200 static void
10201 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10202 {
10203 if (!dwarf2out_locviews_in_loclist ())
10204 return;
10205
10206 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10207 return;
10208
10209 #ifdef DW_LLE_view_pair
10210 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10211
10212 if (dwarf2out_as_locview_support)
10213 {
10214 if (ZERO_VIEW_P (curr->vbegin))
10215 dw2_asm_output_data_uleb128 (0, "Location view begin");
10216 else
10217 {
10218 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10219 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10220 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10221 }
10222
10223 if (ZERO_VIEW_P (curr->vend))
10224 dw2_asm_output_data_uleb128 (0, "Location view end");
10225 else
10226 {
10227 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10228 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10229 dw2_asm_output_symname_uleb128 (label, "Location view end");
10230 }
10231 }
10232 else
10233 {
10234 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10235 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10236 }
10237 #endif /* DW_LLE_view_pair */
10238
10239 return;
10240 }
10241
10242 /* Output the location list given to us. */
10243
10244 static void
10245 output_loc_list (dw_loc_list_ref list_head)
10246 {
10247 int vcount = 0, lcount = 0;
10248
10249 if (list_head->emitted)
10250 return;
10251 list_head->emitted = true;
10252
10253 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10254 {
10255 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10256
10257 for (dw_loc_list_ref curr = list_head; curr != NULL;
10258 curr = curr->dw_loc_next)
10259 {
10260 unsigned long size;
10261
10262 if (skip_loc_list_entry (curr, &size))
10263 continue;
10264
10265 vcount++;
10266
10267 /* ?? dwarf_split_debug_info? */
10268 if (dwarf2out_as_locview_support)
10269 {
10270 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10271
10272 if (!ZERO_VIEW_P (curr->vbegin))
10273 {
10274 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10275 dw2_asm_output_symname_uleb128 (label,
10276 "View list begin (%s)",
10277 list_head->vl_symbol);
10278 }
10279 else
10280 dw2_asm_output_data_uleb128 (0,
10281 "View list begin (%s)",
10282 list_head->vl_symbol);
10283
10284 if (!ZERO_VIEW_P (curr->vend))
10285 {
10286 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10287 dw2_asm_output_symname_uleb128 (label,
10288 "View list end (%s)",
10289 list_head->vl_symbol);
10290 }
10291 else
10292 dw2_asm_output_data_uleb128 (0,
10293 "View list end (%s)",
10294 list_head->vl_symbol);
10295 }
10296 else
10297 {
10298 dw2_asm_output_data_uleb128 (curr->vbegin,
10299 "View list begin (%s)",
10300 list_head->vl_symbol);
10301 dw2_asm_output_data_uleb128 (curr->vend,
10302 "View list end (%s)",
10303 list_head->vl_symbol);
10304 }
10305 }
10306 }
10307
10308 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10309
10310 const char *last_section = NULL;
10311 const char *base_label = NULL;
10312
10313 /* Walk the location list, and output each range + expression. */
10314 for (dw_loc_list_ref curr = list_head; curr != NULL;
10315 curr = curr->dw_loc_next)
10316 {
10317 unsigned long size;
10318
10319 /* Skip this entry? If we skip it here, we must skip it in the
10320 view list above as well. */
10321 if (skip_loc_list_entry (curr, &size))
10322 continue;
10323
10324 lcount++;
10325
10326 if (dwarf_version >= 5)
10327 {
10328 if (dwarf_split_debug_info)
10329 {
10330 dwarf2out_maybe_output_loclist_view_pair (curr);
10331 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10332 uleb128 index into .debug_addr and uleb128 length. */
10333 dw2_asm_output_data (1, DW_LLE_startx_length,
10334 "DW_LLE_startx_length (%s)",
10335 list_head->ll_symbol);
10336 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10337 "Location list range start index "
10338 "(%s)", curr->begin);
10339 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10340 For that case we probably need to emit DW_LLE_startx_endx,
10341 but we'd need 2 .debug_addr entries rather than just one. */
10342 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10343 "Location list length (%s)",
10344 list_head->ll_symbol);
10345 }
10346 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10347 {
10348 dwarf2out_maybe_output_loclist_view_pair (curr);
10349 /* If all code is in .text section, the base address is
10350 already provided by the CU attributes. Use
10351 DW_LLE_offset_pair where both addresses are uleb128 encoded
10352 offsets against that base. */
10353 dw2_asm_output_data (1, DW_LLE_offset_pair,
10354 "DW_LLE_offset_pair (%s)",
10355 list_head->ll_symbol);
10356 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10357 "Location list begin address (%s)",
10358 list_head->ll_symbol);
10359 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10360 "Location list end address (%s)",
10361 list_head->ll_symbol);
10362 }
10363 else if (HAVE_AS_LEB128)
10364 {
10365 /* Otherwise, find out how many consecutive entries could share
10366 the same base entry. If just one, emit DW_LLE_start_length,
10367 otherwise emit DW_LLE_base_address for the base address
10368 followed by a series of DW_LLE_offset_pair. */
10369 if (last_section == NULL || curr->section != last_section)
10370 {
10371 dw_loc_list_ref curr2;
10372 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10373 curr2 = curr2->dw_loc_next)
10374 {
10375 if (strcmp (curr2->begin, curr2->end) == 0
10376 && !curr2->force)
10377 continue;
10378 break;
10379 }
10380 if (curr2 == NULL || curr->section != curr2->section)
10381 last_section = NULL;
10382 else
10383 {
10384 last_section = curr->section;
10385 base_label = curr->begin;
10386 dw2_asm_output_data (1, DW_LLE_base_address,
10387 "DW_LLE_base_address (%s)",
10388 list_head->ll_symbol);
10389 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10390 "Base address (%s)",
10391 list_head->ll_symbol);
10392 }
10393 }
10394 /* Only one entry with the same base address. Use
10395 DW_LLE_start_length with absolute address and uleb128
10396 length. */
10397 if (last_section == NULL)
10398 {
10399 dwarf2out_maybe_output_loclist_view_pair (curr);
10400 dw2_asm_output_data (1, DW_LLE_start_length,
10401 "DW_LLE_start_length (%s)",
10402 list_head->ll_symbol);
10403 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10404 "Location list begin address (%s)",
10405 list_head->ll_symbol);
10406 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10407 "Location list length "
10408 "(%s)", list_head->ll_symbol);
10409 }
10410 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10411 DW_LLE_base_address. */
10412 else
10413 {
10414 dwarf2out_maybe_output_loclist_view_pair (curr);
10415 dw2_asm_output_data (1, DW_LLE_offset_pair,
10416 "DW_LLE_offset_pair (%s)",
10417 list_head->ll_symbol);
10418 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10419 "Location list begin address "
10420 "(%s)", list_head->ll_symbol);
10421 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10422 "Location list end address "
10423 "(%s)", list_head->ll_symbol);
10424 }
10425 }
10426 /* The assembler does not support .uleb128 directive. Emit
10427 DW_LLE_start_end with a pair of absolute addresses. */
10428 else
10429 {
10430 dwarf2out_maybe_output_loclist_view_pair (curr);
10431 dw2_asm_output_data (1, DW_LLE_start_end,
10432 "DW_LLE_start_end (%s)",
10433 list_head->ll_symbol);
10434 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10435 "Location list begin address (%s)",
10436 list_head->ll_symbol);
10437 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10438 "Location list end address (%s)",
10439 list_head->ll_symbol);
10440 }
10441 }
10442 else if (dwarf_split_debug_info)
10443 {
10444 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10445 and 4 byte length. */
10446 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10447 "Location list start/length entry (%s)",
10448 list_head->ll_symbol);
10449 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10450 "Location list range start index (%s)",
10451 curr->begin);
10452 /* The length field is 4 bytes. If we ever need to support
10453 an 8-byte length, we can add a new DW_LLE code or fall back
10454 to DW_LLE_GNU_start_end_entry. */
10455 dw2_asm_output_delta (4, curr->end, curr->begin,
10456 "Location list range length (%s)",
10457 list_head->ll_symbol);
10458 }
10459 else if (!have_multiple_function_sections)
10460 {
10461 /* Pair of relative addresses against start of text section. */
10462 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10463 "Location list begin address (%s)",
10464 list_head->ll_symbol);
10465 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10466 "Location list end address (%s)",
10467 list_head->ll_symbol);
10468 }
10469 else
10470 {
10471 /* Pair of absolute addresses. */
10472 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10473 "Location list begin address (%s)",
10474 list_head->ll_symbol);
10475 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10476 "Location list end address (%s)",
10477 list_head->ll_symbol);
10478 }
10479
10480 /* Output the block length for this list of location operations. */
10481 if (dwarf_version >= 5)
10482 dw2_asm_output_data_uleb128 (size, "Location expression size");
10483 else
10484 {
10485 gcc_assert (size <= 0xffff);
10486 dw2_asm_output_data (2, size, "Location expression size");
10487 }
10488
10489 output_loc_sequence (curr->expr, -1);
10490 }
10491
10492 /* And finally list termination. */
10493 if (dwarf_version >= 5)
10494 dw2_asm_output_data (1, DW_LLE_end_of_list,
10495 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10496 else if (dwarf_split_debug_info)
10497 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10498 "Location list terminator (%s)",
10499 list_head->ll_symbol);
10500 else
10501 {
10502 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10503 "Location list terminator begin (%s)",
10504 list_head->ll_symbol);
10505 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10506 "Location list terminator end (%s)",
10507 list_head->ll_symbol);
10508 }
10509
10510 gcc_assert (!list_head->vl_symbol
10511 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10512 }
10513
10514 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10515 section. Emit a relocated reference if val_entry is NULL, otherwise,
10516 emit an indirect reference. */
10517
10518 static void
10519 output_range_list_offset (dw_attr_node *a)
10520 {
10521 const char *name = dwarf_attr_name (a->dw_attr);
10522
10523 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10524 {
10525 if (dwarf_version >= 5)
10526 {
10527 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10528 dw2_asm_output_offset (dwarf_offset_size, r->label,
10529 debug_ranges_section, "%s", name);
10530 }
10531 else
10532 {
10533 char *p = strchr (ranges_section_label, '\0');
10534 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10535 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10536 dw2_asm_output_offset (dwarf_offset_size, ranges_section_label,
10537 debug_ranges_section, "%s", name);
10538 *p = '\0';
10539 }
10540 }
10541 else if (dwarf_version >= 5)
10542 {
10543 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10544 gcc_assert (rnglist_idx);
10545 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10546 }
10547 else
10548 dw2_asm_output_data (dwarf_offset_size,
10549 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10550 "%s (offset from %s)", name, ranges_section_label);
10551 }
10552
10553 /* Output the offset into the debug_loc section. */
10554
10555 static void
10556 output_loc_list_offset (dw_attr_node *a)
10557 {
10558 char *sym = AT_loc_list (a)->ll_symbol;
10559
10560 gcc_assert (sym);
10561 if (!dwarf_split_debug_info)
10562 dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section,
10563 "%s", dwarf_attr_name (a->dw_attr));
10564 else if (dwarf_version >= 5)
10565 {
10566 gcc_assert (AT_loc_list (a)->num_assigned);
10567 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10568 dwarf_attr_name (a->dw_attr),
10569 sym);
10570 }
10571 else
10572 dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label,
10573 "%s", dwarf_attr_name (a->dw_attr));
10574 }
10575
10576 /* Output the offset into the debug_loc section. */
10577
10578 static void
10579 output_view_list_offset (dw_attr_node *a)
10580 {
10581 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10582
10583 gcc_assert (sym);
10584 if (dwarf_split_debug_info)
10585 dw2_asm_output_delta (dwarf_offset_size, sym, loc_section_label,
10586 "%s", dwarf_attr_name (a->dw_attr));
10587 else
10588 dw2_asm_output_offset (dwarf_offset_size, sym, debug_loc_section,
10589 "%s", dwarf_attr_name (a->dw_attr));
10590 }
10591
10592 /* Output an attribute's index or value appropriately. */
10593
10594 static void
10595 output_attr_index_or_value (dw_attr_node *a)
10596 {
10597 const char *name = dwarf_attr_name (a->dw_attr);
10598
10599 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10600 {
10601 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10602 return;
10603 }
10604 switch (AT_class (a))
10605 {
10606 case dw_val_class_addr:
10607 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10608 break;
10609 case dw_val_class_high_pc:
10610 case dw_val_class_lbl_id:
10611 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10612 break;
10613 default:
10614 gcc_unreachable ();
10615 }
10616 }
10617
10618 /* Output a type signature. */
10619
10620 static inline void
10621 output_signature (const char *sig, const char *name)
10622 {
10623 int i;
10624
10625 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10626 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10627 }
10628
10629 /* Output a discriminant value. */
10630
10631 static inline void
10632 output_discr_value (dw_discr_value *discr_value, const char *name)
10633 {
10634 if (discr_value->pos)
10635 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10636 else
10637 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10638 }
10639
10640 /* Output the DIE and its attributes. Called recursively to generate
10641 the definitions of each child DIE. */
10642
10643 static void
10644 output_die (dw_die_ref die)
10645 {
10646 dw_attr_node *a;
10647 dw_die_ref c;
10648 unsigned long size;
10649 unsigned ix;
10650
10651 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10652 (unsigned long)die->die_offset,
10653 dwarf_tag_name (die->die_tag));
10654
10655 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10656 {
10657 const char *name = dwarf_attr_name (a->dw_attr);
10658
10659 switch (AT_class (a))
10660 {
10661 case dw_val_class_addr:
10662 output_attr_index_or_value (a);
10663 break;
10664
10665 case dw_val_class_offset:
10666 dw2_asm_output_data (dwarf_offset_size, a->dw_attr_val.v.val_offset,
10667 "%s", name);
10668 break;
10669
10670 case dw_val_class_range_list:
10671 output_range_list_offset (a);
10672 break;
10673
10674 case dw_val_class_loc:
10675 size = size_of_locs (AT_loc (a));
10676
10677 /* Output the block length for this list of location operations. */
10678 if (dwarf_version >= 4)
10679 dw2_asm_output_data_uleb128 (size, "%s", name);
10680 else
10681 dw2_asm_output_data (constant_size (size), size, "%s", name);
10682
10683 output_loc_sequence (AT_loc (a), -1);
10684 break;
10685
10686 case dw_val_class_const:
10687 /* ??? It would be slightly more efficient to use a scheme like is
10688 used for unsigned constants below, but gdb 4.x does not sign
10689 extend. Gdb 5.x does sign extend. */
10690 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10691 break;
10692
10693 case dw_val_class_unsigned_const:
10694 {
10695 int csize = constant_size (AT_unsigned (a));
10696 if (dwarf_version == 3
10697 && a->dw_attr == DW_AT_data_member_location
10698 && csize >= 4)
10699 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10700 else
10701 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10702 }
10703 break;
10704
10705 case dw_val_class_symview:
10706 {
10707 int vsize;
10708 if (symview_upper_bound <= 0xff)
10709 vsize = 1;
10710 else if (symview_upper_bound <= 0xffff)
10711 vsize = 2;
10712 else if (symview_upper_bound <= 0xffffffff)
10713 vsize = 4;
10714 else
10715 vsize = 8;
10716 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10717 "%s", name);
10718 }
10719 break;
10720
10721 case dw_val_class_const_implicit:
10722 if (flag_debug_asm)
10723 fprintf (asm_out_file, "\t\t\t%s %s ("
10724 HOST_WIDE_INT_PRINT_DEC ")\n",
10725 ASM_COMMENT_START, name, AT_int (a));
10726 break;
10727
10728 case dw_val_class_unsigned_const_implicit:
10729 if (flag_debug_asm)
10730 fprintf (asm_out_file, "\t\t\t%s %s ("
10731 HOST_WIDE_INT_PRINT_HEX ")\n",
10732 ASM_COMMENT_START, name, AT_unsigned (a));
10733 break;
10734
10735 case dw_val_class_const_double:
10736 {
10737 unsigned HOST_WIDE_INT first, second;
10738
10739 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10740 dw2_asm_output_data (1,
10741 HOST_BITS_PER_DOUBLE_INT
10742 / HOST_BITS_PER_CHAR,
10743 NULL);
10744
10745 if (WORDS_BIG_ENDIAN)
10746 {
10747 first = a->dw_attr_val.v.val_double.high;
10748 second = a->dw_attr_val.v.val_double.low;
10749 }
10750 else
10751 {
10752 first = a->dw_attr_val.v.val_double.low;
10753 second = a->dw_attr_val.v.val_double.high;
10754 }
10755
10756 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10757 first, "%s", name);
10758 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10759 second, NULL);
10760 }
10761 break;
10762
10763 case dw_val_class_wide_int:
10764 {
10765 int i;
10766 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10767 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10768 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10769 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10770 * l, NULL);
10771
10772 if (WORDS_BIG_ENDIAN)
10773 for (i = len - 1; i >= 0; --i)
10774 {
10775 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10776 "%s", name);
10777 name = "";
10778 }
10779 else
10780 for (i = 0; i < len; ++i)
10781 {
10782 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10783 "%s", name);
10784 name = "";
10785 }
10786 }
10787 break;
10788
10789 case dw_val_class_vec:
10790 {
10791 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10792 unsigned int len = a->dw_attr_val.v.val_vec.length;
10793 unsigned int i;
10794 unsigned char *p;
10795
10796 dw2_asm_output_data (constant_size (len * elt_size),
10797 len * elt_size, "%s", name);
10798 if (elt_size > sizeof (HOST_WIDE_INT))
10799 {
10800 elt_size /= 2;
10801 len *= 2;
10802 }
10803 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10804 i < len;
10805 i++, p += elt_size)
10806 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10807 "fp or vector constant word %u", i);
10808 break;
10809 }
10810
10811 case dw_val_class_flag:
10812 if (dwarf_version >= 4)
10813 {
10814 /* Currently all add_AT_flag calls pass in 1 as last argument,
10815 so DW_FORM_flag_present can be used. If that ever changes,
10816 we'll need to use DW_FORM_flag and have some optimization
10817 in build_abbrev_table that will change those to
10818 DW_FORM_flag_present if it is set to 1 in all DIEs using
10819 the same abbrev entry. */
10820 gcc_assert (AT_flag (a) == 1);
10821 if (flag_debug_asm)
10822 fprintf (asm_out_file, "\t\t\t%s %s\n",
10823 ASM_COMMENT_START, name);
10824 break;
10825 }
10826 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10827 break;
10828
10829 case dw_val_class_loc_list:
10830 output_loc_list_offset (a);
10831 break;
10832
10833 case dw_val_class_view_list:
10834 output_view_list_offset (a);
10835 break;
10836
10837 case dw_val_class_die_ref:
10838 if (AT_ref_external (a))
10839 {
10840 if (AT_ref (a)->comdat_type_p)
10841 {
10842 comdat_type_node *type_node
10843 = AT_ref (a)->die_id.die_type_node;
10844
10845 gcc_assert (type_node);
10846 output_signature (type_node->signature, name);
10847 }
10848 else
10849 {
10850 const char *sym = AT_ref (a)->die_id.die_symbol;
10851 int size;
10852
10853 gcc_assert (sym);
10854 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10855 length, whereas in DWARF3 it's always sized as an
10856 offset. */
10857 if (dwarf_version == 2)
10858 size = DWARF2_ADDR_SIZE;
10859 else
10860 size = dwarf_offset_size;
10861 /* ??? We cannot unconditionally output die_offset if
10862 non-zero - others might create references to those
10863 DIEs via symbols.
10864 And we do not clear its DIE offset after outputting it
10865 (and the label refers to the actual DIEs, not the
10866 DWARF CU unit header which is when using label + offset
10867 would be the correct thing to do).
10868 ??? This is the reason for the with_offset flag. */
10869 if (AT_ref (a)->with_offset)
10870 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10871 debug_info_section, "%s", name);
10872 else
10873 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10874 name);
10875 }
10876 }
10877 else
10878 {
10879 gcc_assert (AT_ref (a)->die_offset);
10880 dw2_asm_output_data (dwarf_offset_size, AT_ref (a)->die_offset,
10881 "%s", name);
10882 }
10883 break;
10884
10885 case dw_val_class_fde_ref:
10886 {
10887 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10888
10889 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10890 a->dw_attr_val.v.val_fde_index * 2);
10891 dw2_asm_output_offset (dwarf_offset_size, l1, debug_frame_section,
10892 "%s", name);
10893 }
10894 break;
10895
10896 case dw_val_class_vms_delta:
10897 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10898 dw2_asm_output_vms_delta (dwarf_offset_size,
10899 AT_vms_delta2 (a), AT_vms_delta1 (a),
10900 "%s", name);
10901 #else
10902 dw2_asm_output_delta (dwarf_offset_size,
10903 AT_vms_delta2 (a), AT_vms_delta1 (a),
10904 "%s", name);
10905 #endif
10906 break;
10907
10908 case dw_val_class_lbl_id:
10909 output_attr_index_or_value (a);
10910 break;
10911
10912 case dw_val_class_lineptr:
10913 dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a),
10914 debug_line_section, "%s", name);
10915 break;
10916
10917 case dw_val_class_macptr:
10918 dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a),
10919 debug_macinfo_section, "%s", name);
10920 break;
10921
10922 case dw_val_class_loclistsptr:
10923 dw2_asm_output_offset (dwarf_offset_size, AT_lbl (a),
10924 debug_loc_section, "%s", name);
10925 break;
10926
10927 case dw_val_class_str:
10928 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10929 dw2_asm_output_offset (dwarf_offset_size,
10930 a->dw_attr_val.v.val_str->label,
10931 debug_str_section,
10932 "%s: \"%s\"", name, AT_string (a));
10933 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10934 dw2_asm_output_offset (dwarf_offset_size,
10935 a->dw_attr_val.v.val_str->label,
10936 debug_line_str_section,
10937 "%s: \"%s\"", name, AT_string (a));
10938 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10939 dw2_asm_output_data_uleb128 (AT_index (a),
10940 "%s: \"%s\"", name, AT_string (a));
10941 else
10942 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10943 break;
10944
10945 case dw_val_class_file:
10946 {
10947 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10948
10949 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10950 a->dw_attr_val.v.val_file->filename);
10951 break;
10952 }
10953
10954 case dw_val_class_file_implicit:
10955 if (flag_debug_asm)
10956 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10957 ASM_COMMENT_START, name,
10958 maybe_emit_file (a->dw_attr_val.v.val_file),
10959 a->dw_attr_val.v.val_file->filename);
10960 break;
10961
10962 case dw_val_class_data8:
10963 {
10964 int i;
10965
10966 for (i = 0; i < 8; i++)
10967 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10968 i == 0 ? "%s" : NULL, name);
10969 break;
10970 }
10971
10972 case dw_val_class_high_pc:
10973 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10974 get_AT_low_pc (die), "DW_AT_high_pc");
10975 break;
10976
10977 case dw_val_class_discr_value:
10978 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10979 break;
10980
10981 case dw_val_class_discr_list:
10982 {
10983 dw_discr_list_ref list = AT_discr_list (a);
10984 const int size = size_of_discr_list (list);
10985
10986 /* This is a block, so output its length first. */
10987 dw2_asm_output_data (constant_size (size), size,
10988 "%s: block size", name);
10989
10990 for (; list != NULL; list = list->dw_discr_next)
10991 {
10992 /* One byte for the discriminant value descriptor, and then as
10993 many LEB128 numbers as required. */
10994 if (list->dw_discr_range)
10995 dw2_asm_output_data (1, DW_DSC_range,
10996 "%s: DW_DSC_range", name);
10997 else
10998 dw2_asm_output_data (1, DW_DSC_label,
10999 "%s: DW_DSC_label", name);
11000
11001 output_discr_value (&list->dw_discr_lower_bound, name);
11002 if (list->dw_discr_range)
11003 output_discr_value (&list->dw_discr_upper_bound, name);
11004 }
11005 break;
11006 }
11007
11008 default:
11009 gcc_unreachable ();
11010 }
11011 }
11012
11013 FOR_EACH_CHILD (die, c, output_die (c));
11014
11015 /* Add null byte to terminate sibling list. */
11016 if (die->die_child != NULL)
11017 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
11018 (unsigned long) die->die_offset);
11019 }
11020
11021 /* Output the dwarf version number. */
11022
11023 static void
11024 output_dwarf_version ()
11025 {
11026 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
11027 views in loclist. That will change eventually. */
11028 if (dwarf_version == 6)
11029 {
11030 static bool once;
11031 if (!once)
11032 {
11033 warning (0, "%<-gdwarf-6%> is output as version 5 with "
11034 "incompatibilities");
11035 once = true;
11036 }
11037 dw2_asm_output_data (2, 5, "DWARF version number");
11038 }
11039 else
11040 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
11041 }
11042
11043 /* Output the compilation unit that appears at the beginning of the
11044 .debug_info section, and precedes the DIE descriptions. */
11045
11046 static void
11047 output_compilation_unit_header (enum dwarf_unit_type ut)
11048 {
11049 if (!XCOFF_DEBUGGING_INFO)
11050 {
11051 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11052 dw2_asm_output_data (4, 0xffffffff,
11053 "Initial length escape value indicating 64-bit DWARF extension");
11054 dw2_asm_output_data (dwarf_offset_size,
11055 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11056 "Length of Compilation Unit Info");
11057 }
11058
11059 output_dwarf_version ();
11060 if (dwarf_version >= 5)
11061 {
11062 const char *name;
11063 switch (ut)
11064 {
11065 case DW_UT_compile: name = "DW_UT_compile"; break;
11066 case DW_UT_type: name = "DW_UT_type"; break;
11067 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11068 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11069 default: gcc_unreachable ();
11070 }
11071 dw2_asm_output_data (1, ut, "%s", name);
11072 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11073 }
11074 dw2_asm_output_offset (dwarf_offset_size, abbrev_section_label,
11075 debug_abbrev_section,
11076 "Offset Into Abbrev. Section");
11077 if (dwarf_version < 5)
11078 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11079 }
11080
11081 /* Output the compilation unit DIE and its children. */
11082
11083 static void
11084 output_comp_unit (dw_die_ref die, int output_if_empty,
11085 const unsigned char *dwo_id)
11086 {
11087 const char *secname, *oldsym;
11088 char *tmp;
11089
11090 /* Unless we are outputting main CU, we may throw away empty ones. */
11091 if (!output_if_empty && die->die_child == NULL)
11092 return;
11093
11094 /* Even if there are no children of this DIE, we must output the information
11095 about the compilation unit. Otherwise, on an empty translation unit, we
11096 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11097 will then complain when examining the file. First mark all the DIEs in
11098 this CU so we know which get local refs. */
11099 mark_dies (die);
11100
11101 external_ref_hash_type *extern_map = optimize_external_refs (die);
11102
11103 /* For now, optimize only the main CU, in order to optimize the rest
11104 we'd need to see all of them earlier. Leave the rest for post-linking
11105 tools like DWZ. */
11106 if (die == comp_unit_die ())
11107 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11108
11109 build_abbrev_table (die, extern_map);
11110
11111 optimize_abbrev_table ();
11112
11113 delete extern_map;
11114
11115 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11116 next_die_offset = (dwo_id
11117 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11118 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11119 calc_die_sizes (die);
11120
11121 oldsym = die->die_id.die_symbol;
11122 if (oldsym && die->comdat_type_p)
11123 {
11124 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11125
11126 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11127 secname = tmp;
11128 die->die_id.die_symbol = NULL;
11129 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11130 }
11131 else
11132 {
11133 switch_to_section (debug_info_section);
11134 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11135 info_section_emitted = true;
11136 }
11137
11138 /* For LTO cross unit DIE refs we want a symbol on the start of the
11139 debuginfo section, not on the CU DIE. */
11140 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11141 {
11142 /* ??? No way to get visibility assembled without a decl. */
11143 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11144 get_identifier (oldsym), char_type_node);
11145 TREE_PUBLIC (decl) = true;
11146 TREE_STATIC (decl) = true;
11147 DECL_ARTIFICIAL (decl) = true;
11148 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11149 DECL_VISIBILITY_SPECIFIED (decl) = true;
11150 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11151 #ifdef ASM_WEAKEN_LABEL
11152 /* We prefer a .weak because that handles duplicates from duplicate
11153 archive members in a graceful way. */
11154 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11155 #else
11156 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11157 #endif
11158 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11159 }
11160
11161 /* Output debugging information. */
11162 output_compilation_unit_header (dwo_id
11163 ? DW_UT_split_compile : DW_UT_compile);
11164 if (dwarf_version >= 5)
11165 {
11166 if (dwo_id != NULL)
11167 for (int i = 0; i < 8; i++)
11168 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11169 }
11170 output_die (die);
11171
11172 /* Leave the marks on the main CU, so we can check them in
11173 output_pubnames. */
11174 if (oldsym)
11175 {
11176 unmark_dies (die);
11177 die->die_id.die_symbol = oldsym;
11178 }
11179 }
11180
11181 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11182 and .debug_pubtypes. This is configured per-target, but can be
11183 overridden by the -gpubnames or -gno-pubnames options. */
11184
11185 static inline bool
11186 want_pubnames (void)
11187 {
11188 if (debug_info_level <= DINFO_LEVEL_TERSE
11189 /* Names and types go to the early debug part only. */
11190 || in_lto_p)
11191 return false;
11192 if (debug_generate_pub_sections != -1)
11193 return debug_generate_pub_sections;
11194 return targetm.want_debug_pub_sections;
11195 }
11196
11197 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11198
11199 static void
11200 add_AT_pubnames (dw_die_ref die)
11201 {
11202 if (want_pubnames ())
11203 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11204 }
11205
11206 /* Add a string attribute value to a skeleton DIE. */
11207
11208 static inline void
11209 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11210 const char *str)
11211 {
11212 dw_attr_node attr;
11213 struct indirect_string_node *node;
11214
11215 if (! skeleton_debug_str_hash)
11216 skeleton_debug_str_hash
11217 = hash_table<indirect_string_hasher>::create_ggc (10);
11218
11219 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11220 find_string_form (node);
11221 if (node->form == dwarf_FORM (DW_FORM_strx))
11222 node->form = DW_FORM_strp;
11223
11224 attr.dw_attr = attr_kind;
11225 attr.dw_attr_val.val_class = dw_val_class_str;
11226 attr.dw_attr_val.val_entry = NULL;
11227 attr.dw_attr_val.v.val_str = node;
11228 add_dwarf_attr (die, &attr);
11229 }
11230
11231 /* Helper function to generate top-level dies for skeleton debug_info and
11232 debug_types. */
11233
11234 static void
11235 add_top_level_skeleton_die_attrs (dw_die_ref die)
11236 {
11237 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11238 const char *comp_dir = comp_dir_string ();
11239
11240 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11241 if (comp_dir != NULL)
11242 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11243 add_AT_pubnames (die);
11244 if (addr_index_table != NULL && addr_index_table->size () > 0)
11245 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11246 }
11247
11248 /* Output skeleton debug sections that point to the dwo file. */
11249
11250 static void
11251 output_skeleton_debug_sections (dw_die_ref comp_unit,
11252 const unsigned char *dwo_id)
11253 {
11254 /* These attributes will be found in the full debug_info section. */
11255 remove_AT (comp_unit, DW_AT_producer);
11256 remove_AT (comp_unit, DW_AT_language);
11257
11258 switch_to_section (debug_skeleton_info_section);
11259 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11260
11261 /* Produce the skeleton compilation-unit header. This one differs enough from
11262 a normal CU header that it's better not to call output_compilation_unit
11263 header. */
11264 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11265 dw2_asm_output_data (4, 0xffffffff,
11266 "Initial length escape value indicating 64-bit "
11267 "DWARF extension");
11268
11269 dw2_asm_output_data (dwarf_offset_size,
11270 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11271 - DWARF_INITIAL_LENGTH_SIZE
11272 + size_of_die (comp_unit),
11273 "Length of Compilation Unit Info");
11274 output_dwarf_version ();
11275 if (dwarf_version >= 5)
11276 {
11277 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11278 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11279 }
11280 dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_abbrev_section_label,
11281 debug_skeleton_abbrev_section,
11282 "Offset Into Abbrev. Section");
11283 if (dwarf_version < 5)
11284 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11285 else
11286 for (int i = 0; i < 8; i++)
11287 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11288
11289 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11290 output_die (comp_unit);
11291
11292 /* Build the skeleton debug_abbrev section. */
11293 switch_to_section (debug_skeleton_abbrev_section);
11294 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11295
11296 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11297
11298 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11299 }
11300
11301 /* Output a comdat type unit DIE and its children. */
11302
11303 static void
11304 output_comdat_type_unit (comdat_type_node *node,
11305 bool early_lto_debug ATTRIBUTE_UNUSED)
11306 {
11307 const char *secname;
11308 char *tmp;
11309 int i;
11310 #if defined (OBJECT_FORMAT_ELF)
11311 tree comdat_key;
11312 #endif
11313
11314 /* First mark all the DIEs in this CU so we know which get local refs. */
11315 mark_dies (node->root_die);
11316
11317 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11318
11319 build_abbrev_table (node->root_die, extern_map);
11320
11321 delete extern_map;
11322 extern_map = NULL;
11323
11324 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11325 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11326 calc_die_sizes (node->root_die);
11327
11328 #if defined (OBJECT_FORMAT_ELF)
11329 if (dwarf_version >= 5)
11330 {
11331 if (!dwarf_split_debug_info)
11332 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11333 else
11334 secname = (early_lto_debug
11335 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11336 }
11337 else if (!dwarf_split_debug_info)
11338 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11339 else
11340 secname = (early_lto_debug
11341 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11342
11343 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11344 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11345 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11346 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11347 comdat_key = get_identifier (tmp);
11348 targetm.asm_out.named_section (secname,
11349 SECTION_DEBUG | SECTION_LINKONCE,
11350 comdat_key);
11351 #else
11352 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11353 sprintf (tmp, (dwarf_version >= 5
11354 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11355 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11356 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11357 secname = tmp;
11358 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11359 #endif
11360
11361 /* Output debugging information. */
11362 output_compilation_unit_header (dwarf_split_debug_info
11363 ? DW_UT_split_type : DW_UT_type);
11364 output_signature (node->signature, "Type Signature");
11365 dw2_asm_output_data (dwarf_offset_size, node->type_die->die_offset,
11366 "Offset to Type DIE");
11367 output_die (node->root_die);
11368
11369 unmark_dies (node->root_die);
11370 }
11371
11372 /* Return the DWARF2/3 pubname associated with a decl. */
11373
11374 static const char *
11375 dwarf2_name (tree decl, int scope)
11376 {
11377 if (DECL_NAMELESS (decl))
11378 return NULL;
11379 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11380 }
11381
11382 /* Add a new entry to .debug_pubnames if appropriate. */
11383
11384 static void
11385 add_pubname_string (const char *str, dw_die_ref die)
11386 {
11387 pubname_entry e;
11388
11389 e.die = die;
11390 e.name = xstrdup (str);
11391 vec_safe_push (pubname_table, e);
11392 }
11393
11394 static void
11395 add_pubname (tree decl, dw_die_ref die)
11396 {
11397 if (!want_pubnames ())
11398 return;
11399
11400 /* Don't add items to the table when we expect that the consumer will have
11401 just read the enclosing die. For example, if the consumer is looking at a
11402 class_member, it will either be inside the class already, or will have just
11403 looked up the class to find the member. Either way, searching the class is
11404 faster than searching the index. */
11405 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11406 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11407 {
11408 const char *name = dwarf2_name (decl, 1);
11409
11410 if (name)
11411 add_pubname_string (name, die);
11412 }
11413 }
11414
11415 /* Add an enumerator to the pubnames section. */
11416
11417 static void
11418 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11419 {
11420 pubname_entry e;
11421
11422 gcc_assert (scope_name);
11423 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11424 e.die = die;
11425 vec_safe_push (pubname_table, e);
11426 }
11427
11428 /* Add a new entry to .debug_pubtypes if appropriate. */
11429
11430 static void
11431 add_pubtype (tree decl, dw_die_ref die)
11432 {
11433 pubname_entry e;
11434
11435 if (!want_pubnames ())
11436 return;
11437
11438 if ((TREE_PUBLIC (decl)
11439 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11440 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11441 {
11442 tree scope = NULL;
11443 const char *scope_name = "";
11444 const char *sep = is_cxx () ? "::" : ".";
11445 const char *name;
11446
11447 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11448 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11449 {
11450 scope_name = lang_hooks.dwarf_name (scope, 1);
11451 if (scope_name != NULL && scope_name[0] != '\0')
11452 scope_name = concat (scope_name, sep, NULL);
11453 else
11454 scope_name = "";
11455 }
11456
11457 if (TYPE_P (decl))
11458 name = type_tag (decl);
11459 else
11460 name = lang_hooks.dwarf_name (decl, 1);
11461
11462 /* If we don't have a name for the type, there's no point in adding
11463 it to the table. */
11464 if (name != NULL && name[0] != '\0')
11465 {
11466 e.die = die;
11467 e.name = concat (scope_name, name, NULL);
11468 vec_safe_push (pubtype_table, e);
11469 }
11470
11471 /* Although it might be more consistent to add the pubinfo for the
11472 enumerators as their dies are created, they should only be added if the
11473 enum type meets the criteria above. So rather than re-check the parent
11474 enum type whenever an enumerator die is created, just output them all
11475 here. This isn't protected by the name conditional because anonymous
11476 enums don't have names. */
11477 if (die->die_tag == DW_TAG_enumeration_type)
11478 {
11479 dw_die_ref c;
11480
11481 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11482 }
11483 }
11484 }
11485
11486 /* Output a single entry in the pubnames table. */
11487
11488 static void
11489 output_pubname (dw_offset die_offset, pubname_entry *entry)
11490 {
11491 dw_die_ref die = entry->die;
11492 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11493
11494 dw2_asm_output_data (dwarf_offset_size, die_offset, "DIE offset");
11495
11496 if (debug_generate_pub_sections == 2)
11497 {
11498 /* This logic follows gdb's method for determining the value of the flag
11499 byte. */
11500 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11501 switch (die->die_tag)
11502 {
11503 case DW_TAG_typedef:
11504 case DW_TAG_base_type:
11505 case DW_TAG_subrange_type:
11506 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11507 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11508 break;
11509 case DW_TAG_enumerator:
11510 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11511 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11512 if (!is_cxx ())
11513 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11514 break;
11515 case DW_TAG_subprogram:
11516 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11517 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11518 if (!is_ada ())
11519 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11520 break;
11521 case DW_TAG_constant:
11522 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11523 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11524 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11525 break;
11526 case DW_TAG_variable:
11527 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11528 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11529 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11530 break;
11531 case DW_TAG_namespace:
11532 case DW_TAG_imported_declaration:
11533 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11534 break;
11535 case DW_TAG_class_type:
11536 case DW_TAG_interface_type:
11537 case DW_TAG_structure_type:
11538 case DW_TAG_union_type:
11539 case DW_TAG_enumeration_type:
11540 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11541 if (!is_cxx ())
11542 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11543 break;
11544 default:
11545 /* An unusual tag. Leave the flag-byte empty. */
11546 break;
11547 }
11548 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11549 "GDB-index flags");
11550 }
11551
11552 dw2_asm_output_nstring (entry->name, -1, "external name");
11553 }
11554
11555
11556 /* Output the public names table used to speed up access to externally
11557 visible names; or the public types table used to find type definitions. */
11558
11559 static void
11560 output_pubnames (vec<pubname_entry, va_gc> *names)
11561 {
11562 unsigned i;
11563 unsigned long pubnames_length = size_of_pubnames (names);
11564 pubname_entry *pub;
11565
11566 if (!XCOFF_DEBUGGING_INFO)
11567 {
11568 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11569 dw2_asm_output_data (4, 0xffffffff,
11570 "Initial length escape value indicating 64-bit DWARF extension");
11571 dw2_asm_output_data (dwarf_offset_size, pubnames_length,
11572 "Pub Info Length");
11573 }
11574
11575 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11576 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11577
11578 if (dwarf_split_debug_info)
11579 dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label,
11580 debug_skeleton_info_section,
11581 "Offset of Compilation Unit Info");
11582 else
11583 dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label,
11584 debug_info_section,
11585 "Offset of Compilation Unit Info");
11586 dw2_asm_output_data (dwarf_offset_size, next_die_offset,
11587 "Compilation Unit Length");
11588
11589 FOR_EACH_VEC_ELT (*names, i, pub)
11590 {
11591 if (include_pubname_in_output (names, pub))
11592 {
11593 dw_offset die_offset = pub->die->die_offset;
11594
11595 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11596 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11597 gcc_assert (pub->die->die_mark);
11598
11599 /* If we're putting types in their own .debug_types sections,
11600 the .debug_pubtypes table will still point to the compile
11601 unit (not the type unit), so we want to use the offset of
11602 the skeleton DIE (if there is one). */
11603 if (pub->die->comdat_type_p && names == pubtype_table)
11604 {
11605 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11606
11607 if (type_node != NULL)
11608 die_offset = (type_node->skeleton_die != NULL
11609 ? type_node->skeleton_die->die_offset
11610 : comp_unit_die ()->die_offset);
11611 }
11612
11613 output_pubname (die_offset, pub);
11614 }
11615 }
11616
11617 dw2_asm_output_data (dwarf_offset_size, 0, NULL);
11618 }
11619
11620 /* Output public names and types tables if necessary. */
11621
11622 static void
11623 output_pubtables (void)
11624 {
11625 if (!want_pubnames () || !info_section_emitted)
11626 return;
11627
11628 switch_to_section (debug_pubnames_section);
11629 output_pubnames (pubname_table);
11630 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11631 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11632 simply won't look for the section. */
11633 switch_to_section (debug_pubtypes_section);
11634 output_pubnames (pubtype_table);
11635 }
11636
11637
11638 /* Output the information that goes into the .debug_aranges table.
11639 Namely, define the beginning and ending address range of the
11640 text section generated for this compilation unit. */
11641
11642 static void
11643 output_aranges (void)
11644 {
11645 unsigned i;
11646 unsigned long aranges_length = size_of_aranges ();
11647
11648 if (!XCOFF_DEBUGGING_INFO)
11649 {
11650 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11651 dw2_asm_output_data (4, 0xffffffff,
11652 "Initial length escape value indicating 64-bit DWARF extension");
11653 dw2_asm_output_data (dwarf_offset_size, aranges_length,
11654 "Length of Address Ranges Info");
11655 }
11656
11657 /* Version number for aranges is still 2, even up to DWARF5. */
11658 dw2_asm_output_data (2, 2, "DWARF aranges version");
11659 if (dwarf_split_debug_info)
11660 dw2_asm_output_offset (dwarf_offset_size, debug_skeleton_info_section_label,
11661 debug_skeleton_info_section,
11662 "Offset of Compilation Unit Info");
11663 else
11664 dw2_asm_output_offset (dwarf_offset_size, debug_info_section_label,
11665 debug_info_section,
11666 "Offset of Compilation Unit Info");
11667 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11668 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11669
11670 /* We need to align to twice the pointer size here. */
11671 if (DWARF_ARANGES_PAD_SIZE)
11672 {
11673 /* Pad using a 2 byte words so that padding is correct for any
11674 pointer size. */
11675 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11676 2 * DWARF2_ADDR_SIZE);
11677 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11678 dw2_asm_output_data (2, 0, NULL);
11679 }
11680
11681 /* It is necessary not to output these entries if the sections were
11682 not used; if the sections were not used, the length will be 0 and
11683 the address may end up as 0 if the section is discarded by ld
11684 --gc-sections, leaving an invalid (0, 0) entry that can be
11685 confused with the terminator. */
11686 if (text_section_used)
11687 {
11688 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11689 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11690 text_section_label, "Length");
11691 }
11692 if (cold_text_section_used)
11693 {
11694 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11695 "Address");
11696 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11697 cold_text_section_label, "Length");
11698 }
11699
11700 if (have_multiple_function_sections)
11701 {
11702 unsigned fde_idx;
11703 dw_fde_ref fde;
11704
11705 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11706 {
11707 if (DECL_IGNORED_P (fde->decl))
11708 continue;
11709 if (!fde->in_std_section)
11710 {
11711 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11712 "Address");
11713 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11714 fde->dw_fde_begin, "Length");
11715 }
11716 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11717 {
11718 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11719 "Address");
11720 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11721 fde->dw_fde_second_begin, "Length");
11722 }
11723 }
11724 }
11725
11726 /* Output the terminator words. */
11727 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11728 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11729 }
11730
11731 /* Add a new entry to .debug_ranges. Return its index into
11732 ranges_table vector. */
11733
11734 static unsigned int
11735 add_ranges_num (int num, bool maybe_new_sec)
11736 {
11737 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11738 vec_safe_push (ranges_table, r);
11739 return vec_safe_length (ranges_table) - 1;
11740 }
11741
11742 /* Add a new entry to .debug_ranges corresponding to a block, or a
11743 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11744 this entry might be in a different section from previous range. */
11745
11746 static unsigned int
11747 add_ranges (const_tree block, bool maybe_new_sec)
11748 {
11749 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11750 }
11751
11752 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11753 chain, or middle entry of a chain that will be directly referred to. */
11754
11755 static void
11756 note_rnglist_head (unsigned int offset)
11757 {
11758 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11759 return;
11760 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11761 }
11762
11763 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11764 When using dwarf_split_debug_info, address attributes in dies destined
11765 for the final executable should be direct references--setting the
11766 parameter force_direct ensures this behavior. */
11767
11768 static void
11769 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11770 bool *added, bool force_direct)
11771 {
11772 unsigned int in_use = vec_safe_length (ranges_by_label);
11773 unsigned int offset;
11774 dw_ranges_by_label rbl = { begin, end };
11775 vec_safe_push (ranges_by_label, rbl);
11776 offset = add_ranges_num (-(int)in_use - 1, true);
11777 if (!*added)
11778 {
11779 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11780 *added = true;
11781 note_rnglist_head (offset);
11782 }
11783 }
11784
11785 /* Emit .debug_ranges section. */
11786
11787 static void
11788 output_ranges (void)
11789 {
11790 unsigned i;
11791 static const char *const start_fmt = "Offset %#x";
11792 const char *fmt = start_fmt;
11793 dw_ranges *r;
11794
11795 switch_to_section (debug_ranges_section);
11796 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11797 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11798 {
11799 int block_num = r->num;
11800
11801 if (block_num > 0)
11802 {
11803 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11804 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11805
11806 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11807 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11808
11809 /* If all code is in the text section, then the compilation
11810 unit base address defaults to DW_AT_low_pc, which is the
11811 base of the text section. */
11812 if (!have_multiple_function_sections)
11813 {
11814 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11815 text_section_label,
11816 fmt, i * 2 * DWARF2_ADDR_SIZE);
11817 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11818 text_section_label, NULL);
11819 }
11820
11821 /* Otherwise, the compilation unit base address is zero,
11822 which allows us to use absolute addresses, and not worry
11823 about whether the target supports cross-section
11824 arithmetic. */
11825 else
11826 {
11827 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11828 fmt, i * 2 * DWARF2_ADDR_SIZE);
11829 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11830 }
11831
11832 fmt = NULL;
11833 }
11834
11835 /* Negative block_num stands for an index into ranges_by_label. */
11836 else if (block_num < 0)
11837 {
11838 int lab_idx = - block_num - 1;
11839
11840 if (!have_multiple_function_sections)
11841 {
11842 gcc_unreachable ();
11843 #if 0
11844 /* If we ever use add_ranges_by_labels () for a single
11845 function section, all we have to do is to take out
11846 the #if 0 above. */
11847 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11848 (*ranges_by_label)[lab_idx].begin,
11849 text_section_label,
11850 fmt, i * 2 * DWARF2_ADDR_SIZE);
11851 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11852 (*ranges_by_label)[lab_idx].end,
11853 text_section_label, NULL);
11854 #endif
11855 }
11856 else
11857 {
11858 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11859 (*ranges_by_label)[lab_idx].begin,
11860 fmt, i * 2 * DWARF2_ADDR_SIZE);
11861 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11862 (*ranges_by_label)[lab_idx].end,
11863 NULL);
11864 }
11865 }
11866 else
11867 {
11868 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11869 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11870 fmt = start_fmt;
11871 }
11872 }
11873 }
11874
11875 /* Non-zero if .debug_line_str should be used for .debug_line section
11876 strings or strings that are likely shareable with those. */
11877 #define DWARF5_USE_DEBUG_LINE_STR \
11878 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11879 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11880 /* FIXME: there is no .debug_line_str.dwo section, \
11881 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11882 && !dwarf_split_debug_info)
11883
11884
11885 /* Returns TRUE if we are outputting DWARF5 and the assembler supports
11886 DWARF5 .debug_line tables using .debug_line_str or we generate
11887 it ourselves, except for split-dwarf which doesn't have a
11888 .debug_line_str. */
11889 static bool
11890 asm_outputs_debug_line_str (void)
11891 {
11892 if (dwarf_version >= 5
11893 && ! output_asm_line_debug_info ()
11894 && DWARF5_USE_DEBUG_LINE_STR)
11895 return true;
11896 else
11897 {
11898 #if defined(HAVE_AS_GDWARF_5_DEBUG_FLAG) && defined(HAVE_AS_WORKING_DWARF_N_FLAG)
11899 return !dwarf_split_debug_info && dwarf_version >= 5;
11900 #else
11901 return false;
11902 #endif
11903 }
11904 }
11905
11906
11907 /* Assign .debug_rnglists indexes. */
11908
11909 static void
11910 index_rnglists (void)
11911 {
11912 unsigned i;
11913 dw_ranges *r;
11914
11915 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11916 if (r->label)
11917 r->idx = rnglist_idx++;
11918 }
11919
11920 /* Emit .debug_rnglists section. */
11921
11922 static void
11923 output_rnglists (unsigned generation)
11924 {
11925 unsigned i;
11926 dw_ranges *r;
11927 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11928 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11929 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11930
11931 switch_to_section (debug_ranges_section);
11932 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11933 /* There are up to 4 unique ranges labels per generation.
11934 See also init_sections_and_labels. */
11935 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11936 2 + generation * 4);
11937 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11938 3 + generation * 4);
11939 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
11940 dw2_asm_output_data (4, 0xffffffff,
11941 "Initial length escape value indicating "
11942 "64-bit DWARF extension");
11943 dw2_asm_output_delta (dwarf_offset_size, l2, l1,
11944 "Length of Range Lists");
11945 ASM_OUTPUT_LABEL (asm_out_file, l1);
11946 output_dwarf_version ();
11947 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11948 dw2_asm_output_data (1, 0, "Segment Size");
11949 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11950 about relocation sizes and primarily care about the size of .debug*
11951 sections in linked shared libraries and executables, then
11952 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11953 into it are usually larger than just DW_FORM_sec_offset offsets
11954 into the .debug_rnglists section. */
11955 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11956 "Offset Entry Count");
11957 if (dwarf_split_debug_info)
11958 {
11959 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11960 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11961 if (r->label)
11962 dw2_asm_output_delta (dwarf_offset_size, r->label,
11963 ranges_base_label, NULL);
11964 }
11965
11966 const char *lab = "";
11967 unsigned int len = vec_safe_length (ranges_table);
11968 const char *base = NULL;
11969 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11970 {
11971 int block_num = r->num;
11972
11973 if (r->label)
11974 {
11975 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11976 lab = r->label;
11977 }
11978 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11979 base = NULL;
11980 if (block_num > 0)
11981 {
11982 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11983 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11984
11985 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11986 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11987
11988 if (HAVE_AS_LEB128)
11989 {
11990 /* If all code is in the text section, then the compilation
11991 unit base address defaults to DW_AT_low_pc, which is the
11992 base of the text section. */
11993 if (!have_multiple_function_sections)
11994 {
11995 dw2_asm_output_data (1, DW_RLE_offset_pair,
11996 "DW_RLE_offset_pair (%s)", lab);
11997 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11998 "Range begin address (%s)", lab);
11999 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
12000 "Range end address (%s)", lab);
12001 continue;
12002 }
12003 if (base == NULL)
12004 {
12005 dw_ranges *r2 = NULL;
12006 if (i < len - 1)
12007 r2 = &(*ranges_table)[i + 1];
12008 if (r2
12009 && r2->num != 0
12010 && r2->label == NULL
12011 && !r2->maybe_new_sec)
12012 {
12013 dw2_asm_output_data (1, DW_RLE_base_address,
12014 "DW_RLE_base_address (%s)", lab);
12015 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12016 "Base address (%s)", lab);
12017 strcpy (basebuf, blabel);
12018 base = basebuf;
12019 }
12020 }
12021 if (base)
12022 {
12023 dw2_asm_output_data (1, DW_RLE_offset_pair,
12024 "DW_RLE_offset_pair (%s)", lab);
12025 dw2_asm_output_delta_uleb128 (blabel, base,
12026 "Range begin address (%s)", lab);
12027 dw2_asm_output_delta_uleb128 (elabel, base,
12028 "Range end address (%s)", lab);
12029 continue;
12030 }
12031 dw2_asm_output_data (1, DW_RLE_start_length,
12032 "DW_RLE_start_length (%s)", lab);
12033 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12034 "Range begin address (%s)", lab);
12035 dw2_asm_output_delta_uleb128 (elabel, blabel,
12036 "Range length (%s)", lab);
12037 }
12038 else
12039 {
12040 dw2_asm_output_data (1, DW_RLE_start_end,
12041 "DW_RLE_start_end (%s)", lab);
12042 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12043 "Range begin address (%s)", lab);
12044 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12045 "Range end address (%s)", lab);
12046 }
12047 }
12048
12049 /* Negative block_num stands for an index into ranges_by_label. */
12050 else if (block_num < 0)
12051 {
12052 int lab_idx = - block_num - 1;
12053 const char *blabel = (*ranges_by_label)[lab_idx].begin;
12054 const char *elabel = (*ranges_by_label)[lab_idx].end;
12055
12056 if (!have_multiple_function_sections)
12057 gcc_unreachable ();
12058 if (HAVE_AS_LEB128)
12059 {
12060 dw2_asm_output_data (1, DW_RLE_start_length,
12061 "DW_RLE_start_length (%s)", lab);
12062 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12063 "Range begin address (%s)", lab);
12064 dw2_asm_output_delta_uleb128 (elabel, blabel,
12065 "Range length (%s)", lab);
12066 }
12067 else
12068 {
12069 dw2_asm_output_data (1, DW_RLE_start_end,
12070 "DW_RLE_start_end (%s)", lab);
12071 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12072 "Range begin address (%s)", lab);
12073 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12074 "Range end address (%s)", lab);
12075 }
12076 }
12077 else
12078 dw2_asm_output_data (1, DW_RLE_end_of_list,
12079 "DW_RLE_end_of_list (%s)", lab);
12080 }
12081 ASM_OUTPUT_LABEL (asm_out_file, l2);
12082 }
12083
12084 /* Data structure containing information about input files. */
12085 struct file_info
12086 {
12087 const char *path; /* Complete file name. */
12088 const char *fname; /* File name part. */
12089 int length; /* Length of entire string. */
12090 struct dwarf_file_data * file_idx; /* Index in input file table. */
12091 int dir_idx; /* Index in directory table. */
12092 };
12093
12094 /* Data structure containing information about directories with source
12095 files. */
12096 struct dir_info
12097 {
12098 const char *path; /* Path including directory name. */
12099 int length; /* Path length. */
12100 int prefix; /* Index of directory entry which is a prefix. */
12101 int count; /* Number of files in this directory. */
12102 int dir_idx; /* Index of directory used as base. */
12103 };
12104
12105 /* Callback function for file_info comparison. We sort by looking at
12106 the directories in the path. */
12107
12108 static int
12109 file_info_cmp (const void *p1, const void *p2)
12110 {
12111 const struct file_info *const s1 = (const struct file_info *) p1;
12112 const struct file_info *const s2 = (const struct file_info *) p2;
12113 const unsigned char *cp1;
12114 const unsigned char *cp2;
12115
12116 /* Take care of file names without directories. We need to make sure that
12117 we return consistent values to qsort since some will get confused if
12118 we return the same value when identical operands are passed in opposite
12119 orders. So if neither has a directory, return 0 and otherwise return
12120 1 or -1 depending on which one has the directory. We want the one with
12121 the directory to sort after the one without, so all no directory files
12122 are at the start (normally only the compilation unit file). */
12123 if ((s1->path == s1->fname || s2->path == s2->fname))
12124 return (s2->path == s2->fname) - (s1->path == s1->fname);
12125
12126 cp1 = (const unsigned char *) s1->path;
12127 cp2 = (const unsigned char *) s2->path;
12128
12129 while (1)
12130 {
12131 ++cp1;
12132 ++cp2;
12133 /* Reached the end of the first path? If so, handle like above,
12134 but now we want longer directory prefixes before shorter ones. */
12135 if ((cp1 == (const unsigned char *) s1->fname)
12136 || (cp2 == (const unsigned char *) s2->fname))
12137 return ((cp1 == (const unsigned char *) s1->fname)
12138 - (cp2 == (const unsigned char *) s2->fname));
12139
12140 /* Character of current path component the same? */
12141 else if (*cp1 != *cp2)
12142 return *cp1 - *cp2;
12143 }
12144 }
12145
12146 struct file_name_acquire_data
12147 {
12148 struct file_info *files;
12149 int used_files;
12150 int max_files;
12151 };
12152
12153 /* Traversal function for the hash table. */
12154
12155 int
12156 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12157 {
12158 struct dwarf_file_data *d = *slot;
12159 struct file_info *fi;
12160 const char *f;
12161
12162 gcc_assert (fnad->max_files >= d->emitted_number);
12163
12164 if (! d->emitted_number)
12165 return 1;
12166
12167 gcc_assert (fnad->max_files != fnad->used_files);
12168
12169 fi = fnad->files + fnad->used_files++;
12170
12171 f = remap_debug_filename (d->filename);
12172
12173 /* Skip all leading "./". */
12174 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12175 f += 2;
12176
12177 /* Create a new array entry. */
12178 fi->path = f;
12179 fi->length = strlen (f);
12180 fi->file_idx = d;
12181
12182 /* Search for the file name part. */
12183 f = strrchr (f, DIR_SEPARATOR);
12184 #if defined (DIR_SEPARATOR_2)
12185 {
12186 const char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12187
12188 if (g != NULL)
12189 {
12190 if (f == NULL || f < g)
12191 f = g;
12192 }
12193 }
12194 #endif
12195
12196 fi->fname = f == NULL ? fi->path : f + 1;
12197 return 1;
12198 }
12199
12200 /* Helper function for output_file_names. Emit a FORM encoded
12201 string STR, with assembly comment start ENTRY_KIND and
12202 index IDX */
12203
12204 static void
12205 output_line_string (enum dwarf_form form, const char *str,
12206 const char *entry_kind, unsigned int idx)
12207 {
12208 switch (form)
12209 {
12210 case DW_FORM_string:
12211 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12212 break;
12213 case DW_FORM_line_strp:
12214 if (!debug_line_str_hash)
12215 debug_line_str_hash
12216 = hash_table<indirect_string_hasher>::create_ggc (10);
12217
12218 struct indirect_string_node *node;
12219 node = find_AT_string_in_table (str, debug_line_str_hash);
12220 set_indirect_string (node);
12221 node->form = form;
12222 dw2_asm_output_offset (dwarf_offset_size, node->label,
12223 debug_line_str_section, "%s: %#x: \"%s\"",
12224 entry_kind, 0, node->str);
12225 break;
12226 default:
12227 gcc_unreachable ();
12228 }
12229 }
12230
12231 /* Output the directory table and the file name table. We try to minimize
12232 the total amount of memory needed. A heuristic is used to avoid large
12233 slowdowns with many input files. */
12234
12235 static void
12236 output_file_names (void)
12237 {
12238 struct file_name_acquire_data fnad;
12239 int numfiles;
12240 struct file_info *files;
12241 struct dir_info *dirs;
12242 int *saved;
12243 int *savehere;
12244 int *backmap;
12245 int ndirs;
12246 int idx_offset;
12247 int i;
12248
12249 if (!last_emitted_file)
12250 {
12251 if (dwarf_version >= 5)
12252 {
12253 dw2_asm_output_data (1, 0, "Directory entry format count");
12254 dw2_asm_output_data_uleb128 (0, "Directories count");
12255 dw2_asm_output_data (1, 0, "File name entry format count");
12256 dw2_asm_output_data_uleb128 (0, "File names count");
12257 }
12258 else
12259 {
12260 dw2_asm_output_data (1, 0, "End directory table");
12261 dw2_asm_output_data (1, 0, "End file name table");
12262 }
12263 return;
12264 }
12265
12266 numfiles = last_emitted_file->emitted_number;
12267
12268 /* Allocate the various arrays we need. */
12269 files = XALLOCAVEC (struct file_info, numfiles);
12270 dirs = XALLOCAVEC (struct dir_info, numfiles);
12271
12272 fnad.files = files;
12273 fnad.used_files = 0;
12274 fnad.max_files = numfiles;
12275 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12276 gcc_assert (fnad.used_files == fnad.max_files);
12277
12278 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12279
12280 /* Find all the different directories used. */
12281 dirs[0].path = files[0].path;
12282 dirs[0].length = files[0].fname - files[0].path;
12283 dirs[0].prefix = -1;
12284 dirs[0].count = 1;
12285 dirs[0].dir_idx = 0;
12286 files[0].dir_idx = 0;
12287 ndirs = 1;
12288
12289 for (i = 1; i < numfiles; i++)
12290 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12291 && memcmp (dirs[ndirs - 1].path, files[i].path,
12292 dirs[ndirs - 1].length) == 0)
12293 {
12294 /* Same directory as last entry. */
12295 files[i].dir_idx = ndirs - 1;
12296 ++dirs[ndirs - 1].count;
12297 }
12298 else
12299 {
12300 int j;
12301
12302 /* This is a new directory. */
12303 dirs[ndirs].path = files[i].path;
12304 dirs[ndirs].length = files[i].fname - files[i].path;
12305 dirs[ndirs].count = 1;
12306 dirs[ndirs].dir_idx = ndirs;
12307 files[i].dir_idx = ndirs;
12308
12309 /* Search for a prefix. */
12310 dirs[ndirs].prefix = -1;
12311 for (j = 0; j < ndirs; j++)
12312 if (dirs[j].length < dirs[ndirs].length
12313 && dirs[j].length > 1
12314 && (dirs[ndirs].prefix == -1
12315 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12316 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12317 dirs[ndirs].prefix = j;
12318
12319 ++ndirs;
12320 }
12321
12322 /* Now to the actual work. We have to find a subset of the directories which
12323 allow expressing the file name using references to the directory table
12324 with the least amount of characters. We do not do an exhaustive search
12325 where we would have to check out every combination of every single
12326 possible prefix. Instead we use a heuristic which provides nearly optimal
12327 results in most cases and never is much off. */
12328 saved = XALLOCAVEC (int, ndirs);
12329 savehere = XALLOCAVEC (int, ndirs);
12330
12331 memset (saved, '\0', ndirs * sizeof (saved[0]));
12332 for (i = 0; i < ndirs; i++)
12333 {
12334 int j;
12335 int total;
12336
12337 /* We can always save some space for the current directory. But this
12338 does not mean it will be enough to justify adding the directory. */
12339 savehere[i] = dirs[i].length;
12340 total = (savehere[i] - saved[i]) * dirs[i].count;
12341
12342 for (j = i + 1; j < ndirs; j++)
12343 {
12344 savehere[j] = 0;
12345 if (saved[j] < dirs[i].length)
12346 {
12347 /* Determine whether the dirs[i] path is a prefix of the
12348 dirs[j] path. */
12349 int k;
12350
12351 k = dirs[j].prefix;
12352 while (k != -1 && k != (int) i)
12353 k = dirs[k].prefix;
12354
12355 if (k == (int) i)
12356 {
12357 /* Yes it is. We can possibly save some memory by
12358 writing the filenames in dirs[j] relative to
12359 dirs[i]. */
12360 savehere[j] = dirs[i].length;
12361 total += (savehere[j] - saved[j]) * dirs[j].count;
12362 }
12363 }
12364 }
12365
12366 /* Check whether we can save enough to justify adding the dirs[i]
12367 directory. */
12368 if (total > dirs[i].length + 1)
12369 {
12370 /* It's worthwhile adding. */
12371 for (j = i; j < ndirs; j++)
12372 if (savehere[j] > 0)
12373 {
12374 /* Remember how much we saved for this directory so far. */
12375 saved[j] = savehere[j];
12376
12377 /* Remember the prefix directory. */
12378 dirs[j].dir_idx = i;
12379 }
12380 }
12381 }
12382
12383 /* Emit the directory name table. */
12384 idx_offset = dirs[0].length > 0 ? 1 : 0;
12385 enum dwarf_form str_form = DW_FORM_string;
12386 enum dwarf_form idx_form = DW_FORM_udata;
12387 if (dwarf_version >= 5)
12388 {
12389 const char *comp_dir = comp_dir_string ();
12390 if (comp_dir == NULL)
12391 comp_dir = "";
12392 dw2_asm_output_data (1, 1, "Directory entry format count");
12393 if (DWARF5_USE_DEBUG_LINE_STR)
12394 str_form = DW_FORM_line_strp;
12395 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12396 dw2_asm_output_data_uleb128 (str_form, "%s",
12397 get_DW_FORM_name (str_form));
12398 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12399 if (str_form == DW_FORM_string)
12400 {
12401 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12402 for (i = 1 - idx_offset; i < ndirs; i++)
12403 dw2_asm_output_nstring (dirs[i].path,
12404 dirs[i].length
12405 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12406 "Directory Entry: %#x", i + idx_offset);
12407 }
12408 else
12409 {
12410 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12411 for (i = 1 - idx_offset; i < ndirs; i++)
12412 {
12413 const char *str
12414 = ggc_alloc_string (dirs[i].path,
12415 dirs[i].length
12416 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12417 output_line_string (str_form, str, "Directory Entry",
12418 (unsigned) i + idx_offset);
12419 }
12420 }
12421 }
12422 else
12423 {
12424 for (i = 1 - idx_offset; i < ndirs; i++)
12425 dw2_asm_output_nstring (dirs[i].path,
12426 dirs[i].length
12427 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12428 "Directory Entry: %#x", i + idx_offset);
12429
12430 dw2_asm_output_data (1, 0, "End directory table");
12431 }
12432
12433 /* We have to emit them in the order of emitted_number since that's
12434 used in the debug info generation. To do this efficiently we
12435 generate a back-mapping of the indices first. */
12436 backmap = XALLOCAVEC (int, numfiles);
12437 for (i = 0; i < numfiles; i++)
12438 backmap[files[i].file_idx->emitted_number - 1] = i;
12439
12440 if (dwarf_version >= 5)
12441 {
12442 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12443 if (filename0 == NULL)
12444 filename0 = "";
12445 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12446 DW_FORM_data2. Choose one based on the number of directories
12447 and how much space would they occupy in each encoding.
12448 If we have at most 256 directories, all indexes fit into
12449 a single byte, so DW_FORM_data1 is most compact (if there
12450 are at most 128 directories, DW_FORM_udata would be as
12451 compact as that, but not shorter and slower to decode). */
12452 if (ndirs + idx_offset <= 256)
12453 idx_form = DW_FORM_data1;
12454 /* If there are more than 65536 directories, we have to use
12455 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12456 Otherwise, compute what space would occupy if all the indexes
12457 used DW_FORM_udata - sum - and compare that to how large would
12458 be DW_FORM_data2 encoding, and pick the more efficient one. */
12459 else if (ndirs + idx_offset <= 65536)
12460 {
12461 unsigned HOST_WIDE_INT sum = 1;
12462 for (i = 0; i < numfiles; i++)
12463 {
12464 int file_idx = backmap[i];
12465 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12466 sum += size_of_uleb128 (dir_idx);
12467 }
12468 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12469 idx_form = DW_FORM_data2;
12470 }
12471 #ifdef VMS_DEBUGGING_INFO
12472 dw2_asm_output_data (1, 4, "File name entry format count");
12473 #else
12474 dw2_asm_output_data (1, 2, "File name entry format count");
12475 #endif
12476 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12477 dw2_asm_output_data_uleb128 (str_form, "%s",
12478 get_DW_FORM_name (str_form));
12479 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12480 "DW_LNCT_directory_index");
12481 dw2_asm_output_data_uleb128 (idx_form, "%s",
12482 get_DW_FORM_name (idx_form));
12483 #ifdef VMS_DEBUGGING_INFO
12484 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12485 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12486 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12487 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12488 #endif
12489 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12490
12491 output_line_string (str_form, filename0, "File Entry", 0);
12492
12493 /* Include directory index. */
12494 if (idx_form != DW_FORM_udata)
12495 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12496 0, NULL);
12497 else
12498 dw2_asm_output_data_uleb128 (0, NULL);
12499
12500 #ifdef VMS_DEBUGGING_INFO
12501 dw2_asm_output_data_uleb128 (0, NULL);
12502 dw2_asm_output_data_uleb128 (0, NULL);
12503 #endif
12504 }
12505
12506 /* Now write all the file names. */
12507 for (i = 0; i < numfiles; i++)
12508 {
12509 int file_idx = backmap[i];
12510 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12511
12512 #ifdef VMS_DEBUGGING_INFO
12513 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12514
12515 /* Setting these fields can lead to debugger miscomparisons,
12516 but VMS Debug requires them to be set correctly. */
12517
12518 int ver;
12519 long long cdt;
12520 long siz;
12521 int maxfilelen = (strlen (files[file_idx].path)
12522 + dirs[dir_idx].length
12523 + MAX_VMS_VERSION_LEN + 1);
12524 char *filebuf = XALLOCAVEC (char, maxfilelen);
12525
12526 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12527 snprintf (filebuf, maxfilelen, "%s;%d",
12528 files[file_idx].path + dirs[dir_idx].length, ver);
12529
12530 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12531
12532 /* Include directory index. */
12533 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12534 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12535 dir_idx + idx_offset, NULL);
12536 else
12537 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12538
12539 /* Modification time. */
12540 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12541 &cdt, 0, 0, 0) == 0)
12542 ? cdt : 0, NULL);
12543
12544 /* File length in bytes. */
12545 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12546 0, &siz, 0, 0) == 0)
12547 ? siz : 0, NULL);
12548 #else
12549 output_line_string (str_form,
12550 files[file_idx].path + dirs[dir_idx].length,
12551 "File Entry", (unsigned) i + 1);
12552
12553 /* Include directory index. */
12554 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12555 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12556 dir_idx + idx_offset, NULL);
12557 else
12558 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12559
12560 if (dwarf_version >= 5)
12561 continue;
12562
12563 /* Modification time. */
12564 dw2_asm_output_data_uleb128 (0, NULL);
12565
12566 /* File length in bytes. */
12567 dw2_asm_output_data_uleb128 (0, NULL);
12568 #endif /* VMS_DEBUGGING_INFO */
12569 }
12570
12571 if (dwarf_version < 5)
12572 dw2_asm_output_data (1, 0, "End file name table");
12573 }
12574
12575
12576 /* Output one line number table into the .debug_line section. */
12577
12578 static void
12579 output_one_line_info_table (dw_line_info_table *table)
12580 {
12581 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12582 unsigned int current_line = 1;
12583 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12584 dw_line_info_entry *ent, *prev_addr;
12585 size_t i;
12586 unsigned int view;
12587
12588 view = 0;
12589
12590 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12591 {
12592 switch (ent->opcode)
12593 {
12594 case LI_set_address:
12595 /* ??? Unfortunately, we have little choice here currently, and
12596 must always use the most general form. GCC does not know the
12597 address delta itself, so we can't use DW_LNS_advance_pc. Many
12598 ports do have length attributes which will give an upper bound
12599 on the address range. We could perhaps use length attributes
12600 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12601 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12602
12603 view = 0;
12604
12605 /* This can handle any delta. This takes
12606 4+DWARF2_ADDR_SIZE bytes. */
12607 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12608 debug_variable_location_views
12609 ? ", reset view to 0" : "");
12610 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12611 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12612 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12613
12614 prev_addr = ent;
12615 break;
12616
12617 case LI_adv_address:
12618 {
12619 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12620 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12621 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12622
12623 view++;
12624
12625 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12626 dw2_asm_output_delta (2, line_label, prev_label,
12627 "from %s to %s", prev_label, line_label);
12628
12629 prev_addr = ent;
12630 break;
12631 }
12632
12633 case LI_set_line:
12634 if (ent->val == current_line)
12635 {
12636 /* We still need to start a new row, so output a copy insn. */
12637 dw2_asm_output_data (1, DW_LNS_copy,
12638 "copy line %u", current_line);
12639 }
12640 else
12641 {
12642 int line_offset = ent->val - current_line;
12643 int line_delta = line_offset - DWARF_LINE_BASE;
12644
12645 current_line = ent->val;
12646 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12647 {
12648 /* This can handle deltas from -10 to 234, using the current
12649 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12650 This takes 1 byte. */
12651 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12652 "line %u", current_line);
12653 }
12654 else
12655 {
12656 /* This can handle any delta. This takes at least 4 bytes,
12657 depending on the value being encoded. */
12658 dw2_asm_output_data (1, DW_LNS_advance_line,
12659 "advance to line %u", current_line);
12660 dw2_asm_output_data_sleb128 (line_offset, NULL);
12661 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12662 }
12663 }
12664 break;
12665
12666 case LI_set_file:
12667 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12668 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12669 break;
12670
12671 case LI_set_column:
12672 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12673 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12674 break;
12675
12676 case LI_negate_stmt:
12677 current_is_stmt = !current_is_stmt;
12678 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12679 "is_stmt %d", current_is_stmt);
12680 break;
12681
12682 case LI_set_prologue_end:
12683 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12684 "set prologue end");
12685 break;
12686
12687 case LI_set_epilogue_begin:
12688 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12689 "set epilogue begin");
12690 break;
12691
12692 case LI_set_discriminator:
12693 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12694 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12695 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12696 dw2_asm_output_data_uleb128 (ent->val, NULL);
12697 break;
12698 }
12699 }
12700
12701 /* Emit debug info for the address of the end of the table. */
12702 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12703 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12704 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12705 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12706
12707 dw2_asm_output_data (1, 0, "end sequence");
12708 dw2_asm_output_data_uleb128 (1, NULL);
12709 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12710 }
12711
12712 /* Output the source line number correspondence information. This
12713 information goes into the .debug_line section. */
12714
12715 static void
12716 output_line_info (bool prologue_only)
12717 {
12718 static unsigned int generation;
12719 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12720 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12721 bool saw_one = false;
12722 int opc;
12723
12724 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12725 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12726 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12727 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12728
12729 if (!XCOFF_DEBUGGING_INFO)
12730 {
12731 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
12732 dw2_asm_output_data (4, 0xffffffff,
12733 "Initial length escape value indicating 64-bit DWARF extension");
12734 dw2_asm_output_delta (dwarf_offset_size, l2, l1,
12735 "Length of Source Line Info");
12736 }
12737
12738 ASM_OUTPUT_LABEL (asm_out_file, l1);
12739
12740 output_dwarf_version ();
12741 if (dwarf_version >= 5)
12742 {
12743 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12744 dw2_asm_output_data (1, 0, "Segment Size");
12745 }
12746 dw2_asm_output_delta (dwarf_offset_size, p2, p1, "Prolog Length");
12747 ASM_OUTPUT_LABEL (asm_out_file, p1);
12748
12749 /* Define the architecture-dependent minimum instruction length (in bytes).
12750 In this implementation of DWARF, this field is used for information
12751 purposes only. Since GCC generates assembly language, we have no
12752 a priori knowledge of how many instruction bytes are generated for each
12753 source line, and therefore can use only the DW_LNE_set_address and
12754 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12755 this as '1', which is "correct enough" for all architectures,
12756 and don't let the target override. */
12757 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12758
12759 if (dwarf_version >= 4)
12760 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12761 "Maximum Operations Per Instruction");
12762 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12763 "Default is_stmt_start flag");
12764 dw2_asm_output_data (1, DWARF_LINE_BASE,
12765 "Line Base Value (Special Opcodes)");
12766 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12767 "Line Range Value (Special Opcodes)");
12768 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12769 "Special Opcode Base");
12770
12771 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12772 {
12773 int n_op_args;
12774 switch (opc)
12775 {
12776 case DW_LNS_advance_pc:
12777 case DW_LNS_advance_line:
12778 case DW_LNS_set_file:
12779 case DW_LNS_set_column:
12780 case DW_LNS_fixed_advance_pc:
12781 case DW_LNS_set_isa:
12782 n_op_args = 1;
12783 break;
12784 default:
12785 n_op_args = 0;
12786 break;
12787 }
12788
12789 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12790 opc, n_op_args);
12791 }
12792
12793 /* Write out the information about the files we use. */
12794 output_file_names ();
12795 ASM_OUTPUT_LABEL (asm_out_file, p2);
12796 if (prologue_only)
12797 {
12798 /* Output the marker for the end of the line number info. */
12799 ASM_OUTPUT_LABEL (asm_out_file, l2);
12800 return;
12801 }
12802
12803 if (separate_line_info)
12804 {
12805 dw_line_info_table *table;
12806 size_t i;
12807
12808 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12809 if (table->in_use)
12810 {
12811 output_one_line_info_table (table);
12812 saw_one = true;
12813 }
12814 }
12815 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12816 {
12817 output_one_line_info_table (cold_text_section_line_info);
12818 saw_one = true;
12819 }
12820
12821 /* ??? Some Darwin linkers crash on a .debug_line section with no
12822 sequences. Further, merely a DW_LNE_end_sequence entry is not
12823 sufficient -- the address column must also be initialized.
12824 Make sure to output at least one set_address/end_sequence pair,
12825 choosing .text since that section is always present. */
12826 if (text_section_line_info->in_use || !saw_one)
12827 output_one_line_info_table (text_section_line_info);
12828
12829 /* Output the marker for the end of the line number info. */
12830 ASM_OUTPUT_LABEL (asm_out_file, l2);
12831 }
12832 \f
12833 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12834
12835 static inline bool
12836 need_endianity_attribute_p (bool reverse)
12837 {
12838 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12839 }
12840
12841 /* Given a pointer to a tree node for some base type, return a pointer to
12842 a DIE that describes the given type. REVERSE is true if the type is
12843 to be interpreted in the reverse storage order wrt the target order.
12844
12845 This routine must only be called for GCC type nodes that correspond to
12846 Dwarf base (fundamental) types. */
12847
12848 static dw_die_ref
12849 base_type_die (tree type, bool reverse)
12850 {
12851 dw_die_ref base_type_result;
12852 enum dwarf_type encoding;
12853 bool fpt_used = false;
12854 struct fixed_point_type_info fpt_info;
12855 tree type_bias = NULL_TREE;
12856
12857 /* If this is a subtype that should not be emitted as a subrange type,
12858 use the base type. See subrange_type_for_debug_p. */
12859 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12860 type = TREE_TYPE (type);
12861
12862 switch (TREE_CODE (type))
12863 {
12864 case INTEGER_TYPE:
12865 if ((dwarf_version >= 4 || !dwarf_strict)
12866 && TYPE_NAME (type)
12867 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12868 && DECL_IS_UNDECLARED_BUILTIN (TYPE_NAME (type))
12869 && DECL_NAME (TYPE_NAME (type)))
12870 {
12871 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12872 if (strcmp (name, "char16_t") == 0
12873 || strcmp (name, "char32_t") == 0)
12874 {
12875 encoding = DW_ATE_UTF;
12876 break;
12877 }
12878 }
12879 if ((dwarf_version >= 3 || !dwarf_strict)
12880 && lang_hooks.types.get_fixed_point_type_info)
12881 {
12882 memset (&fpt_info, 0, sizeof (fpt_info));
12883 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12884 {
12885 fpt_used = true;
12886 encoding = ((TYPE_UNSIGNED (type))
12887 ? DW_ATE_unsigned_fixed
12888 : DW_ATE_signed_fixed);
12889 break;
12890 }
12891 }
12892 if (TYPE_STRING_FLAG (type))
12893 {
12894 if (TYPE_UNSIGNED (type))
12895 encoding = DW_ATE_unsigned_char;
12896 else
12897 encoding = DW_ATE_signed_char;
12898 }
12899 else if (TYPE_UNSIGNED (type))
12900 encoding = DW_ATE_unsigned;
12901 else
12902 encoding = DW_ATE_signed;
12903
12904 if (!dwarf_strict
12905 && lang_hooks.types.get_type_bias)
12906 type_bias = lang_hooks.types.get_type_bias (type);
12907 break;
12908
12909 case REAL_TYPE:
12910 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12911 {
12912 if (dwarf_version >= 3 || !dwarf_strict)
12913 encoding = DW_ATE_decimal_float;
12914 else
12915 encoding = DW_ATE_lo_user;
12916 }
12917 else
12918 encoding = DW_ATE_float;
12919 break;
12920
12921 case FIXED_POINT_TYPE:
12922 if (!(dwarf_version >= 3 || !dwarf_strict))
12923 encoding = DW_ATE_lo_user;
12924 else if (TYPE_UNSIGNED (type))
12925 encoding = DW_ATE_unsigned_fixed;
12926 else
12927 encoding = DW_ATE_signed_fixed;
12928 break;
12929
12930 /* Dwarf2 doesn't know anything about complex ints, so use
12931 a user defined type for it. */
12932 case COMPLEX_TYPE:
12933 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12934 encoding = DW_ATE_complex_float;
12935 else
12936 encoding = DW_ATE_lo_user;
12937 break;
12938
12939 case BOOLEAN_TYPE:
12940 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12941 encoding = DW_ATE_boolean;
12942 break;
12943
12944 default:
12945 /* No other TREE_CODEs are Dwarf fundamental types. */
12946 gcc_unreachable ();
12947 }
12948
12949 base_type_result = new_die_raw (DW_TAG_base_type);
12950
12951 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12952 int_size_in_bytes (type));
12953 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12954
12955 if (need_endianity_attribute_p (reverse))
12956 add_AT_unsigned (base_type_result, DW_AT_endianity,
12957 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12958
12959 add_alignment_attribute (base_type_result, type);
12960
12961 if (fpt_used)
12962 {
12963 switch (fpt_info.scale_factor_kind)
12964 {
12965 case fixed_point_scale_factor_binary:
12966 add_AT_int (base_type_result, DW_AT_binary_scale,
12967 fpt_info.scale_factor.binary);
12968 break;
12969
12970 case fixed_point_scale_factor_decimal:
12971 add_AT_int (base_type_result, DW_AT_decimal_scale,
12972 fpt_info.scale_factor.decimal);
12973 break;
12974
12975 case fixed_point_scale_factor_arbitrary:
12976 /* Arbitrary scale factors cannot be described in standard DWARF. */
12977 if (!dwarf_strict)
12978 {
12979 /* Describe the scale factor as a rational constant. */
12980 const dw_die_ref scale_factor
12981 = new_die (DW_TAG_constant, comp_unit_die (), type);
12982
12983 add_scalar_info (scale_factor, DW_AT_GNU_numerator,
12984 fpt_info.scale_factor.arbitrary.numerator,
12985 dw_scalar_form_constant, NULL);
12986 add_scalar_info (scale_factor, DW_AT_GNU_denominator,
12987 fpt_info.scale_factor.arbitrary.denominator,
12988 dw_scalar_form_constant, NULL);
12989
12990 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12991 }
12992 break;
12993
12994 default:
12995 gcc_unreachable ();
12996 }
12997 }
12998
12999 if (type_bias)
13000 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
13001 dw_scalar_form_constant
13002 | dw_scalar_form_exprloc
13003 | dw_scalar_form_reference,
13004 NULL);
13005
13006 return base_type_result;
13007 }
13008
13009 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
13010 named 'auto' in its type: return true for it, false otherwise. */
13011
13012 static inline bool
13013 is_cxx_auto (tree type)
13014 {
13015 if (is_cxx ())
13016 {
13017 tree name = TYPE_IDENTIFIER (type);
13018 if (name == get_identifier ("auto")
13019 || name == get_identifier ("decltype(auto)"))
13020 return true;
13021 }
13022 return false;
13023 }
13024
13025 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
13026 given input type is a Dwarf "fundamental" type. Otherwise return null. */
13027
13028 static inline int
13029 is_base_type (tree type)
13030 {
13031 switch (TREE_CODE (type))
13032 {
13033 case INTEGER_TYPE:
13034 case REAL_TYPE:
13035 case FIXED_POINT_TYPE:
13036 case COMPLEX_TYPE:
13037 case BOOLEAN_TYPE:
13038 return 1;
13039
13040 case VOID_TYPE:
13041 case OPAQUE_TYPE:
13042 case ARRAY_TYPE:
13043 case RECORD_TYPE:
13044 case UNION_TYPE:
13045 case QUAL_UNION_TYPE:
13046 case ENUMERAL_TYPE:
13047 case FUNCTION_TYPE:
13048 case METHOD_TYPE:
13049 case POINTER_TYPE:
13050 case REFERENCE_TYPE:
13051 case NULLPTR_TYPE:
13052 case OFFSET_TYPE:
13053 case LANG_TYPE:
13054 case VECTOR_TYPE:
13055 return 0;
13056
13057 default:
13058 if (is_cxx_auto (type))
13059 return 0;
13060 gcc_unreachable ();
13061 }
13062
13063 return 0;
13064 }
13065
13066 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
13067 node, return the size in bits for the type if it is a constant, or else
13068 return the alignment for the type if the type's size is not constant, or
13069 else return BITS_PER_WORD if the type actually turns out to be an
13070 ERROR_MARK node. */
13071
13072 static inline unsigned HOST_WIDE_INT
13073 simple_type_size_in_bits (const_tree type)
13074 {
13075 if (TREE_CODE (type) == ERROR_MARK)
13076 return BITS_PER_WORD;
13077 else if (TYPE_SIZE (type) == NULL_TREE)
13078 return 0;
13079 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13080 return tree_to_uhwi (TYPE_SIZE (type));
13081 else
13082 return TYPE_ALIGN (type);
13083 }
13084
13085 /* Similarly, but return an offset_int instead of UHWI. */
13086
13087 static inline offset_int
13088 offset_int_type_size_in_bits (const_tree type)
13089 {
13090 if (TREE_CODE (type) == ERROR_MARK)
13091 return BITS_PER_WORD;
13092 else if (TYPE_SIZE (type) == NULL_TREE)
13093 return 0;
13094 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13095 return wi::to_offset (TYPE_SIZE (type));
13096 else
13097 return TYPE_ALIGN (type);
13098 }
13099
13100 /* Given a pointer to a tree node for a subrange type, return a pointer
13101 to a DIE that describes the given type. */
13102
13103 static dw_die_ref
13104 subrange_type_die (tree type, tree low, tree high, tree bias,
13105 dw_die_ref context_die)
13106 {
13107 dw_die_ref subrange_die;
13108 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13109
13110 if (context_die == NULL)
13111 context_die = comp_unit_die ();
13112
13113 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13114
13115 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13116 {
13117 /* The size of the subrange type and its base type do not match,
13118 so we need to generate a size attribute for the subrange type. */
13119 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13120 }
13121
13122 add_alignment_attribute (subrange_die, type);
13123
13124 if (low)
13125 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13126 if (high)
13127 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13128 if (bias && !dwarf_strict)
13129 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13130 dw_scalar_form_constant
13131 | dw_scalar_form_exprloc
13132 | dw_scalar_form_reference,
13133 NULL);
13134
13135 return subrange_die;
13136 }
13137
13138 /* Returns the (const and/or volatile) cv_qualifiers associated with
13139 the decl node. This will normally be augmented with the
13140 cv_qualifiers of the underlying type in add_type_attribute. */
13141
13142 static int
13143 decl_quals (const_tree decl)
13144 {
13145 return ((TREE_READONLY (decl)
13146 /* The C++ front-end correctly marks reference-typed
13147 variables as readonly, but from a language (and debug
13148 info) standpoint they are not const-qualified. */
13149 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13150 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13151 | (TREE_THIS_VOLATILE (decl)
13152 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13153 }
13154
13155 /* Determine the TYPE whose qualifiers match the largest strict subset
13156 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13157 qualifiers outside QUAL_MASK. */
13158
13159 static int
13160 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13161 {
13162 tree t;
13163 int best_rank = 0, best_qual = 0, max_rank;
13164
13165 type_quals &= qual_mask;
13166 max_rank = popcount_hwi (type_quals) - 1;
13167
13168 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13169 t = TYPE_NEXT_VARIANT (t))
13170 {
13171 int q = TYPE_QUALS (t) & qual_mask;
13172
13173 if ((q & type_quals) == q && q != type_quals
13174 && check_base_type (t, type))
13175 {
13176 int rank = popcount_hwi (q);
13177
13178 if (rank > best_rank)
13179 {
13180 best_rank = rank;
13181 best_qual = q;
13182 }
13183 }
13184 }
13185
13186 return best_qual;
13187 }
13188
13189 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13190 static const dwarf_qual_info_t dwarf_qual_info[] =
13191 {
13192 { TYPE_QUAL_CONST, DW_TAG_const_type },
13193 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13194 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13195 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13196 };
13197 static const unsigned int dwarf_qual_info_size
13198 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13199
13200 /* If DIE is a qualified DIE of some base DIE with the same parent,
13201 return the base DIE, otherwise return NULL. Set MASK to the
13202 qualifiers added compared to the returned DIE. */
13203
13204 static dw_die_ref
13205 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13206 {
13207 unsigned int i;
13208 for (i = 0; i < dwarf_qual_info_size; i++)
13209 if (die->die_tag == dwarf_qual_info[i].t)
13210 break;
13211 if (i == dwarf_qual_info_size)
13212 return NULL;
13213 if (vec_safe_length (die->die_attr) != 1)
13214 return NULL;
13215 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13216 if (type == NULL || type->die_parent != die->die_parent)
13217 return NULL;
13218 *mask |= dwarf_qual_info[i].q;
13219 if (depth)
13220 {
13221 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13222 if (ret)
13223 return ret;
13224 }
13225 return type;
13226 }
13227
13228 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13229 entry that chains the modifiers specified by CV_QUALS in front of the
13230 given type. REVERSE is true if the type is to be interpreted in the
13231 reverse storage order wrt the target order. */
13232
13233 static dw_die_ref
13234 modified_type_die (tree type, int cv_quals, bool reverse,
13235 dw_die_ref context_die)
13236 {
13237 enum tree_code code = TREE_CODE (type);
13238 dw_die_ref mod_type_die;
13239 dw_die_ref sub_die = NULL;
13240 tree item_type = NULL;
13241 tree qualified_type;
13242 tree name, low, high;
13243 dw_die_ref mod_scope;
13244 /* Only these cv-qualifiers are currently handled. */
13245 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13246 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13247 ENCODE_QUAL_ADDR_SPACE(~0U));
13248 const bool reverse_base_type
13249 = need_endianity_attribute_p (reverse) && is_base_type (type);
13250
13251 if (code == ERROR_MARK)
13252 return NULL;
13253
13254 if (lang_hooks.types.get_debug_type)
13255 {
13256 tree debug_type = lang_hooks.types.get_debug_type (type);
13257
13258 if (debug_type != NULL_TREE && debug_type != type)
13259 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13260 }
13261
13262 cv_quals &= cv_qual_mask;
13263
13264 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13265 tag modifier (and not an attribute) old consumers won't be able
13266 to handle it. */
13267 if (dwarf_version < 3)
13268 cv_quals &= ~TYPE_QUAL_RESTRICT;
13269
13270 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13271 if (dwarf_version < 5)
13272 cv_quals &= ~TYPE_QUAL_ATOMIC;
13273
13274 /* See if we already have the appropriately qualified variant of
13275 this type. */
13276 qualified_type = get_qualified_type (type, cv_quals);
13277
13278 if (qualified_type == sizetype)
13279 {
13280 /* Try not to expose the internal sizetype type's name. */
13281 if (TYPE_NAME (qualified_type)
13282 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13283 {
13284 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13285
13286 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13287 && (TYPE_PRECISION (t)
13288 == TYPE_PRECISION (qualified_type))
13289 && (TYPE_UNSIGNED (t)
13290 == TYPE_UNSIGNED (qualified_type)));
13291 qualified_type = t;
13292 }
13293 else if (qualified_type == sizetype
13294 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13295 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13296 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13297 qualified_type = size_type_node;
13298 if (type == sizetype)
13299 type = qualified_type;
13300 }
13301
13302 /* If we do, then we can just use its DIE, if it exists. */
13303 if (qualified_type)
13304 {
13305 mod_type_die = lookup_type_die (qualified_type);
13306
13307 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13308 dealt with specially: the DIE with the attribute, if it exists, is
13309 placed immediately after the regular DIE for the same base type. */
13310 if (mod_type_die
13311 && (!reverse_base_type
13312 || ((mod_type_die = mod_type_die->die_sib) != NULL
13313 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13314 return mod_type_die;
13315 }
13316
13317 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13318
13319 /* Handle C typedef types. */
13320 if (name
13321 && TREE_CODE (name) == TYPE_DECL
13322 && DECL_ORIGINAL_TYPE (name)
13323 && !DECL_ARTIFICIAL (name))
13324 {
13325 tree dtype = TREE_TYPE (name);
13326
13327 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13328 if (qualified_type == dtype && !reverse_base_type)
13329 {
13330 tree origin = decl_ultimate_origin (name);
13331
13332 /* Typedef variants that have an abstract origin don't get their own
13333 type DIE (see gen_typedef_die), so fall back on the ultimate
13334 abstract origin instead. */
13335 if (origin != NULL && origin != name)
13336 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13337 context_die);
13338
13339 /* For a named type, use the typedef. */
13340 gen_type_die (qualified_type, context_die);
13341 return lookup_type_die (qualified_type);
13342 }
13343 else
13344 {
13345 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13346 dquals &= cv_qual_mask;
13347 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13348 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13349 /* cv-unqualified version of named type. Just use
13350 the unnamed type to which it refers. */
13351 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13352 reverse, context_die);
13353 /* Else cv-qualified version of named type; fall through. */
13354 }
13355 }
13356
13357 mod_scope = scope_die_for (type, context_die);
13358
13359 if (cv_quals)
13360 {
13361 int sub_quals = 0, first_quals = 0;
13362 unsigned i;
13363 dw_die_ref first = NULL, last = NULL;
13364
13365 /* Determine a lesser qualified type that most closely matches
13366 this one. Then generate DW_TAG_* entries for the remaining
13367 qualifiers. */
13368 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13369 cv_qual_mask);
13370 if (sub_quals && use_debug_types)
13371 {
13372 bool needed = false;
13373 /* If emitting type units, make sure the order of qualifiers
13374 is canonical. Thus, start from unqualified type if
13375 an earlier qualifier is missing in sub_quals, but some later
13376 one is present there. */
13377 for (i = 0; i < dwarf_qual_info_size; i++)
13378 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13379 needed = true;
13380 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13381 {
13382 sub_quals = 0;
13383 break;
13384 }
13385 }
13386 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13387 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13388 {
13389 /* As not all intermediate qualified DIEs have corresponding
13390 tree types, ensure that qualified DIEs in the same scope
13391 as their DW_AT_type are emitted after their DW_AT_type,
13392 only with other qualified DIEs for the same type possibly
13393 in between them. Determine the range of such qualified
13394 DIEs now (first being the base type, last being corresponding
13395 last qualified DIE for it). */
13396 unsigned int count = 0;
13397 first = qualified_die_p (mod_type_die, &first_quals,
13398 dwarf_qual_info_size);
13399 if (first == NULL)
13400 first = mod_type_die;
13401 gcc_assert ((first_quals & ~sub_quals) == 0);
13402 for (count = 0, last = first;
13403 count < (1U << dwarf_qual_info_size);
13404 count++, last = last->die_sib)
13405 {
13406 int quals = 0;
13407 if (last == mod_scope->die_child)
13408 break;
13409 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13410 != first)
13411 break;
13412 }
13413 }
13414
13415 for (i = 0; i < dwarf_qual_info_size; i++)
13416 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13417 {
13418 dw_die_ref d;
13419 if (first && first != last)
13420 {
13421 for (d = first->die_sib; ; d = d->die_sib)
13422 {
13423 int quals = 0;
13424 qualified_die_p (d, &quals, dwarf_qual_info_size);
13425 if (quals == (first_quals | dwarf_qual_info[i].q))
13426 break;
13427 if (d == last)
13428 {
13429 d = NULL;
13430 break;
13431 }
13432 }
13433 if (d)
13434 {
13435 mod_type_die = d;
13436 continue;
13437 }
13438 }
13439 if (first)
13440 {
13441 d = new_die_raw (dwarf_qual_info[i].t);
13442 add_child_die_after (mod_scope, d, last);
13443 last = d;
13444 }
13445 else
13446 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13447 if (mod_type_die)
13448 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13449 mod_type_die = d;
13450 first_quals |= dwarf_qual_info[i].q;
13451 }
13452 }
13453 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13454 {
13455 dwarf_tag tag = DW_TAG_pointer_type;
13456 if (code == REFERENCE_TYPE)
13457 {
13458 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13459 tag = DW_TAG_rvalue_reference_type;
13460 else
13461 tag = DW_TAG_reference_type;
13462 }
13463 mod_type_die = new_die (tag, mod_scope, type);
13464
13465 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13466 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13467 add_alignment_attribute (mod_type_die, type);
13468 item_type = TREE_TYPE (type);
13469
13470 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13471 if (!ADDR_SPACE_GENERIC_P (as))
13472 {
13473 int action = targetm.addr_space.debug (as);
13474 if (action >= 0)
13475 {
13476 /* Positive values indicate an address_class. */
13477 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13478 }
13479 else
13480 {
13481 /* Negative values indicate an (inverted) segment base reg. */
13482 dw_loc_descr_ref d
13483 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13484 add_AT_loc (mod_type_die, DW_AT_segment, d);
13485 }
13486 }
13487 }
13488 else if (code == INTEGER_TYPE
13489 && TREE_TYPE (type) != NULL_TREE
13490 && subrange_type_for_debug_p (type, &low, &high))
13491 {
13492 tree bias = NULL_TREE;
13493 if (lang_hooks.types.get_type_bias)
13494 bias = lang_hooks.types.get_type_bias (type);
13495 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13496 item_type = TREE_TYPE (type);
13497 }
13498 else if (is_base_type (type))
13499 {
13500 mod_type_die = base_type_die (type, reverse);
13501
13502 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13503 if (reverse_base_type)
13504 {
13505 dw_die_ref after_die
13506 = modified_type_die (type, cv_quals, false, context_die);
13507 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13508 }
13509 else
13510 add_child_die (comp_unit_die (), mod_type_die);
13511
13512 add_pubtype (type, mod_type_die);
13513 }
13514 else
13515 {
13516 gen_type_die (type, context_die);
13517
13518 /* We have to get the type_main_variant here (and pass that to the
13519 `lookup_type_die' routine) because the ..._TYPE node we have
13520 might simply be a *copy* of some original type node (where the
13521 copy was created to help us keep track of typedef names) and
13522 that copy might have a different TYPE_UID from the original
13523 ..._TYPE node. */
13524 if (TREE_CODE (type) == FUNCTION_TYPE
13525 || TREE_CODE (type) == METHOD_TYPE)
13526 {
13527 /* For function/method types, can't just use type_main_variant here,
13528 because that can have different ref-qualifiers for C++,
13529 but try to canonicalize. */
13530 tree main = TYPE_MAIN_VARIANT (type);
13531 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13532 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13533 && check_base_type (t, main)
13534 && check_lang_type (t, type))
13535 return lookup_type_die (t);
13536 return lookup_type_die (type);
13537 }
13538 else if (TREE_CODE (type) != VECTOR_TYPE
13539 && TREE_CODE (type) != ARRAY_TYPE)
13540 return lookup_type_die (type_main_variant (type));
13541 else
13542 /* Vectors have the debugging information in the type,
13543 not the main variant. */
13544 return lookup_type_die (type);
13545 }
13546
13547 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13548 don't output a DW_TAG_typedef, since there isn't one in the
13549 user's program; just attach a DW_AT_name to the type.
13550 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13551 if the base type already has the same name. */
13552 if (name
13553 && ((TREE_CODE (name) != TYPE_DECL
13554 && (qualified_type == TYPE_MAIN_VARIANT (type)
13555 || (cv_quals == TYPE_UNQUALIFIED)))
13556 || (TREE_CODE (name) == TYPE_DECL
13557 && TREE_TYPE (name) == qualified_type
13558 && DECL_NAME (name))))
13559 {
13560 if (TREE_CODE (name) == TYPE_DECL)
13561 /* Could just call add_name_and_src_coords_attributes here,
13562 but since this is a builtin type it doesn't have any
13563 useful source coordinates anyway. */
13564 name = DECL_NAME (name);
13565 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13566 }
13567 /* This probably indicates a bug. */
13568 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13569 {
13570 name = TYPE_IDENTIFIER (type);
13571 add_name_attribute (mod_type_die,
13572 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13573 }
13574
13575 if (qualified_type && !reverse_base_type)
13576 equate_type_number_to_die (qualified_type, mod_type_die);
13577
13578 if (item_type)
13579 /* We must do this after the equate_type_number_to_die call, in case
13580 this is a recursive type. This ensures that the modified_type_die
13581 recursion will terminate even if the type is recursive. Recursive
13582 types are possible in Ada. */
13583 sub_die = modified_type_die (item_type,
13584 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13585 reverse,
13586 context_die);
13587
13588 if (sub_die != NULL)
13589 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13590
13591 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13592 if (TYPE_ARTIFICIAL (type))
13593 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13594
13595 return mod_type_die;
13596 }
13597
13598 /* Generate DIEs for the generic parameters of T.
13599 T must be either a generic type or a generic function.
13600 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13601
13602 static void
13603 gen_generic_params_dies (tree t)
13604 {
13605 tree parms, args;
13606 int parms_num, i;
13607 dw_die_ref die = NULL;
13608 int non_default;
13609
13610 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13611 return;
13612
13613 if (TYPE_P (t))
13614 die = lookup_type_die (t);
13615 else if (DECL_P (t))
13616 die = lookup_decl_die (t);
13617
13618 gcc_assert (die);
13619
13620 parms = lang_hooks.get_innermost_generic_parms (t);
13621 if (!parms)
13622 /* T has no generic parameter. It means T is neither a generic type
13623 or function. End of story. */
13624 return;
13625
13626 parms_num = TREE_VEC_LENGTH (parms);
13627 args = lang_hooks.get_innermost_generic_args (t);
13628 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13629 non_default = int_cst_value (TREE_CHAIN (args));
13630 else
13631 non_default = TREE_VEC_LENGTH (args);
13632 for (i = 0; i < parms_num; i++)
13633 {
13634 tree parm, arg, arg_pack_elems;
13635 dw_die_ref parm_die;
13636
13637 parm = TREE_VEC_ELT (parms, i);
13638 arg = TREE_VEC_ELT (args, i);
13639 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13640 gcc_assert (parm && TREE_VALUE (parm) && arg);
13641
13642 if (parm && TREE_VALUE (parm) && arg)
13643 {
13644 /* If PARM represents a template parameter pack,
13645 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13646 by DW_TAG_template_*_parameter DIEs for the argument
13647 pack elements of ARG. Note that ARG would then be
13648 an argument pack. */
13649 if (arg_pack_elems)
13650 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13651 arg_pack_elems,
13652 die);
13653 else
13654 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13655 true /* emit name */, die);
13656 if (i >= non_default)
13657 add_AT_flag (parm_die, DW_AT_default_value, 1);
13658 }
13659 }
13660 }
13661
13662 /* Create and return a DIE for PARM which should be
13663 the representation of a generic type parameter.
13664 For instance, in the C++ front end, PARM would be a template parameter.
13665 ARG is the argument to PARM.
13666 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13667 name of the PARM.
13668 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13669 as a child node. */
13670
13671 static dw_die_ref
13672 generic_parameter_die (tree parm, tree arg,
13673 bool emit_name_p,
13674 dw_die_ref parent_die)
13675 {
13676 dw_die_ref tmpl_die = NULL;
13677 const char *name = NULL;
13678
13679 /* C++20 accepts class literals as template parameters, and var
13680 decls with initializers represent them. The VAR_DECLs would be
13681 rejected, but we can take the DECL_INITIAL constructor and
13682 attempt to expand it. */
13683 if (arg && VAR_P (arg))
13684 arg = DECL_INITIAL (arg);
13685
13686 if (!parm || !DECL_NAME (parm) || !arg)
13687 return NULL;
13688
13689 /* We support non-type generic parameters and arguments,
13690 type generic parameters and arguments, as well as
13691 generic generic parameters (a.k.a. template template parameters in C++)
13692 and arguments. */
13693 if (TREE_CODE (parm) == PARM_DECL)
13694 /* PARM is a nontype generic parameter */
13695 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13696 else if (TREE_CODE (parm) == TYPE_DECL)
13697 /* PARM is a type generic parameter. */
13698 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13699 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13700 /* PARM is a generic generic parameter.
13701 Its DIE is a GNU extension. It shall have a
13702 DW_AT_name attribute to represent the name of the template template
13703 parameter, and a DW_AT_GNU_template_name attribute to represent the
13704 name of the template template argument. */
13705 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13706 parent_die, parm);
13707 else
13708 gcc_unreachable ();
13709
13710 if (tmpl_die)
13711 {
13712 tree tmpl_type;
13713
13714 /* If PARM is a generic parameter pack, it means we are
13715 emitting debug info for a template argument pack element.
13716 In other terms, ARG is a template argument pack element.
13717 In that case, we don't emit any DW_AT_name attribute for
13718 the die. */
13719 if (emit_name_p)
13720 {
13721 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13722 gcc_assert (name);
13723 add_AT_string (tmpl_die, DW_AT_name, name);
13724 }
13725
13726 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13727 {
13728 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13729 TMPL_DIE should have a child DW_AT_type attribute that is set
13730 to the type of the argument to PARM, which is ARG.
13731 If PARM is a type generic parameter, TMPL_DIE should have a
13732 child DW_AT_type that is set to ARG. */
13733 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13734 add_type_attribute (tmpl_die, tmpl_type,
13735 (TREE_THIS_VOLATILE (tmpl_type)
13736 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13737 false, parent_die);
13738 }
13739 else
13740 {
13741 /* So TMPL_DIE is a DIE representing a
13742 a generic generic template parameter, a.k.a template template
13743 parameter in C++ and arg is a template. */
13744
13745 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13746 to the name of the argument. */
13747 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13748 if (name)
13749 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13750 }
13751
13752 if (TREE_CODE (parm) == PARM_DECL)
13753 /* So PARM is a non-type generic parameter.
13754 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13755 attribute of TMPL_DIE which value represents the value
13756 of ARG.
13757 We must be careful here:
13758 The value of ARG might reference some function decls.
13759 We might currently be emitting debug info for a generic
13760 type and types are emitted before function decls, we don't
13761 know if the function decls referenced by ARG will actually be
13762 emitted after cgraph computations.
13763 So must defer the generation of the DW_AT_const_value to
13764 after cgraph is ready. */
13765 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13766 }
13767
13768 return tmpl_die;
13769 }
13770
13771 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13772 PARM_PACK must be a template parameter pack. The returned DIE
13773 will be child DIE of PARENT_DIE. */
13774
13775 static dw_die_ref
13776 template_parameter_pack_die (tree parm_pack,
13777 tree parm_pack_args,
13778 dw_die_ref parent_die)
13779 {
13780 dw_die_ref die;
13781 int j;
13782
13783 gcc_assert (parent_die && parm_pack);
13784
13785 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13786 add_name_and_src_coords_attributes (die, parm_pack);
13787 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13788 generic_parameter_die (parm_pack,
13789 TREE_VEC_ELT (parm_pack_args, j),
13790 false /* Don't emit DW_AT_name */,
13791 die);
13792 return die;
13793 }
13794
13795 /* Return the DBX register number described by a given RTL node. */
13796
13797 static unsigned int
13798 dbx_reg_number (const_rtx rtl)
13799 {
13800 unsigned regno = REGNO (rtl);
13801
13802 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13803
13804 #ifdef LEAF_REG_REMAP
13805 if (crtl->uses_only_leaf_regs)
13806 {
13807 int leaf_reg = LEAF_REG_REMAP (regno);
13808 if (leaf_reg != -1)
13809 regno = (unsigned) leaf_reg;
13810 }
13811 #endif
13812
13813 regno = DBX_REGISTER_NUMBER (regno);
13814 gcc_assert (regno != INVALID_REGNUM);
13815 return regno;
13816 }
13817
13818 /* Optionally add a DW_OP_piece term to a location description expression.
13819 DW_OP_piece is only added if the location description expression already
13820 doesn't end with DW_OP_piece. */
13821
13822 static void
13823 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13824 {
13825 dw_loc_descr_ref loc;
13826
13827 if (*list_head != NULL)
13828 {
13829 /* Find the end of the chain. */
13830 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13831 ;
13832
13833 if (loc->dw_loc_opc != DW_OP_piece)
13834 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13835 }
13836 }
13837
13838 /* Return a location descriptor that designates a machine register or
13839 zero if there is none. */
13840
13841 static dw_loc_descr_ref
13842 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13843 {
13844 rtx regs;
13845
13846 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13847 return 0;
13848
13849 /* We only use "frame base" when we're sure we're talking about the
13850 post-prologue local stack frame. We do this by *not* running
13851 register elimination until this point, and recognizing the special
13852 argument pointer and soft frame pointer rtx's.
13853 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13854 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13855 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13856 {
13857 dw_loc_descr_ref result = NULL;
13858
13859 if (dwarf_version >= 4 || !dwarf_strict)
13860 {
13861 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13862 initialized);
13863 if (result)
13864 add_loc_descr (&result,
13865 new_loc_descr (DW_OP_stack_value, 0, 0));
13866 }
13867 return result;
13868 }
13869
13870 regs = targetm.dwarf_register_span (rtl);
13871
13872 if (REG_NREGS (rtl) > 1 || regs)
13873 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13874 else
13875 {
13876 unsigned int dbx_regnum = dbx_reg_number (rtl);
13877 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13878 return 0;
13879 return one_reg_loc_descriptor (dbx_regnum, initialized);
13880 }
13881 }
13882
13883 /* Return a location descriptor that designates a machine register for
13884 a given hard register number. */
13885
13886 static dw_loc_descr_ref
13887 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13888 {
13889 dw_loc_descr_ref reg_loc_descr;
13890
13891 if (regno <= 31)
13892 reg_loc_descr
13893 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13894 else
13895 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13896
13897 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13898 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13899
13900 return reg_loc_descr;
13901 }
13902
13903 /* Given an RTL of a register, return a location descriptor that
13904 designates a value that spans more than one register. */
13905
13906 static dw_loc_descr_ref
13907 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13908 enum var_init_status initialized)
13909 {
13910 int size, i;
13911 dw_loc_descr_ref loc_result = NULL;
13912
13913 /* Simple, contiguous registers. */
13914 if (regs == NULL_RTX)
13915 {
13916 unsigned reg = REGNO (rtl);
13917 int nregs;
13918
13919 #ifdef LEAF_REG_REMAP
13920 if (crtl->uses_only_leaf_regs)
13921 {
13922 int leaf_reg = LEAF_REG_REMAP (reg);
13923 if (leaf_reg != -1)
13924 reg = (unsigned) leaf_reg;
13925 }
13926 #endif
13927
13928 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13929 nregs = REG_NREGS (rtl);
13930
13931 /* At present we only track constant-sized pieces. */
13932 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13933 return NULL;
13934 size /= nregs;
13935
13936 loc_result = NULL;
13937 while (nregs--)
13938 {
13939 dw_loc_descr_ref t;
13940
13941 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13942 VAR_INIT_STATUS_INITIALIZED);
13943 add_loc_descr (&loc_result, t);
13944 add_loc_descr_op_piece (&loc_result, size);
13945 ++reg;
13946 }
13947 return loc_result;
13948 }
13949
13950 /* Now onto stupid register sets in non contiguous locations. */
13951
13952 gcc_assert (GET_CODE (regs) == PARALLEL);
13953
13954 /* At present we only track constant-sized pieces. */
13955 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13956 return NULL;
13957 loc_result = NULL;
13958
13959 for (i = 0; i < XVECLEN (regs, 0); ++i)
13960 {
13961 dw_loc_descr_ref t;
13962
13963 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13964 VAR_INIT_STATUS_INITIALIZED);
13965 add_loc_descr (&loc_result, t);
13966 add_loc_descr_op_piece (&loc_result, size);
13967 }
13968
13969 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13970 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13971 return loc_result;
13972 }
13973
13974 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13975
13976 /* Return a location descriptor that designates a constant i,
13977 as a compound operation from constant (i >> shift), constant shift
13978 and DW_OP_shl. */
13979
13980 static dw_loc_descr_ref
13981 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13982 {
13983 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13984 add_loc_descr (&ret, int_loc_descriptor (shift));
13985 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13986 return ret;
13987 }
13988
13989 /* Return a location descriptor that designates constant POLY_I. */
13990
13991 static dw_loc_descr_ref
13992 int_loc_descriptor (poly_int64 poly_i)
13993 {
13994 enum dwarf_location_atom op;
13995
13996 HOST_WIDE_INT i;
13997 if (!poly_i.is_constant (&i))
13998 {
13999 /* Create location descriptions for the non-constant part and
14000 add any constant offset at the end. */
14001 dw_loc_descr_ref ret = NULL;
14002 HOST_WIDE_INT constant = poly_i.coeffs[0];
14003 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
14004 {
14005 HOST_WIDE_INT coeff = poly_i.coeffs[j];
14006 if (coeff != 0)
14007 {
14008 dw_loc_descr_ref start = ret;
14009 unsigned int factor;
14010 int bias;
14011 unsigned int regno = targetm.dwarf_poly_indeterminate_value
14012 (j, &factor, &bias);
14013
14014 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
14015 add COEFF * (REGNO / FACTOR) now and subtract
14016 COEFF * BIAS from the final constant part. */
14017 constant -= coeff * bias;
14018 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
14019 if (coeff % factor == 0)
14020 coeff /= factor;
14021 else
14022 {
14023 int amount = exact_log2 (factor);
14024 gcc_assert (amount >= 0);
14025 add_loc_descr (&ret, int_loc_descriptor (amount));
14026 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
14027 }
14028 if (coeff != 1)
14029 {
14030 add_loc_descr (&ret, int_loc_descriptor (coeff));
14031 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14032 }
14033 if (start)
14034 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
14035 }
14036 }
14037 loc_descr_plus_const (&ret, constant);
14038 return ret;
14039 }
14040
14041 /* Pick the smallest representation of a constant, rather than just
14042 defaulting to the LEB encoding. */
14043 if (i >= 0)
14044 {
14045 int clz = clz_hwi (i);
14046 int ctz = ctz_hwi (i);
14047 if (i <= 31)
14048 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
14049 else if (i <= 0xff)
14050 op = DW_OP_const1u;
14051 else if (i <= 0xffff)
14052 op = DW_OP_const2u;
14053 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14054 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14055 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
14056 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
14057 while DW_OP_const4u is 5 bytes. */
14058 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
14059 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14060 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14061 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
14062 while DW_OP_const4u is 5 bytes. */
14063 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14064
14065 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14066 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14067 <= 4)
14068 {
14069 /* As i >= 2**31, the double cast above will yield a negative number.
14070 Since wrapping is defined in DWARF expressions we can output big
14071 positive integers as small negative ones, regardless of the size
14072 of host wide ints.
14073
14074 Here, since the evaluator will handle 32-bit values and since i >=
14075 2**31, we know it's going to be interpreted as a negative literal:
14076 store it this way if we can do better than 5 bytes this way. */
14077 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14078 }
14079 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14080 op = DW_OP_const4u;
14081
14082 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14083 least 6 bytes: see if we can do better before falling back to it. */
14084 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14085 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14086 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14087 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14088 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14089 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14090 >= HOST_BITS_PER_WIDE_INT)
14091 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14092 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14093 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14094 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14095 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14096 && size_of_uleb128 (i) > 6)
14097 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14098 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14099 else
14100 op = DW_OP_constu;
14101 }
14102 else
14103 {
14104 if (i >= -0x80)
14105 op = DW_OP_const1s;
14106 else if (i >= -0x8000)
14107 op = DW_OP_const2s;
14108 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14109 {
14110 if (size_of_int_loc_descriptor (i) < 5)
14111 {
14112 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14113 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14114 return ret;
14115 }
14116 op = DW_OP_const4s;
14117 }
14118 else
14119 {
14120 if (size_of_int_loc_descriptor (i)
14121 < (unsigned long) 1 + size_of_sleb128 (i))
14122 {
14123 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14124 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14125 return ret;
14126 }
14127 op = DW_OP_consts;
14128 }
14129 }
14130
14131 return new_loc_descr (op, i, 0);
14132 }
14133
14134 /* Likewise, for unsigned constants. */
14135
14136 static dw_loc_descr_ref
14137 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14138 {
14139 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14140 const unsigned HOST_WIDE_INT max_uint
14141 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14142
14143 /* If possible, use the clever signed constants handling. */
14144 if (i <= max_int)
14145 return int_loc_descriptor ((HOST_WIDE_INT) i);
14146
14147 /* Here, we are left with positive numbers that cannot be represented as
14148 HOST_WIDE_INT, i.e.:
14149 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14150
14151 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14152 whereas may be better to output a negative integer: thanks to integer
14153 wrapping, we know that:
14154 x = x - 2 ** DWARF2_ADDR_SIZE
14155 = x - 2 * (max (HOST_WIDE_INT) + 1)
14156 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14157 small negative integers. Let's try that in cases it will clearly improve
14158 the encoding: there is no gain turning DW_OP_const4u into
14159 DW_OP_const4s. */
14160 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14161 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14162 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14163 {
14164 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14165
14166 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14167 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14168 const HOST_WIDE_INT second_shift
14169 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14170
14171 /* So we finally have:
14172 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14173 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14174 return int_loc_descriptor (second_shift);
14175 }
14176
14177 /* Last chance: fallback to a simple constant operation. */
14178 return new_loc_descr
14179 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14180 ? DW_OP_const4u
14181 : DW_OP_const8u,
14182 i, 0);
14183 }
14184
14185 /* Generate and return a location description that computes the unsigned
14186 comparison of the two stack top entries (a OP b where b is the top-most
14187 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14188 LE_EXPR, GT_EXPR or GE_EXPR. */
14189
14190 static dw_loc_descr_ref
14191 uint_comparison_loc_list (enum tree_code kind)
14192 {
14193 enum dwarf_location_atom op, flip_op;
14194 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14195
14196 switch (kind)
14197 {
14198 case LT_EXPR:
14199 op = DW_OP_lt;
14200 break;
14201 case LE_EXPR:
14202 op = DW_OP_le;
14203 break;
14204 case GT_EXPR:
14205 op = DW_OP_gt;
14206 break;
14207 case GE_EXPR:
14208 op = DW_OP_ge;
14209 break;
14210 default:
14211 gcc_unreachable ();
14212 }
14213
14214 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14215 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14216
14217 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14218 possible to perform unsigned comparisons: we just have to distinguish
14219 three cases:
14220
14221 1. when a and b have the same sign (as signed integers); then we should
14222 return: a OP(signed) b;
14223
14224 2. when a is a negative signed integer while b is a positive one, then a
14225 is a greater unsigned integer than b; likewise when a and b's roles
14226 are flipped.
14227
14228 So first, compare the sign of the two operands. */
14229 ret = new_loc_descr (DW_OP_over, 0, 0);
14230 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14231 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14232 /* If they have different signs (i.e. they have different sign bits), then
14233 the stack top value has now the sign bit set and thus it's smaller than
14234 zero. */
14235 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14236 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14237 add_loc_descr (&ret, bra_node);
14238
14239 /* We are in case 1. At this point, we know both operands have the same
14240 sign, to it's safe to use the built-in signed comparison. */
14241 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14242 add_loc_descr (&ret, jmp_node);
14243
14244 /* We are in case 2. Here, we know both operands do not have the same sign,
14245 so we have to flip the signed comparison. */
14246 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14247 tmp = new_loc_descr (flip_op, 0, 0);
14248 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14249 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14250 add_loc_descr (&ret, tmp);
14251
14252 /* This dummy operation is necessary to make the two branches join. */
14253 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14254 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14255 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14256 add_loc_descr (&ret, tmp);
14257
14258 return ret;
14259 }
14260
14261 /* Likewise, but takes the location description lists (might be destructive on
14262 them). Return NULL if either is NULL or if concatenation fails. */
14263
14264 static dw_loc_list_ref
14265 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14266 enum tree_code kind)
14267 {
14268 if (left == NULL || right == NULL)
14269 return NULL;
14270
14271 add_loc_list (&left, right);
14272 if (left == NULL)
14273 return NULL;
14274
14275 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14276 return left;
14277 }
14278
14279 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14280 without actually allocating it. */
14281
14282 static unsigned long
14283 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14284 {
14285 return size_of_int_loc_descriptor (i >> shift)
14286 + size_of_int_loc_descriptor (shift)
14287 + 1;
14288 }
14289
14290 /* Return size_of_locs (int_loc_descriptor (i)) without
14291 actually allocating it. */
14292
14293 static unsigned long
14294 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14295 {
14296 unsigned long s;
14297
14298 if (i >= 0)
14299 {
14300 int clz, ctz;
14301 if (i <= 31)
14302 return 1;
14303 else if (i <= 0xff)
14304 return 2;
14305 else if (i <= 0xffff)
14306 return 3;
14307 clz = clz_hwi (i);
14308 ctz = ctz_hwi (i);
14309 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14310 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14311 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14312 - clz - 5);
14313 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14314 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14315 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14316 - clz - 8);
14317 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14318 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14319 <= 4)
14320 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14321 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14322 return 5;
14323 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14324 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14325 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14326 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14327 - clz - 8);
14328 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14329 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14330 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14331 - clz - 16);
14332 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14333 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14334 && s > 6)
14335 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14336 - clz - 32);
14337 else
14338 return 1 + s;
14339 }
14340 else
14341 {
14342 if (i >= -0x80)
14343 return 2;
14344 else if (i >= -0x8000)
14345 return 3;
14346 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14347 {
14348 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14349 {
14350 s = size_of_int_loc_descriptor (-i) + 1;
14351 if (s < 5)
14352 return s;
14353 }
14354 return 5;
14355 }
14356 else
14357 {
14358 unsigned long r = 1 + size_of_sleb128 (i);
14359 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14360 {
14361 s = size_of_int_loc_descriptor (-i) + 1;
14362 if (s < r)
14363 return s;
14364 }
14365 return r;
14366 }
14367 }
14368 }
14369
14370 /* Return loc description representing "address" of integer value.
14371 This can appear only as toplevel expression. */
14372
14373 static dw_loc_descr_ref
14374 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14375 {
14376 int litsize;
14377 dw_loc_descr_ref loc_result = NULL;
14378
14379 if (!(dwarf_version >= 4 || !dwarf_strict))
14380 return NULL;
14381
14382 litsize = size_of_int_loc_descriptor (i);
14383 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14384 is more compact. For DW_OP_stack_value we need:
14385 litsize + 1 (DW_OP_stack_value)
14386 and for DW_OP_implicit_value:
14387 1 (DW_OP_implicit_value) + 1 (length) + size. */
14388 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14389 {
14390 loc_result = int_loc_descriptor (i);
14391 add_loc_descr (&loc_result,
14392 new_loc_descr (DW_OP_stack_value, 0, 0));
14393 return loc_result;
14394 }
14395
14396 loc_result = new_loc_descr (DW_OP_implicit_value,
14397 size, 0);
14398 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14399 loc_result->dw_loc_oprnd2.v.val_int = i;
14400 return loc_result;
14401 }
14402
14403 /* Return a location descriptor that designates a base+offset location. */
14404
14405 static dw_loc_descr_ref
14406 based_loc_descr (rtx reg, poly_int64 offset,
14407 enum var_init_status initialized)
14408 {
14409 unsigned int regno;
14410 dw_loc_descr_ref result;
14411 dw_fde_ref fde = cfun->fde;
14412
14413 /* We only use "frame base" when we're sure we're talking about the
14414 post-prologue local stack frame. We do this by *not* running
14415 register elimination until this point, and recognizing the special
14416 argument pointer and soft frame pointer rtx's. */
14417 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14418 {
14419 rtx elim = (ira_use_lra_p
14420 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14421 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14422
14423 if (elim != reg)
14424 {
14425 /* Allow hard frame pointer here even if frame pointer
14426 isn't used since hard frame pointer is encoded with
14427 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14428 not hard frame pointer directly. */
14429 elim = strip_offset_and_add (elim, &offset);
14430 gcc_assert (elim == hard_frame_pointer_rtx
14431 || elim == stack_pointer_rtx);
14432
14433 /* If drap register is used to align stack, use frame
14434 pointer + offset to access stack variables. If stack
14435 is aligned without drap, use stack pointer + offset to
14436 access stack variables. */
14437 if (crtl->stack_realign_tried
14438 && reg == frame_pointer_rtx)
14439 {
14440 int base_reg
14441 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14442 ? HARD_FRAME_POINTER_REGNUM
14443 : REGNO (elim));
14444 return new_reg_loc_descr (base_reg, offset);
14445 }
14446
14447 gcc_assert (frame_pointer_fb_offset_valid);
14448 offset += frame_pointer_fb_offset;
14449 HOST_WIDE_INT const_offset;
14450 if (offset.is_constant (&const_offset))
14451 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14452 else
14453 {
14454 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14455 loc_descr_plus_const (&ret, offset);
14456 return ret;
14457 }
14458 }
14459 }
14460
14461 regno = REGNO (reg);
14462 #ifdef LEAF_REG_REMAP
14463 if (crtl->uses_only_leaf_regs)
14464 {
14465 int leaf_reg = LEAF_REG_REMAP (regno);
14466 if (leaf_reg != -1)
14467 regno = (unsigned) leaf_reg;
14468 }
14469 #endif
14470 regno = DWARF_FRAME_REGNUM (regno);
14471
14472 HOST_WIDE_INT const_offset;
14473 if (!optimize && fde
14474 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14475 && offset.is_constant (&const_offset))
14476 {
14477 /* Use cfa+offset to represent the location of arguments passed
14478 on the stack when drap is used to align stack.
14479 Only do this when not optimizing, for optimized code var-tracking
14480 is supposed to track where the arguments live and the register
14481 used as vdrap or drap in some spot might be used for something
14482 else in other part of the routine. */
14483 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14484 }
14485
14486 result = new_reg_loc_descr (regno, offset);
14487
14488 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14489 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14490
14491 return result;
14492 }
14493
14494 /* Return true if this RTL expression describes a base+offset calculation. */
14495
14496 static inline int
14497 is_based_loc (const_rtx rtl)
14498 {
14499 return (GET_CODE (rtl) == PLUS
14500 && ((REG_P (XEXP (rtl, 0))
14501 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14502 && CONST_INT_P (XEXP (rtl, 1)))));
14503 }
14504
14505 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14506 failed. */
14507
14508 static dw_loc_descr_ref
14509 tls_mem_loc_descriptor (rtx mem)
14510 {
14511 tree base;
14512 dw_loc_descr_ref loc_result;
14513
14514 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14515 return NULL;
14516
14517 base = get_base_address (MEM_EXPR (mem));
14518 if (base == NULL
14519 || !VAR_P (base)
14520 || !DECL_THREAD_LOCAL_P (base))
14521 return NULL;
14522
14523 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14524 if (loc_result == NULL)
14525 return NULL;
14526
14527 if (maybe_ne (MEM_OFFSET (mem), 0))
14528 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14529
14530 return loc_result;
14531 }
14532
14533 /* Output debug info about reason why we failed to expand expression as dwarf
14534 expression. */
14535
14536 static void
14537 expansion_failed (tree expr, rtx rtl, char const *reason)
14538 {
14539 if (dump_file && (dump_flags & TDF_DETAILS))
14540 {
14541 fprintf (dump_file, "Failed to expand as dwarf: ");
14542 if (expr)
14543 print_generic_expr (dump_file, expr, dump_flags);
14544 if (rtl)
14545 {
14546 fprintf (dump_file, "\n");
14547 print_rtl (dump_file, rtl);
14548 }
14549 fprintf (dump_file, "\nReason: %s\n", reason);
14550 }
14551 }
14552
14553 /* Helper function for const_ok_for_output. */
14554
14555 static bool
14556 const_ok_for_output_1 (rtx rtl)
14557 {
14558 if (targetm.const_not_ok_for_debug_p (rtl))
14559 {
14560 if (GET_CODE (rtl) != UNSPEC)
14561 {
14562 expansion_failed (NULL_TREE, rtl,
14563 "Expression rejected for debug by the backend.\n");
14564 return false;
14565 }
14566
14567 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14568 the target hook doesn't explicitly allow it in debug info, assume
14569 we can't express it in the debug info. */
14570 /* Don't complain about TLS UNSPECs, those are just too hard to
14571 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14572 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14573 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14574 if (flag_checking
14575 && (XVECLEN (rtl, 0) == 0
14576 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14577 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14578 inform (current_function_decl
14579 ? DECL_SOURCE_LOCATION (current_function_decl)
14580 : UNKNOWN_LOCATION,
14581 #if NUM_UNSPEC_VALUES > 0
14582 "non-delegitimized UNSPEC %s (%d) found in variable location",
14583 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14584 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14585 #else
14586 "non-delegitimized UNSPEC %d found in variable location",
14587 #endif
14588 XINT (rtl, 1));
14589 expansion_failed (NULL_TREE, rtl,
14590 "UNSPEC hasn't been delegitimized.\n");
14591 return false;
14592 }
14593
14594 if (CONST_POLY_INT_P (rtl))
14595 return false;
14596
14597 /* FIXME: Refer to PR60655. It is possible for simplification
14598 of rtl expressions in var tracking to produce such expressions.
14599 We should really identify / validate expressions
14600 enclosed in CONST that can be handled by assemblers on various
14601 targets and only handle legitimate cases here. */
14602 switch (GET_CODE (rtl))
14603 {
14604 case SYMBOL_REF:
14605 break;
14606 case NOT:
14607 case NEG:
14608 return false;
14609 case PLUS:
14610 {
14611 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14612 operands. */
14613 subrtx_var_iterator::array_type array;
14614 bool first = false;
14615 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14616 if (SYMBOL_REF_P (*iter)
14617 || LABEL_P (*iter)
14618 || GET_CODE (*iter) == UNSPEC)
14619 {
14620 first = true;
14621 break;
14622 }
14623 if (!first)
14624 return true;
14625 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14626 if (SYMBOL_REF_P (*iter)
14627 || LABEL_P (*iter)
14628 || GET_CODE (*iter) == UNSPEC)
14629 return false;
14630 return true;
14631 }
14632 case MINUS:
14633 {
14634 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14635 appear in the second operand of MINUS. */
14636 subrtx_var_iterator::array_type array;
14637 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14638 if (SYMBOL_REF_P (*iter)
14639 || LABEL_P (*iter)
14640 || GET_CODE (*iter) == UNSPEC)
14641 return false;
14642 return true;
14643 }
14644 default:
14645 return true;
14646 }
14647
14648 if (CONSTANT_POOL_ADDRESS_P (rtl))
14649 {
14650 bool marked;
14651 get_pool_constant_mark (rtl, &marked);
14652 /* If all references to this pool constant were optimized away,
14653 it was not output and thus we can't represent it. */
14654 if (!marked)
14655 {
14656 expansion_failed (NULL_TREE, rtl,
14657 "Constant was removed from constant pool.\n");
14658 return false;
14659 }
14660 }
14661
14662 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14663 return false;
14664
14665 /* Avoid references to external symbols in debug info, on several targets
14666 the linker might even refuse to link when linking a shared library,
14667 and in many other cases the relocations for .debug_info/.debug_loc are
14668 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14669 to be defined within the same shared library or executable are fine. */
14670 if (SYMBOL_REF_EXTERNAL_P (rtl))
14671 {
14672 tree decl = SYMBOL_REF_DECL (rtl);
14673
14674 if (decl == NULL || !targetm.binds_local_p (decl))
14675 {
14676 expansion_failed (NULL_TREE, rtl,
14677 "Symbol not defined in current TU.\n");
14678 return false;
14679 }
14680 }
14681
14682 return true;
14683 }
14684
14685 /* Return true if constant RTL can be emitted in DW_OP_addr or
14686 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14687 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14688
14689 static bool
14690 const_ok_for_output (rtx rtl)
14691 {
14692 if (GET_CODE (rtl) == SYMBOL_REF)
14693 return const_ok_for_output_1 (rtl);
14694
14695 if (GET_CODE (rtl) == CONST)
14696 {
14697 subrtx_var_iterator::array_type array;
14698 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14699 if (!const_ok_for_output_1 (*iter))
14700 return false;
14701 return true;
14702 }
14703
14704 return true;
14705 }
14706
14707 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14708 if possible, NULL otherwise. */
14709
14710 static dw_die_ref
14711 base_type_for_mode (machine_mode mode, bool unsignedp)
14712 {
14713 dw_die_ref type_die;
14714 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14715
14716 if (type == NULL)
14717 return NULL;
14718 switch (TREE_CODE (type))
14719 {
14720 case INTEGER_TYPE:
14721 case REAL_TYPE:
14722 break;
14723 default:
14724 return NULL;
14725 }
14726 type_die = lookup_type_die (type);
14727 if (!type_die)
14728 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14729 comp_unit_die ());
14730 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14731 return NULL;
14732 return type_die;
14733 }
14734
14735 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14736 type matching MODE, or, if MODE is narrower than or as wide as
14737 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14738 possible. */
14739
14740 static dw_loc_descr_ref
14741 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14742 {
14743 machine_mode outer_mode = mode;
14744 dw_die_ref type_die;
14745 dw_loc_descr_ref cvt;
14746
14747 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14748 {
14749 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14750 return op;
14751 }
14752 type_die = base_type_for_mode (outer_mode, 1);
14753 if (type_die == NULL)
14754 return NULL;
14755 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14756 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14757 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14758 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14759 add_loc_descr (&op, cvt);
14760 return op;
14761 }
14762
14763 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14764
14765 static dw_loc_descr_ref
14766 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14767 dw_loc_descr_ref op1)
14768 {
14769 dw_loc_descr_ref ret = op0;
14770 add_loc_descr (&ret, op1);
14771 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14772 if (STORE_FLAG_VALUE != 1)
14773 {
14774 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14775 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14776 }
14777 return ret;
14778 }
14779
14780 /* Subroutine of scompare_loc_descriptor for the case in which we're
14781 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14782 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14783
14784 static dw_loc_descr_ref
14785 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14786 scalar_int_mode op_mode,
14787 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14788 {
14789 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14790 dw_loc_descr_ref cvt;
14791
14792 if (type_die == NULL)
14793 return NULL;
14794 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14795 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14796 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14797 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14798 add_loc_descr (&op0, cvt);
14799 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14800 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14801 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14802 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14803 add_loc_descr (&op1, cvt);
14804 return compare_loc_descriptor (op, op0, op1);
14805 }
14806
14807 /* Subroutine of scompare_loc_descriptor for the case in which we're
14808 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14809 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14810
14811 static dw_loc_descr_ref
14812 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14813 scalar_int_mode op_mode,
14814 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14815 {
14816 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14817 /* For eq/ne, if the operands are known to be zero-extended,
14818 there is no need to do the fancy shifting up. */
14819 if (op == DW_OP_eq || op == DW_OP_ne)
14820 {
14821 dw_loc_descr_ref last0, last1;
14822 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14823 ;
14824 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14825 ;
14826 /* deref_size zero extends, and for constants we can check
14827 whether they are zero extended or not. */
14828 if (((last0->dw_loc_opc == DW_OP_deref_size
14829 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14830 || (CONST_INT_P (XEXP (rtl, 0))
14831 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14832 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14833 && ((last1->dw_loc_opc == DW_OP_deref_size
14834 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14835 || (CONST_INT_P (XEXP (rtl, 1))
14836 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14837 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14838 return compare_loc_descriptor (op, op0, op1);
14839
14840 /* EQ/NE comparison against constant in narrower type than
14841 DWARF2_ADDR_SIZE can be performed either as
14842 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14843 DW_OP_{eq,ne}
14844 or
14845 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14846 DW_OP_{eq,ne}. Pick whatever is shorter. */
14847 if (CONST_INT_P (XEXP (rtl, 1))
14848 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14849 && (size_of_int_loc_descriptor (shift) + 1
14850 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14851 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14852 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14853 & GET_MODE_MASK (op_mode))))
14854 {
14855 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14856 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14857 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14858 & GET_MODE_MASK (op_mode));
14859 return compare_loc_descriptor (op, op0, op1);
14860 }
14861 }
14862 add_loc_descr (&op0, int_loc_descriptor (shift));
14863 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14864 if (CONST_INT_P (XEXP (rtl, 1)))
14865 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14866 else
14867 {
14868 add_loc_descr (&op1, int_loc_descriptor (shift));
14869 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14870 }
14871 return compare_loc_descriptor (op, op0, op1);
14872 }
14873
14874 /* Return location descriptor for unsigned comparison OP RTL. */
14875
14876 static dw_loc_descr_ref
14877 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14878 machine_mode mem_mode)
14879 {
14880 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14881 dw_loc_descr_ref op0, op1;
14882
14883 if (op_mode == VOIDmode)
14884 op_mode = GET_MODE (XEXP (rtl, 1));
14885 if (op_mode == VOIDmode)
14886 return NULL;
14887
14888 scalar_int_mode int_op_mode;
14889 if (dwarf_strict
14890 && dwarf_version < 5
14891 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14892 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14893 return NULL;
14894
14895 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14896 VAR_INIT_STATUS_INITIALIZED);
14897 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14898 VAR_INIT_STATUS_INITIALIZED);
14899
14900 if (op0 == NULL || op1 == NULL)
14901 return NULL;
14902
14903 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14904 {
14905 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14906 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14907
14908 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14909 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14910 }
14911 return compare_loc_descriptor (op, op0, op1);
14912 }
14913
14914 /* Return location descriptor for unsigned comparison OP RTL. */
14915
14916 static dw_loc_descr_ref
14917 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14918 machine_mode mem_mode)
14919 {
14920 dw_loc_descr_ref op0, op1;
14921
14922 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14923 if (test_op_mode == VOIDmode)
14924 test_op_mode = GET_MODE (XEXP (rtl, 1));
14925
14926 scalar_int_mode op_mode;
14927 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14928 return NULL;
14929
14930 if (dwarf_strict
14931 && dwarf_version < 5
14932 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14933 return NULL;
14934
14935 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14936 VAR_INIT_STATUS_INITIALIZED);
14937 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14938 VAR_INIT_STATUS_INITIALIZED);
14939
14940 if (op0 == NULL || op1 == NULL)
14941 return NULL;
14942
14943 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14944 {
14945 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14946 dw_loc_descr_ref last0, last1;
14947 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14948 ;
14949 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14950 ;
14951 if (CONST_INT_P (XEXP (rtl, 0)))
14952 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14953 /* deref_size zero extends, so no need to mask it again. */
14954 else if (last0->dw_loc_opc != DW_OP_deref_size
14955 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14956 {
14957 add_loc_descr (&op0, int_loc_descriptor (mask));
14958 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14959 }
14960 if (CONST_INT_P (XEXP (rtl, 1)))
14961 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14962 /* deref_size zero extends, so no need to mask it again. */
14963 else if (last1->dw_loc_opc != DW_OP_deref_size
14964 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14965 {
14966 add_loc_descr (&op1, int_loc_descriptor (mask));
14967 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14968 }
14969 }
14970 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14971 {
14972 HOST_WIDE_INT bias = 1;
14973 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14974 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14975 if (CONST_INT_P (XEXP (rtl, 1)))
14976 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14977 + INTVAL (XEXP (rtl, 1)));
14978 else
14979 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14980 bias, 0));
14981 }
14982 return compare_loc_descriptor (op, op0, op1);
14983 }
14984
14985 /* Return location descriptor for {U,S}{MIN,MAX}. */
14986
14987 static dw_loc_descr_ref
14988 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14989 machine_mode mem_mode)
14990 {
14991 enum dwarf_location_atom op;
14992 dw_loc_descr_ref op0, op1, ret;
14993 dw_loc_descr_ref bra_node, drop_node;
14994
14995 scalar_int_mode int_mode;
14996 if (dwarf_strict
14997 && dwarf_version < 5
14998 && (!is_a <scalar_int_mode> (mode, &int_mode)
14999 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
15000 return NULL;
15001
15002 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15003 VAR_INIT_STATUS_INITIALIZED);
15004 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15005 VAR_INIT_STATUS_INITIALIZED);
15006
15007 if (op0 == NULL || op1 == NULL)
15008 return NULL;
15009
15010 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
15011 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
15012 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
15013 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
15014 {
15015 /* Checked by the caller. */
15016 int_mode = as_a <scalar_int_mode> (mode);
15017 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
15018 {
15019 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
15020 add_loc_descr (&op0, int_loc_descriptor (mask));
15021 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
15022 add_loc_descr (&op1, int_loc_descriptor (mask));
15023 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
15024 }
15025 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15026 {
15027 HOST_WIDE_INT bias = 1;
15028 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
15029 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
15030 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
15031 }
15032 }
15033 else if (is_a <scalar_int_mode> (mode, &int_mode)
15034 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
15035 {
15036 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
15037 add_loc_descr (&op0, int_loc_descriptor (shift));
15038 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
15039 add_loc_descr (&op1, int_loc_descriptor (shift));
15040 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
15041 }
15042 else if (is_a <scalar_int_mode> (mode, &int_mode)
15043 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15044 {
15045 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
15046 dw_loc_descr_ref cvt;
15047 if (type_die == NULL)
15048 return NULL;
15049 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15050 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15051 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15052 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15053 add_loc_descr (&op0, cvt);
15054 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15055 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15056 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15057 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15058 add_loc_descr (&op1, cvt);
15059 }
15060
15061 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
15062 op = DW_OP_lt;
15063 else
15064 op = DW_OP_gt;
15065 ret = op0;
15066 add_loc_descr (&ret, op1);
15067 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
15068 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15069 add_loc_descr (&ret, bra_node);
15070 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15071 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15072 add_loc_descr (&ret, drop_node);
15073 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15074 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15075 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15076 && is_a <scalar_int_mode> (mode, &int_mode)
15077 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15078 ret = convert_descriptor_to_mode (int_mode, ret);
15079 return ret;
15080 }
15081
15082 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15083 but after converting arguments to type_die, afterwards
15084 convert back to unsigned. */
15085
15086 static dw_loc_descr_ref
15087 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15088 scalar_int_mode mode, machine_mode mem_mode)
15089 {
15090 dw_loc_descr_ref cvt, op0, op1;
15091
15092 if (type_die == NULL)
15093 return NULL;
15094 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15095 VAR_INIT_STATUS_INITIALIZED);
15096 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15097 VAR_INIT_STATUS_INITIALIZED);
15098 if (op0 == NULL || op1 == NULL)
15099 return NULL;
15100 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15101 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15102 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15103 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15104 add_loc_descr (&op0, cvt);
15105 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15106 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15107 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15108 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15109 add_loc_descr (&op1, cvt);
15110 add_loc_descr (&op0, op1);
15111 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15112 return convert_descriptor_to_mode (mode, op0);
15113 }
15114
15115 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15116 const0 is DW_OP_lit0 or corresponding typed constant,
15117 const1 is DW_OP_lit1 or corresponding typed constant
15118 and constMSB is constant with just the MSB bit set
15119 for the mode):
15120 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15121 L1: const0 DW_OP_swap
15122 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15123 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15124 L3: DW_OP_drop
15125 L4: DW_OP_nop
15126
15127 CTZ is similar:
15128 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15129 L1: const0 DW_OP_swap
15130 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15131 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15132 L3: DW_OP_drop
15133 L4: DW_OP_nop
15134
15135 FFS is similar:
15136 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15137 L1: const1 DW_OP_swap
15138 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15139 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15140 L3: DW_OP_drop
15141 L4: DW_OP_nop */
15142
15143 static dw_loc_descr_ref
15144 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15145 machine_mode mem_mode)
15146 {
15147 dw_loc_descr_ref op0, ret, tmp;
15148 HOST_WIDE_INT valv;
15149 dw_loc_descr_ref l1jump, l1label;
15150 dw_loc_descr_ref l2jump, l2label;
15151 dw_loc_descr_ref l3jump, l3label;
15152 dw_loc_descr_ref l4jump, l4label;
15153 rtx msb;
15154
15155 if (GET_MODE (XEXP (rtl, 0)) != mode)
15156 return NULL;
15157
15158 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15159 VAR_INIT_STATUS_INITIALIZED);
15160 if (op0 == NULL)
15161 return NULL;
15162 ret = op0;
15163 if (GET_CODE (rtl) == CLZ)
15164 {
15165 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15166 valv = GET_MODE_BITSIZE (mode);
15167 }
15168 else if (GET_CODE (rtl) == FFS)
15169 valv = 0;
15170 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15171 valv = GET_MODE_BITSIZE (mode);
15172 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15173 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15174 add_loc_descr (&ret, l1jump);
15175 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15176 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15177 VAR_INIT_STATUS_INITIALIZED);
15178 if (tmp == NULL)
15179 return NULL;
15180 add_loc_descr (&ret, tmp);
15181 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15182 add_loc_descr (&ret, l4jump);
15183 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15184 ? const1_rtx : const0_rtx,
15185 mode, mem_mode,
15186 VAR_INIT_STATUS_INITIALIZED);
15187 if (l1label == NULL)
15188 return NULL;
15189 add_loc_descr (&ret, l1label);
15190 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15191 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15192 add_loc_descr (&ret, l2label);
15193 if (GET_CODE (rtl) != CLZ)
15194 msb = const1_rtx;
15195 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15196 msb = GEN_INT (HOST_WIDE_INT_1U
15197 << (GET_MODE_BITSIZE (mode) - 1));
15198 else
15199 msb = immed_wide_int_const
15200 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15201 GET_MODE_PRECISION (mode)), mode);
15202 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15203 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15204 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15205 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15206 else
15207 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15208 VAR_INIT_STATUS_INITIALIZED);
15209 if (tmp == NULL)
15210 return NULL;
15211 add_loc_descr (&ret, tmp);
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15213 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15214 add_loc_descr (&ret, l3jump);
15215 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15216 VAR_INIT_STATUS_INITIALIZED);
15217 if (tmp == NULL)
15218 return NULL;
15219 add_loc_descr (&ret, tmp);
15220 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15221 ? DW_OP_shl : DW_OP_shr, 0, 0));
15222 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15223 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15224 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15225 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15226 add_loc_descr (&ret, l2jump);
15227 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15228 add_loc_descr (&ret, l3label);
15229 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15230 add_loc_descr (&ret, l4label);
15231 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15232 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15233 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15234 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15235 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15236 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15237 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15238 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15239 return ret;
15240 }
15241
15242 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15243 const1 is DW_OP_lit1 or corresponding typed constant):
15244 const0 DW_OP_swap
15245 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15246 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15247 L2: DW_OP_drop
15248
15249 PARITY is similar:
15250 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15251 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15252 L2: DW_OP_drop */
15253
15254 static dw_loc_descr_ref
15255 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15256 machine_mode mem_mode)
15257 {
15258 dw_loc_descr_ref op0, ret, tmp;
15259 dw_loc_descr_ref l1jump, l1label;
15260 dw_loc_descr_ref l2jump, l2label;
15261
15262 if (GET_MODE (XEXP (rtl, 0)) != mode)
15263 return NULL;
15264
15265 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15266 VAR_INIT_STATUS_INITIALIZED);
15267 if (op0 == NULL)
15268 return NULL;
15269 ret = op0;
15270 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15271 VAR_INIT_STATUS_INITIALIZED);
15272 if (tmp == NULL)
15273 return NULL;
15274 add_loc_descr (&ret, tmp);
15275 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15276 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15277 add_loc_descr (&ret, l1label);
15278 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15279 add_loc_descr (&ret, l2jump);
15280 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15281 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15282 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15283 VAR_INIT_STATUS_INITIALIZED);
15284 if (tmp == NULL)
15285 return NULL;
15286 add_loc_descr (&ret, tmp);
15287 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15288 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15289 ? DW_OP_plus : DW_OP_xor, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15291 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15292 VAR_INIT_STATUS_INITIALIZED);
15293 add_loc_descr (&ret, tmp);
15294 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15295 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15296 add_loc_descr (&ret, l1jump);
15297 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15298 add_loc_descr (&ret, l2label);
15299 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15300 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15301 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15302 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15303 return ret;
15304 }
15305
15306 /* BSWAP (constS is initial shift count, either 56 or 24):
15307 constS const0
15308 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15309 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15310 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15311 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15312 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15313
15314 static dw_loc_descr_ref
15315 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15316 machine_mode mem_mode)
15317 {
15318 dw_loc_descr_ref op0, ret, tmp;
15319 dw_loc_descr_ref l1jump, l1label;
15320 dw_loc_descr_ref l2jump, l2label;
15321
15322 if (BITS_PER_UNIT != 8
15323 || (GET_MODE_BITSIZE (mode) != 32
15324 && GET_MODE_BITSIZE (mode) != 64))
15325 return NULL;
15326
15327 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15328 VAR_INIT_STATUS_INITIALIZED);
15329 if (op0 == NULL)
15330 return NULL;
15331
15332 ret = op0;
15333 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15334 mode, mem_mode,
15335 VAR_INIT_STATUS_INITIALIZED);
15336 if (tmp == NULL)
15337 return NULL;
15338 add_loc_descr (&ret, tmp);
15339 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15340 VAR_INIT_STATUS_INITIALIZED);
15341 if (tmp == NULL)
15342 return NULL;
15343 add_loc_descr (&ret, tmp);
15344 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15345 add_loc_descr (&ret, l1label);
15346 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15347 mode, mem_mode,
15348 VAR_INIT_STATUS_INITIALIZED);
15349 add_loc_descr (&ret, tmp);
15350 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15351 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15352 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15353 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15354 VAR_INIT_STATUS_INITIALIZED);
15355 if (tmp == NULL)
15356 return NULL;
15357 add_loc_descr (&ret, tmp);
15358 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15359 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15360 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15361 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15362 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15363 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15364 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15365 VAR_INIT_STATUS_INITIALIZED);
15366 add_loc_descr (&ret, tmp);
15367 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15368 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15369 add_loc_descr (&ret, l2jump);
15370 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15371 VAR_INIT_STATUS_INITIALIZED);
15372 add_loc_descr (&ret, tmp);
15373 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15374 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15375 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15376 add_loc_descr (&ret, l1jump);
15377 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15378 add_loc_descr (&ret, l2label);
15379 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15380 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15381 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15382 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15383 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15384 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15385 return ret;
15386 }
15387
15388 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15389 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15390 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15391 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15392
15393 ROTATERT is similar:
15394 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15395 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15396 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15397
15398 static dw_loc_descr_ref
15399 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15400 machine_mode mem_mode)
15401 {
15402 rtx rtlop1 = XEXP (rtl, 1);
15403 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15404 int i;
15405
15406 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15407 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15408 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15409 VAR_INIT_STATUS_INITIALIZED);
15410 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15411 VAR_INIT_STATUS_INITIALIZED);
15412 if (op0 == NULL || op1 == NULL)
15413 return NULL;
15414 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15415 for (i = 0; i < 2; i++)
15416 {
15417 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15418 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15419 mode, mem_mode,
15420 VAR_INIT_STATUS_INITIALIZED);
15421 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15422 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15423 ? DW_OP_const4u
15424 : HOST_BITS_PER_WIDE_INT == 64
15425 ? DW_OP_const8u : DW_OP_constu,
15426 GET_MODE_MASK (mode), 0);
15427 else
15428 mask[i] = NULL;
15429 if (mask[i] == NULL)
15430 return NULL;
15431 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15432 }
15433 ret = op0;
15434 add_loc_descr (&ret, op1);
15435 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15436 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15437 if (GET_CODE (rtl) == ROTATERT)
15438 {
15439 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15440 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15441 GET_MODE_BITSIZE (mode), 0));
15442 }
15443 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15444 if (mask[0] != NULL)
15445 add_loc_descr (&ret, mask[0]);
15446 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15447 if (mask[1] != NULL)
15448 {
15449 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15450 add_loc_descr (&ret, mask[1]);
15451 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15452 }
15453 if (GET_CODE (rtl) == ROTATE)
15454 {
15455 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15456 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15457 GET_MODE_BITSIZE (mode), 0));
15458 }
15459 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15460 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15461 return ret;
15462 }
15463
15464 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15465 for DEBUG_PARAMETER_REF RTL. */
15466
15467 static dw_loc_descr_ref
15468 parameter_ref_descriptor (rtx rtl)
15469 {
15470 dw_loc_descr_ref ret;
15471 dw_die_ref ref;
15472
15473 if (dwarf_strict)
15474 return NULL;
15475 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15476 /* With LTO during LTRANS we get the late DIE that refers to the early
15477 DIE, thus we add another indirection here. This seems to confuse
15478 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15479 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15480 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15481 if (ref)
15482 {
15483 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15484 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15485 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15486 }
15487 else
15488 {
15489 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15490 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15491 }
15492 return ret;
15493 }
15494
15495 /* The following routine converts the RTL for a variable or parameter
15496 (resident in memory) into an equivalent Dwarf representation of a
15497 mechanism for getting the address of that same variable onto the top of a
15498 hypothetical "address evaluation" stack.
15499
15500 When creating memory location descriptors, we are effectively transforming
15501 the RTL for a memory-resident object into its Dwarf postfix expression
15502 equivalent. This routine recursively descends an RTL tree, turning
15503 it into Dwarf postfix code as it goes.
15504
15505 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15506
15507 MEM_MODE is the mode of the memory reference, needed to handle some
15508 autoincrement addressing modes.
15509
15510 Return 0 if we can't represent the location. */
15511
15512 dw_loc_descr_ref
15513 mem_loc_descriptor (rtx rtl, machine_mode mode,
15514 machine_mode mem_mode,
15515 enum var_init_status initialized)
15516 {
15517 dw_loc_descr_ref mem_loc_result = NULL;
15518 enum dwarf_location_atom op;
15519 dw_loc_descr_ref op0, op1;
15520 rtx inner = NULL_RTX;
15521 poly_int64 offset;
15522
15523 if (mode == VOIDmode)
15524 mode = GET_MODE (rtl);
15525
15526 /* Note that for a dynamically sized array, the location we will generate a
15527 description of here will be the lowest numbered location which is
15528 actually within the array. That's *not* necessarily the same as the
15529 zeroth element of the array. */
15530
15531 rtl = targetm.delegitimize_address (rtl);
15532
15533 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15534 return NULL;
15535
15536 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15537 switch (GET_CODE (rtl))
15538 {
15539 case POST_INC:
15540 case POST_DEC:
15541 case POST_MODIFY:
15542 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15543
15544 case SUBREG:
15545 /* The case of a subreg may arise when we have a local (register)
15546 variable or a formal (register) parameter which doesn't quite fill
15547 up an entire register. For now, just assume that it is
15548 legitimate to make the Dwarf info refer to the whole register which
15549 contains the given subreg. */
15550 if (!subreg_lowpart_p (rtl))
15551 break;
15552 inner = SUBREG_REG (rtl);
15553 /* FALLTHRU */
15554 case TRUNCATE:
15555 if (inner == NULL_RTX)
15556 inner = XEXP (rtl, 0);
15557 if (is_a <scalar_int_mode> (mode, &int_mode)
15558 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15559 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15560 #ifdef POINTERS_EXTEND_UNSIGNED
15561 || (int_mode == Pmode && mem_mode != VOIDmode)
15562 #endif
15563 )
15564 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15565 {
15566 mem_loc_result = mem_loc_descriptor (inner,
15567 inner_mode,
15568 mem_mode, initialized);
15569 break;
15570 }
15571 if (dwarf_strict && dwarf_version < 5)
15572 break;
15573 if (is_a <scalar_int_mode> (mode, &int_mode)
15574 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15575 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15576 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15577 {
15578 dw_die_ref type_die;
15579 dw_loc_descr_ref cvt;
15580
15581 mem_loc_result = mem_loc_descriptor (inner,
15582 GET_MODE (inner),
15583 mem_mode, initialized);
15584 if (mem_loc_result == NULL)
15585 break;
15586 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15587 if (type_die == NULL)
15588 {
15589 mem_loc_result = NULL;
15590 break;
15591 }
15592 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15593 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15594 else
15595 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15596 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15597 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15598 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15599 add_loc_descr (&mem_loc_result, cvt);
15600 if (is_a <scalar_int_mode> (mode, &int_mode)
15601 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15602 {
15603 /* Convert it to untyped afterwards. */
15604 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15605 add_loc_descr (&mem_loc_result, cvt);
15606 }
15607 }
15608 break;
15609
15610 case REG:
15611 if (!is_a <scalar_int_mode> (mode, &int_mode)
15612 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15613 && rtl != arg_pointer_rtx
15614 && rtl != frame_pointer_rtx
15615 #ifdef POINTERS_EXTEND_UNSIGNED
15616 && (int_mode != Pmode || mem_mode == VOIDmode)
15617 #endif
15618 ))
15619 {
15620 dw_die_ref type_die;
15621 unsigned int dbx_regnum;
15622
15623 if (dwarf_strict && dwarf_version < 5)
15624 break;
15625 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15626 break;
15627 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15628 if (type_die == NULL)
15629 break;
15630
15631 dbx_regnum = dbx_reg_number (rtl);
15632 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15633 break;
15634 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15635 dbx_regnum, 0);
15636 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15637 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15638 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15639 break;
15640 }
15641 /* Whenever a register number forms a part of the description of the
15642 method for calculating the (dynamic) address of a memory resident
15643 object, DWARF rules require the register number be referred to as
15644 a "base register". This distinction is not based in any way upon
15645 what category of register the hardware believes the given register
15646 belongs to. This is strictly DWARF terminology we're dealing with
15647 here. Note that in cases where the location of a memory-resident
15648 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15649 OP_CONST (0)) the actual DWARF location descriptor that we generate
15650 may just be OP_BASEREG (basereg). This may look deceptively like
15651 the object in question was allocated to a register (rather than in
15652 memory) so DWARF consumers need to be aware of the subtle
15653 distinction between OP_REG and OP_BASEREG. */
15654 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15655 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15656 else if (stack_realign_drap
15657 && crtl->drap_reg
15658 && crtl->args.internal_arg_pointer == rtl
15659 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15660 {
15661 /* If RTL is internal_arg_pointer, which has been optimized
15662 out, use DRAP instead. */
15663 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15664 VAR_INIT_STATUS_INITIALIZED);
15665 }
15666 break;
15667
15668 case SIGN_EXTEND:
15669 case ZERO_EXTEND:
15670 if (!is_a <scalar_int_mode> (mode, &int_mode)
15671 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15672 break;
15673 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15674 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15675 if (op0 == 0)
15676 break;
15677 else if (GET_CODE (rtl) == ZERO_EXTEND
15678 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15679 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15680 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15681 to expand zero extend as two shifts instead of
15682 masking. */
15683 && GET_MODE_SIZE (inner_mode) <= 4)
15684 {
15685 mem_loc_result = op0;
15686 add_loc_descr (&mem_loc_result,
15687 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15688 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15689 }
15690 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15691 {
15692 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15693 shift *= BITS_PER_UNIT;
15694 if (GET_CODE (rtl) == SIGN_EXTEND)
15695 op = DW_OP_shra;
15696 else
15697 op = DW_OP_shr;
15698 mem_loc_result = op0;
15699 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15700 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15701 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15702 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15703 }
15704 else if (!dwarf_strict || dwarf_version >= 5)
15705 {
15706 dw_die_ref type_die1, type_die2;
15707 dw_loc_descr_ref cvt;
15708
15709 type_die1 = base_type_for_mode (inner_mode,
15710 GET_CODE (rtl) == ZERO_EXTEND);
15711 if (type_die1 == NULL)
15712 break;
15713 type_die2 = base_type_for_mode (int_mode, 1);
15714 if (type_die2 == NULL)
15715 break;
15716 mem_loc_result = op0;
15717 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15718 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15719 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15720 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15721 add_loc_descr (&mem_loc_result, cvt);
15722 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15723 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15724 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15725 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15726 add_loc_descr (&mem_loc_result, cvt);
15727 }
15728 break;
15729
15730 case MEM:
15731 {
15732 rtx new_rtl = avoid_constant_pool_reference (rtl);
15733 if (new_rtl != rtl)
15734 {
15735 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15736 initialized);
15737 if (mem_loc_result != NULL)
15738 return mem_loc_result;
15739 }
15740 }
15741 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15742 get_address_mode (rtl), mode,
15743 VAR_INIT_STATUS_INITIALIZED);
15744 if (mem_loc_result == NULL)
15745 mem_loc_result = tls_mem_loc_descriptor (rtl);
15746 if (mem_loc_result != NULL)
15747 {
15748 if (!is_a <scalar_int_mode> (mode, &int_mode)
15749 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15750 {
15751 dw_die_ref type_die;
15752 dw_loc_descr_ref deref;
15753 HOST_WIDE_INT size;
15754
15755 if (dwarf_strict && dwarf_version < 5)
15756 return NULL;
15757 if (!GET_MODE_SIZE (mode).is_constant (&size))
15758 return NULL;
15759 type_die
15760 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15761 if (type_die == NULL)
15762 return NULL;
15763 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15764 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15765 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15766 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15767 add_loc_descr (&mem_loc_result, deref);
15768 }
15769 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15770 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15771 else
15772 add_loc_descr (&mem_loc_result,
15773 new_loc_descr (DW_OP_deref_size,
15774 GET_MODE_SIZE (int_mode), 0));
15775 }
15776 break;
15777
15778 case LO_SUM:
15779 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15780
15781 case LABEL_REF:
15782 /* Some ports can transform a symbol ref into a label ref, because
15783 the symbol ref is too far away and has to be dumped into a constant
15784 pool. */
15785 case CONST:
15786 case SYMBOL_REF:
15787 case UNSPEC:
15788 if (!is_a <scalar_int_mode> (mode, &int_mode)
15789 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15790 #ifdef POINTERS_EXTEND_UNSIGNED
15791 && (int_mode != Pmode || mem_mode == VOIDmode)
15792 #endif
15793 ))
15794 break;
15795
15796 if (GET_CODE (rtl) == UNSPEC)
15797 {
15798 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15799 can't express it in the debug info. This can happen e.g. with some
15800 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15801 approves. */
15802 bool not_ok = false;
15803 subrtx_var_iterator::array_type array;
15804 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15805 if (*iter != rtl && !CONSTANT_P (*iter))
15806 {
15807 not_ok = true;
15808 break;
15809 }
15810
15811 if (not_ok)
15812 break;
15813
15814 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15815 if (!const_ok_for_output_1 (*iter))
15816 {
15817 not_ok = true;
15818 break;
15819 }
15820
15821 if (not_ok)
15822 break;
15823
15824 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15825 goto symref;
15826 }
15827
15828 if (GET_CODE (rtl) == SYMBOL_REF
15829 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15830 {
15831 dw_loc_descr_ref temp;
15832
15833 /* If this is not defined, we have no way to emit the data. */
15834 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15835 break;
15836
15837 temp = new_addr_loc_descr (rtl, dtprel_true);
15838
15839 /* We check for DWARF 5 here because gdb did not implement
15840 DW_OP_form_tls_address until after 7.12. */
15841 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15842 ? DW_OP_form_tls_address
15843 : DW_OP_GNU_push_tls_address),
15844 0, 0);
15845 add_loc_descr (&mem_loc_result, temp);
15846
15847 break;
15848 }
15849
15850 if (!const_ok_for_output (rtl))
15851 {
15852 if (GET_CODE (rtl) == CONST)
15853 switch (GET_CODE (XEXP (rtl, 0)))
15854 {
15855 case NOT:
15856 op = DW_OP_not;
15857 goto try_const_unop;
15858 case NEG:
15859 op = DW_OP_neg;
15860 goto try_const_unop;
15861 try_const_unop:
15862 rtx arg;
15863 arg = XEXP (XEXP (rtl, 0), 0);
15864 if (!CONSTANT_P (arg))
15865 arg = gen_rtx_CONST (int_mode, arg);
15866 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15867 initialized);
15868 if (op0)
15869 {
15870 mem_loc_result = op0;
15871 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15872 }
15873 break;
15874 default:
15875 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15876 mem_mode, initialized);
15877 break;
15878 }
15879 break;
15880 }
15881
15882 symref:
15883 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15884 vec_safe_push (used_rtx_array, rtl);
15885 break;
15886
15887 case CONCAT:
15888 case CONCATN:
15889 case VAR_LOCATION:
15890 case DEBUG_IMPLICIT_PTR:
15891 expansion_failed (NULL_TREE, rtl,
15892 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15893 return 0;
15894
15895 case ENTRY_VALUE:
15896 if (dwarf_strict && dwarf_version < 5)
15897 return NULL;
15898 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15899 {
15900 if (!is_a <scalar_int_mode> (mode, &int_mode)
15901 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15902 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15903 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15904 else
15905 {
15906 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15907 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15908 return NULL;
15909 op0 = one_reg_loc_descriptor (dbx_regnum,
15910 VAR_INIT_STATUS_INITIALIZED);
15911 }
15912 }
15913 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15914 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15915 {
15916 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15917 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15918 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15919 return NULL;
15920 }
15921 else
15922 gcc_unreachable ();
15923 if (op0 == NULL)
15924 return NULL;
15925 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15926 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15927 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15928 break;
15929
15930 case DEBUG_PARAMETER_REF:
15931 mem_loc_result = parameter_ref_descriptor (rtl);
15932 break;
15933
15934 case PRE_MODIFY:
15935 /* Extract the PLUS expression nested inside and fall into
15936 PLUS code below. */
15937 rtl = XEXP (rtl, 1);
15938 goto plus;
15939
15940 case PRE_INC:
15941 case PRE_DEC:
15942 /* Turn these into a PLUS expression and fall into the PLUS code
15943 below. */
15944 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15945 gen_int_mode (GET_CODE (rtl) == PRE_INC
15946 ? GET_MODE_UNIT_SIZE (mem_mode)
15947 : -GET_MODE_UNIT_SIZE (mem_mode),
15948 mode));
15949
15950 /* fall through */
15951
15952 case PLUS:
15953 plus:
15954 if (is_based_loc (rtl)
15955 && is_a <scalar_int_mode> (mode, &int_mode)
15956 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15957 || XEXP (rtl, 0) == arg_pointer_rtx
15958 || XEXP (rtl, 0) == frame_pointer_rtx))
15959 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15960 INTVAL (XEXP (rtl, 1)),
15961 VAR_INIT_STATUS_INITIALIZED);
15962 else
15963 {
15964 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15965 VAR_INIT_STATUS_INITIALIZED);
15966 if (mem_loc_result == 0)
15967 break;
15968
15969 if (CONST_INT_P (XEXP (rtl, 1))
15970 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15971 <= DWARF2_ADDR_SIZE))
15972 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15973 else
15974 {
15975 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15976 VAR_INIT_STATUS_INITIALIZED);
15977 if (op1 == 0)
15978 return NULL;
15979 add_loc_descr (&mem_loc_result, op1);
15980 add_loc_descr (&mem_loc_result,
15981 new_loc_descr (DW_OP_plus, 0, 0));
15982 }
15983 }
15984 break;
15985
15986 /* If a pseudo-reg is optimized away, it is possible for it to
15987 be replaced with a MEM containing a multiply or shift. */
15988 case MINUS:
15989 op = DW_OP_minus;
15990 goto do_binop;
15991
15992 case MULT:
15993 op = DW_OP_mul;
15994 goto do_binop;
15995
15996 case DIV:
15997 if ((!dwarf_strict || dwarf_version >= 5)
15998 && is_a <scalar_int_mode> (mode, &int_mode)
15999 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16000 {
16001 mem_loc_result = typed_binop (DW_OP_div, rtl,
16002 base_type_for_mode (mode, 0),
16003 int_mode, mem_mode);
16004 break;
16005 }
16006 op = DW_OP_div;
16007 goto do_binop;
16008
16009 case UMOD:
16010 op = DW_OP_mod;
16011 goto do_binop;
16012
16013 case ASHIFT:
16014 op = DW_OP_shl;
16015 goto do_shift;
16016
16017 case ASHIFTRT:
16018 op = DW_OP_shra;
16019 goto do_shift;
16020
16021 case LSHIFTRT:
16022 op = DW_OP_shr;
16023 goto do_shift;
16024
16025 do_shift:
16026 if (!is_a <scalar_int_mode> (mode, &int_mode))
16027 break;
16028 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
16029 VAR_INIT_STATUS_INITIALIZED);
16030 {
16031 rtx rtlop1 = XEXP (rtl, 1);
16032 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
16033 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
16034 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
16035 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
16036 VAR_INIT_STATUS_INITIALIZED);
16037 }
16038
16039 if (op0 == 0 || op1 == 0)
16040 break;
16041
16042 mem_loc_result = op0;
16043 add_loc_descr (&mem_loc_result, op1);
16044 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16045 break;
16046
16047 case AND:
16048 op = DW_OP_and;
16049 goto do_binop;
16050
16051 case IOR:
16052 op = DW_OP_or;
16053 goto do_binop;
16054
16055 case XOR:
16056 op = DW_OP_xor;
16057 goto do_binop;
16058
16059 do_binop:
16060 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16061 VAR_INIT_STATUS_INITIALIZED);
16062 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16063 VAR_INIT_STATUS_INITIALIZED);
16064
16065 if (op0 == 0 || op1 == 0)
16066 break;
16067
16068 mem_loc_result = op0;
16069 add_loc_descr (&mem_loc_result, op1);
16070 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16071 break;
16072
16073 case MOD:
16074 if ((!dwarf_strict || dwarf_version >= 5)
16075 && is_a <scalar_int_mode> (mode, &int_mode)
16076 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16077 {
16078 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16079 base_type_for_mode (mode, 0),
16080 int_mode, mem_mode);
16081 break;
16082 }
16083
16084 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16085 VAR_INIT_STATUS_INITIALIZED);
16086 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16087 VAR_INIT_STATUS_INITIALIZED);
16088
16089 if (op0 == 0 || op1 == 0)
16090 break;
16091
16092 mem_loc_result = op0;
16093 add_loc_descr (&mem_loc_result, op1);
16094 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16095 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16096 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16097 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16098 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16099 break;
16100
16101 case UDIV:
16102 if ((!dwarf_strict || dwarf_version >= 5)
16103 && is_a <scalar_int_mode> (mode, &int_mode))
16104 {
16105 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16106 {
16107 op = DW_OP_div;
16108 goto do_binop;
16109 }
16110 mem_loc_result = typed_binop (DW_OP_div, rtl,
16111 base_type_for_mode (int_mode, 1),
16112 int_mode, mem_mode);
16113 }
16114 break;
16115
16116 case NOT:
16117 op = DW_OP_not;
16118 goto do_unop;
16119
16120 case ABS:
16121 op = DW_OP_abs;
16122 goto do_unop;
16123
16124 case NEG:
16125 op = DW_OP_neg;
16126 goto do_unop;
16127
16128 do_unop:
16129 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16130 VAR_INIT_STATUS_INITIALIZED);
16131
16132 if (op0 == 0)
16133 break;
16134
16135 mem_loc_result = op0;
16136 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16137 break;
16138
16139 case CONST_INT:
16140 if (!is_a <scalar_int_mode> (mode, &int_mode)
16141 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16142 #ifdef POINTERS_EXTEND_UNSIGNED
16143 || (int_mode == Pmode
16144 && mem_mode != VOIDmode
16145 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16146 #endif
16147 )
16148 {
16149 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16150 break;
16151 }
16152 if ((!dwarf_strict || dwarf_version >= 5)
16153 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16154 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16155 {
16156 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16157 scalar_int_mode amode;
16158 if (type_die == NULL)
16159 return NULL;
16160 if (INTVAL (rtl) >= 0
16161 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16162 .exists (&amode))
16163 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16164 /* const DW_OP_convert <XXX> vs.
16165 DW_OP_const_type <XXX, 1, const>. */
16166 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16167 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16168 {
16169 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16170 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16171 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16172 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16173 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16174 add_loc_descr (&mem_loc_result, op0);
16175 return mem_loc_result;
16176 }
16177 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16178 INTVAL (rtl));
16179 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16180 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16181 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16182 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16183 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16184 else
16185 {
16186 mem_loc_result->dw_loc_oprnd2.val_class
16187 = dw_val_class_const_double;
16188 mem_loc_result->dw_loc_oprnd2.v.val_double
16189 = double_int::from_shwi (INTVAL (rtl));
16190 }
16191 }
16192 break;
16193
16194 case CONST_DOUBLE:
16195 if (!dwarf_strict || dwarf_version >= 5)
16196 {
16197 dw_die_ref type_die;
16198
16199 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16200 CONST_DOUBLE rtx could represent either a large integer
16201 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16202 the value is always a floating point constant.
16203
16204 When it is an integer, a CONST_DOUBLE is used whenever
16205 the constant requires 2 HWIs to be adequately represented.
16206 We output CONST_DOUBLEs as blocks. */
16207 if (mode == VOIDmode
16208 || (GET_MODE (rtl) == VOIDmode
16209 && maybe_ne (GET_MODE_BITSIZE (mode),
16210 HOST_BITS_PER_DOUBLE_INT)))
16211 break;
16212 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16213 if (type_die == NULL)
16214 return NULL;
16215 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16216 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16217 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16218 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16219 #if TARGET_SUPPORTS_WIDE_INT == 0
16220 if (!SCALAR_FLOAT_MODE_P (mode))
16221 {
16222 mem_loc_result->dw_loc_oprnd2.val_class
16223 = dw_val_class_const_double;
16224 mem_loc_result->dw_loc_oprnd2.v.val_double
16225 = rtx_to_double_int (rtl);
16226 }
16227 else
16228 #endif
16229 {
16230 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16231 unsigned int length = GET_MODE_SIZE (float_mode);
16232 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16233
16234 insert_float (rtl, array);
16235 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16236 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16237 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16238 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16239 }
16240 }
16241 break;
16242
16243 case CONST_WIDE_INT:
16244 if (!dwarf_strict || dwarf_version >= 5)
16245 {
16246 dw_die_ref type_die;
16247
16248 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16249 if (type_die == NULL)
16250 return NULL;
16251 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16252 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16253 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16254 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16255 mem_loc_result->dw_loc_oprnd2.val_class
16256 = dw_val_class_wide_int;
16257 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16258 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16259 }
16260 break;
16261
16262 case CONST_POLY_INT:
16263 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16264 break;
16265
16266 case EQ:
16267 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16268 break;
16269
16270 case GE:
16271 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16272 break;
16273
16274 case GT:
16275 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16276 break;
16277
16278 case LE:
16279 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16280 break;
16281
16282 case LT:
16283 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16284 break;
16285
16286 case NE:
16287 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16288 break;
16289
16290 case GEU:
16291 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16292 break;
16293
16294 case GTU:
16295 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16296 break;
16297
16298 case LEU:
16299 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16300 break;
16301
16302 case LTU:
16303 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16304 break;
16305
16306 case UMIN:
16307 case UMAX:
16308 if (!SCALAR_INT_MODE_P (mode))
16309 break;
16310 /* FALLTHRU */
16311 case SMIN:
16312 case SMAX:
16313 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16314 break;
16315
16316 case ZERO_EXTRACT:
16317 case SIGN_EXTRACT:
16318 if (CONST_INT_P (XEXP (rtl, 1))
16319 && CONST_INT_P (XEXP (rtl, 2))
16320 && is_a <scalar_int_mode> (mode, &int_mode)
16321 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16322 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16323 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16324 && ((unsigned) INTVAL (XEXP (rtl, 1))
16325 + (unsigned) INTVAL (XEXP (rtl, 2))
16326 <= GET_MODE_BITSIZE (int_mode)))
16327 {
16328 int shift, size;
16329 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16330 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16331 if (op0 == 0)
16332 break;
16333 if (GET_CODE (rtl) == SIGN_EXTRACT)
16334 op = DW_OP_shra;
16335 else
16336 op = DW_OP_shr;
16337 mem_loc_result = op0;
16338 size = INTVAL (XEXP (rtl, 1));
16339 shift = INTVAL (XEXP (rtl, 2));
16340 if (BITS_BIG_ENDIAN)
16341 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16342 if (shift + size != (int) DWARF2_ADDR_SIZE)
16343 {
16344 add_loc_descr (&mem_loc_result,
16345 int_loc_descriptor (DWARF2_ADDR_SIZE
16346 - shift - size));
16347 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16348 }
16349 if (size != (int) DWARF2_ADDR_SIZE)
16350 {
16351 add_loc_descr (&mem_loc_result,
16352 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16353 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16354 }
16355 }
16356 break;
16357
16358 case IF_THEN_ELSE:
16359 {
16360 dw_loc_descr_ref op2, bra_node, drop_node;
16361 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16362 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16363 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16364 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16365 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16366 VAR_INIT_STATUS_INITIALIZED);
16367 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16368 VAR_INIT_STATUS_INITIALIZED);
16369 if (op0 == NULL || op1 == NULL || op2 == NULL)
16370 break;
16371
16372 mem_loc_result = op1;
16373 add_loc_descr (&mem_loc_result, op2);
16374 add_loc_descr (&mem_loc_result, op0);
16375 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16376 add_loc_descr (&mem_loc_result, bra_node);
16377 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16378 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16379 add_loc_descr (&mem_loc_result, drop_node);
16380 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16381 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16382 }
16383 break;
16384
16385 case FLOAT_EXTEND:
16386 case FLOAT_TRUNCATE:
16387 case FLOAT:
16388 case UNSIGNED_FLOAT:
16389 case FIX:
16390 case UNSIGNED_FIX:
16391 if (!dwarf_strict || dwarf_version >= 5)
16392 {
16393 dw_die_ref type_die;
16394 dw_loc_descr_ref cvt;
16395
16396 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16397 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16398 if (op0 == NULL)
16399 break;
16400 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16401 && (GET_CODE (rtl) == FLOAT
16402 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16403 {
16404 type_die = base_type_for_mode (int_mode,
16405 GET_CODE (rtl) == UNSIGNED_FLOAT);
16406 if (type_die == NULL)
16407 break;
16408 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16409 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16410 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16411 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16412 add_loc_descr (&op0, cvt);
16413 }
16414 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16415 if (type_die == NULL)
16416 break;
16417 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16418 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16419 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16420 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16421 add_loc_descr (&op0, cvt);
16422 if (is_a <scalar_int_mode> (mode, &int_mode)
16423 && (GET_CODE (rtl) == FIX
16424 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16425 {
16426 op0 = convert_descriptor_to_mode (int_mode, op0);
16427 if (op0 == NULL)
16428 break;
16429 }
16430 mem_loc_result = op0;
16431 }
16432 break;
16433
16434 case CLZ:
16435 case CTZ:
16436 case FFS:
16437 if (is_a <scalar_int_mode> (mode, &int_mode))
16438 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16439 break;
16440
16441 case POPCOUNT:
16442 case PARITY:
16443 if (is_a <scalar_int_mode> (mode, &int_mode))
16444 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16445 break;
16446
16447 case BSWAP:
16448 if (is_a <scalar_int_mode> (mode, &int_mode))
16449 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16450 break;
16451
16452 case ROTATE:
16453 case ROTATERT:
16454 if (is_a <scalar_int_mode> (mode, &int_mode))
16455 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16456 break;
16457
16458 case COMPARE:
16459 /* In theory, we could implement the above. */
16460 /* DWARF cannot represent the unsigned compare operations
16461 natively. */
16462 case SS_MULT:
16463 case US_MULT:
16464 case SS_DIV:
16465 case US_DIV:
16466 case SS_PLUS:
16467 case US_PLUS:
16468 case SS_MINUS:
16469 case US_MINUS:
16470 case SS_NEG:
16471 case US_NEG:
16472 case SS_ABS:
16473 case SS_ASHIFT:
16474 case US_ASHIFT:
16475 case SS_TRUNCATE:
16476 case US_TRUNCATE:
16477 case UNORDERED:
16478 case ORDERED:
16479 case UNEQ:
16480 case UNGE:
16481 case UNGT:
16482 case UNLE:
16483 case UNLT:
16484 case LTGT:
16485 case FRACT_CONVERT:
16486 case UNSIGNED_FRACT_CONVERT:
16487 case SAT_FRACT:
16488 case UNSIGNED_SAT_FRACT:
16489 case SQRT:
16490 case ASM_OPERANDS:
16491 case VEC_MERGE:
16492 case VEC_SELECT:
16493 case VEC_CONCAT:
16494 case VEC_DUPLICATE:
16495 case VEC_SERIES:
16496 case HIGH:
16497 case FMA:
16498 case STRICT_LOW_PART:
16499 case CONST_VECTOR:
16500 case CONST_FIXED:
16501 case CLRSB:
16502 case CLOBBER:
16503 break;
16504
16505 case CONST_STRING:
16506 resolve_one_addr (&rtl);
16507 goto symref;
16508
16509 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16510 the expression. An UNSPEC rtx represents a raw DWARF operation,
16511 new_loc_descr is called for it to build the operation directly.
16512 Otherwise mem_loc_descriptor is called recursively. */
16513 case PARALLEL:
16514 {
16515 int index = 0;
16516 dw_loc_descr_ref exp_result = NULL;
16517
16518 for (; index < XVECLEN (rtl, 0); index++)
16519 {
16520 rtx elem = XVECEXP (rtl, 0, index);
16521 if (GET_CODE (elem) == UNSPEC)
16522 {
16523 /* Each DWARF operation UNSPEC contain two operands, if
16524 one operand is not used for the operation, const0_rtx is
16525 passed. */
16526 gcc_assert (XVECLEN (elem, 0) == 2);
16527
16528 HOST_WIDE_INT dw_op = XINT (elem, 1);
16529 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16530 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16531 exp_result
16532 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16533 oprnd2);
16534 }
16535 else
16536 exp_result
16537 = mem_loc_descriptor (elem, mode, mem_mode,
16538 VAR_INIT_STATUS_INITIALIZED);
16539
16540 if (!mem_loc_result)
16541 mem_loc_result = exp_result;
16542 else
16543 add_loc_descr (&mem_loc_result, exp_result);
16544 }
16545
16546 break;
16547 }
16548
16549 default:
16550 if (flag_checking)
16551 {
16552 print_rtl (stderr, rtl);
16553 gcc_unreachable ();
16554 }
16555 break;
16556 }
16557
16558 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16559 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16560
16561 return mem_loc_result;
16562 }
16563
16564 /* Return a descriptor that describes the concatenation of two locations.
16565 This is typically a complex variable. */
16566
16567 static dw_loc_descr_ref
16568 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16569 {
16570 /* At present we only track constant-sized pieces. */
16571 unsigned int size0, size1;
16572 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16573 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16574 return 0;
16575
16576 dw_loc_descr_ref cc_loc_result = NULL;
16577 dw_loc_descr_ref x0_ref
16578 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16579 dw_loc_descr_ref x1_ref
16580 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16581
16582 if (x0_ref == 0 || x1_ref == 0)
16583 return 0;
16584
16585 cc_loc_result = x0_ref;
16586 add_loc_descr_op_piece (&cc_loc_result, size0);
16587
16588 add_loc_descr (&cc_loc_result, x1_ref);
16589 add_loc_descr_op_piece (&cc_loc_result, size1);
16590
16591 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16592 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16593
16594 return cc_loc_result;
16595 }
16596
16597 /* Return a descriptor that describes the concatenation of N
16598 locations. */
16599
16600 static dw_loc_descr_ref
16601 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16602 {
16603 unsigned int i;
16604 dw_loc_descr_ref cc_loc_result = NULL;
16605 unsigned int n = XVECLEN (concatn, 0);
16606 unsigned int size;
16607
16608 for (i = 0; i < n; ++i)
16609 {
16610 dw_loc_descr_ref ref;
16611 rtx x = XVECEXP (concatn, 0, i);
16612
16613 /* At present we only track constant-sized pieces. */
16614 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16615 return NULL;
16616
16617 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16618 if (ref == NULL)
16619 return NULL;
16620
16621 add_loc_descr (&cc_loc_result, ref);
16622 add_loc_descr_op_piece (&cc_loc_result, size);
16623 }
16624
16625 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16626 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16627
16628 return cc_loc_result;
16629 }
16630
16631 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16632 for DEBUG_IMPLICIT_PTR RTL. */
16633
16634 static dw_loc_descr_ref
16635 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16636 {
16637 dw_loc_descr_ref ret;
16638 dw_die_ref ref;
16639
16640 if (dwarf_strict && dwarf_version < 5)
16641 return NULL;
16642 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16643 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16644 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16645 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16646 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16647 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16648 if (ref)
16649 {
16650 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16651 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16652 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16653 }
16654 else
16655 {
16656 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16657 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16658 }
16659 return ret;
16660 }
16661
16662 /* Output a proper Dwarf location descriptor for a variable or parameter
16663 which is either allocated in a register or in a memory location. For a
16664 register, we just generate an OP_REG and the register number. For a
16665 memory location we provide a Dwarf postfix expression describing how to
16666 generate the (dynamic) address of the object onto the address stack.
16667
16668 MODE is mode of the decl if this loc_descriptor is going to be used in
16669 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16670 allowed, VOIDmode otherwise.
16671
16672 If we don't know how to describe it, return 0. */
16673
16674 static dw_loc_descr_ref
16675 loc_descriptor (rtx rtl, machine_mode mode,
16676 enum var_init_status initialized)
16677 {
16678 dw_loc_descr_ref loc_result = NULL;
16679 scalar_int_mode int_mode;
16680
16681 switch (GET_CODE (rtl))
16682 {
16683 case SUBREG:
16684 /* The case of a subreg may arise when we have a local (register)
16685 variable or a formal (register) parameter which doesn't quite fill
16686 up an entire register. For now, just assume that it is
16687 legitimate to make the Dwarf info refer to the whole register which
16688 contains the given subreg. */
16689 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16690 loc_result = loc_descriptor (SUBREG_REG (rtl),
16691 GET_MODE (SUBREG_REG (rtl)), initialized);
16692 else
16693 goto do_default;
16694 break;
16695
16696 case REG:
16697 loc_result = reg_loc_descriptor (rtl, initialized);
16698 break;
16699
16700 case MEM:
16701 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16702 GET_MODE (rtl), initialized);
16703 if (loc_result == NULL)
16704 loc_result = tls_mem_loc_descriptor (rtl);
16705 if (loc_result == NULL)
16706 {
16707 rtx new_rtl = avoid_constant_pool_reference (rtl);
16708 if (new_rtl != rtl)
16709 loc_result = loc_descriptor (new_rtl, mode, initialized);
16710 }
16711 break;
16712
16713 case CONCAT:
16714 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16715 initialized);
16716 break;
16717
16718 case CONCATN:
16719 loc_result = concatn_loc_descriptor (rtl, initialized);
16720 break;
16721
16722 case VAR_LOCATION:
16723 /* Single part. */
16724 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16725 {
16726 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16727 if (GET_CODE (loc) == EXPR_LIST)
16728 loc = XEXP (loc, 0);
16729 loc_result = loc_descriptor (loc, mode, initialized);
16730 break;
16731 }
16732
16733 rtl = XEXP (rtl, 1);
16734 /* FALLTHRU */
16735
16736 case PARALLEL:
16737 {
16738 rtvec par_elems = XVEC (rtl, 0);
16739 int num_elem = GET_NUM_ELEM (par_elems);
16740 machine_mode mode;
16741 int i, size;
16742
16743 /* Create the first one, so we have something to add to. */
16744 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16745 VOIDmode, initialized);
16746 if (loc_result == NULL)
16747 return NULL;
16748 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16749 /* At present we only track constant-sized pieces. */
16750 if (!GET_MODE_SIZE (mode).is_constant (&size))
16751 return NULL;
16752 add_loc_descr_op_piece (&loc_result, size);
16753 for (i = 1; i < num_elem; i++)
16754 {
16755 dw_loc_descr_ref temp;
16756
16757 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16758 VOIDmode, initialized);
16759 if (temp == NULL)
16760 return NULL;
16761 add_loc_descr (&loc_result, temp);
16762 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16763 /* At present we only track constant-sized pieces. */
16764 if (!GET_MODE_SIZE (mode).is_constant (&size))
16765 return NULL;
16766 add_loc_descr_op_piece (&loc_result, size);
16767 }
16768 }
16769 break;
16770
16771 case CONST_INT:
16772 if (mode != VOIDmode && mode != BLKmode)
16773 {
16774 int_mode = as_a <scalar_int_mode> (mode);
16775 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16776 INTVAL (rtl));
16777 }
16778 break;
16779
16780 case CONST_DOUBLE:
16781 if (mode == VOIDmode)
16782 mode = GET_MODE (rtl);
16783
16784 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16785 {
16786 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16787
16788 /* Note that a CONST_DOUBLE rtx could represent either an integer
16789 or a floating-point constant. A CONST_DOUBLE is used whenever
16790 the constant requires more than one word in order to be
16791 adequately represented. We output CONST_DOUBLEs as blocks. */
16792 scalar_mode smode = as_a <scalar_mode> (mode);
16793 loc_result = new_loc_descr (DW_OP_implicit_value,
16794 GET_MODE_SIZE (smode), 0);
16795 #if TARGET_SUPPORTS_WIDE_INT == 0
16796 if (!SCALAR_FLOAT_MODE_P (smode))
16797 {
16798 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16799 loc_result->dw_loc_oprnd2.v.val_double
16800 = rtx_to_double_int (rtl);
16801 }
16802 else
16803 #endif
16804 {
16805 unsigned int length = GET_MODE_SIZE (smode);
16806 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16807
16808 insert_float (rtl, array);
16809 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16810 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16811 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16812 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16813 }
16814 }
16815 break;
16816
16817 case CONST_WIDE_INT:
16818 if (mode == VOIDmode)
16819 mode = GET_MODE (rtl);
16820
16821 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16822 {
16823 int_mode = as_a <scalar_int_mode> (mode);
16824 loc_result = new_loc_descr (DW_OP_implicit_value,
16825 GET_MODE_SIZE (int_mode), 0);
16826 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16827 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16828 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16829 }
16830 break;
16831
16832 case CONST_VECTOR:
16833 if (mode == VOIDmode)
16834 mode = GET_MODE (rtl);
16835
16836 if (mode != VOIDmode
16837 /* The combination of a length and byte elt_size doesn't extend
16838 naturally to boolean vectors, where several elements are packed
16839 into the same byte. */
16840 && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL
16841 && (dwarf_version >= 4 || !dwarf_strict))
16842 {
16843 unsigned int length;
16844 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16845 return NULL;
16846
16847 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16848 unsigned char *array
16849 = ggc_vec_alloc<unsigned char> (length * elt_size);
16850 unsigned int i;
16851 unsigned char *p;
16852 machine_mode imode = GET_MODE_INNER (mode);
16853
16854 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16855 switch (GET_MODE_CLASS (mode))
16856 {
16857 case MODE_VECTOR_INT:
16858 for (i = 0, p = array; i < length; i++, p += elt_size)
16859 {
16860 rtx elt = CONST_VECTOR_ELT (rtl, i);
16861 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16862 }
16863 break;
16864
16865 case MODE_VECTOR_FLOAT:
16866 for (i = 0, p = array; i < length; i++, p += elt_size)
16867 {
16868 rtx elt = CONST_VECTOR_ELT (rtl, i);
16869 insert_float (elt, p);
16870 }
16871 break;
16872
16873 default:
16874 gcc_unreachable ();
16875 }
16876
16877 loc_result = new_loc_descr (DW_OP_implicit_value,
16878 length * elt_size, 0);
16879 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16880 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16881 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16882 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16883 }
16884 break;
16885
16886 case CONST:
16887 if (mode == VOIDmode
16888 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16889 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16890 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16891 {
16892 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16893 break;
16894 }
16895 /* FALLTHROUGH */
16896 case SYMBOL_REF:
16897 if (!const_ok_for_output (rtl))
16898 break;
16899 /* FALLTHROUGH */
16900 case LABEL_REF:
16901 if (is_a <scalar_int_mode> (mode, &int_mode)
16902 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16903 && (dwarf_version >= 4 || !dwarf_strict))
16904 {
16905 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16906 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16907 vec_safe_push (used_rtx_array, rtl);
16908 }
16909 break;
16910
16911 case DEBUG_IMPLICIT_PTR:
16912 loc_result = implicit_ptr_descriptor (rtl, 0);
16913 break;
16914
16915 case PLUS:
16916 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16917 && CONST_INT_P (XEXP (rtl, 1)))
16918 {
16919 loc_result
16920 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16921 break;
16922 }
16923 /* FALLTHRU */
16924 do_default:
16925 default:
16926 if ((is_a <scalar_int_mode> (mode, &int_mode)
16927 && GET_MODE (rtl) == int_mode
16928 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16929 && dwarf_version >= 4)
16930 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16931 {
16932 /* Value expression. */
16933 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16934 if (loc_result)
16935 add_loc_descr (&loc_result,
16936 new_loc_descr (DW_OP_stack_value, 0, 0));
16937 }
16938 break;
16939 }
16940
16941 return loc_result;
16942 }
16943
16944 /* We need to figure out what section we should use as the base for the
16945 address ranges where a given location is valid.
16946 1. If this particular DECL has a section associated with it, use that.
16947 2. If this function has a section associated with it, use that.
16948 3. Otherwise, use the text section.
16949 XXX: If you split a variable across multiple sections, we won't notice. */
16950
16951 static const char *
16952 secname_for_decl (const_tree decl)
16953 {
16954 const char *secname;
16955
16956 if (VAR_OR_FUNCTION_DECL_P (decl)
16957 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16958 && DECL_SECTION_NAME (decl))
16959 secname = DECL_SECTION_NAME (decl);
16960 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16961 {
16962 if (in_cold_section_p)
16963 {
16964 section *sec = current_function_section ();
16965 if (sec->common.flags & SECTION_NAMED)
16966 return sec->named.name;
16967 }
16968 secname = DECL_SECTION_NAME (current_function_decl);
16969 }
16970 else if (cfun && in_cold_section_p)
16971 secname = crtl->subsections.cold_section_label;
16972 else
16973 secname = text_section_label;
16974
16975 return secname;
16976 }
16977
16978 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16979
16980 static bool
16981 decl_by_reference_p (tree decl)
16982 {
16983 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16984 || VAR_P (decl))
16985 && DECL_BY_REFERENCE (decl));
16986 }
16987
16988 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16989 for VARLOC. */
16990
16991 static dw_loc_descr_ref
16992 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16993 enum var_init_status initialized)
16994 {
16995 int have_address = 0;
16996 dw_loc_descr_ref descr;
16997 machine_mode mode;
16998
16999 if (want_address != 2)
17000 {
17001 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
17002 /* Single part. */
17003 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17004 {
17005 varloc = PAT_VAR_LOCATION_LOC (varloc);
17006 if (GET_CODE (varloc) == EXPR_LIST)
17007 varloc = XEXP (varloc, 0);
17008 mode = GET_MODE (varloc);
17009 if (MEM_P (varloc))
17010 {
17011 rtx addr = XEXP (varloc, 0);
17012 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
17013 mode, initialized);
17014 if (descr)
17015 have_address = 1;
17016 else
17017 {
17018 rtx x = avoid_constant_pool_reference (varloc);
17019 if (x != varloc)
17020 descr = mem_loc_descriptor (x, mode, VOIDmode,
17021 initialized);
17022 }
17023 }
17024 else
17025 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
17026 }
17027 else
17028 return 0;
17029 }
17030 else
17031 {
17032 if (GET_CODE (varloc) == VAR_LOCATION)
17033 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
17034 else
17035 mode = DECL_MODE (loc);
17036 descr = loc_descriptor (varloc, mode, initialized);
17037 have_address = 1;
17038 }
17039
17040 if (!descr)
17041 return 0;
17042
17043 if (want_address == 2 && !have_address
17044 && (dwarf_version >= 4 || !dwarf_strict))
17045 {
17046 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
17047 {
17048 expansion_failed (loc, NULL_RTX,
17049 "DWARF address size mismatch");
17050 return 0;
17051 }
17052 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
17053 have_address = 1;
17054 }
17055 /* Show if we can't fill the request for an address. */
17056 if (want_address && !have_address)
17057 {
17058 expansion_failed (loc, NULL_RTX,
17059 "Want address and only have value");
17060 return 0;
17061 }
17062
17063 /* If we've got an address and don't want one, dereference. */
17064 if (!want_address && have_address)
17065 {
17066 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
17067 enum dwarf_location_atom op;
17068
17069 if (size > DWARF2_ADDR_SIZE || size == -1)
17070 {
17071 expansion_failed (loc, NULL_RTX,
17072 "DWARF address size mismatch");
17073 return 0;
17074 }
17075 else if (size == DWARF2_ADDR_SIZE)
17076 op = DW_OP_deref;
17077 else
17078 op = DW_OP_deref_size;
17079
17080 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17081 }
17082
17083 return descr;
17084 }
17085
17086 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17087 if it is not possible. */
17088
17089 static dw_loc_descr_ref
17090 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17091 {
17092 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17093 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17094 else if (dwarf_version >= 3 || !dwarf_strict)
17095 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17096 else
17097 return NULL;
17098 }
17099
17100 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17101 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17102
17103 static dw_loc_descr_ref
17104 dw_sra_loc_expr (tree decl, rtx loc)
17105 {
17106 rtx p;
17107 unsigned HOST_WIDE_INT padsize = 0;
17108 dw_loc_descr_ref descr, *descr_tail;
17109 unsigned HOST_WIDE_INT decl_size;
17110 rtx varloc;
17111 enum var_init_status initialized;
17112
17113 if (DECL_SIZE (decl) == NULL
17114 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17115 return NULL;
17116
17117 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17118 descr = NULL;
17119 descr_tail = &descr;
17120
17121 for (p = loc; p; p = XEXP (p, 1))
17122 {
17123 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17124 rtx loc_note = *decl_piece_varloc_ptr (p);
17125 dw_loc_descr_ref cur_descr;
17126 dw_loc_descr_ref *tail, last = NULL;
17127 unsigned HOST_WIDE_INT opsize = 0;
17128
17129 if (loc_note == NULL_RTX
17130 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17131 {
17132 padsize += bitsize;
17133 continue;
17134 }
17135 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17136 varloc = NOTE_VAR_LOCATION (loc_note);
17137 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17138 if (cur_descr == NULL)
17139 {
17140 padsize += bitsize;
17141 continue;
17142 }
17143
17144 /* Check that cur_descr either doesn't use
17145 DW_OP_*piece operations, or their sum is equal
17146 to bitsize. Otherwise we can't embed it. */
17147 for (tail = &cur_descr; *tail != NULL;
17148 tail = &(*tail)->dw_loc_next)
17149 if ((*tail)->dw_loc_opc == DW_OP_piece)
17150 {
17151 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17152 * BITS_PER_UNIT;
17153 last = *tail;
17154 }
17155 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17156 {
17157 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17158 last = *tail;
17159 }
17160
17161 if (last != NULL && opsize != bitsize)
17162 {
17163 padsize += bitsize;
17164 /* Discard the current piece of the descriptor and release any
17165 addr_table entries it uses. */
17166 remove_loc_list_addr_table_entries (cur_descr);
17167 continue;
17168 }
17169
17170 /* If there is a hole, add DW_OP_*piece after empty DWARF
17171 expression, which means that those bits are optimized out. */
17172 if (padsize)
17173 {
17174 if (padsize > decl_size)
17175 {
17176 remove_loc_list_addr_table_entries (cur_descr);
17177 goto discard_descr;
17178 }
17179 decl_size -= padsize;
17180 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17181 if (*descr_tail == NULL)
17182 {
17183 remove_loc_list_addr_table_entries (cur_descr);
17184 goto discard_descr;
17185 }
17186 descr_tail = &(*descr_tail)->dw_loc_next;
17187 padsize = 0;
17188 }
17189 *descr_tail = cur_descr;
17190 descr_tail = tail;
17191 if (bitsize > decl_size)
17192 goto discard_descr;
17193 decl_size -= bitsize;
17194 if (last == NULL)
17195 {
17196 HOST_WIDE_INT offset = 0;
17197 if (GET_CODE (varloc) == VAR_LOCATION
17198 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17199 {
17200 varloc = PAT_VAR_LOCATION_LOC (varloc);
17201 if (GET_CODE (varloc) == EXPR_LIST)
17202 varloc = XEXP (varloc, 0);
17203 }
17204 do
17205 {
17206 if (GET_CODE (varloc) == CONST
17207 || GET_CODE (varloc) == SIGN_EXTEND
17208 || GET_CODE (varloc) == ZERO_EXTEND)
17209 varloc = XEXP (varloc, 0);
17210 else if (GET_CODE (varloc) == SUBREG)
17211 varloc = SUBREG_REG (varloc);
17212 else
17213 break;
17214 }
17215 while (1);
17216 /* DW_OP_bit_size offset should be zero for register
17217 or implicit location descriptions and empty location
17218 descriptions, but for memory addresses needs big endian
17219 adjustment. */
17220 if (MEM_P (varloc))
17221 {
17222 unsigned HOST_WIDE_INT memsize;
17223 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17224 goto discard_descr;
17225 memsize *= BITS_PER_UNIT;
17226 if (memsize != bitsize)
17227 {
17228 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17229 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17230 goto discard_descr;
17231 if (memsize < bitsize)
17232 goto discard_descr;
17233 if (BITS_BIG_ENDIAN)
17234 offset = memsize - bitsize;
17235 }
17236 }
17237
17238 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17239 if (*descr_tail == NULL)
17240 goto discard_descr;
17241 descr_tail = &(*descr_tail)->dw_loc_next;
17242 }
17243 }
17244
17245 /* If there were any non-empty expressions, add padding till the end of
17246 the decl. */
17247 if (descr != NULL && decl_size != 0)
17248 {
17249 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17250 if (*descr_tail == NULL)
17251 goto discard_descr;
17252 }
17253 return descr;
17254
17255 discard_descr:
17256 /* Discard the descriptor and release any addr_table entries it uses. */
17257 remove_loc_list_addr_table_entries (descr);
17258 return NULL;
17259 }
17260
17261 /* Return the dwarf representation of the location list LOC_LIST of
17262 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17263 function. */
17264
17265 static dw_loc_list_ref
17266 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17267 {
17268 const char *endname, *secname;
17269 var_loc_view endview;
17270 rtx varloc;
17271 enum var_init_status initialized;
17272 struct var_loc_node *node;
17273 dw_loc_descr_ref descr;
17274 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17275 dw_loc_list_ref list = NULL;
17276 dw_loc_list_ref *listp = &list;
17277
17278 /* Now that we know what section we are using for a base,
17279 actually construct the list of locations.
17280 The first location information is what is passed to the
17281 function that creates the location list, and the remaining
17282 locations just get added on to that list.
17283 Note that we only know the start address for a location
17284 (IE location changes), so to build the range, we use
17285 the range [current location start, next location start].
17286 This means we have to special case the last node, and generate
17287 a range of [last location start, end of function label]. */
17288
17289 if (cfun && crtl->has_bb_partition)
17290 {
17291 bool save_in_cold_section_p = in_cold_section_p;
17292 in_cold_section_p = first_function_block_is_cold;
17293 if (loc_list->last_before_switch == NULL)
17294 in_cold_section_p = !in_cold_section_p;
17295 secname = secname_for_decl (decl);
17296 in_cold_section_p = save_in_cold_section_p;
17297 }
17298 else
17299 secname = secname_for_decl (decl);
17300
17301 for (node = loc_list->first; node; node = node->next)
17302 {
17303 bool range_across_switch = false;
17304 if (GET_CODE (node->loc) == EXPR_LIST
17305 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17306 {
17307 if (GET_CODE (node->loc) == EXPR_LIST)
17308 {
17309 descr = NULL;
17310 /* This requires DW_OP_{,bit_}piece, which is not usable
17311 inside DWARF expressions. */
17312 if (want_address == 2)
17313 descr = dw_sra_loc_expr (decl, node->loc);
17314 }
17315 else
17316 {
17317 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17318 varloc = NOTE_VAR_LOCATION (node->loc);
17319 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17320 }
17321 if (descr)
17322 {
17323 /* If section switch happens in between node->label
17324 and node->next->label (or end of function) and
17325 we can't emit it as a single entry list,
17326 emit two ranges, first one ending at the end
17327 of first partition and second one starting at the
17328 beginning of second partition. */
17329 if (node == loc_list->last_before_switch
17330 && (node != loc_list->first || loc_list->first->next
17331 /* If we are to emit a view number, we will emit
17332 a loclist rather than a single location
17333 expression for the entire function (see
17334 loc_list_has_views), so we have to split the
17335 range that straddles across partitions. */
17336 || !ZERO_VIEW_P (node->view))
17337 && current_function_decl)
17338 {
17339 endname = cfun->fde->dw_fde_end;
17340 endview = 0;
17341 range_across_switch = true;
17342 }
17343 /* The variable has a location between NODE->LABEL and
17344 NODE->NEXT->LABEL. */
17345 else if (node->next)
17346 endname = node->next->label, endview = node->next->view;
17347 /* If the variable has a location at the last label
17348 it keeps its location until the end of function. */
17349 else if (!current_function_decl)
17350 endname = text_end_label, endview = 0;
17351 else
17352 {
17353 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17354 current_function_funcdef_no);
17355 endname = ggc_strdup (label_id);
17356 endview = 0;
17357 }
17358
17359 *listp = new_loc_list (descr, node->label, node->view,
17360 endname, endview, secname);
17361 if (TREE_CODE (decl) == PARM_DECL
17362 && node == loc_list->first
17363 && NOTE_P (node->loc)
17364 && strcmp (node->label, endname) == 0)
17365 (*listp)->force = true;
17366 listp = &(*listp)->dw_loc_next;
17367 }
17368 }
17369
17370 if (cfun
17371 && crtl->has_bb_partition
17372 && node == loc_list->last_before_switch)
17373 {
17374 bool save_in_cold_section_p = in_cold_section_p;
17375 in_cold_section_p = !first_function_block_is_cold;
17376 secname = secname_for_decl (decl);
17377 in_cold_section_p = save_in_cold_section_p;
17378 }
17379
17380 if (range_across_switch)
17381 {
17382 if (GET_CODE (node->loc) == EXPR_LIST)
17383 descr = dw_sra_loc_expr (decl, node->loc);
17384 else
17385 {
17386 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17387 varloc = NOTE_VAR_LOCATION (node->loc);
17388 descr = dw_loc_list_1 (decl, varloc, want_address,
17389 initialized);
17390 }
17391 gcc_assert (descr);
17392 /* The variable has a location between NODE->LABEL and
17393 NODE->NEXT->LABEL. */
17394 if (node->next)
17395 endname = node->next->label, endview = node->next->view;
17396 else
17397 endname = cfun->fde->dw_fde_second_end, endview = 0;
17398 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17399 endname, endview, secname);
17400 listp = &(*listp)->dw_loc_next;
17401 }
17402 }
17403
17404 /* Try to avoid the overhead of a location list emitting a location
17405 expression instead, but only if we didn't have more than one
17406 location entry in the first place. If some entries were not
17407 representable, we don't want to pretend a single entry that was
17408 applies to the entire scope in which the variable is
17409 available. */
17410 if (list && loc_list->first->next)
17411 gen_llsym (list);
17412 else
17413 maybe_gen_llsym (list);
17414
17415 return list;
17416 }
17417
17418 /* Return if the loc_list has only single element and thus can be represented
17419 as location description. */
17420
17421 static bool
17422 single_element_loc_list_p (dw_loc_list_ref list)
17423 {
17424 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17425 return !list->ll_symbol;
17426 }
17427
17428 /* Duplicate a single element of location list. */
17429
17430 static inline dw_loc_descr_ref
17431 copy_loc_descr (dw_loc_descr_ref ref)
17432 {
17433 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17434 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17435 return copy;
17436 }
17437
17438 /* To each location in list LIST append loc descr REF. */
17439
17440 static void
17441 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17442 {
17443 dw_loc_descr_ref copy;
17444 add_loc_descr (&list->expr, ref);
17445 list = list->dw_loc_next;
17446 while (list)
17447 {
17448 copy = copy_loc_descr (ref);
17449 add_loc_descr (&list->expr, copy);
17450 while (copy->dw_loc_next)
17451 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17452 list = list->dw_loc_next;
17453 }
17454 }
17455
17456 /* To each location in list LIST prepend loc descr REF. */
17457
17458 static void
17459 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17460 {
17461 dw_loc_descr_ref copy;
17462 dw_loc_descr_ref ref_end = list->expr;
17463 add_loc_descr (&ref, list->expr);
17464 list->expr = ref;
17465 list = list->dw_loc_next;
17466 while (list)
17467 {
17468 dw_loc_descr_ref end = list->expr;
17469 list->expr = copy = copy_loc_descr (ref);
17470 while (copy->dw_loc_next != ref_end)
17471 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17472 copy->dw_loc_next = end;
17473 list = list->dw_loc_next;
17474 }
17475 }
17476
17477 /* Given two lists RET and LIST
17478 produce location list that is result of adding expression in LIST
17479 to expression in RET on each position in program.
17480 Might be destructive on both RET and LIST.
17481
17482 TODO: We handle only simple cases of RET or LIST having at most one
17483 element. General case would involve sorting the lists in program order
17484 and merging them that will need some additional work.
17485 Adding that will improve quality of debug info especially for SRA-ed
17486 structures. */
17487
17488 static void
17489 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17490 {
17491 if (!list)
17492 return;
17493 if (!*ret)
17494 {
17495 *ret = list;
17496 return;
17497 }
17498 if (!list->dw_loc_next)
17499 {
17500 add_loc_descr_to_each (*ret, list->expr);
17501 return;
17502 }
17503 if (!(*ret)->dw_loc_next)
17504 {
17505 prepend_loc_descr_to_each (list, (*ret)->expr);
17506 *ret = list;
17507 return;
17508 }
17509 expansion_failed (NULL_TREE, NULL_RTX,
17510 "Don't know how to merge two non-trivial"
17511 " location lists.\n");
17512 *ret = NULL;
17513 return;
17514 }
17515
17516 /* LOC is constant expression. Try a luck, look it up in constant
17517 pool and return its loc_descr of its address. */
17518
17519 static dw_loc_descr_ref
17520 cst_pool_loc_descr (tree loc)
17521 {
17522 /* Get an RTL for this, if something has been emitted. */
17523 rtx rtl = lookup_constant_def (loc);
17524
17525 if (!rtl || !MEM_P (rtl))
17526 {
17527 gcc_assert (!rtl);
17528 return 0;
17529 }
17530 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17531
17532 /* TODO: We might get more coverage if we was actually delaying expansion
17533 of all expressions till end of compilation when constant pools are fully
17534 populated. */
17535 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17536 {
17537 expansion_failed (loc, NULL_RTX,
17538 "CST value in contant pool but not marked.");
17539 return 0;
17540 }
17541 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17542 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17543 }
17544
17545 /* Return dw_loc_list representing address of addr_expr LOC
17546 by looking for inner INDIRECT_REF expression and turning
17547 it into simple arithmetics.
17548
17549 See loc_list_from_tree for the meaning of CONTEXT. */
17550
17551 static dw_loc_list_ref
17552 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17553 loc_descr_context *context)
17554 {
17555 tree obj, offset;
17556 poly_int64 bitsize, bitpos, bytepos;
17557 machine_mode mode;
17558 int unsignedp, reversep, volatilep = 0;
17559 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17560
17561 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17562 &bitsize, &bitpos, &offset, &mode,
17563 &unsignedp, &reversep, &volatilep);
17564 STRIP_NOPS (obj);
17565 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17566 {
17567 expansion_failed (loc, NULL_RTX, "bitfield access");
17568 return 0;
17569 }
17570 if (!INDIRECT_REF_P (obj))
17571 {
17572 expansion_failed (obj,
17573 NULL_RTX, "no indirect ref in inner refrence");
17574 return 0;
17575 }
17576 if (!offset && known_eq (bitpos, 0))
17577 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17578 context);
17579 else if (toplev
17580 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17581 && (dwarf_version >= 4 || !dwarf_strict))
17582 {
17583 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17584 if (!list_ret)
17585 return 0;
17586 if (offset)
17587 {
17588 /* Variable offset. */
17589 list_ret1 = loc_list_from_tree (offset, 0, context);
17590 if (list_ret1 == 0)
17591 return 0;
17592 add_loc_list (&list_ret, list_ret1);
17593 if (!list_ret)
17594 return 0;
17595 add_loc_descr_to_each (list_ret,
17596 new_loc_descr (DW_OP_plus, 0, 0));
17597 }
17598 HOST_WIDE_INT value;
17599 if (bytepos.is_constant (&value) && value > 0)
17600 add_loc_descr_to_each (list_ret,
17601 new_loc_descr (DW_OP_plus_uconst, value, 0));
17602 else if (maybe_ne (bytepos, 0))
17603 loc_list_plus_const (list_ret, bytepos);
17604 add_loc_descr_to_each (list_ret,
17605 new_loc_descr (DW_OP_stack_value, 0, 0));
17606 }
17607 return list_ret;
17608 }
17609
17610 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17611 all operations from LOC are nops, move to the last one. Insert in NOPS all
17612 operations that are skipped. */
17613
17614 static void
17615 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17616 hash_set<dw_loc_descr_ref> &nops)
17617 {
17618 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17619 {
17620 nops.add (loc);
17621 loc = loc->dw_loc_next;
17622 }
17623 }
17624
17625 /* Helper for loc_descr_without_nops: free the location description operation
17626 P. */
17627
17628 bool
17629 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17630 {
17631 ggc_free (loc);
17632 return true;
17633 }
17634
17635 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17636 finishes LOC. */
17637
17638 static void
17639 loc_descr_without_nops (dw_loc_descr_ref &loc)
17640 {
17641 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17642 return;
17643
17644 /* Set of all DW_OP_nop operations we remove. */
17645 hash_set<dw_loc_descr_ref> nops;
17646
17647 /* First, strip all prefix NOP operations in order to keep the head of the
17648 operations list. */
17649 loc_descr_to_next_no_nop (loc, nops);
17650
17651 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17652 {
17653 /* For control flow operations: strip "prefix" nops in destination
17654 labels. */
17655 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17656 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17657 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17658 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17659
17660 /* Do the same for the operations that follow, then move to the next
17661 iteration. */
17662 if (cur->dw_loc_next != NULL)
17663 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17664 cur = cur->dw_loc_next;
17665 }
17666
17667 nops.traverse<void *, free_loc_descr> (NULL);
17668 }
17669
17670
17671 struct dwarf_procedure_info;
17672
17673 /* Helper structure for location descriptions generation. */
17674 struct loc_descr_context
17675 {
17676 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17677 NULL_TREE if DW_OP_push_object_address in invalid for this location
17678 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17679 tree context_type;
17680 /* The ..._DECL node that should be translated as a
17681 DW_OP_push_object_address operation. */
17682 tree base_decl;
17683 /* Information about the DWARF procedure we are currently generating. NULL if
17684 we are not generating a DWARF procedure. */
17685 struct dwarf_procedure_info *dpi;
17686 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17687 by consumer. Used for DW_TAG_generic_subrange attributes. */
17688 bool placeholder_arg;
17689 /* True if PLACEHOLDER_EXPR has been seen. */
17690 bool placeholder_seen;
17691 };
17692
17693 /* DWARF procedures generation
17694
17695 DWARF expressions (aka. location descriptions) are used to encode variable
17696 things such as sizes or offsets. Such computations can have redundant parts
17697 that can be factorized in order to reduce the size of the output debug
17698 information. This is the whole point of DWARF procedures.
17699
17700 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17701 already factorized into functions ("size functions") in order to handle very
17702 big and complex types. Such functions are quite simple: they have integral
17703 arguments, they return an integral result and their body contains only a
17704 return statement with arithmetic expressions. This is the only kind of
17705 function we are interested in translating into DWARF procedures, here.
17706
17707 DWARF expressions and DWARF procedure are executed using a stack, so we have
17708 to define some calling convention for them to interact. Let's say that:
17709
17710 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17711 all arguments in reverse order (right-to-left) so that when the DWARF
17712 procedure execution starts, the first argument is the top of the stack.
17713
17714 - Then, when returning, the DWARF procedure must have consumed all arguments
17715 on the stack, must have pushed the result and touched nothing else.
17716
17717 - Each integral argument and the result are integral types can be hold in a
17718 single stack slot.
17719
17720 - We call "frame offset" the number of stack slots that are "under DWARF
17721 procedure control": it includes the arguments slots, the temporaries and
17722 the result slot. Thus, it is equal to the number of arguments when the
17723 procedure execution starts and must be equal to one (the result) when it
17724 returns. */
17725
17726 /* Helper structure used when generating operations for a DWARF procedure. */
17727 struct dwarf_procedure_info
17728 {
17729 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17730 currently translated. */
17731 tree fndecl;
17732 /* The number of arguments FNDECL takes. */
17733 unsigned args_count;
17734 };
17735
17736 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17737 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17738 equate it to this DIE. */
17739
17740 static dw_die_ref
17741 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17742 dw_die_ref parent_die)
17743 {
17744 dw_die_ref dwarf_proc_die;
17745
17746 if ((dwarf_version < 3 && dwarf_strict)
17747 || location == NULL)
17748 return NULL;
17749
17750 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17751 if (fndecl)
17752 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17753 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17754 return dwarf_proc_die;
17755 }
17756
17757 /* Return whether TYPE is a supported type as a DWARF procedure argument
17758 type or return type (we handle only scalar types and pointer types that
17759 aren't wider than the DWARF expression evaluation stack. */
17760
17761 static bool
17762 is_handled_procedure_type (tree type)
17763 {
17764 return ((INTEGRAL_TYPE_P (type)
17765 || TREE_CODE (type) == OFFSET_TYPE
17766 || TREE_CODE (type) == POINTER_TYPE)
17767 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17768 }
17769
17770 /* Helper for resolve_args_picking: do the same but stop when coming across
17771 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17772 offset *before* evaluating the corresponding operation. */
17773
17774 static bool
17775 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17776 struct dwarf_procedure_info *dpi,
17777 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17778 {
17779 /* The "frame_offset" identifier is already used to name a macro... */
17780 unsigned frame_offset_ = initial_frame_offset;
17781 dw_loc_descr_ref l;
17782
17783 for (l = loc; l != NULL;)
17784 {
17785 bool existed;
17786 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17787
17788 /* If we already met this node, there is nothing to compute anymore. */
17789 if (existed)
17790 {
17791 /* Make sure that the stack size is consistent wherever the execution
17792 flow comes from. */
17793 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17794 break;
17795 }
17796 l_frame_offset = frame_offset_;
17797
17798 /* If needed, relocate the picking offset with respect to the frame
17799 offset. */
17800 if (l->frame_offset_rel)
17801 {
17802 unsigned HOST_WIDE_INT off;
17803 switch (l->dw_loc_opc)
17804 {
17805 case DW_OP_pick:
17806 off = l->dw_loc_oprnd1.v.val_unsigned;
17807 break;
17808 case DW_OP_dup:
17809 off = 0;
17810 break;
17811 case DW_OP_over:
17812 off = 1;
17813 break;
17814 default:
17815 gcc_unreachable ();
17816 }
17817 /* frame_offset_ is the size of the current stack frame, including
17818 incoming arguments. Besides, the arguments are pushed
17819 right-to-left. Thus, in order to access the Nth argument from
17820 this operation node, the picking has to skip temporaries *plus*
17821 one stack slot per argument (0 for the first one, 1 for the second
17822 one, etc.).
17823
17824 The targetted argument number (N) is already set as the operand,
17825 and the number of temporaries can be computed with:
17826 frame_offsets_ - dpi->args_count */
17827 off += frame_offset_ - dpi->args_count;
17828
17829 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17830 if (off > 255)
17831 return false;
17832
17833 if (off == 0)
17834 {
17835 l->dw_loc_opc = DW_OP_dup;
17836 l->dw_loc_oprnd1.v.val_unsigned = 0;
17837 }
17838 else if (off == 1)
17839 {
17840 l->dw_loc_opc = DW_OP_over;
17841 l->dw_loc_oprnd1.v.val_unsigned = 0;
17842 }
17843 else
17844 {
17845 l->dw_loc_opc = DW_OP_pick;
17846 l->dw_loc_oprnd1.v.val_unsigned = off;
17847 }
17848 }
17849
17850 /* Update frame_offset according to the effect the current operation has
17851 on the stack. */
17852 switch (l->dw_loc_opc)
17853 {
17854 case DW_OP_deref:
17855 case DW_OP_swap:
17856 case DW_OP_rot:
17857 case DW_OP_abs:
17858 case DW_OP_neg:
17859 case DW_OP_not:
17860 case DW_OP_plus_uconst:
17861 case DW_OP_skip:
17862 case DW_OP_reg0:
17863 case DW_OP_reg1:
17864 case DW_OP_reg2:
17865 case DW_OP_reg3:
17866 case DW_OP_reg4:
17867 case DW_OP_reg5:
17868 case DW_OP_reg6:
17869 case DW_OP_reg7:
17870 case DW_OP_reg8:
17871 case DW_OP_reg9:
17872 case DW_OP_reg10:
17873 case DW_OP_reg11:
17874 case DW_OP_reg12:
17875 case DW_OP_reg13:
17876 case DW_OP_reg14:
17877 case DW_OP_reg15:
17878 case DW_OP_reg16:
17879 case DW_OP_reg17:
17880 case DW_OP_reg18:
17881 case DW_OP_reg19:
17882 case DW_OP_reg20:
17883 case DW_OP_reg21:
17884 case DW_OP_reg22:
17885 case DW_OP_reg23:
17886 case DW_OP_reg24:
17887 case DW_OP_reg25:
17888 case DW_OP_reg26:
17889 case DW_OP_reg27:
17890 case DW_OP_reg28:
17891 case DW_OP_reg29:
17892 case DW_OP_reg30:
17893 case DW_OP_reg31:
17894 case DW_OP_bregx:
17895 case DW_OP_piece:
17896 case DW_OP_deref_size:
17897 case DW_OP_nop:
17898 case DW_OP_bit_piece:
17899 case DW_OP_implicit_value:
17900 case DW_OP_stack_value:
17901 break;
17902
17903 case DW_OP_addr:
17904 case DW_OP_const1u:
17905 case DW_OP_const1s:
17906 case DW_OP_const2u:
17907 case DW_OP_const2s:
17908 case DW_OP_const4u:
17909 case DW_OP_const4s:
17910 case DW_OP_const8u:
17911 case DW_OP_const8s:
17912 case DW_OP_constu:
17913 case DW_OP_consts:
17914 case DW_OP_dup:
17915 case DW_OP_over:
17916 case DW_OP_pick:
17917 case DW_OP_lit0:
17918 case DW_OP_lit1:
17919 case DW_OP_lit2:
17920 case DW_OP_lit3:
17921 case DW_OP_lit4:
17922 case DW_OP_lit5:
17923 case DW_OP_lit6:
17924 case DW_OP_lit7:
17925 case DW_OP_lit8:
17926 case DW_OP_lit9:
17927 case DW_OP_lit10:
17928 case DW_OP_lit11:
17929 case DW_OP_lit12:
17930 case DW_OP_lit13:
17931 case DW_OP_lit14:
17932 case DW_OP_lit15:
17933 case DW_OP_lit16:
17934 case DW_OP_lit17:
17935 case DW_OP_lit18:
17936 case DW_OP_lit19:
17937 case DW_OP_lit20:
17938 case DW_OP_lit21:
17939 case DW_OP_lit22:
17940 case DW_OP_lit23:
17941 case DW_OP_lit24:
17942 case DW_OP_lit25:
17943 case DW_OP_lit26:
17944 case DW_OP_lit27:
17945 case DW_OP_lit28:
17946 case DW_OP_lit29:
17947 case DW_OP_lit30:
17948 case DW_OP_lit31:
17949 case DW_OP_breg0:
17950 case DW_OP_breg1:
17951 case DW_OP_breg2:
17952 case DW_OP_breg3:
17953 case DW_OP_breg4:
17954 case DW_OP_breg5:
17955 case DW_OP_breg6:
17956 case DW_OP_breg7:
17957 case DW_OP_breg8:
17958 case DW_OP_breg9:
17959 case DW_OP_breg10:
17960 case DW_OP_breg11:
17961 case DW_OP_breg12:
17962 case DW_OP_breg13:
17963 case DW_OP_breg14:
17964 case DW_OP_breg15:
17965 case DW_OP_breg16:
17966 case DW_OP_breg17:
17967 case DW_OP_breg18:
17968 case DW_OP_breg19:
17969 case DW_OP_breg20:
17970 case DW_OP_breg21:
17971 case DW_OP_breg22:
17972 case DW_OP_breg23:
17973 case DW_OP_breg24:
17974 case DW_OP_breg25:
17975 case DW_OP_breg26:
17976 case DW_OP_breg27:
17977 case DW_OP_breg28:
17978 case DW_OP_breg29:
17979 case DW_OP_breg30:
17980 case DW_OP_breg31:
17981 case DW_OP_fbreg:
17982 case DW_OP_push_object_address:
17983 case DW_OP_call_frame_cfa:
17984 case DW_OP_GNU_variable_value:
17985 case DW_OP_GNU_addr_index:
17986 case DW_OP_GNU_const_index:
17987 ++frame_offset_;
17988 break;
17989
17990 case DW_OP_drop:
17991 case DW_OP_xderef:
17992 case DW_OP_and:
17993 case DW_OP_div:
17994 case DW_OP_minus:
17995 case DW_OP_mod:
17996 case DW_OP_mul:
17997 case DW_OP_or:
17998 case DW_OP_plus:
17999 case DW_OP_shl:
18000 case DW_OP_shr:
18001 case DW_OP_shra:
18002 case DW_OP_xor:
18003 case DW_OP_bra:
18004 case DW_OP_eq:
18005 case DW_OP_ge:
18006 case DW_OP_gt:
18007 case DW_OP_le:
18008 case DW_OP_lt:
18009 case DW_OP_ne:
18010 case DW_OP_regx:
18011 case DW_OP_xderef_size:
18012 --frame_offset_;
18013 break;
18014
18015 case DW_OP_call2:
18016 case DW_OP_call4:
18017 case DW_OP_call_ref:
18018 {
18019 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
18020 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
18021
18022 if (stack_usage == NULL)
18023 return false;
18024 frame_offset_ += *stack_usage;
18025 break;
18026 }
18027
18028 case DW_OP_implicit_pointer:
18029 case DW_OP_entry_value:
18030 case DW_OP_const_type:
18031 case DW_OP_regval_type:
18032 case DW_OP_deref_type:
18033 case DW_OP_convert:
18034 case DW_OP_reinterpret:
18035 case DW_OP_form_tls_address:
18036 case DW_OP_GNU_push_tls_address:
18037 case DW_OP_GNU_uninit:
18038 case DW_OP_GNU_encoded_addr:
18039 case DW_OP_GNU_implicit_pointer:
18040 case DW_OP_GNU_entry_value:
18041 case DW_OP_GNU_const_type:
18042 case DW_OP_GNU_regval_type:
18043 case DW_OP_GNU_deref_type:
18044 case DW_OP_GNU_convert:
18045 case DW_OP_GNU_reinterpret:
18046 case DW_OP_GNU_parameter_ref:
18047 /* loc_list_from_tree will probably not output these operations for
18048 size functions, so assume they will not appear here. */
18049 /* Fall through... */
18050
18051 default:
18052 gcc_unreachable ();
18053 }
18054
18055 /* Now, follow the control flow (except subroutine calls). */
18056 switch (l->dw_loc_opc)
18057 {
18058 case DW_OP_bra:
18059 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
18060 frame_offsets))
18061 return false;
18062 /* Fall through. */
18063
18064 case DW_OP_skip:
18065 l = l->dw_loc_oprnd1.v.val_loc;
18066 break;
18067
18068 case DW_OP_stack_value:
18069 return true;
18070
18071 default:
18072 l = l->dw_loc_next;
18073 break;
18074 }
18075 }
18076
18077 return true;
18078 }
18079
18080 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18081 operations) in order to resolve the operand of DW_OP_pick operations that
18082 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18083 offset *before* LOC is executed. Return if all relocations were
18084 successful. */
18085
18086 static bool
18087 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18088 struct dwarf_procedure_info *dpi)
18089 {
18090 /* Associate to all visited operations the frame offset *before* evaluating
18091 this operation. */
18092 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18093
18094 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18095 frame_offsets);
18096 }
18097
18098 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18099 Return NULL if it is not possible. */
18100
18101 static dw_die_ref
18102 function_to_dwarf_procedure (tree fndecl)
18103 {
18104 struct loc_descr_context ctx;
18105 struct dwarf_procedure_info dpi;
18106 dw_die_ref dwarf_proc_die;
18107 tree tree_body = DECL_SAVED_TREE (fndecl);
18108 dw_loc_descr_ref loc_body, epilogue;
18109
18110 tree cursor;
18111 unsigned i;
18112
18113 /* Do not generate multiple DWARF procedures for the same function
18114 declaration. */
18115 dwarf_proc_die = lookup_decl_die (fndecl);
18116 if (dwarf_proc_die != NULL)
18117 return dwarf_proc_die;
18118
18119 /* DWARF procedures are available starting with the DWARFv3 standard. */
18120 if (dwarf_version < 3 && dwarf_strict)
18121 return NULL;
18122
18123 /* We handle only functions for which we still have a body, that return a
18124 supported type and that takes arguments with supported types. Note that
18125 there is no point translating functions that return nothing. */
18126 if (tree_body == NULL_TREE
18127 || DECL_RESULT (fndecl) == NULL_TREE
18128 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18129 return NULL;
18130
18131 for (cursor = DECL_ARGUMENTS (fndecl);
18132 cursor != NULL_TREE;
18133 cursor = TREE_CHAIN (cursor))
18134 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18135 return NULL;
18136
18137 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18138 if (TREE_CODE (tree_body) != RETURN_EXPR)
18139 return NULL;
18140 tree_body = TREE_OPERAND (tree_body, 0);
18141 if (TREE_CODE (tree_body) != MODIFY_EXPR
18142 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18143 return NULL;
18144 tree_body = TREE_OPERAND (tree_body, 1);
18145
18146 /* Try to translate the body expression itself. Note that this will probably
18147 cause an infinite recursion if its call graph has a cycle. This is very
18148 unlikely for size functions, however, so don't bother with such things at
18149 the moment. */
18150 ctx.context_type = NULL_TREE;
18151 ctx.base_decl = NULL_TREE;
18152 ctx.dpi = &dpi;
18153 ctx.placeholder_arg = false;
18154 ctx.placeholder_seen = false;
18155 dpi.fndecl = fndecl;
18156 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18157 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18158 if (!loc_body)
18159 return NULL;
18160
18161 /* After evaluating all operands in "loc_body", we should still have on the
18162 stack all arguments plus the desired function result (top of the stack).
18163 Generate code in order to keep only the result in our stack frame. */
18164 epilogue = NULL;
18165 for (i = 0; i < dpi.args_count; ++i)
18166 {
18167 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18168 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18169 op_couple->dw_loc_next->dw_loc_next = epilogue;
18170 epilogue = op_couple;
18171 }
18172 add_loc_descr (&loc_body, epilogue);
18173 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18174 return NULL;
18175
18176 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18177 because they are considered useful. Now there is an epilogue, they are
18178 not anymore, so give it another try. */
18179 loc_descr_without_nops (loc_body);
18180
18181 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18182 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18183 though, given that size functions do not come from source, so they should
18184 not have a dedicated DW_TAG_subprogram DIE. */
18185 dwarf_proc_die
18186 = new_dwarf_proc_die (loc_body, fndecl,
18187 get_context_die (DECL_CONTEXT (fndecl)));
18188
18189 /* The called DWARF procedure consumes one stack slot per argument and
18190 returns one stack slot. */
18191 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18192
18193 return dwarf_proc_die;
18194 }
18195
18196
18197 /* Generate Dwarf location list representing LOC.
18198 If WANT_ADDRESS is false, expression computing LOC will be computed
18199 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18200 if WANT_ADDRESS is 2, expression computing address useable in location
18201 will be returned (i.e. DW_OP_reg can be used
18202 to refer to register values).
18203
18204 CONTEXT provides information to customize the location descriptions
18205 generation. Its context_type field specifies what type is implicitly
18206 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18207 will not be generated.
18208
18209 Its DPI field determines whether we are generating a DWARF expression for a
18210 DWARF procedure, so PARM_DECL references are processed specifically.
18211
18212 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18213 and dpi fields were null. */
18214
18215 static dw_loc_list_ref
18216 loc_list_from_tree_1 (tree loc, int want_address,
18217 struct loc_descr_context *context)
18218 {
18219 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18220 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18221 int have_address = 0;
18222 enum dwarf_location_atom op;
18223
18224 /* ??? Most of the time we do not take proper care for sign/zero
18225 extending the values properly. Hopefully this won't be a real
18226 problem... */
18227
18228 if (context != NULL
18229 && context->base_decl == loc
18230 && want_address == 0)
18231 {
18232 if (dwarf_version >= 3 || !dwarf_strict)
18233 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18234 NULL, 0, NULL, 0, NULL);
18235 else
18236 return NULL;
18237 }
18238
18239 switch (TREE_CODE (loc))
18240 {
18241 case ERROR_MARK:
18242 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18243 return 0;
18244
18245 case PLACEHOLDER_EXPR:
18246 /* This case involves extracting fields from an object to determine the
18247 position of other fields. It is supposed to appear only as the first
18248 operand of COMPONENT_REF nodes and to reference precisely the type
18249 that the context allows or its enclosing type. */
18250 if (context != NULL
18251 && (TREE_TYPE (loc) == context->context_type
18252 || TREE_TYPE (loc) == TYPE_CONTEXT (context->context_type))
18253 && want_address >= 1)
18254 {
18255 if (dwarf_version >= 3 || !dwarf_strict)
18256 {
18257 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18258 have_address = 1;
18259 break;
18260 }
18261 else
18262 return NULL;
18263 }
18264 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18265 the single argument passed by consumer. */
18266 else if (context != NULL
18267 && context->placeholder_arg
18268 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18269 && want_address == 0)
18270 {
18271 ret = new_loc_descr (DW_OP_pick, 0, 0);
18272 ret->frame_offset_rel = 1;
18273 context->placeholder_seen = true;
18274 break;
18275 }
18276 else
18277 expansion_failed (loc, NULL_RTX,
18278 "PLACEHOLDER_EXPR for an unexpected type");
18279 break;
18280
18281 case CALL_EXPR:
18282 {
18283 const int nargs = call_expr_nargs (loc);
18284 tree callee = get_callee_fndecl (loc);
18285 int i;
18286 dw_die_ref dwarf_proc;
18287
18288 if (callee == NULL_TREE)
18289 goto call_expansion_failed;
18290
18291 /* We handle only functions that return an integer. */
18292 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18293 goto call_expansion_failed;
18294
18295 dwarf_proc = function_to_dwarf_procedure (callee);
18296 if (dwarf_proc == NULL)
18297 goto call_expansion_failed;
18298
18299 /* Evaluate arguments right-to-left so that the first argument will
18300 be the top-most one on the stack. */
18301 for (i = nargs - 1; i >= 0; --i)
18302 {
18303 dw_loc_descr_ref loc_descr
18304 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18305 context);
18306
18307 if (loc_descr == NULL)
18308 goto call_expansion_failed;
18309
18310 add_loc_descr (&ret, loc_descr);
18311 }
18312
18313 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18314 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18315 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18316 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18317 add_loc_descr (&ret, ret1);
18318 break;
18319
18320 call_expansion_failed:
18321 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18322 /* There are no opcodes for these operations. */
18323 return 0;
18324 }
18325
18326 case PREINCREMENT_EXPR:
18327 case PREDECREMENT_EXPR:
18328 case POSTINCREMENT_EXPR:
18329 case POSTDECREMENT_EXPR:
18330 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18331 /* There are no opcodes for these operations. */
18332 return 0;
18333
18334 case ADDR_EXPR:
18335 /* If we already want an address, see if there is INDIRECT_REF inside
18336 e.g. for &this->field. */
18337 if (want_address)
18338 {
18339 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18340 (loc, want_address == 2, context);
18341 if (list_ret)
18342 have_address = 1;
18343 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18344 && (ret = cst_pool_loc_descr (loc)))
18345 have_address = 1;
18346 }
18347 /* Otherwise, process the argument and look for the address. */
18348 if (!list_ret && !ret)
18349 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18350 else
18351 {
18352 if (want_address)
18353 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18354 return NULL;
18355 }
18356 break;
18357
18358 case VAR_DECL:
18359 if (DECL_THREAD_LOCAL_P (loc))
18360 {
18361 rtx rtl;
18362 enum dwarf_location_atom tls_op;
18363 enum dtprel_bool dtprel = dtprel_false;
18364
18365 if (targetm.have_tls)
18366 {
18367 /* If this is not defined, we have no way to emit the
18368 data. */
18369 if (!targetm.asm_out.output_dwarf_dtprel)
18370 return 0;
18371
18372 /* The way DW_OP_GNU_push_tls_address is specified, we
18373 can only look up addresses of objects in the current
18374 module. We used DW_OP_addr as first op, but that's
18375 wrong, because DW_OP_addr is relocated by the debug
18376 info consumer, while DW_OP_GNU_push_tls_address
18377 operand shouldn't be. */
18378 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18379 return 0;
18380 dtprel = dtprel_true;
18381 /* We check for DWARF 5 here because gdb did not implement
18382 DW_OP_form_tls_address until after 7.12. */
18383 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18384 : DW_OP_GNU_push_tls_address);
18385 }
18386 else
18387 {
18388 if (!targetm.emutls.debug_form_tls_address
18389 || !(dwarf_version >= 3 || !dwarf_strict))
18390 return 0;
18391 /* We stuffed the control variable into the DECL_VALUE_EXPR
18392 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18393 no longer appear in gimple code. We used the control
18394 variable in specific so that we could pick it up here. */
18395 loc = DECL_VALUE_EXPR (loc);
18396 tls_op = DW_OP_form_tls_address;
18397 }
18398
18399 rtl = rtl_for_decl_location (loc);
18400 if (rtl == NULL_RTX)
18401 return 0;
18402
18403 if (!MEM_P (rtl))
18404 return 0;
18405 rtl = XEXP (rtl, 0);
18406 if (! CONSTANT_P (rtl))
18407 return 0;
18408
18409 ret = new_addr_loc_descr (rtl, dtprel);
18410 ret1 = new_loc_descr (tls_op, 0, 0);
18411 add_loc_descr (&ret, ret1);
18412
18413 have_address = 1;
18414 break;
18415 }
18416 /* FALLTHRU */
18417
18418 case PARM_DECL:
18419 if (context != NULL && context->dpi != NULL
18420 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18421 {
18422 /* We are generating code for a DWARF procedure and we want to access
18423 one of its arguments: find the appropriate argument offset and let
18424 the resolve_args_picking pass compute the offset that complies
18425 with the stack frame size. */
18426 unsigned i = 0;
18427 tree cursor;
18428
18429 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18430 cursor != NULL_TREE && cursor != loc;
18431 cursor = TREE_CHAIN (cursor), ++i)
18432 ;
18433 /* If we are translating a DWARF procedure, all referenced parameters
18434 must belong to the current function. */
18435 gcc_assert (cursor != NULL_TREE);
18436
18437 ret = new_loc_descr (DW_OP_pick, i, 0);
18438 ret->frame_offset_rel = 1;
18439 break;
18440 }
18441 /* FALLTHRU */
18442
18443 case RESULT_DECL:
18444 if (DECL_HAS_VALUE_EXPR_P (loc))
18445 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18446 want_address, context);
18447 /* FALLTHRU */
18448
18449 case FUNCTION_DECL:
18450 {
18451 rtx rtl;
18452 var_loc_list *loc_list = lookup_decl_loc (loc);
18453
18454 if (loc_list && loc_list->first)
18455 {
18456 list_ret = dw_loc_list (loc_list, loc, want_address);
18457 have_address = want_address != 0;
18458 break;
18459 }
18460 rtl = rtl_for_decl_location (loc);
18461 if (rtl == NULL_RTX)
18462 {
18463 if (TREE_CODE (loc) != FUNCTION_DECL
18464 && early_dwarf
18465 && current_function_decl
18466 && want_address != 1
18467 && ! DECL_IGNORED_P (loc)
18468 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18469 || POINTER_TYPE_P (TREE_TYPE (loc)))
18470 && DECL_CONTEXT (loc) == current_function_decl
18471 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18472 <= DWARF2_ADDR_SIZE))
18473 {
18474 dw_die_ref ref = lookup_decl_die (loc);
18475 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18476 if (ref)
18477 {
18478 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18479 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18480 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18481 }
18482 else
18483 {
18484 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18485 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18486 }
18487 break;
18488 }
18489 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18490 return 0;
18491 }
18492 else if (CONST_INT_P (rtl))
18493 {
18494 HOST_WIDE_INT val = INTVAL (rtl);
18495 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18496 val &= GET_MODE_MASK (DECL_MODE (loc));
18497 ret = int_loc_descriptor (val);
18498 }
18499 else if (GET_CODE (rtl) == CONST_STRING)
18500 {
18501 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18502 return 0;
18503 }
18504 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18505 ret = new_addr_loc_descr (rtl, dtprel_false);
18506 else
18507 {
18508 machine_mode mode, mem_mode;
18509
18510 /* Certain constructs can only be represented at top-level. */
18511 if (want_address == 2)
18512 {
18513 ret = loc_descriptor (rtl, VOIDmode,
18514 VAR_INIT_STATUS_INITIALIZED);
18515 have_address = 1;
18516 }
18517 else
18518 {
18519 mode = GET_MODE (rtl);
18520 mem_mode = VOIDmode;
18521 if (MEM_P (rtl))
18522 {
18523 mem_mode = mode;
18524 mode = get_address_mode (rtl);
18525 rtl = XEXP (rtl, 0);
18526 have_address = 1;
18527 }
18528 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18529 VAR_INIT_STATUS_INITIALIZED);
18530 }
18531 if (!ret)
18532 expansion_failed (loc, rtl,
18533 "failed to produce loc descriptor for rtl");
18534 }
18535 }
18536 break;
18537
18538 case MEM_REF:
18539 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18540 {
18541 have_address = 1;
18542 goto do_plus;
18543 }
18544 /* Fallthru. */
18545 case INDIRECT_REF:
18546 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18547 have_address = 1;
18548 break;
18549
18550 case TARGET_MEM_REF:
18551 case SSA_NAME:
18552 case DEBUG_EXPR_DECL:
18553 return NULL;
18554
18555 case COMPOUND_EXPR:
18556 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18557 context);
18558
18559 CASE_CONVERT:
18560 case VIEW_CONVERT_EXPR:
18561 case SAVE_EXPR:
18562 case MODIFY_EXPR:
18563 case NON_LVALUE_EXPR:
18564 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18565 context);
18566
18567 case COMPONENT_REF:
18568 case BIT_FIELD_REF:
18569 case ARRAY_REF:
18570 case ARRAY_RANGE_REF:
18571 case REALPART_EXPR:
18572 case IMAGPART_EXPR:
18573 {
18574 tree obj, offset;
18575 poly_int64 bitsize, bitpos, bytepos;
18576 machine_mode mode;
18577 int unsignedp, reversep, volatilep = 0;
18578
18579 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18580 &unsignedp, &reversep, &volatilep);
18581
18582 gcc_assert (obj != loc);
18583
18584 list_ret = loc_list_from_tree_1 (obj,
18585 want_address == 2
18586 && known_eq (bitpos, 0)
18587 && !offset ? 2 : 1,
18588 context);
18589 /* TODO: We can extract value of the small expression via shifting even
18590 for nonzero bitpos. */
18591 if (list_ret == 0)
18592 return 0;
18593 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18594 || !multiple_p (bitsize, BITS_PER_UNIT))
18595 {
18596 expansion_failed (loc, NULL_RTX,
18597 "bitfield access");
18598 return 0;
18599 }
18600
18601 if (offset != NULL_TREE)
18602 {
18603 /* Variable offset. */
18604 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18605 if (list_ret1 == 0)
18606 return 0;
18607 add_loc_list (&list_ret, list_ret1);
18608 if (!list_ret)
18609 return 0;
18610 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18611 }
18612
18613 HOST_WIDE_INT value;
18614 if (bytepos.is_constant (&value) && value > 0)
18615 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18616 value, 0));
18617 else if (maybe_ne (bytepos, 0))
18618 loc_list_plus_const (list_ret, bytepos);
18619
18620 have_address = 1;
18621 break;
18622 }
18623
18624 case INTEGER_CST:
18625 if ((want_address || !tree_fits_shwi_p (loc))
18626 && (ret = cst_pool_loc_descr (loc)))
18627 have_address = 1;
18628 else if (want_address == 2
18629 && tree_fits_shwi_p (loc)
18630 && (ret = address_of_int_loc_descriptor
18631 (int_size_in_bytes (TREE_TYPE (loc)),
18632 tree_to_shwi (loc))))
18633 have_address = 1;
18634 else if (tree_fits_shwi_p (loc))
18635 ret = int_loc_descriptor (tree_to_shwi (loc));
18636 else if (tree_fits_uhwi_p (loc))
18637 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18638 else
18639 {
18640 expansion_failed (loc, NULL_RTX,
18641 "Integer operand is not host integer");
18642 return 0;
18643 }
18644 break;
18645
18646 case POLY_INT_CST:
18647 {
18648 if (want_address)
18649 {
18650 expansion_failed (loc, NULL_RTX,
18651 "constant address with a runtime component");
18652 return 0;
18653 }
18654 poly_int64 value;
18655 if (!poly_int_tree_p (loc, &value))
18656 {
18657 expansion_failed (loc, NULL_RTX, "constant too big");
18658 return 0;
18659 }
18660 ret = int_loc_descriptor (value);
18661 }
18662 break;
18663
18664 case CONSTRUCTOR:
18665 case REAL_CST:
18666 case STRING_CST:
18667 case COMPLEX_CST:
18668 if ((ret = cst_pool_loc_descr (loc)))
18669 have_address = 1;
18670 else if (TREE_CODE (loc) == CONSTRUCTOR)
18671 {
18672 tree type = TREE_TYPE (loc);
18673 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18674 unsigned HOST_WIDE_INT offset = 0;
18675 unsigned HOST_WIDE_INT cnt;
18676 constructor_elt *ce;
18677
18678 if (TREE_CODE (type) == RECORD_TYPE)
18679 {
18680 /* This is very limited, but it's enough to output
18681 pointers to member functions, as long as the
18682 referenced function is defined in the current
18683 translation unit. */
18684 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18685 {
18686 tree val = ce->value;
18687
18688 tree field = ce->index;
18689
18690 if (val)
18691 STRIP_NOPS (val);
18692
18693 if (!field || DECL_BIT_FIELD (field))
18694 {
18695 expansion_failed (loc, NULL_RTX,
18696 "bitfield in record type constructor");
18697 size = offset = (unsigned HOST_WIDE_INT)-1;
18698 ret = NULL;
18699 break;
18700 }
18701
18702 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18703 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18704 gcc_assert (pos + fieldsize <= size);
18705 if (pos < offset)
18706 {
18707 expansion_failed (loc, NULL_RTX,
18708 "out-of-order fields in record constructor");
18709 size = offset = (unsigned HOST_WIDE_INT)-1;
18710 ret = NULL;
18711 break;
18712 }
18713 if (pos > offset)
18714 {
18715 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18716 add_loc_descr (&ret, ret1);
18717 offset = pos;
18718 }
18719 if (val && fieldsize != 0)
18720 {
18721 ret1 = loc_descriptor_from_tree (val, want_address, context);
18722 if (!ret1)
18723 {
18724 expansion_failed (loc, NULL_RTX,
18725 "unsupported expression in field");
18726 size = offset = (unsigned HOST_WIDE_INT)-1;
18727 ret = NULL;
18728 break;
18729 }
18730 add_loc_descr (&ret, ret1);
18731 }
18732 if (fieldsize)
18733 {
18734 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18735 add_loc_descr (&ret, ret1);
18736 offset = pos + fieldsize;
18737 }
18738 }
18739
18740 if (offset != size)
18741 {
18742 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18743 add_loc_descr (&ret, ret1);
18744 offset = size;
18745 }
18746
18747 have_address = !!want_address;
18748 }
18749 else
18750 expansion_failed (loc, NULL_RTX,
18751 "constructor of non-record type");
18752 }
18753 else
18754 /* We can construct small constants here using int_loc_descriptor. */
18755 expansion_failed (loc, NULL_RTX,
18756 "constructor or constant not in constant pool");
18757 break;
18758
18759 case TRUTH_AND_EXPR:
18760 case TRUTH_ANDIF_EXPR:
18761 case BIT_AND_EXPR:
18762 op = DW_OP_and;
18763 goto do_binop;
18764
18765 case TRUTH_XOR_EXPR:
18766 case BIT_XOR_EXPR:
18767 op = DW_OP_xor;
18768 goto do_binop;
18769
18770 case TRUTH_OR_EXPR:
18771 case TRUTH_ORIF_EXPR:
18772 case BIT_IOR_EXPR:
18773 op = DW_OP_or;
18774 goto do_binop;
18775
18776 case FLOOR_DIV_EXPR:
18777 case CEIL_DIV_EXPR:
18778 case ROUND_DIV_EXPR:
18779 case TRUNC_DIV_EXPR:
18780 case EXACT_DIV_EXPR:
18781 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18782 return 0;
18783 op = DW_OP_div;
18784 goto do_binop;
18785
18786 case MINUS_EXPR:
18787 op = DW_OP_minus;
18788 goto do_binop;
18789
18790 case FLOOR_MOD_EXPR:
18791 case CEIL_MOD_EXPR:
18792 case ROUND_MOD_EXPR:
18793 case TRUNC_MOD_EXPR:
18794 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18795 {
18796 op = DW_OP_mod;
18797 goto do_binop;
18798 }
18799 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18800 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18801 if (list_ret == 0 || list_ret1 == 0)
18802 return 0;
18803
18804 add_loc_list (&list_ret, list_ret1);
18805 if (list_ret == 0)
18806 return 0;
18807 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18808 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18809 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18810 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18811 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18812 break;
18813
18814 case MULT_EXPR:
18815 op = DW_OP_mul;
18816 goto do_binop;
18817
18818 case LSHIFT_EXPR:
18819 op = DW_OP_shl;
18820 goto do_binop;
18821
18822 case RSHIFT_EXPR:
18823 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18824 goto do_binop;
18825
18826 case POINTER_PLUS_EXPR:
18827 case PLUS_EXPR:
18828 do_plus:
18829 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18830 {
18831 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18832 smarter to encode their opposite. The DW_OP_plus_uconst operation
18833 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18834 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18835 bytes, Y being the size of the operation that pushes the opposite
18836 of the addend. So let's choose the smallest representation. */
18837 const tree tree_addend = TREE_OPERAND (loc, 1);
18838 offset_int wi_addend;
18839 HOST_WIDE_INT shwi_addend;
18840 dw_loc_descr_ref loc_naddend;
18841
18842 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18843 if (list_ret == 0)
18844 return 0;
18845
18846 /* Try to get the literal to push. It is the opposite of the addend,
18847 so as we rely on wrapping during DWARF evaluation, first decode
18848 the literal as a "DWARF-sized" signed number. */
18849 wi_addend = wi::to_offset (tree_addend);
18850 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18851 shwi_addend = wi_addend.to_shwi ();
18852 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18853 ? int_loc_descriptor (-shwi_addend)
18854 : NULL;
18855
18856 if (loc_naddend != NULL
18857 && ((unsigned) size_of_uleb128 (shwi_addend)
18858 > size_of_loc_descr (loc_naddend)))
18859 {
18860 add_loc_descr_to_each (list_ret, loc_naddend);
18861 add_loc_descr_to_each (list_ret,
18862 new_loc_descr (DW_OP_minus, 0, 0));
18863 }
18864 else
18865 {
18866 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18867 {
18868 loc_naddend = loc_cur;
18869 loc_cur = loc_cur->dw_loc_next;
18870 ggc_free (loc_naddend);
18871 }
18872 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18873 }
18874 break;
18875 }
18876
18877 op = DW_OP_plus;
18878 goto do_binop;
18879
18880 case LE_EXPR:
18881 op = DW_OP_le;
18882 goto do_comp_binop;
18883
18884 case GE_EXPR:
18885 op = DW_OP_ge;
18886 goto do_comp_binop;
18887
18888 case LT_EXPR:
18889 op = DW_OP_lt;
18890 goto do_comp_binop;
18891
18892 case GT_EXPR:
18893 op = DW_OP_gt;
18894 goto do_comp_binop;
18895
18896 do_comp_binop:
18897 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18898 {
18899 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18900 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18901 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18902 TREE_CODE (loc));
18903 break;
18904 }
18905 else
18906 goto do_binop;
18907
18908 case EQ_EXPR:
18909 op = DW_OP_eq;
18910 goto do_binop;
18911
18912 case NE_EXPR:
18913 op = DW_OP_ne;
18914 goto do_binop;
18915
18916 do_binop:
18917 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18918 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18919 if (list_ret == 0 || list_ret1 == 0)
18920 return 0;
18921
18922 add_loc_list (&list_ret, list_ret1);
18923 if (list_ret == 0)
18924 return 0;
18925 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18926 break;
18927
18928 case TRUTH_NOT_EXPR:
18929 case BIT_NOT_EXPR:
18930 op = DW_OP_not;
18931 goto do_unop;
18932
18933 case ABS_EXPR:
18934 op = DW_OP_abs;
18935 goto do_unop;
18936
18937 case NEGATE_EXPR:
18938 op = DW_OP_neg;
18939 goto do_unop;
18940
18941 do_unop:
18942 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18943 if (list_ret == 0)
18944 return 0;
18945
18946 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18947 break;
18948
18949 case MIN_EXPR:
18950 case MAX_EXPR:
18951 {
18952 const enum tree_code code =
18953 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18954
18955 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18956 build2 (code, integer_type_node,
18957 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18958 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18959 }
18960
18961 /* fall through */
18962
18963 case COND_EXPR:
18964 {
18965 dw_loc_descr_ref lhs
18966 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18967 dw_loc_list_ref rhs
18968 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18969 dw_loc_descr_ref bra_node, jump_node, tmp;
18970
18971 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18972 if (list_ret == 0 || lhs == 0 || rhs == 0)
18973 return 0;
18974
18975 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18976 add_loc_descr_to_each (list_ret, bra_node);
18977
18978 add_loc_list (&list_ret, rhs);
18979 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18980 add_loc_descr_to_each (list_ret, jump_node);
18981
18982 add_loc_descr_to_each (list_ret, lhs);
18983 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18984 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18985
18986 /* ??? Need a node to point the skip at. Use a nop. */
18987 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18988 add_loc_descr_to_each (list_ret, tmp);
18989 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18990 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18991 }
18992 break;
18993
18994 case FIX_TRUNC_EXPR:
18995 return 0;
18996
18997 default:
18998 /* Leave front-end specific codes as simply unknown. This comes
18999 up, for instance, with the C STMT_EXPR. */
19000 if ((unsigned int) TREE_CODE (loc)
19001 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
19002 {
19003 expansion_failed (loc, NULL_RTX,
19004 "language specific tree node");
19005 return 0;
19006 }
19007
19008 /* Otherwise this is a generic code; we should just lists all of
19009 these explicitly. We forgot one. */
19010 if (flag_checking)
19011 gcc_unreachable ();
19012
19013 /* In a release build, we want to degrade gracefully: better to
19014 generate incomplete debugging information than to crash. */
19015 return NULL;
19016 }
19017
19018 if (!ret && !list_ret)
19019 return 0;
19020
19021 if (want_address == 2 && !have_address
19022 && (dwarf_version >= 4 || !dwarf_strict))
19023 {
19024 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
19025 {
19026 expansion_failed (loc, NULL_RTX,
19027 "DWARF address size mismatch");
19028 return 0;
19029 }
19030 if (ret)
19031 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
19032 else
19033 add_loc_descr_to_each (list_ret,
19034 new_loc_descr (DW_OP_stack_value, 0, 0));
19035 have_address = 1;
19036 }
19037 /* Show if we can't fill the request for an address. */
19038 if (want_address && !have_address)
19039 {
19040 expansion_failed (loc, NULL_RTX,
19041 "Want address and only have value");
19042 return 0;
19043 }
19044
19045 gcc_assert (!ret || !list_ret);
19046
19047 /* If we've got an address and don't want one, dereference. */
19048 if (!want_address && have_address)
19049 {
19050 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
19051
19052 if (size > DWARF2_ADDR_SIZE || size == -1)
19053 {
19054 expansion_failed (loc, NULL_RTX,
19055 "DWARF address size mismatch");
19056 return 0;
19057 }
19058 else if (size == DWARF2_ADDR_SIZE)
19059 op = DW_OP_deref;
19060 else
19061 op = DW_OP_deref_size;
19062
19063 if (ret)
19064 add_loc_descr (&ret, new_loc_descr (op, size, 0));
19065 else
19066 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
19067 }
19068 if (ret)
19069 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
19070
19071 return list_ret;
19072 }
19073
19074 /* Likewise, but strip useless DW_OP_nop operations in the resulting
19075 expressions. */
19076
19077 static dw_loc_list_ref
19078 loc_list_from_tree (tree loc, int want_address,
19079 struct loc_descr_context *context)
19080 {
19081 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
19082
19083 for (dw_loc_list_ref loc_cur = result;
19084 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
19085 loc_descr_without_nops (loc_cur->expr);
19086 return result;
19087 }
19088
19089 /* Same as above but return only single location expression. */
19090 static dw_loc_descr_ref
19091 loc_descriptor_from_tree (tree loc, int want_address,
19092 struct loc_descr_context *context)
19093 {
19094 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
19095 if (!ret)
19096 return NULL;
19097 if (ret->dw_loc_next)
19098 {
19099 expansion_failed (loc, NULL_RTX,
19100 "Location list where only loc descriptor needed");
19101 return NULL;
19102 }
19103 return ret->expr;
19104 }
19105
19106 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19107 pointer to the declared type for the relevant field variable, or return
19108 `integer_type_node' if the given node turns out to be an
19109 ERROR_MARK node. */
19110
19111 static inline tree
19112 field_type (const_tree decl)
19113 {
19114 tree type;
19115
19116 if (TREE_CODE (decl) == ERROR_MARK)
19117 return integer_type_node;
19118
19119 type = DECL_BIT_FIELD_TYPE (decl);
19120 if (type == NULL_TREE)
19121 type = TREE_TYPE (decl);
19122
19123 return type;
19124 }
19125
19126 /* Given a pointer to a tree node, return the alignment in bits for
19127 it, or else return BITS_PER_WORD if the node actually turns out to
19128 be an ERROR_MARK node. */
19129
19130 static inline unsigned
19131 simple_type_align_in_bits (const_tree type)
19132 {
19133 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19134 }
19135
19136 static inline unsigned
19137 simple_decl_align_in_bits (const_tree decl)
19138 {
19139 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19140 }
19141
19142 /* Return the result of rounding T up to ALIGN. */
19143
19144 static inline offset_int
19145 round_up_to_align (const offset_int &t, unsigned int align)
19146 {
19147 return wi::udiv_trunc (t + align - 1, align) * align;
19148 }
19149
19150 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19151 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19152 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19153 if we fail to return the size in one of these two forms. */
19154
19155 static dw_loc_descr_ref
19156 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19157 {
19158 tree tree_size;
19159 struct loc_descr_context ctx;
19160
19161 /* Return a constant integer in priority, if possible. */
19162 *cst_size = int_size_in_bytes (type);
19163 if (*cst_size != -1)
19164 return NULL;
19165
19166 ctx.context_type = const_cast<tree> (type);
19167 ctx.base_decl = NULL_TREE;
19168 ctx.dpi = NULL;
19169 ctx.placeholder_arg = false;
19170 ctx.placeholder_seen = false;
19171
19172 type = TYPE_MAIN_VARIANT (type);
19173 tree_size = TYPE_SIZE_UNIT (type);
19174 return ((tree_size != NULL_TREE)
19175 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19176 : NULL);
19177 }
19178
19179 /* Helper structure for RECORD_TYPE processing. */
19180 struct vlr_context
19181 {
19182 /* Root RECORD_TYPE. It is needed to generate data member location
19183 descriptions in variable-length records (VLR), but also to cope with
19184 variants, which are composed of nested structures multiplexed with
19185 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19186 function processing a FIELD_DECL, it is required to be non null. */
19187 tree struct_type;
19188
19189 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19190 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19191 this variant part as part of the root record (in storage units). For
19192 regular records, it must be NULL_TREE. */
19193 tree variant_part_offset;
19194 };
19195
19196 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19197 addressed byte of the "containing object" for the given FIELD_DECL. If
19198 possible, return a native constant through CST_OFFSET (in which case NULL is
19199 returned); otherwise return a DWARF expression that computes the offset.
19200
19201 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19202 that offset is, either because the argument turns out to be a pointer to an
19203 ERROR_MARK node, or because the offset expression is too complex for us.
19204
19205 CTX is required: see the comment for VLR_CONTEXT. */
19206
19207 static dw_loc_descr_ref
19208 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19209 HOST_WIDE_INT *cst_offset)
19210 {
19211 tree tree_result;
19212 dw_loc_list_ref loc_result;
19213
19214 *cst_offset = 0;
19215
19216 if (TREE_CODE (decl) == ERROR_MARK)
19217 return NULL;
19218 else
19219 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19220
19221 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19222 case. */
19223 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19224 return NULL;
19225
19226 /* We used to handle only constant offsets in all cases. Now, we handle
19227 properly dynamic byte offsets only when PCC bitfield type doesn't
19228 matter. */
19229 if (PCC_BITFIELD_TYPE_MATTERS
19230 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19231 {
19232 offset_int object_offset_in_bits;
19233 offset_int object_offset_in_bytes;
19234 offset_int bitpos_int;
19235 tree type;
19236 tree field_size_tree;
19237 offset_int deepest_bitpos;
19238 offset_int field_size_in_bits;
19239 unsigned int type_align_in_bits;
19240 unsigned int decl_align_in_bits;
19241 offset_int type_size_in_bits;
19242
19243 bitpos_int = wi::to_offset (bit_position (decl));
19244 type = field_type (decl);
19245 type_size_in_bits = offset_int_type_size_in_bits (type);
19246 type_align_in_bits = simple_type_align_in_bits (type);
19247
19248 field_size_tree = DECL_SIZE (decl);
19249
19250 /* The size could be unspecified if there was an error, or for
19251 a flexible array member. */
19252 if (!field_size_tree)
19253 field_size_tree = bitsize_zero_node;
19254
19255 /* If the size of the field is not constant, use the type size. */
19256 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19257 field_size_in_bits = wi::to_offset (field_size_tree);
19258 else
19259 field_size_in_bits = type_size_in_bits;
19260
19261 decl_align_in_bits = simple_decl_align_in_bits (decl);
19262
19263 /* The GCC front-end doesn't make any attempt to keep track of the
19264 starting bit offset (relative to the start of the containing
19265 structure type) of the hypothetical "containing object" for a
19266 bit-field. Thus, when computing the byte offset value for the
19267 start of the "containing object" of a bit-field, we must deduce
19268 this information on our own. This can be rather tricky to do in
19269 some cases. For example, handling the following structure type
19270 definition when compiling for an i386/i486 target (which only
19271 aligns long long's to 32-bit boundaries) can be very tricky:
19272
19273 struct S { int field1; long long field2:31; };
19274
19275 Fortunately, there is a simple rule-of-thumb which can be used
19276 in such cases. When compiling for an i386/i486, GCC will
19277 allocate 8 bytes for the structure shown above. It decides to
19278 do this based upon one simple rule for bit-field allocation.
19279 GCC allocates each "containing object" for each bit-field at
19280 the first (i.e. lowest addressed) legitimate alignment boundary
19281 (based upon the required minimum alignment for the declared
19282 type of the field) which it can possibly use, subject to the
19283 condition that there is still enough available space remaining
19284 in the containing object (when allocated at the selected point)
19285 to fully accommodate all of the bits of the bit-field itself.
19286
19287 This simple rule makes it obvious why GCC allocates 8 bytes for
19288 each object of the structure type shown above. When looking
19289 for a place to allocate the "containing object" for `field2',
19290 the compiler simply tries to allocate a 64-bit "containing
19291 object" at each successive 32-bit boundary (starting at zero)
19292 until it finds a place to allocate that 64- bit field such that
19293 at least 31 contiguous (and previously unallocated) bits remain
19294 within that selected 64 bit field. (As it turns out, for the
19295 example above, the compiler finds it is OK to allocate the
19296 "containing object" 64-bit field at bit-offset zero within the
19297 structure type.)
19298
19299 Here we attempt to work backwards from the limited set of facts
19300 we're given, and we try to deduce from those facts, where GCC
19301 must have believed that the containing object started (within
19302 the structure type). The value we deduce is then used (by the
19303 callers of this routine) to generate DW_AT_location and
19304 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19305 the case of DW_AT_location, regular fields as well). */
19306
19307 /* Figure out the bit-distance from the start of the structure to
19308 the "deepest" bit of the bit-field. */
19309 deepest_bitpos = bitpos_int + field_size_in_bits;
19310
19311 /* This is the tricky part. Use some fancy footwork to deduce
19312 where the lowest addressed bit of the containing object must
19313 be. */
19314 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19315
19316 /* Round up to type_align by default. This works best for
19317 bitfields. */
19318 object_offset_in_bits
19319 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19320
19321 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19322 {
19323 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19324
19325 /* Round up to decl_align instead. */
19326 object_offset_in_bits
19327 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19328 }
19329
19330 object_offset_in_bytes
19331 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19332 if (ctx->variant_part_offset == NULL_TREE)
19333 {
19334 *cst_offset = object_offset_in_bytes.to_shwi ();
19335 return NULL;
19336 }
19337 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19338 }
19339 else
19340 tree_result = byte_position (decl);
19341
19342 if (ctx->variant_part_offset != NULL_TREE)
19343 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19344 ctx->variant_part_offset, tree_result);
19345
19346 /* If the byte offset is a constant, it's simplier to handle a native
19347 constant rather than a DWARF expression. */
19348 if (TREE_CODE (tree_result) == INTEGER_CST)
19349 {
19350 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19351 return NULL;
19352 }
19353 struct loc_descr_context loc_ctx = {
19354 ctx->struct_type, /* context_type */
19355 NULL_TREE, /* base_decl */
19356 NULL, /* dpi */
19357 false, /* placeholder_arg */
19358 false /* placeholder_seen */
19359 };
19360 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19361
19362 /* We want a DWARF expression: abort if we only have a location list with
19363 multiple elements. */
19364 if (!loc_result || !single_element_loc_list_p (loc_result))
19365 return NULL;
19366 else
19367 return loc_result->expr;
19368 }
19369 \f
19370 /* The following routines define various Dwarf attributes and any data
19371 associated with them. */
19372
19373 /* Add a location description attribute value to a DIE.
19374
19375 This emits location attributes suitable for whole variables and
19376 whole parameters. Note that the location attributes for struct fields are
19377 generated by the routine `data_member_location_attribute' below. */
19378
19379 static inline void
19380 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19381 dw_loc_list_ref descr)
19382 {
19383 bool check_no_locviews = true;
19384 if (descr == 0)
19385 return;
19386 if (single_element_loc_list_p (descr))
19387 add_AT_loc (die, attr_kind, descr->expr);
19388 else
19389 {
19390 add_AT_loc_list (die, attr_kind, descr);
19391 gcc_assert (descr->ll_symbol);
19392 if (attr_kind == DW_AT_location && descr->vl_symbol
19393 && dwarf2out_locviews_in_attribute ())
19394 {
19395 add_AT_view_list (die, DW_AT_GNU_locviews);
19396 check_no_locviews = false;
19397 }
19398 }
19399
19400 if (check_no_locviews)
19401 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19402 }
19403
19404 /* Add DW_AT_accessibility attribute to DIE if needed. */
19405
19406 static void
19407 add_accessibility_attribute (dw_die_ref die, tree decl)
19408 {
19409 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19410 children, otherwise the default is DW_ACCESS_public. In DWARF2
19411 the default has always been DW_ACCESS_public. */
19412 if (TREE_PROTECTED (decl))
19413 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19414 else if (TREE_PRIVATE (decl))
19415 {
19416 if (dwarf_version == 2
19417 || die->die_parent == NULL
19418 || die->die_parent->die_tag != DW_TAG_class_type)
19419 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19420 }
19421 else if (dwarf_version > 2
19422 && die->die_parent
19423 && die->die_parent->die_tag == DW_TAG_class_type)
19424 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19425 }
19426
19427 /* Attach the specialized form of location attribute used for data members of
19428 struct and union types. In the special case of a FIELD_DECL node which
19429 represents a bit-field, the "offset" part of this special location
19430 descriptor must indicate the distance in bytes from the lowest-addressed
19431 byte of the containing struct or union type to the lowest-addressed byte of
19432 the "containing object" for the bit-field. (See the `field_byte_offset'
19433 function above).
19434
19435 For any given bit-field, the "containing object" is a hypothetical object
19436 (of some integral or enum type) within which the given bit-field lives. The
19437 type of this hypothetical "containing object" is always the same as the
19438 declared type of the individual bit-field itself (for GCC anyway... the
19439 DWARF spec doesn't actually mandate this). Note that it is the size (in
19440 bytes) of the hypothetical "containing object" which will be given in the
19441 DW_AT_byte_size attribute for this bit-field. (See the
19442 `byte_size_attribute' function below.) It is also used when calculating the
19443 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19444 function below.)
19445
19446 CTX is required: see the comment for VLR_CONTEXT. */
19447
19448 static void
19449 add_data_member_location_attribute (dw_die_ref die,
19450 tree decl,
19451 struct vlr_context *ctx)
19452 {
19453 HOST_WIDE_INT offset;
19454 dw_loc_descr_ref loc_descr = 0;
19455
19456 if (TREE_CODE (decl) == TREE_BINFO)
19457 {
19458 /* We're working on the TAG_inheritance for a base class. */
19459 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19460 {
19461 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19462 aren't at a fixed offset from all (sub)objects of the same
19463 type. We need to extract the appropriate offset from our
19464 vtable. The following dwarf expression means
19465
19466 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19467
19468 This is specific to the V3 ABI, of course. */
19469
19470 dw_loc_descr_ref tmp;
19471
19472 /* Make a copy of the object address. */
19473 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19474 add_loc_descr (&loc_descr, tmp);
19475
19476 /* Extract the vtable address. */
19477 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19478 add_loc_descr (&loc_descr, tmp);
19479
19480 /* Calculate the address of the offset. */
19481 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19482 gcc_assert (offset < 0);
19483
19484 tmp = int_loc_descriptor (-offset);
19485 add_loc_descr (&loc_descr, tmp);
19486 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19487 add_loc_descr (&loc_descr, tmp);
19488
19489 /* Extract the offset. */
19490 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19491 add_loc_descr (&loc_descr, tmp);
19492
19493 /* Add it to the object address. */
19494 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19495 add_loc_descr (&loc_descr, tmp);
19496 }
19497 else
19498 offset = tree_to_shwi (BINFO_OFFSET (decl));
19499 }
19500 else
19501 {
19502 loc_descr = field_byte_offset (decl, ctx, &offset);
19503
19504 /* If loc_descr is available then we know the field offset is dynamic.
19505 However, GDB does not handle dynamic field offsets very well at the
19506 moment. */
19507 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19508 {
19509 loc_descr = NULL;
19510 offset = 0;
19511 }
19512
19513 /* Data member location evalutation starts with the base address on the
19514 stack. Compute the field offset and add it to this base address. */
19515 else if (loc_descr != NULL)
19516 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19517 }
19518
19519 if (! loc_descr)
19520 {
19521 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19522 e.g. GDB only added support to it in November 2016. For DWARF5
19523 we need newer debug info consumers anyway. We might change this
19524 to dwarf_version >= 4 once most consumers catched up. */
19525 if (dwarf_version >= 5
19526 && TREE_CODE (decl) == FIELD_DECL
19527 && DECL_BIT_FIELD_TYPE (decl)
19528 && (ctx->variant_part_offset == NULL_TREE
19529 || TREE_CODE (ctx->variant_part_offset) == INTEGER_CST))
19530 {
19531 tree off = bit_position (decl);
19532 if (ctx->variant_part_offset)
19533 off = bit_from_pos (ctx->variant_part_offset, off);
19534 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19535 {
19536 remove_AT (die, DW_AT_byte_size);
19537 remove_AT (die, DW_AT_bit_offset);
19538 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19539 return;
19540 }
19541 }
19542 if (dwarf_version > 2)
19543 {
19544 /* Don't need to output a location expression, just the constant. */
19545 if (offset < 0)
19546 add_AT_int (die, DW_AT_data_member_location, offset);
19547 else
19548 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19549 return;
19550 }
19551 else
19552 {
19553 enum dwarf_location_atom op;
19554
19555 /* The DWARF2 standard says that we should assume that the structure
19556 address is already on the stack, so we can specify a structure
19557 field address by using DW_OP_plus_uconst. */
19558 op = DW_OP_plus_uconst;
19559 loc_descr = new_loc_descr (op, offset, 0);
19560 }
19561 }
19562
19563 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19564 }
19565
19566 /* Writes integer values to dw_vec_const array. */
19567
19568 static void
19569 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19570 {
19571 while (size != 0)
19572 {
19573 *dest++ = val & 0xff;
19574 val >>= 8;
19575 --size;
19576 }
19577 }
19578
19579 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19580
19581 static HOST_WIDE_INT
19582 extract_int (const unsigned char *src, unsigned int size)
19583 {
19584 HOST_WIDE_INT val = 0;
19585
19586 src += size;
19587 while (size != 0)
19588 {
19589 val <<= 8;
19590 val |= *--src & 0xff;
19591 --size;
19592 }
19593 return val;
19594 }
19595
19596 /* Writes wide_int values to dw_vec_const array. */
19597
19598 static void
19599 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19600 {
19601 int i;
19602
19603 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19604 {
19605 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19606 return;
19607 }
19608
19609 /* We'd have to extend this code to support odd sizes. */
19610 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19611
19612 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19613
19614 if (WORDS_BIG_ENDIAN)
19615 for (i = n - 1; i >= 0; i--)
19616 {
19617 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19618 dest += sizeof (HOST_WIDE_INT);
19619 }
19620 else
19621 for (i = 0; i < n; i++)
19622 {
19623 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19624 dest += sizeof (HOST_WIDE_INT);
19625 }
19626 }
19627
19628 /* Writes floating point values to dw_vec_const array. */
19629
19630 static void
19631 insert_float (const_rtx rtl, unsigned char *array)
19632 {
19633 long val[4];
19634 int i;
19635 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19636
19637 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19638
19639 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19640 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19641 {
19642 insert_int (val[i], 4, array);
19643 array += 4;
19644 }
19645 }
19646
19647 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19648 does not have a "location" either in memory or in a register. These
19649 things can arise in GNU C when a constant is passed as an actual parameter
19650 to an inlined function. They can also arise in C++ where declared
19651 constants do not necessarily get memory "homes". */
19652
19653 static bool
19654 add_const_value_attribute (dw_die_ref die, rtx rtl)
19655 {
19656 switch (GET_CODE (rtl))
19657 {
19658 case CONST_INT:
19659 {
19660 HOST_WIDE_INT val = INTVAL (rtl);
19661
19662 if (val < 0)
19663 add_AT_int (die, DW_AT_const_value, val);
19664 else
19665 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19666 }
19667 return true;
19668
19669 case CONST_WIDE_INT:
19670 {
19671 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19672 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19673 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19674 wide_int w = wi::zext (w1, prec);
19675 add_AT_wide (die, DW_AT_const_value, w);
19676 }
19677 return true;
19678
19679 case CONST_DOUBLE:
19680 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19681 floating-point constant. A CONST_DOUBLE is used whenever the
19682 constant requires more than one word in order to be adequately
19683 represented. */
19684 if (TARGET_SUPPORTS_WIDE_INT == 0
19685 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19686 add_AT_double (die, DW_AT_const_value,
19687 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19688 else
19689 {
19690 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19691 unsigned int length = GET_MODE_SIZE (mode);
19692 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19693
19694 insert_float (rtl, array);
19695 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19696 }
19697 return true;
19698
19699 case CONST_VECTOR:
19700 {
19701 unsigned int length;
19702 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19703 return false;
19704
19705 machine_mode mode = GET_MODE (rtl);
19706 /* The combination of a length and byte elt_size doesn't extend
19707 naturally to boolean vectors, where several elements are packed
19708 into the same byte. */
19709 if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
19710 return false;
19711
19712 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19713 unsigned char *array
19714 = ggc_vec_alloc<unsigned char> (length * elt_size);
19715 unsigned int i;
19716 unsigned char *p;
19717 machine_mode imode = GET_MODE_INNER (mode);
19718
19719 switch (GET_MODE_CLASS (mode))
19720 {
19721 case MODE_VECTOR_INT:
19722 for (i = 0, p = array; i < length; i++, p += elt_size)
19723 {
19724 rtx elt = CONST_VECTOR_ELT (rtl, i);
19725 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19726 }
19727 break;
19728
19729 case MODE_VECTOR_FLOAT:
19730 for (i = 0, p = array; i < length; i++, p += elt_size)
19731 {
19732 rtx elt = CONST_VECTOR_ELT (rtl, i);
19733 insert_float (elt, p);
19734 }
19735 break;
19736
19737 default:
19738 gcc_unreachable ();
19739 }
19740
19741 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19742 }
19743 return true;
19744
19745 case CONST_STRING:
19746 if (dwarf_version >= 4 || !dwarf_strict)
19747 {
19748 dw_loc_descr_ref loc_result;
19749 resolve_one_addr (&rtl);
19750 rtl_addr:
19751 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19752 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19753 add_AT_loc (die, DW_AT_location, loc_result);
19754 vec_safe_push (used_rtx_array, rtl);
19755 return true;
19756 }
19757 return false;
19758
19759 case CONST:
19760 if (CONSTANT_P (XEXP (rtl, 0)))
19761 return add_const_value_attribute (die, XEXP (rtl, 0));
19762 /* FALLTHROUGH */
19763 case SYMBOL_REF:
19764 if (!const_ok_for_output (rtl))
19765 return false;
19766 /* FALLTHROUGH */
19767 case LABEL_REF:
19768 if (dwarf_version >= 4 || !dwarf_strict)
19769 goto rtl_addr;
19770 return false;
19771
19772 case PLUS:
19773 /* In cases where an inlined instance of an inline function is passed
19774 the address of an `auto' variable (which is local to the caller) we
19775 can get a situation where the DECL_RTL of the artificial local
19776 variable (for the inlining) which acts as a stand-in for the
19777 corresponding formal parameter (of the inline function) will look
19778 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19779 exactly a compile-time constant expression, but it isn't the address
19780 of the (artificial) local variable either. Rather, it represents the
19781 *value* which the artificial local variable always has during its
19782 lifetime. We currently have no way to represent such quasi-constant
19783 values in Dwarf, so for now we just punt and generate nothing. */
19784 return false;
19785
19786 case HIGH:
19787 case CONST_FIXED:
19788 case MINUS:
19789 case SIGN_EXTEND:
19790 case ZERO_EXTEND:
19791 case CONST_POLY_INT:
19792 return false;
19793
19794 case MEM:
19795 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19796 && MEM_READONLY_P (rtl)
19797 && GET_MODE (rtl) == BLKmode)
19798 {
19799 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19800 return true;
19801 }
19802 return false;
19803
19804 default:
19805 /* No other kinds of rtx should be possible here. */
19806 gcc_unreachable ();
19807 }
19808 return false;
19809 }
19810
19811 /* Determine whether the evaluation of EXPR references any variables
19812 or functions which aren't otherwise used (and therefore may not be
19813 output). */
19814 static tree
19815 reference_to_unused (tree * tp, int * walk_subtrees,
19816 void * data ATTRIBUTE_UNUSED)
19817 {
19818 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19819 *walk_subtrees = 0;
19820
19821 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19822 && ! TREE_ASM_WRITTEN (*tp))
19823 return *tp;
19824 /* ??? The C++ FE emits debug information for using decls, so
19825 putting gcc_unreachable here falls over. See PR31899. For now
19826 be conservative. */
19827 else if (!symtab->global_info_ready && VAR_P (*tp))
19828 return *tp;
19829 else if (VAR_P (*tp))
19830 {
19831 varpool_node *node = varpool_node::get (*tp);
19832 if (!node || !node->definition)
19833 return *tp;
19834 }
19835 else if (TREE_CODE (*tp) == FUNCTION_DECL
19836 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19837 {
19838 /* The call graph machinery must have finished analyzing,
19839 optimizing and gimplifying the CU by now.
19840 So if *TP has no call graph node associated
19841 to it, it means *TP will not be emitted. */
19842 if (!symtab->global_info_ready || !cgraph_node::get (*tp))
19843 return *tp;
19844 }
19845 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19846 return *tp;
19847
19848 return NULL_TREE;
19849 }
19850
19851 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19852 for use in a later add_const_value_attribute call. */
19853
19854 static rtx
19855 rtl_for_decl_init (tree init, tree type)
19856 {
19857 rtx rtl = NULL_RTX;
19858
19859 STRIP_NOPS (init);
19860
19861 /* If a variable is initialized with a string constant without embedded
19862 zeros, build CONST_STRING. */
19863 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19864 {
19865 tree enttype = TREE_TYPE (type);
19866 tree domain = TYPE_DOMAIN (type);
19867 scalar_int_mode mode;
19868
19869 if (is_int_mode (TYPE_MODE (enttype), &mode)
19870 && GET_MODE_SIZE (mode) == 1
19871 && domain
19872 && TYPE_MAX_VALUE (domain)
19873 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19874 && integer_zerop (TYPE_MIN_VALUE (domain))
19875 && compare_tree_int (TYPE_MAX_VALUE (domain),
19876 TREE_STRING_LENGTH (init) - 1) == 0
19877 && ((size_t) TREE_STRING_LENGTH (init)
19878 == strlen (TREE_STRING_POINTER (init)) + 1))
19879 {
19880 rtl = gen_rtx_CONST_STRING (VOIDmode,
19881 ggc_strdup (TREE_STRING_POINTER (init)));
19882 rtl = gen_rtx_MEM (BLKmode, rtl);
19883 MEM_READONLY_P (rtl) = 1;
19884 }
19885 }
19886 /* Other aggregates, and complex values, could be represented using
19887 CONCAT: FIXME! */
19888 else if (AGGREGATE_TYPE_P (type)
19889 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19890 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19891 || TREE_CODE (type) == COMPLEX_TYPE)
19892 ;
19893 /* Vectors only work if their mode is supported by the target.
19894 FIXME: generic vectors ought to work too. */
19895 else if (TREE_CODE (type) == VECTOR_TYPE
19896 && !VECTOR_MODE_P (TYPE_MODE (type)))
19897 ;
19898 /* If the initializer is something that we know will expand into an
19899 immediate RTL constant, expand it now. We must be careful not to
19900 reference variables which won't be output. */
19901 else if (initializer_constant_valid_p (init, type)
19902 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19903 {
19904 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19905 possible. */
19906 if (TREE_CODE (type) == VECTOR_TYPE)
19907 switch (TREE_CODE (init))
19908 {
19909 case VECTOR_CST:
19910 break;
19911 case CONSTRUCTOR:
19912 if (TREE_CONSTANT (init))
19913 {
19914 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19915 bool constant_p = true;
19916 tree value;
19917 unsigned HOST_WIDE_INT ix;
19918
19919 /* Even when ctor is constant, it might contain non-*_CST
19920 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19921 belong into VECTOR_CST nodes. */
19922 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19923 if (!CONSTANT_CLASS_P (value))
19924 {
19925 constant_p = false;
19926 break;
19927 }
19928
19929 if (constant_p)
19930 {
19931 init = build_vector_from_ctor (type, elts);
19932 break;
19933 }
19934 }
19935 /* FALLTHRU */
19936
19937 default:
19938 return NULL;
19939 }
19940
19941 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19942
19943 /* If expand_expr returns a MEM, it wasn't immediate. */
19944 gcc_assert (!rtl || !MEM_P (rtl));
19945 }
19946
19947 return rtl;
19948 }
19949
19950 /* Generate RTL for the variable DECL to represent its location. */
19951
19952 static rtx
19953 rtl_for_decl_location (tree decl)
19954 {
19955 rtx rtl;
19956
19957 /* Here we have to decide where we are going to say the parameter "lives"
19958 (as far as the debugger is concerned). We only have a couple of
19959 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19960
19961 DECL_RTL normally indicates where the parameter lives during most of the
19962 activation of the function. If optimization is enabled however, this
19963 could be either NULL or else a pseudo-reg. Both of those cases indicate
19964 that the parameter doesn't really live anywhere (as far as the code
19965 generation parts of GCC are concerned) during most of the function's
19966 activation. That will happen (for example) if the parameter is never
19967 referenced within the function.
19968
19969 We could just generate a location descriptor here for all non-NULL
19970 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19971 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19972 where DECL_RTL is NULL or is a pseudo-reg.
19973
19974 Note however that we can only get away with using DECL_INCOMING_RTL as
19975 a backup substitute for DECL_RTL in certain limited cases. In cases
19976 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19977 we can be sure that the parameter was passed using the same type as it is
19978 declared to have within the function, and that its DECL_INCOMING_RTL
19979 points us to a place where a value of that type is passed.
19980
19981 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19982 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19983 because in these cases DECL_INCOMING_RTL points us to a value of some
19984 type which is *different* from the type of the parameter itself. Thus,
19985 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19986 such cases, the debugger would end up (for example) trying to fetch a
19987 `float' from a place which actually contains the first part of a
19988 `double'. That would lead to really incorrect and confusing
19989 output at debug-time.
19990
19991 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19992 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19993 are a couple of exceptions however. On little-endian machines we can
19994 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19995 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19996 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19997 when (on a little-endian machine) a non-prototyped function has a
19998 parameter declared to be of type `short' or `char'. In such cases,
19999 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
20000 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
20001 passed `int' value. If the debugger then uses that address to fetch
20002 a `short' or a `char' (on a little-endian machine) the result will be
20003 the correct data, so we allow for such exceptional cases below.
20004
20005 Note that our goal here is to describe the place where the given formal
20006 parameter lives during most of the function's activation (i.e. between the
20007 end of the prologue and the start of the epilogue). We'll do that as best
20008 as we can. Note however that if the given formal parameter is modified
20009 sometime during the execution of the function, then a stack backtrace (at
20010 debug-time) will show the function as having been called with the *new*
20011 value rather than the value which was originally passed in. This happens
20012 rarely enough that it is not a major problem, but it *is* a problem, and
20013 I'd like to fix it.
20014
20015 A future version of dwarf2out.c may generate two additional attributes for
20016 any given DW_TAG_formal_parameter DIE which will describe the "passed
20017 type" and the "passed location" for the given formal parameter in addition
20018 to the attributes we now generate to indicate the "declared type" and the
20019 "active location" for each parameter. This additional set of attributes
20020 could be used by debuggers for stack backtraces. Separately, note that
20021 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
20022 This happens (for example) for inlined-instances of inline function formal
20023 parameters which are never referenced. This really shouldn't be
20024 happening. All PARM_DECL nodes should get valid non-NULL
20025 DECL_INCOMING_RTL values. FIXME. */
20026
20027 /* Use DECL_RTL as the "location" unless we find something better. */
20028 rtl = DECL_RTL_IF_SET (decl);
20029
20030 /* When generating abstract instances, ignore everything except
20031 constants, symbols living in memory, and symbols living in
20032 fixed registers. */
20033 if (! reload_completed)
20034 {
20035 if (rtl
20036 && (CONSTANT_P (rtl)
20037 || (MEM_P (rtl)
20038 && CONSTANT_P (XEXP (rtl, 0)))
20039 || (REG_P (rtl)
20040 && VAR_P (decl)
20041 && TREE_STATIC (decl))))
20042 {
20043 rtl = targetm.delegitimize_address (rtl);
20044 return rtl;
20045 }
20046 rtl = NULL_RTX;
20047 }
20048 else if (TREE_CODE (decl) == PARM_DECL)
20049 {
20050 if (rtl == NULL_RTX
20051 || is_pseudo_reg (rtl)
20052 || (MEM_P (rtl)
20053 && is_pseudo_reg (XEXP (rtl, 0))
20054 && DECL_INCOMING_RTL (decl)
20055 && MEM_P (DECL_INCOMING_RTL (decl))
20056 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
20057 {
20058 tree declared_type = TREE_TYPE (decl);
20059 tree passed_type = DECL_ARG_TYPE (decl);
20060 machine_mode dmode = TYPE_MODE (declared_type);
20061 machine_mode pmode = TYPE_MODE (passed_type);
20062
20063 /* This decl represents a formal parameter which was optimized out.
20064 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
20065 all cases where (rtl == NULL_RTX) just below. */
20066 if (dmode == pmode)
20067 rtl = DECL_INCOMING_RTL (decl);
20068 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
20069 && SCALAR_INT_MODE_P (dmode)
20070 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
20071 && DECL_INCOMING_RTL (decl))
20072 {
20073 rtx inc = DECL_INCOMING_RTL (decl);
20074 if (REG_P (inc))
20075 rtl = inc;
20076 else if (MEM_P (inc))
20077 {
20078 if (BYTES_BIG_ENDIAN)
20079 rtl = adjust_address_nv (inc, dmode,
20080 GET_MODE_SIZE (pmode)
20081 - GET_MODE_SIZE (dmode));
20082 else
20083 rtl = inc;
20084 }
20085 }
20086 }
20087
20088 /* If the parm was passed in registers, but lives on the stack, then
20089 make a big endian correction if the mode of the type of the
20090 parameter is not the same as the mode of the rtl. */
20091 /* ??? This is the same series of checks that are made in dbxout.c before
20092 we reach the big endian correction code there. It isn't clear if all
20093 of these checks are necessary here, but keeping them all is the safe
20094 thing to do. */
20095 else if (MEM_P (rtl)
20096 && XEXP (rtl, 0) != const0_rtx
20097 && ! CONSTANT_P (XEXP (rtl, 0))
20098 /* Not passed in memory. */
20099 && !MEM_P (DECL_INCOMING_RTL (decl))
20100 /* Not passed by invisible reference. */
20101 && (!REG_P (XEXP (rtl, 0))
20102 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
20103 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
20104 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
20105 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
20106 #endif
20107 )
20108 /* Big endian correction check. */
20109 && BYTES_BIG_ENDIAN
20110 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20111 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20112 UNITS_PER_WORD))
20113 {
20114 machine_mode addr_mode = get_address_mode (rtl);
20115 poly_int64 offset = (UNITS_PER_WORD
20116 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20117
20118 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20119 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20120 }
20121 }
20122 else if (VAR_P (decl)
20123 && rtl
20124 && MEM_P (rtl)
20125 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20126 {
20127 machine_mode addr_mode = get_address_mode (rtl);
20128 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20129 GET_MODE (rtl));
20130
20131 /* If a variable is declared "register" yet is smaller than
20132 a register, then if we store the variable to memory, it
20133 looks like we're storing a register-sized value, when in
20134 fact we are not. We need to adjust the offset of the
20135 storage location to reflect the actual value's bytes,
20136 else gdb will not be able to display it. */
20137 if (maybe_ne (offset, 0))
20138 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20139 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20140 }
20141
20142 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20143 and will have been substituted directly into all expressions that use it.
20144 C does not have such a concept, but C++ and other languages do. */
20145 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20146 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20147
20148 if (rtl)
20149 rtl = targetm.delegitimize_address (rtl);
20150
20151 /* If we don't look past the constant pool, we risk emitting a
20152 reference to a constant pool entry that isn't referenced from
20153 code, and thus is not emitted. */
20154 if (rtl)
20155 rtl = avoid_constant_pool_reference (rtl);
20156
20157 /* Try harder to get a rtl. If this symbol ends up not being emitted
20158 in the current CU, resolve_addr will remove the expression referencing
20159 it. */
20160 if (rtl == NULL_RTX
20161 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20162 && VAR_P (decl)
20163 && !DECL_EXTERNAL (decl)
20164 && TREE_STATIC (decl)
20165 && DECL_NAME (decl)
20166 && !DECL_HARD_REGISTER (decl)
20167 && DECL_MODE (decl) != VOIDmode)
20168 {
20169 rtl = make_decl_rtl_for_debug (decl);
20170 if (!MEM_P (rtl)
20171 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20172 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20173 rtl = NULL_RTX;
20174 }
20175
20176 return rtl;
20177 }
20178
20179 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20180 returned. If so, the decl for the COMMON block is returned, and the
20181 value is the offset into the common block for the symbol. */
20182
20183 static tree
20184 fortran_common (tree decl, HOST_WIDE_INT *value)
20185 {
20186 tree val_expr, cvar;
20187 machine_mode mode;
20188 poly_int64 bitsize, bitpos;
20189 tree offset;
20190 HOST_WIDE_INT cbitpos;
20191 int unsignedp, reversep, volatilep = 0;
20192
20193 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20194 it does not have a value (the offset into the common area), or if it
20195 is thread local (as opposed to global) then it isn't common, and shouldn't
20196 be handled as such. */
20197 if (!VAR_P (decl)
20198 || !TREE_STATIC (decl)
20199 || !DECL_HAS_VALUE_EXPR_P (decl)
20200 || !is_fortran ())
20201 return NULL_TREE;
20202
20203 val_expr = DECL_VALUE_EXPR (decl);
20204 if (TREE_CODE (val_expr) != COMPONENT_REF)
20205 return NULL_TREE;
20206
20207 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20208 &unsignedp, &reversep, &volatilep);
20209
20210 if (cvar == NULL_TREE
20211 || !VAR_P (cvar)
20212 || DECL_ARTIFICIAL (cvar)
20213 || !TREE_PUBLIC (cvar)
20214 /* We don't expect to have to cope with variable offsets,
20215 since at present all static data must have a constant size. */
20216 || !bitpos.is_constant (&cbitpos))
20217 return NULL_TREE;
20218
20219 *value = 0;
20220 if (offset != NULL)
20221 {
20222 if (!tree_fits_shwi_p (offset))
20223 return NULL_TREE;
20224 *value = tree_to_shwi (offset);
20225 }
20226 if (cbitpos != 0)
20227 *value += cbitpos / BITS_PER_UNIT;
20228
20229 return cvar;
20230 }
20231
20232 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20233 data attribute for a variable or a parameter. We generate the
20234 DW_AT_const_value attribute only in those cases where the given variable
20235 or parameter does not have a true "location" either in memory or in a
20236 register. This can happen (for example) when a constant is passed as an
20237 actual argument in a call to an inline function. (It's possible that
20238 these things can crop up in other ways also.) Note that one type of
20239 constant value which can be passed into an inlined function is a constant
20240 pointer. This can happen for example if an actual argument in an inlined
20241 function call evaluates to a compile-time constant address.
20242
20243 CACHE_P is true if it is worth caching the location list for DECL,
20244 so that future calls can reuse it rather than regenerate it from scratch.
20245 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20246 since we will need to refer to them each time the function is inlined. */
20247
20248 static bool
20249 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20250 {
20251 rtx rtl;
20252 dw_loc_list_ref list;
20253 var_loc_list *loc_list;
20254 cached_dw_loc_list *cache;
20255
20256 if (early_dwarf)
20257 return false;
20258
20259 if (TREE_CODE (decl) == ERROR_MARK)
20260 return false;
20261
20262 if (get_AT (die, DW_AT_location)
20263 || get_AT (die, DW_AT_const_value))
20264 return true;
20265
20266 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20267 || TREE_CODE (decl) == RESULT_DECL);
20268
20269 /* Try to get some constant RTL for this decl, and use that as the value of
20270 the location. */
20271
20272 rtl = rtl_for_decl_location (decl);
20273 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20274 && add_const_value_attribute (die, rtl))
20275 return true;
20276
20277 /* See if we have single element location list that is equivalent to
20278 a constant value. That way we are better to use add_const_value_attribute
20279 rather than expanding constant value equivalent. */
20280 loc_list = lookup_decl_loc (decl);
20281 if (loc_list
20282 && loc_list->first
20283 && loc_list->first->next == NULL
20284 && NOTE_P (loc_list->first->loc)
20285 && NOTE_VAR_LOCATION (loc_list->first->loc)
20286 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20287 {
20288 struct var_loc_node *node;
20289
20290 node = loc_list->first;
20291 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20292 if (GET_CODE (rtl) == EXPR_LIST)
20293 rtl = XEXP (rtl, 0);
20294 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20295 && add_const_value_attribute (die, rtl))
20296 return true;
20297 }
20298 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20299 list several times. See if we've already cached the contents. */
20300 list = NULL;
20301 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20302 cache_p = false;
20303 if (cache_p)
20304 {
20305 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20306 if (cache)
20307 list = cache->loc_list;
20308 }
20309 if (list == NULL)
20310 {
20311 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20312 NULL);
20313 /* It is usually worth caching this result if the decl is from
20314 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20315 if (cache_p && list && list->dw_loc_next)
20316 {
20317 cached_dw_loc_list **slot
20318 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20319 DECL_UID (decl),
20320 INSERT);
20321 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20322 cache->decl_id = DECL_UID (decl);
20323 cache->loc_list = list;
20324 *slot = cache;
20325 }
20326 }
20327 if (list)
20328 {
20329 add_AT_location_description (die, DW_AT_location, list);
20330 return true;
20331 }
20332 /* None of that worked, so it must not really have a location;
20333 try adding a constant value attribute from the DECL_INITIAL. */
20334 return tree_add_const_value_attribute_for_decl (die, decl);
20335 }
20336
20337 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20338 attribute is the const value T. */
20339
20340 static bool
20341 tree_add_const_value_attribute (dw_die_ref die, tree t)
20342 {
20343 tree init;
20344 tree type = TREE_TYPE (t);
20345 rtx rtl;
20346
20347 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20348 return false;
20349
20350 init = t;
20351 gcc_assert (!DECL_P (init));
20352
20353 if (TREE_CODE (init) == INTEGER_CST)
20354 {
20355 if (tree_fits_uhwi_p (init))
20356 {
20357 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20358 return true;
20359 }
20360 if (tree_fits_shwi_p (init))
20361 {
20362 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20363 return true;
20364 }
20365 }
20366 /* Generate the RTL even if early_dwarf to force mangling of all refered to
20367 symbols. */
20368 rtl = rtl_for_decl_init (init, type);
20369 if (rtl && !early_dwarf)
20370 return add_const_value_attribute (die, rtl);
20371 /* If the host and target are sane, try harder. */
20372 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20373 && initializer_constant_valid_p (init, type))
20374 {
20375 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20376 if (size > 0 && (int) size == size)
20377 {
20378 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20379
20380 if (native_encode_initializer (init, array, size) == size)
20381 {
20382 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20383 return true;
20384 }
20385 ggc_free (array);
20386 }
20387 }
20388 return false;
20389 }
20390
20391 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20392 attribute is the const value of T, where T is an integral constant
20393 variable with static storage duration
20394 (so it can't be a PARM_DECL or a RESULT_DECL). */
20395
20396 static bool
20397 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20398 {
20399
20400 if (!decl
20401 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20402 || (VAR_P (decl) && !TREE_STATIC (decl)))
20403 return false;
20404
20405 if (TREE_READONLY (decl)
20406 && ! TREE_THIS_VOLATILE (decl)
20407 && DECL_INITIAL (decl))
20408 /* OK */;
20409 else
20410 return false;
20411
20412 /* Don't add DW_AT_const_value if abstract origin already has one. */
20413 if (get_AT (var_die, DW_AT_const_value))
20414 return false;
20415
20416 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20417 }
20418
20419 /* Convert the CFI instructions for the current function into a
20420 location list. This is used for DW_AT_frame_base when we targeting
20421 a dwarf2 consumer that does not support the dwarf3
20422 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20423 expressions. */
20424
20425 static dw_loc_list_ref
20426 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20427 {
20428 int ix;
20429 dw_fde_ref fde;
20430 dw_loc_list_ref list, *list_tail;
20431 dw_cfi_ref cfi;
20432 dw_cfa_location last_cfa, next_cfa;
20433 const char *start_label, *last_label, *section;
20434 dw_cfa_location remember;
20435
20436 fde = cfun->fde;
20437 gcc_assert (fde != NULL);
20438
20439 section = secname_for_decl (current_function_decl);
20440 list_tail = &list;
20441 list = NULL;
20442
20443 memset (&next_cfa, 0, sizeof (next_cfa));
20444 next_cfa.reg = INVALID_REGNUM;
20445 remember = next_cfa;
20446
20447 start_label = fde->dw_fde_begin;
20448
20449 /* ??? Bald assumption that the CIE opcode list does not contain
20450 advance opcodes. */
20451 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20452 lookup_cfa_1 (cfi, &next_cfa, &remember);
20453
20454 last_cfa = next_cfa;
20455 last_label = start_label;
20456
20457 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20458 {
20459 /* If the first partition contained no CFI adjustments, the
20460 CIE opcodes apply to the whole first partition. */
20461 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20462 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20463 list_tail =&(*list_tail)->dw_loc_next;
20464 start_label = last_label = fde->dw_fde_second_begin;
20465 }
20466
20467 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20468 {
20469 switch (cfi->dw_cfi_opc)
20470 {
20471 case DW_CFA_set_loc:
20472 case DW_CFA_advance_loc1:
20473 case DW_CFA_advance_loc2:
20474 case DW_CFA_advance_loc4:
20475 if (!cfa_equal_p (&last_cfa, &next_cfa))
20476 {
20477 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20478 start_label, 0, last_label, 0, section);
20479
20480 list_tail = &(*list_tail)->dw_loc_next;
20481 last_cfa = next_cfa;
20482 start_label = last_label;
20483 }
20484 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20485 break;
20486
20487 case DW_CFA_advance_loc:
20488 /* The encoding is complex enough that we should never emit this. */
20489 gcc_unreachable ();
20490
20491 default:
20492 lookup_cfa_1 (cfi, &next_cfa, &remember);
20493 break;
20494 }
20495 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20496 {
20497 if (!cfa_equal_p (&last_cfa, &next_cfa))
20498 {
20499 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20500 start_label, 0, last_label, 0, section);
20501
20502 list_tail = &(*list_tail)->dw_loc_next;
20503 last_cfa = next_cfa;
20504 start_label = last_label;
20505 }
20506 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20507 start_label, 0, fde->dw_fde_end, 0, section);
20508 list_tail = &(*list_tail)->dw_loc_next;
20509 start_label = last_label = fde->dw_fde_second_begin;
20510 }
20511 }
20512
20513 if (!cfa_equal_p (&last_cfa, &next_cfa))
20514 {
20515 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20516 start_label, 0, last_label, 0, section);
20517 list_tail = &(*list_tail)->dw_loc_next;
20518 start_label = last_label;
20519 }
20520
20521 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20522 start_label, 0,
20523 fde->dw_fde_second_begin
20524 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20525 section);
20526
20527 maybe_gen_llsym (list);
20528
20529 return list;
20530 }
20531
20532 /* Compute a displacement from the "steady-state frame pointer" to the
20533 frame base (often the same as the CFA), and store it in
20534 frame_pointer_fb_offset. OFFSET is added to the displacement
20535 before the latter is negated. */
20536
20537 static void
20538 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20539 {
20540 rtx reg, elim;
20541
20542 #ifdef FRAME_POINTER_CFA_OFFSET
20543 reg = frame_pointer_rtx;
20544 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20545 #else
20546 reg = arg_pointer_rtx;
20547 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20548 #endif
20549
20550 elim = (ira_use_lra_p
20551 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20552 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20553 elim = strip_offset_and_add (elim, &offset);
20554
20555 frame_pointer_fb_offset = -offset;
20556
20557 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20558 in which to eliminate. This is because it's stack pointer isn't
20559 directly accessible as a register within the ISA. To work around
20560 this, assume that while we cannot provide a proper value for
20561 frame_pointer_fb_offset, we won't need one either. We can use
20562 hard frame pointer in debug info even if frame pointer isn't used
20563 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20564 which uses the DW_AT_frame_base attribute, not hard frame pointer
20565 directly. */
20566 frame_pointer_fb_offset_valid
20567 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20568 }
20569
20570 /* Generate a DW_AT_name attribute given some string value to be included as
20571 the value of the attribute. */
20572
20573 static void
20574 add_name_attribute (dw_die_ref die, const char *name_string)
20575 {
20576 if (name_string != NULL && *name_string != 0)
20577 {
20578 if (demangle_name_func)
20579 name_string = (*demangle_name_func) (name_string);
20580
20581 add_AT_string (die, DW_AT_name, name_string);
20582 }
20583 }
20584
20585 /* Generate a DW_AT_name attribute given some string value representing a
20586 file or filepath to be included as value of the attribute. */
20587 static void
20588 add_filename_attribute (dw_die_ref die, const char *name_string)
20589 {
20590 if (name_string != NULL && *name_string != 0)
20591 add_filepath_AT_string (die, DW_AT_name, name_string);
20592 }
20593
20594 /* Generate a DW_AT_description attribute given some string value to be included
20595 as the value of the attribute. */
20596
20597 static void
20598 add_desc_attribute (dw_die_ref die, const char *name_string)
20599 {
20600 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20601 return;
20602
20603 if (name_string == NULL || *name_string == 0)
20604 return;
20605
20606 if (demangle_name_func)
20607 name_string = (*demangle_name_func) (name_string);
20608
20609 add_AT_string (die, DW_AT_description, name_string);
20610 }
20611
20612 /* Generate a DW_AT_description attribute given some decl to be included
20613 as the value of the attribute. */
20614
20615 static void
20616 add_desc_attribute (dw_die_ref die, tree decl)
20617 {
20618 tree decl_name;
20619
20620 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20621 return;
20622
20623 if (decl == NULL_TREE || !DECL_P (decl))
20624 return;
20625 decl_name = DECL_NAME (decl);
20626
20627 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20628 {
20629 const char *name = dwarf2_name (decl, 0);
20630 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20631 }
20632 else
20633 {
20634 char *desc = print_generic_expr_to_str (decl);
20635 add_desc_attribute (die, desc);
20636 free (desc);
20637 }
20638 }
20639
20640 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20641 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20642 of TYPE accordingly.
20643
20644 ??? This is a temporary measure until after we're able to generate
20645 regular DWARF for the complex Ada type system. */
20646
20647 static void
20648 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20649 dw_die_ref context_die)
20650 {
20651 tree dtype;
20652 dw_die_ref dtype_die;
20653
20654 if (!lang_hooks.types.descriptive_type)
20655 return;
20656
20657 dtype = lang_hooks.types.descriptive_type (type);
20658 if (!dtype)
20659 return;
20660
20661 dtype_die = lookup_type_die (dtype);
20662 if (!dtype_die)
20663 {
20664 gen_type_die (dtype, context_die);
20665 dtype_die = lookup_type_die (dtype);
20666 gcc_assert (dtype_die);
20667 }
20668
20669 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20670 }
20671
20672 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20673
20674 static const char *
20675 comp_dir_string (void)
20676 {
20677 const char *wd;
20678 char *wd_plus_sep = NULL;
20679 static const char *cached_wd = NULL;
20680
20681 if (cached_wd != NULL)
20682 return cached_wd;
20683
20684 wd = get_src_pwd ();
20685 if (wd == NULL)
20686 return NULL;
20687
20688 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20689 {
20690 size_t wdlen = strlen (wd);
20691 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20692 strcpy (wd_plus_sep, wd);
20693 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20694 wd_plus_sep [wdlen + 1] = 0;
20695 wd = wd_plus_sep;
20696 }
20697
20698 cached_wd = remap_debug_filename (wd);
20699
20700 /* remap_debug_filename can just pass through wd or return a new gc string.
20701 These two types can't be both stored in a GTY(())-tagged string, but since
20702 the cached value lives forever just copy it if needed. */
20703 if (cached_wd != wd)
20704 {
20705 cached_wd = xstrdup (cached_wd);
20706 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20707 free (wd_plus_sep);
20708 }
20709
20710 return cached_wd;
20711 }
20712
20713 /* Generate a DW_AT_comp_dir attribute for DIE. */
20714
20715 static void
20716 add_comp_dir_attribute (dw_die_ref die)
20717 {
20718 const char * wd = comp_dir_string ();
20719 if (wd != NULL)
20720 add_filepath_AT_string (die, DW_AT_comp_dir, wd);
20721 }
20722
20723 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20724 pointer computation, ...), output a representation for that bound according
20725 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20726 loc_list_from_tree for the meaning of CONTEXT. */
20727
20728 static void
20729 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20730 int forms, struct loc_descr_context *context)
20731 {
20732 dw_die_ref context_die, decl_die = NULL;
20733 dw_loc_list_ref list;
20734 bool strip_conversions = true;
20735 bool placeholder_seen = false;
20736
20737 while (strip_conversions)
20738 switch (TREE_CODE (value))
20739 {
20740 case ERROR_MARK:
20741 case SAVE_EXPR:
20742 return;
20743
20744 CASE_CONVERT:
20745 case VIEW_CONVERT_EXPR:
20746 value = TREE_OPERAND (value, 0);
20747 break;
20748
20749 default:
20750 strip_conversions = false;
20751 break;
20752 }
20753
20754 /* If possible and permitted, output the attribute as a constant. */
20755 if ((forms & dw_scalar_form_constant) != 0
20756 && TREE_CODE (value) == INTEGER_CST)
20757 {
20758 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20759
20760 /* If HOST_WIDE_INT is big enough then represent the bound as
20761 a constant value. We need to choose a form based on
20762 whether the type is signed or unsigned. We cannot just
20763 call add_AT_unsigned if the value itself is positive
20764 (add_AT_unsigned might add the unsigned value encoded as
20765 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20766 bounds type and then sign extend any unsigned values found
20767 for signed types. This is needed only for
20768 DW_AT_{lower,upper}_bound, since for most other attributes,
20769 consumers will treat DW_FORM_data[1248] as unsigned values,
20770 regardless of the underlying type. */
20771 if (prec <= HOST_BITS_PER_WIDE_INT
20772 || tree_fits_uhwi_p (value))
20773 {
20774 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20775 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20776 else
20777 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20778 }
20779 else if (dwarf_version >= 5
20780 && TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (value))) == 128)
20781 /* Otherwise represent the bound as an unsigned value with
20782 the precision of its type. The precision and signedness
20783 of the type will be necessary to re-interpret it
20784 unambiguously. */
20785 add_AT_wide (die, attr, wi::to_wide (value));
20786 else
20787 {
20788 rtx v = immed_wide_int_const (wi::to_wide (value),
20789 TYPE_MODE (TREE_TYPE (value)));
20790 dw_loc_descr_ref loc
20791 = loc_descriptor (v, TYPE_MODE (TREE_TYPE (value)),
20792 VAR_INIT_STATUS_INITIALIZED);
20793 if (loc)
20794 add_AT_loc (die, attr, loc);
20795 }
20796 return;
20797 }
20798
20799 /* Otherwise, if it's possible and permitted too, output a reference to
20800 another DIE. */
20801 if ((forms & dw_scalar_form_reference) != 0)
20802 {
20803 tree decl = NULL_TREE;
20804
20805 /* Some type attributes reference an outer type. For instance, the upper
20806 bound of an array may reference an embedding record (this happens in
20807 Ada). */
20808 if (TREE_CODE (value) == COMPONENT_REF
20809 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20810 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20811 decl = TREE_OPERAND (value, 1);
20812
20813 else if (VAR_P (value)
20814 || TREE_CODE (value) == PARM_DECL
20815 || TREE_CODE (value) == RESULT_DECL)
20816 decl = value;
20817
20818 if (decl != NULL_TREE)
20819 {
20820 decl_die = lookup_decl_die (decl);
20821
20822 /* ??? Can this happen, or should the variable have been bound
20823 first? Probably it can, since I imagine that we try to create
20824 the types of parameters in the order in which they exist in
20825 the list, and won't have created a forward reference to a
20826 later parameter. */
20827 if (decl_die != NULL)
20828 {
20829 if (get_AT (decl_die, DW_AT_location)
20830 || get_AT (decl_die, DW_AT_data_member_location)
20831 || get_AT (decl_die, DW_AT_const_value))
20832 {
20833 add_AT_die_ref (die, attr, decl_die);
20834 return;
20835 }
20836 }
20837 }
20838 }
20839
20840 /* Last chance: try to create a stack operation procedure to evaluate the
20841 value. Do nothing if even that is not possible or permitted. */
20842 if ((forms & dw_scalar_form_exprloc) == 0)
20843 return;
20844
20845 list = loc_list_from_tree (value, 2, context);
20846 if (context && context->placeholder_arg)
20847 {
20848 placeholder_seen = context->placeholder_seen;
20849 context->placeholder_seen = false;
20850 }
20851 if (list == NULL || single_element_loc_list_p (list))
20852 {
20853 /* If this attribute is not a reference nor constant, it is
20854 a DWARF expression rather than location description. For that
20855 loc_list_from_tree (value, 0, &context) is needed. */
20856 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20857 if (list2 && single_element_loc_list_p (list2))
20858 {
20859 if (placeholder_seen)
20860 {
20861 struct dwarf_procedure_info dpi;
20862 dpi.fndecl = NULL_TREE;
20863 dpi.args_count = 1;
20864 if (!resolve_args_picking (list2->expr, 1, &dpi))
20865 return;
20866 }
20867 add_AT_loc (die, attr, list2->expr);
20868 return;
20869 }
20870 }
20871
20872 /* If that failed to give a single element location list, fall back to
20873 outputting this as a reference... still if permitted. */
20874 if (list == NULL
20875 || (forms & dw_scalar_form_reference) == 0
20876 || placeholder_seen)
20877 return;
20878
20879 if (!decl_die)
20880 {
20881 if (current_function_decl == 0)
20882 context_die = comp_unit_die ();
20883 else
20884 context_die = lookup_decl_die (current_function_decl);
20885
20886 decl_die = new_die (DW_TAG_variable, context_die, value);
20887 add_AT_flag (decl_die, DW_AT_artificial, 1);
20888 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20889 context_die);
20890 }
20891
20892 add_AT_location_description (decl_die, DW_AT_location, list);
20893 add_AT_die_ref (die, attr, decl_die);
20894 }
20895
20896 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20897 default. */
20898
20899 static int
20900 lower_bound_default (void)
20901 {
20902 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20903 {
20904 case DW_LANG_C:
20905 case DW_LANG_C89:
20906 case DW_LANG_C99:
20907 case DW_LANG_C11:
20908 case DW_LANG_C_plus_plus:
20909 case DW_LANG_C_plus_plus_11:
20910 case DW_LANG_C_plus_plus_14:
20911 case DW_LANG_ObjC:
20912 case DW_LANG_ObjC_plus_plus:
20913 return 0;
20914 case DW_LANG_Fortran77:
20915 case DW_LANG_Fortran90:
20916 case DW_LANG_Fortran95:
20917 case DW_LANG_Fortran03:
20918 case DW_LANG_Fortran08:
20919 return 1;
20920 case DW_LANG_UPC:
20921 case DW_LANG_D:
20922 case DW_LANG_Python:
20923 return dwarf_version >= 4 ? 0 : -1;
20924 case DW_LANG_Ada95:
20925 case DW_LANG_Ada83:
20926 case DW_LANG_Cobol74:
20927 case DW_LANG_Cobol85:
20928 case DW_LANG_Modula2:
20929 case DW_LANG_PLI:
20930 return dwarf_version >= 4 ? 1 : -1;
20931 default:
20932 return -1;
20933 }
20934 }
20935
20936 /* Given a tree node describing an array bound (either lower or upper) output
20937 a representation for that bound. */
20938
20939 static void
20940 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20941 tree bound, struct loc_descr_context *context)
20942 {
20943 int dflt;
20944
20945 while (1)
20946 switch (TREE_CODE (bound))
20947 {
20948 /* Strip all conversions. */
20949 CASE_CONVERT:
20950 case VIEW_CONVERT_EXPR:
20951 bound = TREE_OPERAND (bound, 0);
20952 break;
20953
20954 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20955 are even omitted when they are the default. */
20956 case INTEGER_CST:
20957 /* If the value for this bound is the default one, we can even omit the
20958 attribute. */
20959 if (bound_attr == DW_AT_lower_bound
20960 && tree_fits_shwi_p (bound)
20961 && (dflt = lower_bound_default ()) != -1
20962 && tree_to_shwi (bound) == dflt)
20963 return;
20964
20965 /* FALLTHRU */
20966
20967 default:
20968 /* Because of the complex interaction there can be with other GNAT
20969 encodings, GDB isn't ready yet to handle proper DWARF description
20970 for self-referencial subrange bounds: let GNAT encodings do the
20971 magic in such a case. */
20972 if (is_ada ()
20973 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20974 && contains_placeholder_p (bound))
20975 return;
20976
20977 add_scalar_info (subrange_die, bound_attr, bound,
20978 dw_scalar_form_constant
20979 | dw_scalar_form_exprloc
20980 | dw_scalar_form_reference,
20981 context);
20982 return;
20983 }
20984 }
20985
20986 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20987 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20988 Note that the block of subscript information for an array type also
20989 includes information about the element type of the given array type.
20990
20991 This function reuses previously set type and bound information if
20992 available. */
20993
20994 static void
20995 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20996 {
20997 unsigned dimension_number;
20998 tree lower, upper;
20999 dw_die_ref child = type_die->die_child;
21000
21001 for (dimension_number = 0;
21002 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
21003 type = TREE_TYPE (type), dimension_number++)
21004 {
21005 tree domain = TYPE_DOMAIN (type);
21006
21007 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
21008 break;
21009
21010 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
21011 and (in GNU C only) variable bounds. Handle all three forms
21012 here. */
21013
21014 /* Find and reuse a previously generated DW_TAG_subrange_type if
21015 available.
21016
21017 For multi-dimensional arrays, as we iterate through the
21018 various dimensions in the enclosing for loop above, we also
21019 iterate through the DIE children and pick at each
21020 DW_TAG_subrange_type previously generated (if available).
21021 Each child DW_TAG_subrange_type DIE describes the range of
21022 the current dimension. At this point we should have as many
21023 DW_TAG_subrange_type's as we have dimensions in the
21024 array. */
21025 dw_die_ref subrange_die = NULL;
21026 if (child)
21027 while (1)
21028 {
21029 child = child->die_sib;
21030 if (child->die_tag == DW_TAG_subrange_type)
21031 subrange_die = child;
21032 if (child == type_die->die_child)
21033 {
21034 /* If we wrapped around, stop looking next time. */
21035 child = NULL;
21036 break;
21037 }
21038 if (child->die_tag == DW_TAG_subrange_type)
21039 break;
21040 }
21041 if (!subrange_die)
21042 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
21043
21044 if (domain)
21045 {
21046 /* We have an array type with specified bounds. */
21047 lower = TYPE_MIN_VALUE (domain);
21048 upper = TYPE_MAX_VALUE (domain);
21049
21050 /* Define the index type. */
21051 if (TREE_TYPE (domain)
21052 && !get_AT (subrange_die, DW_AT_type))
21053 {
21054 /* ??? This is probably an Ada unnamed subrange type. Ignore the
21055 TREE_TYPE field. We can't emit debug info for this
21056 because it is an unnamed integral type. */
21057 if (TREE_CODE (domain) == INTEGER_TYPE
21058 && TYPE_NAME (domain) == NULL_TREE
21059 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
21060 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
21061 ;
21062 else
21063 add_type_attribute (subrange_die, TREE_TYPE (domain),
21064 TYPE_UNQUALIFIED, false, type_die);
21065 }
21066
21067 /* ??? If upper is NULL, the array has unspecified length,
21068 but it does have a lower bound. This happens with Fortran
21069 dimension arr(N:*)
21070 Since the debugger is definitely going to need to know N
21071 to produce useful results, go ahead and output the lower
21072 bound solo, and hope the debugger can cope. */
21073
21074 if (!get_AT (subrange_die, DW_AT_lower_bound))
21075 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21076 if (!get_AT (subrange_die, DW_AT_upper_bound)
21077 && !get_AT (subrange_die, DW_AT_count))
21078 {
21079 if (upper)
21080 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21081 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
21082 /* Zero-length array. */
21083 add_bound_info (subrange_die, DW_AT_count,
21084 build_int_cst (TREE_TYPE (lower), 0), NULL);
21085 }
21086 }
21087
21088 /* Otherwise we have an array type with an unspecified length. The
21089 DWARF-2 spec does not say how to handle this; let's just leave out the
21090 bounds. */
21091 }
21092 }
21093
21094 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21095
21096 static void
21097 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21098 {
21099 dw_die_ref decl_die;
21100 HOST_WIDE_INT size;
21101 dw_loc_descr_ref size_expr = NULL;
21102
21103 switch (TREE_CODE (tree_node))
21104 {
21105 case ERROR_MARK:
21106 size = 0;
21107 break;
21108 case ENUMERAL_TYPE:
21109 case RECORD_TYPE:
21110 case UNION_TYPE:
21111 case QUAL_UNION_TYPE:
21112 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21113 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21114 {
21115 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21116 return;
21117 }
21118 size_expr = type_byte_size (tree_node, &size);
21119 break;
21120 case FIELD_DECL:
21121 /* For a data member of a struct or union, the DW_AT_byte_size is
21122 generally given as the number of bytes normally allocated for an
21123 object of the *declared* type of the member itself. This is true
21124 even for bit-fields. */
21125 size = int_size_in_bytes (field_type (tree_node));
21126 break;
21127 default:
21128 gcc_unreachable ();
21129 }
21130
21131 /* Support for dynamically-sized objects was introduced by DWARFv3.
21132 At the moment, GDB does not handle variable byte sizes very well,
21133 though. */
21134 if ((dwarf_version >= 3 || !dwarf_strict)
21135 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21136 && size_expr != NULL)
21137 add_AT_loc (die, DW_AT_byte_size, size_expr);
21138
21139 /* Note that `size' might be -1 when we get to this point. If it is, that
21140 indicates that the byte size of the entity in question is variable and
21141 that we could not generate a DWARF expression that computes it. */
21142 if (size >= 0)
21143 add_AT_unsigned (die, DW_AT_byte_size, size);
21144 }
21145
21146 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21147 alignment. */
21148
21149 static void
21150 add_alignment_attribute (dw_die_ref die, tree tree_node)
21151 {
21152 if (dwarf_version < 5 && dwarf_strict)
21153 return;
21154
21155 unsigned align;
21156
21157 if (DECL_P (tree_node))
21158 {
21159 if (!DECL_USER_ALIGN (tree_node))
21160 return;
21161
21162 align = DECL_ALIGN_UNIT (tree_node);
21163 }
21164 else if (TYPE_P (tree_node))
21165 {
21166 if (!TYPE_USER_ALIGN (tree_node))
21167 return;
21168
21169 align = TYPE_ALIGN_UNIT (tree_node);
21170 }
21171 else
21172 gcc_unreachable ();
21173
21174 add_AT_unsigned (die, DW_AT_alignment, align);
21175 }
21176
21177 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21178 which specifies the distance in bits from the highest order bit of the
21179 "containing object" for the bit-field to the highest order bit of the
21180 bit-field itself.
21181
21182 For any given bit-field, the "containing object" is a hypothetical object
21183 (of some integral or enum type) within which the given bit-field lives. The
21184 type of this hypothetical "containing object" is always the same as the
21185 declared type of the individual bit-field itself. The determination of the
21186 exact location of the "containing object" for a bit-field is rather
21187 complicated. It's handled by the `field_byte_offset' function (above).
21188
21189 Note that it is the size (in bytes) of the hypothetical "containing object"
21190 which will be given in the DW_AT_byte_size attribute for this bit-field.
21191 (See `byte_size_attribute' above). */
21192
21193 static inline void
21194 add_bit_offset_attribute (dw_die_ref die, tree decl)
21195 {
21196 HOST_WIDE_INT object_offset_in_bytes;
21197 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21198 HOST_WIDE_INT bitpos_int;
21199 HOST_WIDE_INT highest_order_object_bit_offset;
21200 HOST_WIDE_INT highest_order_field_bit_offset;
21201 HOST_WIDE_INT bit_offset;
21202
21203 /* The containing object is within the DECL_CONTEXT. */
21204 struct vlr_context ctx = { DECL_CONTEXT (decl), NULL_TREE };
21205
21206 field_byte_offset (decl, &ctx, &object_offset_in_bytes);
21207
21208 /* Must be a field and a bit field. */
21209 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21210
21211 /* We can't yet handle bit-fields whose offsets are variable, so if we
21212 encounter such things, just return without generating any attribute
21213 whatsoever. Likewise for variable or too large size. */
21214 if (! tree_fits_shwi_p (bit_position (decl))
21215 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21216 return;
21217
21218 bitpos_int = int_bit_position (decl);
21219
21220 /* Note that the bit offset is always the distance (in bits) from the
21221 highest-order bit of the "containing object" to the highest-order bit of
21222 the bit-field itself. Since the "high-order end" of any object or field
21223 is different on big-endian and little-endian machines, the computation
21224 below must take account of these differences. */
21225 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21226 highest_order_field_bit_offset = bitpos_int;
21227
21228 if (! BYTES_BIG_ENDIAN)
21229 {
21230 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21231 highest_order_object_bit_offset +=
21232 simple_type_size_in_bits (original_type);
21233 }
21234
21235 bit_offset
21236 = (! BYTES_BIG_ENDIAN
21237 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21238 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21239
21240 if (bit_offset < 0)
21241 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21242 else
21243 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21244 }
21245
21246 /* For a FIELD_DECL node which represents a bit field, output an attribute
21247 which specifies the length in bits of the given field. */
21248
21249 static inline void
21250 add_bit_size_attribute (dw_die_ref die, tree decl)
21251 {
21252 /* Must be a field and a bit field. */
21253 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21254 && DECL_BIT_FIELD_TYPE (decl));
21255
21256 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21257 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21258 }
21259
21260 /* If the compiled language is ANSI C, then add a 'prototyped'
21261 attribute, if arg types are given for the parameters of a function. */
21262
21263 static inline void
21264 add_prototyped_attribute (dw_die_ref die, tree func_type)
21265 {
21266 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21267 {
21268 case DW_LANG_C:
21269 case DW_LANG_C89:
21270 case DW_LANG_C99:
21271 case DW_LANG_C11:
21272 case DW_LANG_ObjC:
21273 if (prototype_p (func_type))
21274 add_AT_flag (die, DW_AT_prototyped, 1);
21275 break;
21276 default:
21277 break;
21278 }
21279 }
21280
21281 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21282 by looking in the type declaration, the object declaration equate table or
21283 the block mapping. */
21284
21285 static inline void
21286 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21287 {
21288 dw_die_ref origin_die = NULL;
21289
21290 /* For late LTO debug output we want to refer directly to the abstract
21291 DIE in the early debug rather to the possibly existing concrete
21292 instance and avoid creating that just for this purpose. */
21293 sym_off_pair *desc;
21294 if (in_lto_p
21295 && external_die_map
21296 && (desc = external_die_map->get (origin)))
21297 {
21298 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21299 desc->sym, desc->off);
21300 return;
21301 }
21302
21303 if (DECL_P (origin))
21304 origin_die = lookup_decl_die (origin);
21305 else if (TYPE_P (origin))
21306 origin_die = lookup_type_die (origin);
21307 else if (TREE_CODE (origin) == BLOCK)
21308 origin_die = lookup_block_die (origin);
21309
21310 /* XXX: Functions that are never lowered don't always have correct block
21311 trees (in the case of java, they simply have no block tree, in some other
21312 languages). For these functions, there is nothing we can really do to
21313 output correct debug info for inlined functions in all cases. Rather
21314 than die, we'll just produce deficient debug info now, in that we will
21315 have variables without a proper abstract origin. In the future, when all
21316 functions are lowered, we should re-add a gcc_assert (origin_die)
21317 here. */
21318
21319 if (origin_die)
21320 {
21321 dw_attr_node *a;
21322 /* Like above, if we already created a concrete instance DIE
21323 do not use that for the abstract origin but the early DIE
21324 if present. */
21325 if (in_lto_p
21326 && (a = get_AT (origin_die, DW_AT_abstract_origin)))
21327 origin_die = AT_ref (a);
21328 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21329 }
21330 }
21331
21332 /* We do not currently support the pure_virtual attribute. */
21333
21334 static inline void
21335 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21336 {
21337 if (DECL_VINDEX (func_decl))
21338 {
21339 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21340
21341 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21342 add_AT_loc (die, DW_AT_vtable_elem_location,
21343 new_loc_descr (DW_OP_constu,
21344 tree_to_shwi (DECL_VINDEX (func_decl)),
21345 0));
21346
21347 /* GNU extension: Record what type this method came from originally. */
21348 if (debug_info_level > DINFO_LEVEL_TERSE
21349 && DECL_CONTEXT (func_decl))
21350 add_AT_die_ref (die, DW_AT_containing_type,
21351 lookup_type_die (DECL_CONTEXT (func_decl)));
21352 }
21353 }
21354 \f
21355 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21356 given decl. This used to be a vendor extension until after DWARF 4
21357 standardized it. */
21358
21359 static void
21360 add_linkage_attr (dw_die_ref die, tree decl)
21361 {
21362 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21363
21364 /* Mimic what assemble_name_raw does with a leading '*'. */
21365 if (name[0] == '*')
21366 name = &name[1];
21367
21368 if (dwarf_version >= 4)
21369 add_AT_string (die, DW_AT_linkage_name, name);
21370 else
21371 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21372 }
21373
21374 /* Add source coordinate attributes for the given decl. */
21375
21376 static void
21377 add_src_coords_attributes (dw_die_ref die, tree decl)
21378 {
21379 expanded_location s;
21380
21381 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21382 return;
21383 s = expand_location (DECL_SOURCE_LOCATION (decl));
21384 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21385 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21386 if (debug_column_info && s.column)
21387 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21388 }
21389
21390 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21391
21392 static void
21393 add_linkage_name_raw (dw_die_ref die, tree decl)
21394 {
21395 /* Defer until we have an assembler name set. */
21396 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21397 {
21398 limbo_die_node *asm_name;
21399
21400 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21401 asm_name->die = die;
21402 asm_name->created_for = decl;
21403 asm_name->next = deferred_asm_name;
21404 deferred_asm_name = asm_name;
21405 }
21406 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21407 add_linkage_attr (die, decl);
21408 }
21409
21410 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21411
21412 static void
21413 add_linkage_name (dw_die_ref die, tree decl)
21414 {
21415 if (debug_info_level > DINFO_LEVEL_NONE
21416 && VAR_OR_FUNCTION_DECL_P (decl)
21417 && TREE_PUBLIC (decl)
21418 && !(VAR_P (decl) && DECL_REGISTER (decl))
21419 && die->die_tag != DW_TAG_member)
21420 add_linkage_name_raw (die, decl);
21421 }
21422
21423 /* Add a DW_AT_name attribute and source coordinate attribute for the
21424 given decl, but only if it actually has a name. */
21425
21426 static void
21427 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21428 bool no_linkage_name)
21429 {
21430 tree decl_name;
21431
21432 decl_name = DECL_NAME (decl);
21433 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21434 {
21435 const char *name = dwarf2_name (decl, 0);
21436 if (name)
21437 add_name_attribute (die, name);
21438 else
21439 add_desc_attribute (die, decl);
21440
21441 if (! DECL_ARTIFICIAL (decl))
21442 add_src_coords_attributes (die, decl);
21443
21444 if (!no_linkage_name)
21445 add_linkage_name (die, decl);
21446 }
21447 else
21448 add_desc_attribute (die, decl);
21449
21450 #ifdef VMS_DEBUGGING_INFO
21451 /* Get the function's name, as described by its RTL. This may be different
21452 from the DECL_NAME name used in the source file. */
21453 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21454 {
21455 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21456 XEXP (DECL_RTL (decl), 0), false);
21457 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21458 }
21459 #endif /* VMS_DEBUGGING_INFO */
21460 }
21461
21462 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21463
21464 static void
21465 add_discr_value (dw_die_ref die, dw_discr_value *value)
21466 {
21467 dw_attr_node attr;
21468
21469 attr.dw_attr = DW_AT_discr_value;
21470 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21471 attr.dw_attr_val.val_entry = NULL;
21472 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21473 if (value->pos)
21474 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21475 else
21476 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21477 add_dwarf_attr (die, &attr);
21478 }
21479
21480 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21481
21482 static void
21483 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21484 {
21485 dw_attr_node attr;
21486
21487 attr.dw_attr = DW_AT_discr_list;
21488 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21489 attr.dw_attr_val.val_entry = NULL;
21490 attr.dw_attr_val.v.val_discr_list = discr_list;
21491 add_dwarf_attr (die, &attr);
21492 }
21493
21494 static inline dw_discr_list_ref
21495 AT_discr_list (dw_attr_node *attr)
21496 {
21497 return attr->dw_attr_val.v.val_discr_list;
21498 }
21499
21500 #ifdef VMS_DEBUGGING_INFO
21501 /* Output the debug main pointer die for VMS */
21502
21503 void
21504 dwarf2out_vms_debug_main_pointer (void)
21505 {
21506 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21507 dw_die_ref die;
21508
21509 /* Allocate the VMS debug main subprogram die. */
21510 die = new_die_raw (DW_TAG_subprogram);
21511 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21512 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21513 current_function_funcdef_no);
21514 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21515
21516 /* Make it the first child of comp_unit_die (). */
21517 die->die_parent = comp_unit_die ();
21518 if (comp_unit_die ()->die_child)
21519 {
21520 die->die_sib = comp_unit_die ()->die_child->die_sib;
21521 comp_unit_die ()->die_child->die_sib = die;
21522 }
21523 else
21524 {
21525 die->die_sib = die;
21526 comp_unit_die ()->die_child = die;
21527 }
21528 }
21529 #endif /* VMS_DEBUGGING_INFO */
21530
21531 /* walk_tree helper function for uses_local_type, below. */
21532
21533 static tree
21534 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21535 {
21536 if (!TYPE_P (*tp))
21537 *walk_subtrees = 0;
21538 else
21539 {
21540 tree name = TYPE_NAME (*tp);
21541 if (name && DECL_P (name) && decl_function_context (name))
21542 return *tp;
21543 }
21544 return NULL_TREE;
21545 }
21546
21547 /* If TYPE involves a function-local type (including a local typedef to a
21548 non-local type), returns that type; otherwise returns NULL_TREE. */
21549
21550 static tree
21551 uses_local_type (tree type)
21552 {
21553 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21554 return used;
21555 }
21556
21557 /* Return the DIE for the scope that immediately contains this type.
21558 Non-named types that do not involve a function-local type get global
21559 scope. Named types nested in namespaces or other types get their
21560 containing scope. All other types (i.e. function-local named types) get
21561 the current active scope. */
21562
21563 static dw_die_ref
21564 scope_die_for (tree t, dw_die_ref context_die)
21565 {
21566 dw_die_ref scope_die = NULL;
21567 tree containing_scope;
21568
21569 /* Non-types always go in the current scope. */
21570 gcc_assert (TYPE_P (t));
21571
21572 /* Use the scope of the typedef, rather than the scope of the type
21573 it refers to. */
21574 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21575 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21576 else
21577 containing_scope = TYPE_CONTEXT (t);
21578
21579 /* Use the containing namespace if there is one. */
21580 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21581 {
21582 if (context_die == lookup_decl_die (containing_scope))
21583 /* OK */;
21584 else if (debug_info_level > DINFO_LEVEL_TERSE)
21585 context_die = get_context_die (containing_scope);
21586 else
21587 containing_scope = NULL_TREE;
21588 }
21589
21590 /* Ignore function type "scopes" from the C frontend. They mean that
21591 a tagged type is local to a parmlist of a function declarator, but
21592 that isn't useful to DWARF. */
21593 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21594 containing_scope = NULL_TREE;
21595
21596 if (SCOPE_FILE_SCOPE_P (containing_scope))
21597 {
21598 /* If T uses a local type keep it local as well, to avoid references
21599 to function-local DIEs from outside the function. */
21600 if (current_function_decl && uses_local_type (t))
21601 scope_die = context_die;
21602 else
21603 scope_die = comp_unit_die ();
21604 }
21605 else if (TYPE_P (containing_scope))
21606 {
21607 /* For types, we can just look up the appropriate DIE. */
21608 if (debug_info_level > DINFO_LEVEL_TERSE)
21609 scope_die = get_context_die (containing_scope);
21610 else
21611 {
21612 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21613 if (scope_die == NULL)
21614 scope_die = comp_unit_die ();
21615 }
21616 }
21617 else
21618 scope_die = context_die;
21619
21620 return scope_die;
21621 }
21622
21623 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21624
21625 static inline int
21626 local_scope_p (dw_die_ref context_die)
21627 {
21628 for (; context_die; context_die = context_die->die_parent)
21629 if (context_die->die_tag == DW_TAG_inlined_subroutine
21630 || context_die->die_tag == DW_TAG_subprogram)
21631 return 1;
21632
21633 return 0;
21634 }
21635
21636 /* Returns nonzero if CONTEXT_DIE is a class. */
21637
21638 static inline int
21639 class_scope_p (dw_die_ref context_die)
21640 {
21641 return (context_die
21642 && (context_die->die_tag == DW_TAG_structure_type
21643 || context_die->die_tag == DW_TAG_class_type
21644 || context_die->die_tag == DW_TAG_interface_type
21645 || context_die->die_tag == DW_TAG_union_type));
21646 }
21647
21648 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21649 whether or not to treat a DIE in this context as a declaration. */
21650
21651 static inline int
21652 class_or_namespace_scope_p (dw_die_ref context_die)
21653 {
21654 return (class_scope_p (context_die)
21655 || (context_die && context_die->die_tag == DW_TAG_namespace));
21656 }
21657
21658 /* Many forms of DIEs require a "type description" attribute. This
21659 routine locates the proper "type descriptor" die for the type given
21660 by 'type' plus any additional qualifiers given by 'cv_quals', and
21661 adds a DW_AT_type attribute below the given die. */
21662
21663 static void
21664 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21665 bool reverse, dw_die_ref context_die)
21666 {
21667 enum tree_code code = TREE_CODE (type);
21668 dw_die_ref type_die = NULL;
21669
21670 if (debug_info_level <= DINFO_LEVEL_TERSE)
21671 return;
21672
21673 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21674 or fixed-point type, use the inner type. This is because we have no
21675 support for unnamed types in base_type_die. This can happen if this is
21676 an Ada subrange type. Correct solution is emit a subrange type die. */
21677 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21678 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21679 type = TREE_TYPE (type), code = TREE_CODE (type);
21680
21681 if (code == ERROR_MARK
21682 /* Handle a special case. For functions whose return type is void, we
21683 generate *no* type attribute. (Note that no object may have type
21684 `void', so this only applies to function return types). */
21685 || code == VOID_TYPE)
21686 return;
21687
21688 type_die = modified_type_die (type,
21689 cv_quals | TYPE_QUALS (type),
21690 reverse,
21691 context_die);
21692
21693 if (type_die != NULL)
21694 add_AT_die_ref (object_die, DW_AT_type, type_die);
21695 }
21696
21697 /* Given an object die, add the calling convention attribute for the
21698 function call type. */
21699 static void
21700 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21701 {
21702 enum dwarf_calling_convention value = DW_CC_normal;
21703
21704 value = ((enum dwarf_calling_convention)
21705 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21706
21707 if (is_fortran ()
21708 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21709 {
21710 /* DWARF 2 doesn't provide a way to identify a program's source-level
21711 entry point. DW_AT_calling_convention attributes are only meant
21712 to describe functions' calling conventions. However, lacking a
21713 better way to signal the Fortran main program, we used this for
21714 a long time, following existing custom. Now, DWARF 4 has
21715 DW_AT_main_subprogram, which we add below, but some tools still
21716 rely on the old way, which we thus keep. */
21717 value = DW_CC_program;
21718
21719 if (dwarf_version >= 4 || !dwarf_strict)
21720 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21721 }
21722
21723 /* Only add the attribute if the backend requests it, and
21724 is not DW_CC_normal. */
21725 if (value && (value != DW_CC_normal))
21726 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21727 }
21728
21729 /* Given a tree pointer to a struct, class, union, or enum type node, return
21730 a pointer to the (string) tag name for the given type, or zero if the type
21731 was declared without a tag. */
21732
21733 static const char *
21734 type_tag (const_tree type)
21735 {
21736 const char *name = 0;
21737
21738 if (TYPE_NAME (type) != 0)
21739 {
21740 tree t = 0;
21741
21742 /* Find the IDENTIFIER_NODE for the type name. */
21743 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21744 && !TYPE_NAMELESS (type))
21745 t = TYPE_NAME (type);
21746
21747 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21748 a TYPE_DECL node, regardless of whether or not a `typedef' was
21749 involved. */
21750 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21751 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21752 {
21753 /* We want to be extra verbose. Don't call dwarf_name if
21754 DECL_NAME isn't set. The default hook for decl_printable_name
21755 doesn't like that, and in this context it's correct to return
21756 0, instead of "<anonymous>" or the like. */
21757 if (DECL_NAME (TYPE_NAME (type))
21758 && !DECL_NAMELESS (TYPE_NAME (type)))
21759 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21760 }
21761
21762 /* Now get the name as a string, or invent one. */
21763 if (!name && t != 0)
21764 name = IDENTIFIER_POINTER (t);
21765 }
21766
21767 return (name == 0 || *name == '\0') ? 0 : name;
21768 }
21769
21770 /* Return the type associated with a data member, make a special check
21771 for bit field types. */
21772
21773 static inline tree
21774 member_declared_type (const_tree member)
21775 {
21776 return (DECL_BIT_FIELD_TYPE (member)
21777 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21778 }
21779
21780 /* Get the decl's label, as described by its RTL. This may be different
21781 from the DECL_NAME name used in the source file. */
21782
21783 #if 0
21784 static const char *
21785 decl_start_label (tree decl)
21786 {
21787 rtx x;
21788 const char *fnname;
21789
21790 x = DECL_RTL (decl);
21791 gcc_assert (MEM_P (x));
21792
21793 x = XEXP (x, 0);
21794 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21795
21796 fnname = XSTR (x, 0);
21797 return fnname;
21798 }
21799 #endif
21800 \f
21801 /* For variable-length arrays that have been previously generated, but
21802 may be incomplete due to missing subscript info, fill the subscript
21803 info. Return TRUE if this is one of those cases. */
21804 static bool
21805 fill_variable_array_bounds (tree type)
21806 {
21807 if (TREE_ASM_WRITTEN (type)
21808 && TREE_CODE (type) == ARRAY_TYPE
21809 && variably_modified_type_p (type, NULL))
21810 {
21811 dw_die_ref array_die = lookup_type_die (type);
21812 if (!array_die)
21813 return false;
21814 add_subscript_info (array_die, type, !is_ada ());
21815 return true;
21816 }
21817 return false;
21818 }
21819
21820 /* These routines generate the internal representation of the DIE's for
21821 the compilation unit. Debugging information is collected by walking
21822 the declaration trees passed in from dwarf2out_decl(). */
21823
21824 static void
21825 gen_array_type_die (tree type, dw_die_ref context_die)
21826 {
21827 dw_die_ref array_die;
21828
21829 /* GNU compilers represent multidimensional array types as sequences of one
21830 dimensional array types whose element types are themselves array types.
21831 We sometimes squish that down to a single array_type DIE with multiple
21832 subscripts in the Dwarf debugging info. The draft Dwarf specification
21833 say that we are allowed to do this kind of compression in C, because
21834 there is no difference between an array of arrays and a multidimensional
21835 array. We don't do this for Ada to remain as close as possible to the
21836 actual representation, which is especially important against the language
21837 flexibilty wrt arrays of variable size. */
21838
21839 bool collapse_nested_arrays = !is_ada ();
21840
21841 if (fill_variable_array_bounds (type))
21842 return;
21843
21844 dw_die_ref scope_die = scope_die_for (type, context_die);
21845 tree element_type;
21846
21847 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21848 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21849 if (TREE_CODE (type) == ARRAY_TYPE
21850 && TYPE_STRING_FLAG (type)
21851 && is_fortran ()
21852 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21853 {
21854 HOST_WIDE_INT size;
21855
21856 array_die = new_die (DW_TAG_string_type, scope_die, type);
21857 add_name_attribute (array_die, type_tag (type));
21858 equate_type_number_to_die (type, array_die);
21859 size = int_size_in_bytes (type);
21860 if (size >= 0)
21861 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21862 /* ??? We can't annotate types late, but for LTO we may not
21863 generate a location early either (gfortran.dg/save_6.f90). */
21864 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21865 && TYPE_DOMAIN (type) != NULL_TREE
21866 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21867 {
21868 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21869 tree rszdecl = szdecl;
21870
21871 size = int_size_in_bytes (TREE_TYPE (szdecl));
21872 if (!DECL_P (szdecl))
21873 {
21874 if (TREE_CODE (szdecl) == INDIRECT_REF
21875 && DECL_P (TREE_OPERAND (szdecl, 0)))
21876 {
21877 rszdecl = TREE_OPERAND (szdecl, 0);
21878 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21879 != DWARF2_ADDR_SIZE)
21880 size = 0;
21881 }
21882 else
21883 size = 0;
21884 }
21885 if (size > 0)
21886 {
21887 dw_loc_list_ref loc
21888 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21889 NULL);
21890 if (loc)
21891 {
21892 add_AT_location_description (array_die, DW_AT_string_length,
21893 loc);
21894 if (size != DWARF2_ADDR_SIZE)
21895 add_AT_unsigned (array_die, dwarf_version >= 5
21896 ? DW_AT_string_length_byte_size
21897 : DW_AT_byte_size, size);
21898 }
21899 }
21900 }
21901 return;
21902 }
21903
21904 array_die = new_die (DW_TAG_array_type, scope_die, type);
21905 add_name_attribute (array_die, type_tag (type));
21906 equate_type_number_to_die (type, array_die);
21907
21908 if (TREE_CODE (type) == VECTOR_TYPE)
21909 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21910
21911 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21912 if (is_fortran ()
21913 && TREE_CODE (type) == ARRAY_TYPE
21914 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21915 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21916 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21917
21918 #if 0
21919 /* We default the array ordering. Debuggers will probably do the right
21920 things even if DW_AT_ordering is not present. It's not even an issue
21921 until we start to get into multidimensional arrays anyway. If a debugger
21922 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21923 then we'll have to put the DW_AT_ordering attribute back in. (But if
21924 and when we find out that we need to put these in, we will only do so
21925 for multidimensional arrays. */
21926 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21927 #endif
21928
21929 if (TREE_CODE (type) == VECTOR_TYPE)
21930 {
21931 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21932 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21933 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21934 add_bound_info (subrange_die, DW_AT_upper_bound,
21935 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21936 }
21937 else
21938 add_subscript_info (array_die, type, collapse_nested_arrays);
21939
21940 /* Add representation of the type of the elements of this array type and
21941 emit the corresponding DIE if we haven't done it already. */
21942 element_type = TREE_TYPE (type);
21943 if (collapse_nested_arrays)
21944 while (TREE_CODE (element_type) == ARRAY_TYPE)
21945 {
21946 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21947 break;
21948 element_type = TREE_TYPE (element_type);
21949 }
21950
21951 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21952 TREE_CODE (type) == ARRAY_TYPE
21953 && TYPE_REVERSE_STORAGE_ORDER (type),
21954 context_die);
21955
21956 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21957 if (TYPE_ARTIFICIAL (type))
21958 add_AT_flag (array_die, DW_AT_artificial, 1);
21959
21960 if (get_AT (array_die, DW_AT_name))
21961 add_pubtype (type, array_die);
21962
21963 add_alignment_attribute (array_die, type);
21964 }
21965
21966 /* This routine generates DIE for array with hidden descriptor, details
21967 are filled into *info by a langhook. */
21968
21969 static void
21970 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21971 dw_die_ref context_die)
21972 {
21973 const dw_die_ref scope_die = scope_die_for (type, context_die);
21974 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21975 struct loc_descr_context context = { type, info->base_decl, NULL,
21976 false, false };
21977 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21978 int dim;
21979
21980 add_name_attribute (array_die, type_tag (type));
21981 equate_type_number_to_die (type, array_die);
21982
21983 if (info->ndimensions > 1)
21984 switch (info->ordering)
21985 {
21986 case array_descr_ordering_row_major:
21987 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21988 break;
21989 case array_descr_ordering_column_major:
21990 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21991 break;
21992 default:
21993 break;
21994 }
21995
21996 if (dwarf_version >= 3 || !dwarf_strict)
21997 {
21998 if (info->data_location)
21999 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
22000 dw_scalar_form_exprloc, &context);
22001 if (info->associated)
22002 add_scalar_info (array_die, DW_AT_associated, info->associated,
22003 dw_scalar_form_constant
22004 | dw_scalar_form_exprloc
22005 | dw_scalar_form_reference, &context);
22006 if (info->allocated)
22007 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
22008 dw_scalar_form_constant
22009 | dw_scalar_form_exprloc
22010 | dw_scalar_form_reference, &context);
22011 if (info->stride)
22012 {
22013 const enum dwarf_attribute attr
22014 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
22015 const int forms
22016 = (info->stride_in_bits)
22017 ? dw_scalar_form_constant
22018 : (dw_scalar_form_constant
22019 | dw_scalar_form_exprloc
22020 | dw_scalar_form_reference);
22021
22022 add_scalar_info (array_die, attr, info->stride, forms, &context);
22023 }
22024 }
22025 if (dwarf_version >= 5)
22026 {
22027 if (info->rank)
22028 {
22029 add_scalar_info (array_die, DW_AT_rank, info->rank,
22030 dw_scalar_form_constant
22031 | dw_scalar_form_exprloc, &context);
22032 subrange_tag = DW_TAG_generic_subrange;
22033 context.placeholder_arg = true;
22034 }
22035 }
22036
22037 add_gnat_descriptive_type_attribute (array_die, type, context_die);
22038
22039 for (dim = 0; dim < info->ndimensions; dim++)
22040 {
22041 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
22042
22043 if (info->dimen[dim].bounds_type)
22044 add_type_attribute (subrange_die,
22045 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
22046 false, context_die);
22047 if (info->dimen[dim].lower_bound)
22048 add_bound_info (subrange_die, DW_AT_lower_bound,
22049 info->dimen[dim].lower_bound, &context);
22050 if (info->dimen[dim].upper_bound)
22051 add_bound_info (subrange_die, DW_AT_upper_bound,
22052 info->dimen[dim].upper_bound, &context);
22053 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
22054 add_scalar_info (subrange_die, DW_AT_byte_stride,
22055 info->dimen[dim].stride,
22056 dw_scalar_form_constant
22057 | dw_scalar_form_exprloc
22058 | dw_scalar_form_reference,
22059 &context);
22060 }
22061
22062 gen_type_die (info->element_type, context_die);
22063 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
22064 TREE_CODE (type) == ARRAY_TYPE
22065 && TYPE_REVERSE_STORAGE_ORDER (type),
22066 context_die);
22067
22068 if (get_AT (array_die, DW_AT_name))
22069 add_pubtype (type, array_die);
22070
22071 add_alignment_attribute (array_die, type);
22072 }
22073
22074 #if 0
22075 static void
22076 gen_entry_point_die (tree decl, dw_die_ref context_die)
22077 {
22078 tree origin = decl_ultimate_origin (decl);
22079 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
22080
22081 if (origin != NULL)
22082 add_abstract_origin_attribute (decl_die, origin);
22083 else
22084 {
22085 add_name_and_src_coords_attributes (decl_die, decl);
22086 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22087 TYPE_UNQUALIFIED, false, context_die);
22088 }
22089
22090 if (DECL_ABSTRACT_P (decl))
22091 equate_decl_number_to_die (decl, decl_die);
22092 else
22093 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22094 }
22095 #endif
22096
22097 /* Walk through the list of incomplete types again, trying once more to
22098 emit full debugging info for them. */
22099
22100 static void
22101 retry_incomplete_types (void)
22102 {
22103 set_early_dwarf s;
22104 int i;
22105
22106 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22107 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22108 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22109 vec_safe_truncate (incomplete_types, 0);
22110 }
22111
22112 /* Determine what tag to use for a record type. */
22113
22114 static enum dwarf_tag
22115 record_type_tag (tree type)
22116 {
22117 if (! lang_hooks.types.classify_record)
22118 return DW_TAG_structure_type;
22119
22120 switch (lang_hooks.types.classify_record (type))
22121 {
22122 case RECORD_IS_STRUCT:
22123 return DW_TAG_structure_type;
22124
22125 case RECORD_IS_CLASS:
22126 return DW_TAG_class_type;
22127
22128 case RECORD_IS_INTERFACE:
22129 if (dwarf_version >= 3 || !dwarf_strict)
22130 return DW_TAG_interface_type;
22131 return DW_TAG_structure_type;
22132
22133 default:
22134 gcc_unreachable ();
22135 }
22136 }
22137
22138 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22139 include all of the information about the enumeration values also. Each
22140 enumerated type name/value is listed as a child of the enumerated type
22141 DIE. */
22142
22143 static dw_die_ref
22144 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22145 {
22146 dw_die_ref type_die = lookup_type_die (type);
22147 dw_die_ref orig_type_die = type_die;
22148
22149 if (type_die == NULL)
22150 {
22151 type_die = new_die (DW_TAG_enumeration_type,
22152 scope_die_for (type, context_die), type);
22153 equate_type_number_to_die (type, type_die);
22154 add_name_attribute (type_die, type_tag (type));
22155 if ((dwarf_version >= 4 || !dwarf_strict)
22156 && ENUM_IS_SCOPED (type))
22157 add_AT_flag (type_die, DW_AT_enum_class, 1);
22158 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22159 add_AT_flag (type_die, DW_AT_declaration, 1);
22160 if (!dwarf_strict)
22161 add_AT_unsigned (type_die, DW_AT_encoding,
22162 TYPE_UNSIGNED (type)
22163 ? DW_ATE_unsigned
22164 : DW_ATE_signed);
22165 }
22166 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22167 return type_die;
22168 else
22169 remove_AT (type_die, DW_AT_declaration);
22170
22171 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22172 given enum type is incomplete, do not generate the DW_AT_byte_size
22173 attribute or the DW_AT_element_list attribute. */
22174 if (TYPE_SIZE (type))
22175 {
22176 tree link;
22177
22178 if (!ENUM_IS_OPAQUE (type))
22179 TREE_ASM_WRITTEN (type) = 1;
22180 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22181 add_byte_size_attribute (type_die, type);
22182 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22183 add_alignment_attribute (type_die, type);
22184 if ((dwarf_version >= 3 || !dwarf_strict)
22185 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22186 {
22187 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22188 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22189 context_die);
22190 }
22191 if (TYPE_STUB_DECL (type) != NULL_TREE)
22192 {
22193 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22194 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22195 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22196 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22197 }
22198
22199 /* If the first reference to this type was as the return type of an
22200 inline function, then it may not have a parent. Fix this now. */
22201 if (type_die->die_parent == NULL)
22202 add_child_die (scope_die_for (type, context_die), type_die);
22203
22204 for (link = TYPE_VALUES (type);
22205 link != NULL; link = TREE_CHAIN (link))
22206 {
22207 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22208 tree value = TREE_VALUE (link);
22209
22210 if (DECL_P (value))
22211 equate_decl_number_to_die (value, enum_die);
22212
22213 gcc_assert (!ENUM_IS_OPAQUE (type));
22214 add_name_attribute (enum_die,
22215 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22216
22217 if (TREE_CODE (value) == CONST_DECL)
22218 value = DECL_INITIAL (value);
22219
22220 if (simple_type_size_in_bits (TREE_TYPE (value))
22221 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22222 {
22223 /* For constant forms created by add_AT_unsigned DWARF
22224 consumers (GDB, elfutils, etc.) always zero extend
22225 the value. Only when the actual value is negative
22226 do we need to use add_AT_int to generate a constant
22227 form that can represent negative values. */
22228 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22229 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22230 add_AT_unsigned (enum_die, DW_AT_const_value,
22231 (unsigned HOST_WIDE_INT) val);
22232 else
22233 add_AT_int (enum_die, DW_AT_const_value, val);
22234 }
22235 else
22236 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22237 that here. TODO: This should be re-worked to use correct
22238 signed/unsigned double tags for all cases. */
22239 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22240 }
22241
22242 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22243 if (TYPE_ARTIFICIAL (type)
22244 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22245 add_AT_flag (type_die, DW_AT_artificial, 1);
22246 }
22247 else
22248 add_AT_flag (type_die, DW_AT_declaration, 1);
22249
22250 add_pubtype (type, type_die);
22251
22252 return type_die;
22253 }
22254
22255 /* Generate a DIE to represent either a real live formal parameter decl or to
22256 represent just the type of some formal parameter position in some function
22257 type.
22258
22259 Note that this routine is a bit unusual because its argument may be a
22260 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22261 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22262 node. If it's the former then this function is being called to output a
22263 DIE to represent a formal parameter object (or some inlining thereof). If
22264 it's the latter, then this function is only being called to output a
22265 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22266 argument type of some subprogram type.
22267 If EMIT_NAME_P is true, name and source coordinate attributes
22268 are emitted. */
22269
22270 static dw_die_ref
22271 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22272 dw_die_ref context_die)
22273 {
22274 tree node_or_origin = node ? node : origin;
22275 tree ultimate_origin;
22276 dw_die_ref parm_die = NULL;
22277
22278 if (DECL_P (node_or_origin))
22279 {
22280 parm_die = lookup_decl_die (node);
22281
22282 /* If the contexts differ, we may not be talking about the same
22283 thing.
22284 ??? When in LTO the DIE parent is the "abstract" copy and the
22285 context_die is the specification "copy". */
22286 if (parm_die
22287 && parm_die->die_parent != context_die
22288 && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack
22289 || parm_die->die_parent->die_parent != context_die)
22290 && !in_lto_p)
22291 {
22292 gcc_assert (!DECL_ABSTRACT_P (node));
22293 /* This can happen when creating a concrete instance, in
22294 which case we need to create a new DIE that will get
22295 annotated with DW_AT_abstract_origin. */
22296 parm_die = NULL;
22297 }
22298
22299 if (parm_die && parm_die->die_parent == NULL)
22300 {
22301 /* Check that parm_die already has the right attributes that
22302 we would have added below. If any attributes are
22303 missing, fall through to add them. */
22304 if (! DECL_ABSTRACT_P (node_or_origin)
22305 && !get_AT (parm_die, DW_AT_location)
22306 && !get_AT (parm_die, DW_AT_const_value))
22307 /* We are missing location info, and are about to add it. */
22308 ;
22309 else
22310 {
22311 add_child_die (context_die, parm_die);
22312 return parm_die;
22313 }
22314 }
22315 }
22316
22317 /* If we have a previously generated DIE, use it, unless this is an
22318 concrete instance (origin != NULL), in which case we need a new
22319 DIE with a corresponding DW_AT_abstract_origin. */
22320 bool reusing_die;
22321 if (parm_die && origin == NULL)
22322 reusing_die = true;
22323 else
22324 {
22325 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22326 reusing_die = false;
22327 }
22328
22329 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22330 {
22331 case tcc_declaration:
22332 ultimate_origin = decl_ultimate_origin (node_or_origin);
22333 if (node || ultimate_origin)
22334 origin = ultimate_origin;
22335
22336 if (reusing_die)
22337 goto add_location;
22338
22339 if (origin != NULL)
22340 add_abstract_origin_attribute (parm_die, origin);
22341 else if (emit_name_p)
22342 add_name_and_src_coords_attributes (parm_die, node);
22343 if (origin == NULL
22344 || (! DECL_ABSTRACT_P (node_or_origin)
22345 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22346 decl_function_context
22347 (node_or_origin))))
22348 {
22349 tree type = TREE_TYPE (node_or_origin);
22350 if (decl_by_reference_p (node_or_origin))
22351 add_type_attribute (parm_die, TREE_TYPE (type),
22352 TYPE_UNQUALIFIED,
22353 false, context_die);
22354 else
22355 add_type_attribute (parm_die, type,
22356 decl_quals (node_or_origin),
22357 false, context_die);
22358 }
22359 if (origin == NULL && DECL_ARTIFICIAL (node))
22360 add_AT_flag (parm_die, DW_AT_artificial, 1);
22361 add_location:
22362 if (node && node != origin)
22363 equate_decl_number_to_die (node, parm_die);
22364 if (! DECL_ABSTRACT_P (node_or_origin))
22365 add_location_or_const_value_attribute (parm_die, node_or_origin,
22366 node == NULL);
22367
22368 break;
22369
22370 case tcc_type:
22371 /* We were called with some kind of a ..._TYPE node. */
22372 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22373 context_die);
22374 break;
22375
22376 default:
22377 gcc_unreachable ();
22378 }
22379
22380 return parm_die;
22381 }
22382
22383 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22384 children DW_TAG_formal_parameter DIEs representing the arguments of the
22385 parameter pack.
22386
22387 PARM_PACK must be a function parameter pack.
22388 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22389 must point to the subsequent arguments of the function PACK_ARG belongs to.
22390 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22391 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22392 following the last one for which a DIE was generated. */
22393
22394 static dw_die_ref
22395 gen_formal_parameter_pack_die (tree parm_pack,
22396 tree pack_arg,
22397 dw_die_ref subr_die,
22398 tree *next_arg)
22399 {
22400 tree arg;
22401 dw_die_ref parm_pack_die;
22402
22403 gcc_assert (parm_pack
22404 && lang_hooks.function_parameter_pack_p (parm_pack)
22405 && subr_die);
22406
22407 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22408 add_src_coords_attributes (parm_pack_die, parm_pack);
22409
22410 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22411 {
22412 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22413 parm_pack))
22414 break;
22415 gen_formal_parameter_die (arg, NULL,
22416 false /* Don't emit name attribute. */,
22417 parm_pack_die);
22418 }
22419 if (next_arg)
22420 *next_arg = arg;
22421 return parm_pack_die;
22422 }
22423
22424 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22425 at the end of an (ANSI prototyped) formal parameters list. */
22426
22427 static void
22428 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22429 {
22430 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22431 }
22432
22433 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22434 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22435 parameters as specified in some function type specification (except for
22436 those which appear as part of a function *definition*). */
22437
22438 static void
22439 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22440 {
22441 tree link;
22442 tree formal_type = NULL;
22443 tree first_parm_type;
22444 tree arg;
22445
22446 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22447 {
22448 arg = DECL_ARGUMENTS (function_or_method_type);
22449 function_or_method_type = TREE_TYPE (function_or_method_type);
22450 }
22451 else
22452 arg = NULL_TREE;
22453
22454 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22455
22456 /* Make our first pass over the list of formal parameter types and output a
22457 DW_TAG_formal_parameter DIE for each one. */
22458 for (link = first_parm_type; link; )
22459 {
22460 dw_die_ref parm_die;
22461
22462 formal_type = TREE_VALUE (link);
22463 if (formal_type == void_type_node)
22464 break;
22465
22466 /* Output a (nameless) DIE to represent the formal parameter itself. */
22467 parm_die = gen_formal_parameter_die (formal_type, NULL,
22468 true /* Emit name attribute. */,
22469 context_die);
22470 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22471 && link == first_parm_type)
22472 {
22473 add_AT_flag (parm_die, DW_AT_artificial, 1);
22474 if (dwarf_version >= 3 || !dwarf_strict)
22475 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22476 }
22477 else if (arg && DECL_ARTIFICIAL (arg))
22478 add_AT_flag (parm_die, DW_AT_artificial, 1);
22479
22480 link = TREE_CHAIN (link);
22481 if (arg)
22482 arg = DECL_CHAIN (arg);
22483 }
22484
22485 /* If this function type has an ellipsis, add a
22486 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22487 if (formal_type != void_type_node)
22488 gen_unspecified_parameters_die (function_or_method_type, context_die);
22489
22490 /* Make our second (and final) pass over the list of formal parameter types
22491 and output DIEs to represent those types (as necessary). */
22492 for (link = TYPE_ARG_TYPES (function_or_method_type);
22493 link && TREE_VALUE (link);
22494 link = TREE_CHAIN (link))
22495 gen_type_die (TREE_VALUE (link), context_die);
22496 }
22497
22498 /* We want to generate the DIE for TYPE so that we can generate the
22499 die for MEMBER, which has been defined; we will need to refer back
22500 to the member declaration nested within TYPE. If we're trying to
22501 generate minimal debug info for TYPE, processing TYPE won't do the
22502 trick; we need to attach the member declaration by hand. */
22503
22504 static void
22505 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22506 {
22507 gen_type_die (type, context_die);
22508
22509 /* If we're trying to avoid duplicate debug info, we may not have
22510 emitted the member decl for this function. Emit it now. */
22511 if (TYPE_STUB_DECL (type)
22512 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22513 && ! lookup_decl_die (member))
22514 {
22515 dw_die_ref type_die;
22516 gcc_assert (!decl_ultimate_origin (member));
22517
22518 type_die = lookup_type_die_strip_naming_typedef (type);
22519 if (TREE_CODE (member) == FUNCTION_DECL)
22520 gen_subprogram_die (member, type_die);
22521 else if (TREE_CODE (member) == FIELD_DECL)
22522 {
22523 /* Ignore the nameless fields that are used to skip bits but handle
22524 C++ anonymous unions and structs. */
22525 if (DECL_NAME (member) != NULL_TREE
22526 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22527 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22528 {
22529 struct vlr_context vlr_ctx = {
22530 DECL_CONTEXT (member), /* struct_type */
22531 NULL_TREE /* variant_part_offset */
22532 };
22533 gen_type_die (member_declared_type (member), type_die);
22534 gen_field_die (member, &vlr_ctx, type_die);
22535 }
22536 }
22537 else
22538 gen_variable_die (member, NULL_TREE, type_die);
22539 }
22540 }
22541 \f
22542 /* Forward declare these functions, because they are mutually recursive
22543 with their set_block_* pairing functions. */
22544 static void set_decl_origin_self (tree);
22545
22546 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22547 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22548 that it points to the node itself, thus indicating that the node is its
22549 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22550 the given node is NULL, recursively descend the decl/block tree which
22551 it is the root of, and for each other ..._DECL or BLOCK node contained
22552 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22553 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22554 values to point to themselves. */
22555
22556 static void
22557 set_block_origin_self (tree stmt)
22558 {
22559 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22560 {
22561 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22562
22563 {
22564 tree local_decl;
22565
22566 for (local_decl = BLOCK_VARS (stmt);
22567 local_decl != NULL_TREE;
22568 local_decl = DECL_CHAIN (local_decl))
22569 /* Do not recurse on nested functions since the inlining status
22570 of parent and child can be different as per the DWARF spec. */
22571 if (TREE_CODE (local_decl) != FUNCTION_DECL
22572 && !DECL_EXTERNAL (local_decl))
22573 set_decl_origin_self (local_decl);
22574 }
22575
22576 {
22577 tree subblock;
22578
22579 for (subblock = BLOCK_SUBBLOCKS (stmt);
22580 subblock != NULL_TREE;
22581 subblock = BLOCK_CHAIN (subblock))
22582 set_block_origin_self (subblock); /* Recurse. */
22583 }
22584 }
22585 }
22586
22587 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22588 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22589 node to so that it points to the node itself, thus indicating that the
22590 node represents its own (abstract) origin. Additionally, if the
22591 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22592 the decl/block tree of which the given node is the root of, and for
22593 each other ..._DECL or BLOCK node contained therein whose
22594 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22595 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22596 point to themselves. */
22597
22598 static void
22599 set_decl_origin_self (tree decl)
22600 {
22601 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22602 {
22603 DECL_ABSTRACT_ORIGIN (decl) = decl;
22604 if (TREE_CODE (decl) == FUNCTION_DECL)
22605 {
22606 tree arg;
22607
22608 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22609 DECL_ABSTRACT_ORIGIN (arg) = arg;
22610 if (DECL_INITIAL (decl) != NULL_TREE
22611 && DECL_INITIAL (decl) != error_mark_node)
22612 set_block_origin_self (DECL_INITIAL (decl));
22613 }
22614 }
22615 }
22616 \f
22617 /* Mark the early DIE for DECL as the abstract instance. */
22618
22619 static void
22620 dwarf2out_abstract_function (tree decl)
22621 {
22622 dw_die_ref old_die;
22623
22624 /* Make sure we have the actual abstract inline, not a clone. */
22625 decl = DECL_ORIGIN (decl);
22626
22627 if (DECL_IGNORED_P (decl))
22628 return;
22629
22630 /* In LTO we're all set. We already created abstract instances
22631 early and we want to avoid creating a concrete instance of that
22632 if we don't output it. */
22633 if (in_lto_p)
22634 return;
22635
22636 old_die = lookup_decl_die (decl);
22637 gcc_assert (old_die != NULL);
22638 if (get_AT (old_die, DW_AT_inline))
22639 /* We've already generated the abstract instance. */
22640 return;
22641
22642 /* Go ahead and put DW_AT_inline on the DIE. */
22643 if (DECL_DECLARED_INLINE_P (decl))
22644 {
22645 if (cgraph_function_possibly_inlined_p (decl))
22646 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22647 else
22648 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22649 }
22650 else
22651 {
22652 if (cgraph_function_possibly_inlined_p (decl))
22653 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22654 else
22655 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22656 }
22657
22658 if (DECL_DECLARED_INLINE_P (decl)
22659 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22660 add_AT_flag (old_die, DW_AT_artificial, 1);
22661
22662 set_decl_origin_self (decl);
22663 }
22664
22665 /* Helper function of premark_used_types() which gets called through
22666 htab_traverse.
22667
22668 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22669 marked as unused by prune_unused_types. */
22670
22671 bool
22672 premark_used_types_helper (tree const &type, void *)
22673 {
22674 dw_die_ref die;
22675
22676 die = lookup_type_die (type);
22677 if (die != NULL)
22678 die->die_perennial_p = 1;
22679 return true;
22680 }
22681
22682 /* Helper function of premark_types_used_by_global_vars which gets called
22683 through htab_traverse.
22684
22685 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22686 marked as unused by prune_unused_types. The DIE of the type is marked
22687 only if the global variable using the type will actually be emitted. */
22688
22689 int
22690 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22691 void *)
22692 {
22693 struct types_used_by_vars_entry *entry;
22694 dw_die_ref die;
22695
22696 entry = (struct types_used_by_vars_entry *) *slot;
22697 gcc_assert (entry->type != NULL
22698 && entry->var_decl != NULL);
22699 die = lookup_type_die (entry->type);
22700 if (die)
22701 {
22702 /* Ask cgraph if the global variable really is to be emitted.
22703 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22704 varpool_node *node = varpool_node::get (entry->var_decl);
22705 if (node && node->definition)
22706 {
22707 die->die_perennial_p = 1;
22708 /* Keep the parent DIEs as well. */
22709 while ((die = die->die_parent) && die->die_perennial_p == 0)
22710 die->die_perennial_p = 1;
22711 }
22712 }
22713 return 1;
22714 }
22715
22716 /* Mark all members of used_types_hash as perennial. */
22717
22718 static void
22719 premark_used_types (struct function *fun)
22720 {
22721 if (fun && fun->used_types_hash)
22722 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22723 }
22724
22725 /* Mark all members of types_used_by_vars_entry as perennial. */
22726
22727 static void
22728 premark_types_used_by_global_vars (void)
22729 {
22730 if (types_used_by_vars_hash)
22731 types_used_by_vars_hash
22732 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22733 }
22734
22735 /* Mark all variables used by the symtab as perennial. */
22736
22737 static void
22738 premark_used_variables (void)
22739 {
22740 /* Mark DIEs in the symtab as used. */
22741 varpool_node *var;
22742 FOR_EACH_VARIABLE (var)
22743 {
22744 dw_die_ref die = lookup_decl_die (var->decl);
22745 if (die)
22746 die->die_perennial_p = 1;
22747 }
22748 }
22749
22750 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22751 for CA_LOC call arg loc node. */
22752
22753 static dw_die_ref
22754 gen_call_site_die (tree decl, dw_die_ref subr_die,
22755 struct call_arg_loc_node *ca_loc)
22756 {
22757 dw_die_ref stmt_die = NULL, die;
22758 tree block = ca_loc->block;
22759
22760 while (block
22761 && block != DECL_INITIAL (decl)
22762 && TREE_CODE (block) == BLOCK)
22763 {
22764 stmt_die = lookup_block_die (block);
22765 if (stmt_die)
22766 break;
22767 block = BLOCK_SUPERCONTEXT (block);
22768 }
22769 if (stmt_die == NULL)
22770 stmt_die = subr_die;
22771 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22772 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22773 if (ca_loc->tail_call_p)
22774 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22775 if (ca_loc->symbol_ref)
22776 {
22777 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22778 if (tdie)
22779 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22780 else
22781 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22782 false);
22783 }
22784 return die;
22785 }
22786
22787 /* Generate a DIE to represent a declared function (either file-scope or
22788 block-local). */
22789
22790 static void
22791 gen_subprogram_die (tree decl, dw_die_ref context_die)
22792 {
22793 tree origin = decl_ultimate_origin (decl);
22794 dw_die_ref subr_die;
22795 dw_die_ref old_die = lookup_decl_die (decl);
22796 bool old_die_had_no_children = false;
22797
22798 /* This function gets called multiple times for different stages of
22799 the debug process. For example, for func() in this code:
22800
22801 namespace S
22802 {
22803 void func() { ... }
22804 }
22805
22806 ...we get called 4 times. Twice in early debug and twice in
22807 late debug:
22808
22809 Early debug
22810 -----------
22811
22812 1. Once while generating func() within the namespace. This is
22813 the declaration. The declaration bit below is set, as the
22814 context is the namespace.
22815
22816 A new DIE will be generated with DW_AT_declaration set.
22817
22818 2. Once for func() itself. This is the specification. The
22819 declaration bit below is clear as the context is the CU.
22820
22821 We will use the cached DIE from (1) to create a new DIE with
22822 DW_AT_specification pointing to the declaration in (1).
22823
22824 Late debug via rest_of_handle_final()
22825 -------------------------------------
22826
22827 3. Once generating func() within the namespace. This is also the
22828 declaration, as in (1), but this time we will early exit below
22829 as we have a cached DIE and a declaration needs no additional
22830 annotations (no locations), as the source declaration line
22831 info is enough.
22832
22833 4. Once for func() itself. As in (2), this is the specification,
22834 but this time we will re-use the cached DIE, and just annotate
22835 it with the location information that should now be available.
22836
22837 For something without namespaces, but with abstract instances, we
22838 are also called a multiple times:
22839
22840 class Base
22841 {
22842 public:
22843 Base (); // constructor declaration (1)
22844 };
22845
22846 Base::Base () { } // constructor specification (2)
22847
22848 Early debug
22849 -----------
22850
22851 1. Once for the Base() constructor by virtue of it being a
22852 member of the Base class. This is done via
22853 rest_of_type_compilation.
22854
22855 This is a declaration, so a new DIE will be created with
22856 DW_AT_declaration.
22857
22858 2. Once for the Base() constructor definition, but this time
22859 while generating the abstract instance of the base
22860 constructor (__base_ctor) which is being generated via early
22861 debug of reachable functions.
22862
22863 Even though we have a cached version of the declaration (1),
22864 we will create a DW_AT_specification of the declaration DIE
22865 in (1).
22866
22867 3. Once for the __base_ctor itself, but this time, we generate
22868 an DW_AT_abstract_origin version of the DW_AT_specification in
22869 (2).
22870
22871 Late debug via rest_of_handle_final
22872 -----------------------------------
22873
22874 4. One final time for the __base_ctor (which will have a cached
22875 DIE with DW_AT_abstract_origin created in (3). This time,
22876 we will just annotate the location information now
22877 available.
22878 */
22879 int declaration = (current_function_decl != decl
22880 || (!DECL_INITIAL (decl) && !origin)
22881 || class_or_namespace_scope_p (context_die));
22882
22883 /* A declaration that has been previously dumped needs no
22884 additional information. */
22885 if (old_die && declaration)
22886 return;
22887
22888 if (in_lto_p && old_die && old_die->die_child == NULL)
22889 old_die_had_no_children = true;
22890
22891 /* Now that the C++ front end lazily declares artificial member fns, we
22892 might need to retrofit the declaration into its class. */
22893 if (!declaration && !origin && !old_die
22894 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22895 && !class_or_namespace_scope_p (context_die)
22896 && debug_info_level > DINFO_LEVEL_TERSE)
22897 old_die = force_decl_die (decl);
22898
22899 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22900 if (origin != NULL)
22901 {
22902 gcc_assert (!declaration || local_scope_p (context_die));
22903
22904 /* Fixup die_parent for the abstract instance of a nested
22905 inline function. */
22906 if (old_die && old_die->die_parent == NULL)
22907 add_child_die (context_die, old_die);
22908
22909 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22910 {
22911 /* If we have a DW_AT_abstract_origin we have a working
22912 cached version. */
22913 subr_die = old_die;
22914 }
22915 else
22916 {
22917 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22918 add_abstract_origin_attribute (subr_die, origin);
22919 /* This is where the actual code for a cloned function is.
22920 Let's emit linkage name attribute for it. This helps
22921 debuggers to e.g, set breakpoints into
22922 constructors/destructors when the user asks "break
22923 K::K". */
22924 add_linkage_name (subr_die, decl);
22925 }
22926 }
22927 /* A cached copy, possibly from early dwarf generation. Reuse as
22928 much as possible. */
22929 else if (old_die)
22930 {
22931 if (!get_AT_flag (old_die, DW_AT_declaration)
22932 /* We can have a normal definition following an inline one in the
22933 case of redefinition of GNU C extern inlines.
22934 It seems reasonable to use AT_specification in this case. */
22935 && !get_AT (old_die, DW_AT_inline))
22936 {
22937 /* Detect and ignore this case, where we are trying to output
22938 something we have already output. */
22939 if (get_AT (old_die, DW_AT_low_pc)
22940 || get_AT (old_die, DW_AT_ranges))
22941 return;
22942
22943 /* If we have no location information, this must be a
22944 partially generated DIE from early dwarf generation.
22945 Fall through and generate it. */
22946 }
22947
22948 /* If the definition comes from the same place as the declaration,
22949 maybe use the old DIE. We always want the DIE for this function
22950 that has the *_pc attributes to be under comp_unit_die so the
22951 debugger can find it. We also need to do this for abstract
22952 instances of inlines, since the spec requires the out-of-line copy
22953 to have the same parent. For local class methods, this doesn't
22954 apply; we just use the old DIE. */
22955 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22956 struct dwarf_file_data * file_index = lookup_filename (s.file);
22957 if (((is_unit_die (old_die->die_parent)
22958 /* This condition fixes the inconsistency/ICE with the
22959 following Fortran test (or some derivative thereof) while
22960 building libgfortran:
22961
22962 module some_m
22963 contains
22964 logical function funky (FLAG)
22965 funky = .true.
22966 end function
22967 end module
22968 */
22969 || (old_die->die_parent
22970 && old_die->die_parent->die_tag == DW_TAG_module)
22971 || local_scope_p (old_die->die_parent)
22972 || context_die == NULL)
22973 && (DECL_ARTIFICIAL (decl)
22974 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22975 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22976 == (unsigned) s.line)
22977 && (!debug_column_info
22978 || s.column == 0
22979 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22980 == (unsigned) s.column)))))
22981 /* With LTO if there's an abstract instance for
22982 the old DIE, this is a concrete instance and
22983 thus re-use the DIE. */
22984 || get_AT (old_die, DW_AT_abstract_origin))
22985 {
22986 subr_die = old_die;
22987
22988 /* Clear out the declaration attribute, but leave the
22989 parameters so they can be augmented with location
22990 information later. Unless this was a declaration, in
22991 which case, wipe out the nameless parameters and recreate
22992 them further down. */
22993 if (remove_AT (subr_die, DW_AT_declaration))
22994 {
22995
22996 remove_AT (subr_die, DW_AT_object_pointer);
22997 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22998 }
22999 }
23000 /* Make a specification pointing to the previously built
23001 declaration. */
23002 else
23003 {
23004 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23005 add_AT_specification (subr_die, old_die);
23006 add_pubname (decl, subr_die);
23007 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23008 add_AT_file (subr_die, DW_AT_decl_file, file_index);
23009 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23010 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
23011 if (debug_column_info
23012 && s.column
23013 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23014 != (unsigned) s.column))
23015 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
23016
23017 /* If the prototype had an 'auto' or 'decltype(auto)' in
23018 the return type, emit the real type on the definition die. */
23019 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
23020 {
23021 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
23022 while (die
23023 && (die->die_tag == DW_TAG_reference_type
23024 || die->die_tag == DW_TAG_rvalue_reference_type
23025 || die->die_tag == DW_TAG_pointer_type
23026 || die->die_tag == DW_TAG_const_type
23027 || die->die_tag == DW_TAG_volatile_type
23028 || die->die_tag == DW_TAG_restrict_type
23029 || die->die_tag == DW_TAG_array_type
23030 || die->die_tag == DW_TAG_ptr_to_member_type
23031 || die->die_tag == DW_TAG_subroutine_type))
23032 die = get_AT_ref (die, DW_AT_type);
23033 if (die == auto_die || die == decltype_auto_die)
23034 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23035 TYPE_UNQUALIFIED, false, context_die);
23036 }
23037
23038 /* When we process the method declaration, we haven't seen
23039 the out-of-class defaulted definition yet, so we have to
23040 recheck now. */
23041 if ((dwarf_version >= 5 || ! dwarf_strict)
23042 && !get_AT (subr_die, DW_AT_defaulted))
23043 {
23044 int defaulted
23045 = lang_hooks.decls.decl_dwarf_attribute (decl,
23046 DW_AT_defaulted);
23047 if (defaulted != -1)
23048 {
23049 /* Other values must have been handled before. */
23050 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
23051 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23052 }
23053 }
23054 }
23055 }
23056 /* Create a fresh DIE for anything else. */
23057 else
23058 {
23059 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23060
23061 if (TREE_PUBLIC (decl))
23062 add_AT_flag (subr_die, DW_AT_external, 1);
23063
23064 add_name_and_src_coords_attributes (subr_die, decl);
23065 add_pubname (decl, subr_die);
23066 if (debug_info_level > DINFO_LEVEL_TERSE)
23067 {
23068 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
23069 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23070 TYPE_UNQUALIFIED, false, context_die);
23071 }
23072
23073 add_pure_or_virtual_attribute (subr_die, decl);
23074 if (DECL_ARTIFICIAL (decl))
23075 add_AT_flag (subr_die, DW_AT_artificial, 1);
23076
23077 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
23078 add_AT_flag (subr_die, DW_AT_noreturn, 1);
23079
23080 add_alignment_attribute (subr_die, decl);
23081
23082 add_accessibility_attribute (subr_die, decl);
23083 }
23084
23085 /* Unless we have an existing non-declaration DIE, equate the new
23086 DIE. */
23087 if (!old_die || is_declaration_die (old_die))
23088 equate_decl_number_to_die (decl, subr_die);
23089
23090 if (declaration)
23091 {
23092 if (!old_die || !get_AT (old_die, DW_AT_inline))
23093 {
23094 add_AT_flag (subr_die, DW_AT_declaration, 1);
23095
23096 /* If this is an explicit function declaration then generate
23097 a DW_AT_explicit attribute. */
23098 if ((dwarf_version >= 3 || !dwarf_strict)
23099 && lang_hooks.decls.decl_dwarf_attribute (decl,
23100 DW_AT_explicit) == 1)
23101 add_AT_flag (subr_die, DW_AT_explicit, 1);
23102
23103 /* If this is a C++11 deleted special function member then generate
23104 a DW_AT_deleted attribute. */
23105 if ((dwarf_version >= 5 || !dwarf_strict)
23106 && lang_hooks.decls.decl_dwarf_attribute (decl,
23107 DW_AT_deleted) == 1)
23108 add_AT_flag (subr_die, DW_AT_deleted, 1);
23109
23110 /* If this is a C++11 defaulted special function member then
23111 generate a DW_AT_defaulted attribute. */
23112 if (dwarf_version >= 5 || !dwarf_strict)
23113 {
23114 int defaulted
23115 = lang_hooks.decls.decl_dwarf_attribute (decl,
23116 DW_AT_defaulted);
23117 if (defaulted != -1)
23118 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23119 }
23120
23121 /* If this is a C++11 non-static member function with & ref-qualifier
23122 then generate a DW_AT_reference attribute. */
23123 if ((dwarf_version >= 5 || !dwarf_strict)
23124 && lang_hooks.decls.decl_dwarf_attribute (decl,
23125 DW_AT_reference) == 1)
23126 add_AT_flag (subr_die, DW_AT_reference, 1);
23127
23128 /* If this is a C++11 non-static member function with &&
23129 ref-qualifier then generate a DW_AT_reference attribute. */
23130 if ((dwarf_version >= 5 || !dwarf_strict)
23131 && lang_hooks.decls.decl_dwarf_attribute (decl,
23132 DW_AT_rvalue_reference)
23133 == 1)
23134 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23135 }
23136 }
23137 /* For non DECL_EXTERNALs, if range information is available, fill
23138 the DIE with it. */
23139 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23140 {
23141 HOST_WIDE_INT cfa_fb_offset;
23142
23143 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23144
23145 if (!crtl->has_bb_partition)
23146 {
23147 dw_fde_ref fde = fun->fde;
23148 if (fde->dw_fde_begin)
23149 {
23150 /* We have already generated the labels. */
23151 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23152 fde->dw_fde_end, false);
23153 }
23154 else
23155 {
23156 /* Create start/end labels and add the range. */
23157 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23158 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23159 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23160 current_function_funcdef_no);
23161 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23162 current_function_funcdef_no);
23163 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23164 false);
23165 }
23166
23167 #if VMS_DEBUGGING_INFO
23168 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23169 Section 2.3 Prologue and Epilogue Attributes:
23170 When a breakpoint is set on entry to a function, it is generally
23171 desirable for execution to be suspended, not on the very first
23172 instruction of the function, but rather at a point after the
23173 function's frame has been set up, after any language defined local
23174 declaration processing has been completed, and before execution of
23175 the first statement of the function begins. Debuggers generally
23176 cannot properly determine where this point is. Similarly for a
23177 breakpoint set on exit from a function. The prologue and epilogue
23178 attributes allow a compiler to communicate the location(s) to use. */
23179
23180 {
23181 if (fde->dw_fde_vms_end_prologue)
23182 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23183 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23184
23185 if (fde->dw_fde_vms_begin_epilogue)
23186 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23187 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23188 }
23189 #endif
23190
23191 }
23192 else
23193 {
23194 /* Generate pubnames entries for the split function code ranges. */
23195 dw_fde_ref fde = fun->fde;
23196
23197 if (fde->dw_fde_second_begin)
23198 {
23199 if (dwarf_version >= 3 || !dwarf_strict)
23200 {
23201 /* We should use ranges for non-contiguous code section
23202 addresses. Use the actual code range for the initial
23203 section, since the HOT/COLD labels might precede an
23204 alignment offset. */
23205 bool range_list_added = false;
23206 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23207 fde->dw_fde_end, &range_list_added,
23208 false);
23209 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23210 fde->dw_fde_second_end,
23211 &range_list_added, false);
23212 if (range_list_added)
23213 add_ranges (NULL);
23214 }
23215 else
23216 {
23217 /* There is no real support in DW2 for this .. so we make
23218 a work-around. First, emit the pub name for the segment
23219 containing the function label. Then make and emit a
23220 simplified subprogram DIE for the second segment with the
23221 name pre-fixed by __hot/cold_sect_of_. We use the same
23222 linkage name for the second die so that gdb will find both
23223 sections when given "b foo". */
23224 const char *name = NULL;
23225 tree decl_name = DECL_NAME (decl);
23226 dw_die_ref seg_die;
23227
23228 /* Do the 'primary' section. */
23229 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23230 fde->dw_fde_end, false);
23231
23232 /* Build a minimal DIE for the secondary section. */
23233 seg_die = new_die (DW_TAG_subprogram,
23234 subr_die->die_parent, decl);
23235
23236 if (TREE_PUBLIC (decl))
23237 add_AT_flag (seg_die, DW_AT_external, 1);
23238
23239 if (decl_name != NULL
23240 && IDENTIFIER_POINTER (decl_name) != NULL)
23241 {
23242 name = dwarf2_name (decl, 1);
23243 if (! DECL_ARTIFICIAL (decl))
23244 add_src_coords_attributes (seg_die, decl);
23245
23246 add_linkage_name (seg_die, decl);
23247 }
23248 gcc_assert (name != NULL);
23249 add_pure_or_virtual_attribute (seg_die, decl);
23250 if (DECL_ARTIFICIAL (decl))
23251 add_AT_flag (seg_die, DW_AT_artificial, 1);
23252
23253 name = concat ("__second_sect_of_", name, NULL);
23254 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23255 fde->dw_fde_second_end, false);
23256 add_name_attribute (seg_die, name);
23257 if (want_pubnames ())
23258 add_pubname_string (name, seg_die);
23259 }
23260 }
23261 else
23262 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23263 false);
23264 }
23265
23266 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23267
23268 /* We define the "frame base" as the function's CFA. This is more
23269 convenient for several reasons: (1) It's stable across the prologue
23270 and epilogue, which makes it better than just a frame pointer,
23271 (2) With dwarf3, there exists a one-byte encoding that allows us
23272 to reference the .debug_frame data by proxy, but failing that,
23273 (3) We can at least reuse the code inspection and interpretation
23274 code that determines the CFA position at various points in the
23275 function. */
23276 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23277 {
23278 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23279 add_AT_loc (subr_die, DW_AT_frame_base, op);
23280 }
23281 else
23282 {
23283 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23284 if (list->dw_loc_next)
23285 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23286 else
23287 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23288 }
23289
23290 /* Compute a displacement from the "steady-state frame pointer" to
23291 the CFA. The former is what all stack slots and argument slots
23292 will reference in the rtl; the latter is what we've told the
23293 debugger about. We'll need to adjust all frame_base references
23294 by this displacement. */
23295 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23296
23297 if (fun->static_chain_decl)
23298 {
23299 /* DWARF requires here a location expression that computes the
23300 address of the enclosing subprogram's frame base. The machinery
23301 in tree-nested.c is supposed to store this specific address in the
23302 last field of the FRAME record. */
23303 const tree frame_type
23304 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23305 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23306
23307 tree fb_expr
23308 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23309 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23310 fb_expr, fb_decl, NULL_TREE);
23311
23312 add_AT_location_description (subr_die, DW_AT_static_link,
23313 loc_list_from_tree (fb_expr, 0, NULL));
23314 }
23315
23316 resolve_variable_values ();
23317 }
23318
23319 /* Generate child dies for template paramaters. */
23320 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23321 gen_generic_params_dies (decl);
23322
23323 /* Now output descriptions of the arguments for this function. This gets
23324 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23325 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23326 `...' at the end of the formal parameter list. In order to find out if
23327 there was a trailing ellipsis or not, we must instead look at the type
23328 associated with the FUNCTION_DECL. This will be a node of type
23329 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23330 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23331 an ellipsis at the end. */
23332
23333 /* In the case where we are describing a mere function declaration, all we
23334 need to do here (and all we *can* do here) is to describe the *types* of
23335 its formal parameters. */
23336 if (debug_info_level <= DINFO_LEVEL_TERSE)
23337 ;
23338 else if (declaration)
23339 gen_formal_types_die (decl, subr_die);
23340 else
23341 {
23342 /* Generate DIEs to represent all known formal parameters. */
23343 tree parm = DECL_ARGUMENTS (decl);
23344 tree generic_decl = early_dwarf
23345 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23346 tree generic_decl_parm = generic_decl
23347 ? DECL_ARGUMENTS (generic_decl)
23348 : NULL;
23349
23350 /* Now we want to walk the list of parameters of the function and
23351 emit their relevant DIEs.
23352
23353 We consider the case of DECL being an instance of a generic function
23354 as well as it being a normal function.
23355
23356 If DECL is an instance of a generic function we walk the
23357 parameters of the generic function declaration _and_ the parameters of
23358 DECL itself. This is useful because we want to emit specific DIEs for
23359 function parameter packs and those are declared as part of the
23360 generic function declaration. In that particular case,
23361 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23362 That DIE has children DIEs representing the set of arguments
23363 of the pack. Note that the set of pack arguments can be empty.
23364 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23365 children DIE.
23366
23367 Otherwise, we just consider the parameters of DECL. */
23368 while (generic_decl_parm || parm)
23369 {
23370 if (generic_decl_parm
23371 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23372 gen_formal_parameter_pack_die (generic_decl_parm,
23373 parm, subr_die,
23374 &parm);
23375 else if (parm)
23376 {
23377 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23378
23379 if (early_dwarf
23380 && parm == DECL_ARGUMENTS (decl)
23381 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23382 && parm_die
23383 && (dwarf_version >= 3 || !dwarf_strict))
23384 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23385
23386 parm = DECL_CHAIN (parm);
23387 }
23388
23389 if (generic_decl_parm)
23390 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23391 }
23392
23393 /* Decide whether we need an unspecified_parameters DIE at the end.
23394 There are 2 more cases to do this for: 1) the ansi ... declaration -
23395 this is detectable when the end of the arg list is not a
23396 void_type_node 2) an unprototyped function declaration (not a
23397 definition). This just means that we have no info about the
23398 parameters at all. */
23399 if (early_dwarf)
23400 {
23401 if (prototype_p (TREE_TYPE (decl)))
23402 {
23403 /* This is the prototyped case, check for.... */
23404 if (stdarg_p (TREE_TYPE (decl)))
23405 gen_unspecified_parameters_die (decl, subr_die);
23406 }
23407 else if (DECL_INITIAL (decl) == NULL_TREE)
23408 gen_unspecified_parameters_die (decl, subr_die);
23409 }
23410 else if ((subr_die != old_die || old_die_had_no_children)
23411 && prototype_p (TREE_TYPE (decl))
23412 && stdarg_p (TREE_TYPE (decl)))
23413 gen_unspecified_parameters_die (decl, subr_die);
23414 }
23415
23416 if (subr_die != old_die)
23417 /* Add the calling convention attribute if requested. */
23418 add_calling_convention_attribute (subr_die, decl);
23419
23420 /* Output Dwarf info for all of the stuff within the body of the function
23421 (if it has one - it may be just a declaration).
23422
23423 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23424 a function. This BLOCK actually represents the outermost binding contour
23425 for the function, i.e. the contour in which the function's formal
23426 parameters and labels get declared. Curiously, it appears that the front
23427 end doesn't actually put the PARM_DECL nodes for the current function onto
23428 the BLOCK_VARS list for this outer scope, but are strung off of the
23429 DECL_ARGUMENTS list for the function instead.
23430
23431 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23432 the LABEL_DECL nodes for the function however, and we output DWARF info
23433 for those in decls_for_scope. Just within the `outer_scope' there will be
23434 a BLOCK node representing the function's outermost pair of curly braces,
23435 and any blocks used for the base and member initializers of a C++
23436 constructor function. */
23437 tree outer_scope = DECL_INITIAL (decl);
23438 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23439 {
23440 int call_site_note_count = 0;
23441 int tail_call_site_note_count = 0;
23442
23443 /* Emit a DW_TAG_variable DIE for a named return value. */
23444 if (DECL_NAME (DECL_RESULT (decl)))
23445 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23446
23447 /* The first time through decls_for_scope we will generate the
23448 DIEs for the locals. The second time, we fill in the
23449 location info. */
23450 decls_for_scope (outer_scope, subr_die);
23451
23452 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23453 {
23454 struct call_arg_loc_node *ca_loc;
23455 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23456 {
23457 dw_die_ref die = NULL;
23458 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23459 rtx arg, next_arg;
23460 tree arg_decl = NULL_TREE;
23461
23462 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23463 ? XEXP (ca_loc->call_arg_loc_note, 0)
23464 : NULL_RTX);
23465 arg; arg = next_arg)
23466 {
23467 dw_loc_descr_ref reg, val;
23468 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23469 dw_die_ref cdie, tdie = NULL;
23470
23471 next_arg = XEXP (arg, 1);
23472 if (REG_P (XEXP (XEXP (arg, 0), 0))
23473 && next_arg
23474 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23475 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23476 && REGNO (XEXP (XEXP (arg, 0), 0))
23477 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23478 next_arg = XEXP (next_arg, 1);
23479 if (mode == VOIDmode)
23480 {
23481 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23482 if (mode == VOIDmode)
23483 mode = GET_MODE (XEXP (arg, 0));
23484 }
23485 if (mode == VOIDmode || mode == BLKmode)
23486 continue;
23487 /* Get dynamic information about call target only if we
23488 have no static information: we cannot generate both
23489 DW_AT_call_origin and DW_AT_call_target
23490 attributes. */
23491 if (ca_loc->symbol_ref == NULL_RTX)
23492 {
23493 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23494 {
23495 tloc = XEXP (XEXP (arg, 0), 1);
23496 continue;
23497 }
23498 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23499 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23500 {
23501 tlocc = XEXP (XEXP (arg, 0), 1);
23502 continue;
23503 }
23504 }
23505 reg = NULL;
23506 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23507 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23508 VAR_INIT_STATUS_INITIALIZED);
23509 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23510 {
23511 rtx mem = XEXP (XEXP (arg, 0), 0);
23512 reg = mem_loc_descriptor (XEXP (mem, 0),
23513 get_address_mode (mem),
23514 GET_MODE (mem),
23515 VAR_INIT_STATUS_INITIALIZED);
23516 }
23517 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23518 == DEBUG_PARAMETER_REF)
23519 {
23520 tree tdecl
23521 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23522 tdie = lookup_decl_die (tdecl);
23523 if (tdie == NULL)
23524 continue;
23525 arg_decl = tdecl;
23526 }
23527 else
23528 continue;
23529 if (reg == NULL
23530 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23531 != DEBUG_PARAMETER_REF)
23532 continue;
23533 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23534 VOIDmode,
23535 VAR_INIT_STATUS_INITIALIZED);
23536 if (val == NULL)
23537 continue;
23538 if (die == NULL)
23539 die = gen_call_site_die (decl, subr_die, ca_loc);
23540 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23541 NULL_TREE);
23542 add_desc_attribute (cdie, arg_decl);
23543 if (reg != NULL)
23544 add_AT_loc (cdie, DW_AT_location, reg);
23545 else if (tdie != NULL)
23546 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23547 tdie);
23548 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23549 if (next_arg != XEXP (arg, 1))
23550 {
23551 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23552 if (mode == VOIDmode)
23553 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23554 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23555 0), 1),
23556 mode, VOIDmode,
23557 VAR_INIT_STATUS_INITIALIZED);
23558 if (val != NULL)
23559 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23560 val);
23561 }
23562 }
23563 if (die == NULL
23564 && (ca_loc->symbol_ref || tloc))
23565 die = gen_call_site_die (decl, subr_die, ca_loc);
23566 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23567 {
23568 dw_loc_descr_ref tval = NULL;
23569
23570 if (tloc != NULL_RTX)
23571 tval = mem_loc_descriptor (tloc,
23572 GET_MODE (tloc) == VOIDmode
23573 ? Pmode : GET_MODE (tloc),
23574 VOIDmode,
23575 VAR_INIT_STATUS_INITIALIZED);
23576 if (tval)
23577 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23578 else if (tlocc != NULL_RTX)
23579 {
23580 tval = mem_loc_descriptor (tlocc,
23581 GET_MODE (tlocc) == VOIDmode
23582 ? Pmode : GET_MODE (tlocc),
23583 VOIDmode,
23584 VAR_INIT_STATUS_INITIALIZED);
23585 if (tval)
23586 add_AT_loc (die,
23587 dwarf_AT (DW_AT_call_target_clobbered),
23588 tval);
23589 }
23590 }
23591 if (die != NULL)
23592 {
23593 call_site_note_count++;
23594 if (ca_loc->tail_call_p)
23595 tail_call_site_note_count++;
23596 }
23597 }
23598 }
23599 call_arg_locations = NULL;
23600 call_arg_loc_last = NULL;
23601 if (tail_call_site_count >= 0
23602 && tail_call_site_count == tail_call_site_note_count
23603 && (!dwarf_strict || dwarf_version >= 5))
23604 {
23605 if (call_site_count >= 0
23606 && call_site_count == call_site_note_count)
23607 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23608 else
23609 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23610 }
23611 call_site_count = -1;
23612 tail_call_site_count = -1;
23613 }
23614
23615 /* Mark used types after we have created DIEs for the functions scopes. */
23616 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23617 }
23618
23619 /* Returns a hash value for X (which really is a die_struct). */
23620
23621 hashval_t
23622 block_die_hasher::hash (die_struct *d)
23623 {
23624 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23625 }
23626
23627 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23628 as decl_id and die_parent of die_struct Y. */
23629
23630 bool
23631 block_die_hasher::equal (die_struct *x, die_struct *y)
23632 {
23633 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23634 }
23635
23636 /* Hold information about markers for inlined entry points. */
23637 struct GTY ((for_user)) inline_entry_data
23638 {
23639 /* The block that's the inlined_function_outer_scope for an inlined
23640 function. */
23641 tree block;
23642
23643 /* The label at the inlined entry point. */
23644 const char *label_pfx;
23645 unsigned int label_num;
23646
23647 /* The view number to be used as the inlined entry point. */
23648 var_loc_view view;
23649 };
23650
23651 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23652 {
23653 typedef tree compare_type;
23654 static inline hashval_t hash (const inline_entry_data *);
23655 static inline bool equal (const inline_entry_data *, const_tree);
23656 };
23657
23658 /* Hash table routines for inline_entry_data. */
23659
23660 inline hashval_t
23661 inline_entry_data_hasher::hash (const inline_entry_data *data)
23662 {
23663 return htab_hash_pointer (data->block);
23664 }
23665
23666 inline bool
23667 inline_entry_data_hasher::equal (const inline_entry_data *data,
23668 const_tree block)
23669 {
23670 return data->block == block;
23671 }
23672
23673 /* Inlined entry points pending DIE creation in this compilation unit. */
23674
23675 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23676
23677
23678 /* Return TRUE if DECL, which may have been previously generated as
23679 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23680 true if decl (or its origin) is either an extern declaration or a
23681 class/namespace scoped declaration.
23682
23683 The declare_in_namespace support causes us to get two DIEs for one
23684 variable, both of which are declarations. We want to avoid
23685 considering one to be a specification, so we must test for
23686 DECLARATION and DW_AT_declaration. */
23687 static inline bool
23688 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23689 {
23690 return (old_die && TREE_STATIC (decl) && !declaration
23691 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23692 }
23693
23694 /* Return true if DECL is a local static. */
23695
23696 static inline bool
23697 local_function_static (tree decl)
23698 {
23699 gcc_assert (VAR_P (decl));
23700 return TREE_STATIC (decl)
23701 && DECL_CONTEXT (decl)
23702 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23703 }
23704
23705 /* Return true iff DECL overrides (presumably completes) the type of
23706 OLD_DIE within CONTEXT_DIE. */
23707
23708 static bool
23709 override_type_for_decl_p (tree decl, dw_die_ref old_die,
23710 dw_die_ref context_die)
23711 {
23712 tree type = TREE_TYPE (decl);
23713 int cv_quals;
23714
23715 if (decl_by_reference_p (decl))
23716 {
23717 type = TREE_TYPE (type);
23718 cv_quals = TYPE_UNQUALIFIED;
23719 }
23720 else
23721 cv_quals = decl_quals (decl);
23722
23723 dw_die_ref type_die = modified_type_die (type,
23724 cv_quals | TYPE_QUALS (type),
23725 false,
23726 context_die);
23727
23728 dw_die_ref old_type_die = get_AT_ref (old_die, DW_AT_type);
23729
23730 return type_die != old_type_die;
23731 }
23732
23733 /* Generate a DIE to represent a declared data object.
23734 Either DECL or ORIGIN must be non-null. */
23735
23736 static void
23737 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23738 {
23739 HOST_WIDE_INT off = 0;
23740 tree com_decl;
23741 tree decl_or_origin = decl ? decl : origin;
23742 tree ultimate_origin;
23743 dw_die_ref var_die;
23744 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23745 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23746 || class_or_namespace_scope_p (context_die));
23747 bool specialization_p = false;
23748 bool no_linkage_name = false;
23749
23750 /* While C++ inline static data members have definitions inside of the
23751 class, force the first DIE to be a declaration, then let gen_member_die
23752 reparent it to the class context and call gen_variable_die again
23753 to create the outside of the class DIE for the definition. */
23754 if (!declaration
23755 && old_die == NULL
23756 && decl
23757 && DECL_CONTEXT (decl)
23758 && TYPE_P (DECL_CONTEXT (decl))
23759 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23760 {
23761 declaration = true;
23762 if (dwarf_version < 5)
23763 no_linkage_name = true;
23764 }
23765
23766 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23767 if (decl || ultimate_origin)
23768 origin = ultimate_origin;
23769 com_decl = fortran_common (decl_or_origin, &off);
23770
23771 /* Symbol in common gets emitted as a child of the common block, in the form
23772 of a data member. */
23773 if (com_decl)
23774 {
23775 dw_die_ref com_die;
23776 dw_loc_list_ref loc = NULL;
23777 die_node com_die_arg;
23778
23779 var_die = lookup_decl_die (decl_or_origin);
23780 if (var_die)
23781 {
23782 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23783 {
23784 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23785 if (loc)
23786 {
23787 if (off)
23788 {
23789 /* Optimize the common case. */
23790 if (single_element_loc_list_p (loc)
23791 && loc->expr->dw_loc_opc == DW_OP_addr
23792 && loc->expr->dw_loc_next == NULL
23793 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23794 == SYMBOL_REF)
23795 {
23796 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23797 loc->expr->dw_loc_oprnd1.v.val_addr
23798 = plus_constant (GET_MODE (x), x , off);
23799 }
23800 else
23801 loc_list_plus_const (loc, off);
23802 }
23803 add_AT_location_description (var_die, DW_AT_location, loc);
23804 remove_AT (var_die, DW_AT_declaration);
23805 }
23806 }
23807 return;
23808 }
23809
23810 if (common_block_die_table == NULL)
23811 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23812
23813 com_die_arg.decl_id = DECL_UID (com_decl);
23814 com_die_arg.die_parent = context_die;
23815 com_die = common_block_die_table->find (&com_die_arg);
23816 if (! early_dwarf)
23817 loc = loc_list_from_tree (com_decl, 2, NULL);
23818 if (com_die == NULL)
23819 {
23820 const char *cnam
23821 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23822 die_node **slot;
23823
23824 com_die = new_die (DW_TAG_common_block, context_die, decl);
23825 add_name_and_src_coords_attributes (com_die, com_decl);
23826 if (loc)
23827 {
23828 add_AT_location_description (com_die, DW_AT_location, loc);
23829 /* Avoid sharing the same loc descriptor between
23830 DW_TAG_common_block and DW_TAG_variable. */
23831 loc = loc_list_from_tree (com_decl, 2, NULL);
23832 }
23833 else if (DECL_EXTERNAL (decl_or_origin))
23834 add_AT_flag (com_die, DW_AT_declaration, 1);
23835 if (want_pubnames ())
23836 add_pubname_string (cnam, com_die); /* ??? needed? */
23837 com_die->decl_id = DECL_UID (com_decl);
23838 slot = common_block_die_table->find_slot (com_die, INSERT);
23839 *slot = com_die;
23840 }
23841 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23842 {
23843 add_AT_location_description (com_die, DW_AT_location, loc);
23844 loc = loc_list_from_tree (com_decl, 2, NULL);
23845 remove_AT (com_die, DW_AT_declaration);
23846 }
23847 var_die = new_die (DW_TAG_variable, com_die, decl);
23848 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23849 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23850 decl_quals (decl_or_origin), false,
23851 context_die);
23852 add_alignment_attribute (var_die, decl);
23853 add_AT_flag (var_die, DW_AT_external, 1);
23854 if (loc)
23855 {
23856 if (off)
23857 {
23858 /* Optimize the common case. */
23859 if (single_element_loc_list_p (loc)
23860 && loc->expr->dw_loc_opc == DW_OP_addr
23861 && loc->expr->dw_loc_next == NULL
23862 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23863 {
23864 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23865 loc->expr->dw_loc_oprnd1.v.val_addr
23866 = plus_constant (GET_MODE (x), x, off);
23867 }
23868 else
23869 loc_list_plus_const (loc, off);
23870 }
23871 add_AT_location_description (var_die, DW_AT_location, loc);
23872 }
23873 else if (DECL_EXTERNAL (decl_or_origin))
23874 add_AT_flag (var_die, DW_AT_declaration, 1);
23875 if (decl)
23876 equate_decl_number_to_die (decl, var_die);
23877 return;
23878 }
23879
23880 if (old_die)
23881 {
23882 if (declaration)
23883 {
23884 /* A declaration that has been previously dumped, needs no
23885 further annotations, since it doesn't need location on
23886 the second pass. */
23887 return;
23888 }
23889 else if (decl_will_get_specification_p (old_die, decl, declaration)
23890 && !get_AT (old_die, DW_AT_specification))
23891 {
23892 /* Fall-thru so we can make a new variable die along with a
23893 DW_AT_specification. */
23894 }
23895 else if (origin && old_die->die_parent != context_die)
23896 {
23897 /* If we will be creating an inlined instance, we need a
23898 new DIE that will get annotated with
23899 DW_AT_abstract_origin. */
23900 gcc_assert (!DECL_ABSTRACT_P (decl));
23901 }
23902 else
23903 {
23904 /* If a DIE was dumped early, it still needs location info.
23905 Skip to where we fill the location bits. */
23906 var_die = old_die;
23907
23908 /* ??? In LTRANS we cannot annotate early created variably
23909 modified type DIEs without copying them and adjusting all
23910 references to them. Thus we dumped them again. Also add a
23911 reference to them but beware of -g0 compile and -g link
23912 in which case the reference will be already present. */
23913 tree type = TREE_TYPE (decl_or_origin);
23914 if (in_lto_p
23915 && ! get_AT (var_die, DW_AT_type)
23916 && variably_modified_type_p
23917 (type, decl_function_context (decl_or_origin)))
23918 {
23919 if (decl_by_reference_p (decl_or_origin))
23920 add_type_attribute (var_die, TREE_TYPE (type),
23921 TYPE_UNQUALIFIED, false, context_die);
23922 else
23923 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23924 false, context_die);
23925 }
23926
23927 goto gen_variable_die_location;
23928 }
23929 }
23930
23931 /* For static data members, the declaration in the class is supposed
23932 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23933 also in DWARF2; the specification should still be DW_TAG_variable
23934 referencing the DW_TAG_member DIE. */
23935 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23936 var_die = new_die (DW_TAG_member, context_die, decl);
23937 else
23938 var_die = new_die (DW_TAG_variable, context_die, decl);
23939
23940 if (origin != NULL)
23941 add_abstract_origin_attribute (var_die, origin);
23942
23943 /* Loop unrolling can create multiple blocks that refer to the same
23944 static variable, so we must test for the DW_AT_declaration flag.
23945
23946 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23947 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23948 sharing them.
23949
23950 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23951 else if (decl_will_get_specification_p (old_die, decl, declaration))
23952 {
23953 /* This is a definition of a C++ class level static. */
23954 add_AT_specification (var_die, old_die);
23955 specialization_p = true;
23956 if (DECL_NAME (decl))
23957 {
23958 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23959 struct dwarf_file_data * file_index = lookup_filename (s.file);
23960
23961 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23962 add_AT_file (var_die, DW_AT_decl_file, file_index);
23963
23964 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23965 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23966
23967 if (debug_column_info
23968 && s.column
23969 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23970 != (unsigned) s.column))
23971 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23972
23973 if (old_die->die_tag == DW_TAG_member)
23974 add_linkage_name (var_die, decl);
23975 }
23976 }
23977 else
23978 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23979
23980 if ((origin == NULL && !specialization_p)
23981 || (origin != NULL
23982 && !DECL_ABSTRACT_P (decl_or_origin)
23983 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23984 decl_function_context
23985 (decl_or_origin)))
23986 || (old_die && specialization_p
23987 && override_type_for_decl_p (decl_or_origin, old_die, context_die)))
23988 {
23989 tree type = TREE_TYPE (decl_or_origin);
23990
23991 if (decl_by_reference_p (decl_or_origin))
23992 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23993 context_die);
23994 else
23995 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23996 context_die);
23997 }
23998
23999 if (origin == NULL && !specialization_p)
24000 {
24001 if (TREE_PUBLIC (decl))
24002 add_AT_flag (var_die, DW_AT_external, 1);
24003
24004 if (DECL_ARTIFICIAL (decl))
24005 add_AT_flag (var_die, DW_AT_artificial, 1);
24006
24007 add_alignment_attribute (var_die, decl);
24008
24009 add_accessibility_attribute (var_die, decl);
24010 }
24011
24012 if (declaration)
24013 add_AT_flag (var_die, DW_AT_declaration, 1);
24014
24015 if (decl && (DECL_ABSTRACT_P (decl)
24016 || !old_die || is_declaration_die (old_die)))
24017 equate_decl_number_to_die (decl, var_die);
24018
24019 gen_variable_die_location:
24020 if (! declaration
24021 && (! DECL_ABSTRACT_P (decl_or_origin)
24022 /* Local static vars are shared between all clones/inlines,
24023 so emit DW_AT_location on the abstract DIE if DECL_RTL is
24024 already set. */
24025 || (VAR_P (decl_or_origin)
24026 && TREE_STATIC (decl_or_origin)
24027 && DECL_RTL_SET_P (decl_or_origin))))
24028 {
24029 if (early_dwarf)
24030 add_pubname (decl_or_origin, var_die);
24031 else
24032 add_location_or_const_value_attribute (var_die, decl_or_origin,
24033 decl == NULL);
24034 }
24035 else
24036 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
24037
24038 if ((dwarf_version >= 4 || !dwarf_strict)
24039 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24040 DW_AT_const_expr) == 1
24041 && !get_AT (var_die, DW_AT_const_expr)
24042 && !specialization_p)
24043 add_AT_flag (var_die, DW_AT_const_expr, 1);
24044
24045 if (!dwarf_strict)
24046 {
24047 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24048 DW_AT_inline);
24049 if (inl != -1
24050 && !get_AT (var_die, DW_AT_inline)
24051 && !specialization_p)
24052 add_AT_unsigned (var_die, DW_AT_inline, inl);
24053 }
24054 }
24055
24056 /* Generate a DIE to represent a named constant. */
24057
24058 static void
24059 gen_const_die (tree decl, dw_die_ref context_die)
24060 {
24061 dw_die_ref const_die;
24062 tree type = TREE_TYPE (decl);
24063
24064 const_die = lookup_decl_die (decl);
24065 if (const_die)
24066 return;
24067
24068 const_die = new_die (DW_TAG_constant, context_die, decl);
24069 equate_decl_number_to_die (decl, const_die);
24070 add_name_and_src_coords_attributes (const_die, decl);
24071 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
24072 if (TREE_PUBLIC (decl))
24073 add_AT_flag (const_die, DW_AT_external, 1);
24074 if (DECL_ARTIFICIAL (decl))
24075 add_AT_flag (const_die, DW_AT_artificial, 1);
24076 tree_add_const_value_attribute_for_decl (const_die, decl);
24077 }
24078
24079 /* Generate a DIE to represent a label identifier. */
24080
24081 static void
24082 gen_label_die (tree decl, dw_die_ref context_die)
24083 {
24084 tree origin = decl_ultimate_origin (decl);
24085 dw_die_ref lbl_die = lookup_decl_die (decl);
24086 rtx insn;
24087 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24088
24089 if (!lbl_die)
24090 {
24091 lbl_die = new_die (DW_TAG_label, context_die, decl);
24092 equate_decl_number_to_die (decl, lbl_die);
24093
24094 if (origin != NULL)
24095 add_abstract_origin_attribute (lbl_die, origin);
24096 else
24097 add_name_and_src_coords_attributes (lbl_die, decl);
24098 }
24099
24100 if (DECL_ABSTRACT_P (decl))
24101 equate_decl_number_to_die (decl, lbl_die);
24102 else if (! early_dwarf)
24103 {
24104 insn = DECL_RTL_IF_SET (decl);
24105
24106 /* Deleted labels are programmer specified labels which have been
24107 eliminated because of various optimizations. We still emit them
24108 here so that it is possible to put breakpoints on them. */
24109 if (insn
24110 && (LABEL_P (insn)
24111 || ((NOTE_P (insn)
24112 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
24113 {
24114 /* When optimization is enabled (via -O) some parts of the compiler
24115 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
24116 represent source-level labels which were explicitly declared by
24117 the user. This really shouldn't be happening though, so catch
24118 it if it ever does happen. */
24119 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
24120
24121 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
24122 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24123 }
24124 else if (insn
24125 && NOTE_P (insn)
24126 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24127 && CODE_LABEL_NUMBER (insn) != -1)
24128 {
24129 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24130 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24131 }
24132 }
24133 }
24134
24135 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24136 attributes to the DIE for a block STMT, to describe where the inlined
24137 function was called from. This is similar to add_src_coords_attributes. */
24138
24139 static inline void
24140 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24141 {
24142 /* We can end up with BUILTINS_LOCATION here. */
24143 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24144 return;
24145
24146 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24147
24148 if (dwarf_version >= 3 || !dwarf_strict)
24149 {
24150 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24151 add_AT_unsigned (die, DW_AT_call_line, s.line);
24152 if (debug_column_info && s.column)
24153 add_AT_unsigned (die, DW_AT_call_column, s.column);
24154 }
24155 }
24156
24157
24158 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24159 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24160
24161 static inline void
24162 add_high_low_attributes (tree stmt, dw_die_ref die)
24163 {
24164 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24165
24166 if (inline_entry_data **iedp
24167 = !inline_entry_data_table ? NULL
24168 : inline_entry_data_table->find_slot_with_hash (stmt,
24169 htab_hash_pointer (stmt),
24170 NO_INSERT))
24171 {
24172 inline_entry_data *ied = *iedp;
24173 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24174 gcc_assert (debug_inline_points);
24175 gcc_assert (inlined_function_outer_scope_p (stmt));
24176
24177 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24178 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24179
24180 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24181 && !dwarf_strict)
24182 {
24183 if (!output_asm_line_debug_info ())
24184 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24185 else
24186 {
24187 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24188 /* FIXME: this will resolve to a small number. Could we
24189 possibly emit smaller data? Ideally we'd emit a
24190 uleb128, but that would make the size of DIEs
24191 impossible for the compiler to compute, since it's
24192 the assembler that computes the value of the view
24193 label in this case. Ideally, we'd have a single form
24194 encompassing both the address and the view, and
24195 indirecting them through a table might make things
24196 easier, but even that would be more wasteful,
24197 space-wise, than what we have now. */
24198 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24199 }
24200 }
24201
24202 inline_entry_data_table->clear_slot (iedp);
24203 }
24204
24205 if (BLOCK_FRAGMENT_CHAIN (stmt)
24206 && (dwarf_version >= 3 || !dwarf_strict))
24207 {
24208 tree chain, superblock = NULL_TREE;
24209 dw_die_ref pdie;
24210 dw_attr_node *attr = NULL;
24211
24212 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24213 {
24214 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24215 BLOCK_NUMBER (stmt));
24216 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24217 }
24218
24219 /* Optimize duplicate .debug_ranges lists or even tails of
24220 lists. If this BLOCK has same ranges as its supercontext,
24221 lookup DW_AT_ranges attribute in the supercontext (and
24222 recursively so), verify that the ranges_table contains the
24223 right values and use it instead of adding a new .debug_range. */
24224 for (chain = stmt, pdie = die;
24225 BLOCK_SAME_RANGE (chain);
24226 chain = BLOCK_SUPERCONTEXT (chain))
24227 {
24228 dw_attr_node *new_attr;
24229
24230 pdie = pdie->die_parent;
24231 if (pdie == NULL)
24232 break;
24233 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24234 break;
24235 new_attr = get_AT (pdie, DW_AT_ranges);
24236 if (new_attr == NULL
24237 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24238 break;
24239 attr = new_attr;
24240 superblock = BLOCK_SUPERCONTEXT (chain);
24241 }
24242 if (attr != NULL
24243 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24244 == (int)BLOCK_NUMBER (superblock))
24245 && BLOCK_FRAGMENT_CHAIN (superblock))
24246 {
24247 unsigned long off = attr->dw_attr_val.v.val_offset;
24248 unsigned long supercnt = 0, thiscnt = 0;
24249 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24250 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24251 {
24252 ++supercnt;
24253 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24254 == (int)BLOCK_NUMBER (chain));
24255 }
24256 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24257 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24258 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24259 ++thiscnt;
24260 gcc_assert (supercnt >= thiscnt);
24261 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24262 false);
24263 note_rnglist_head (off + supercnt - thiscnt);
24264 return;
24265 }
24266
24267 unsigned int offset = add_ranges (stmt, true);
24268 add_AT_range_list (die, DW_AT_ranges, offset, false);
24269 note_rnglist_head (offset);
24270
24271 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24272 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24273 do
24274 {
24275 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24276 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24277 chain = BLOCK_FRAGMENT_CHAIN (chain);
24278 }
24279 while (chain);
24280 add_ranges (NULL);
24281 }
24282 else
24283 {
24284 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24285 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24286 BLOCK_NUMBER (stmt));
24287 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24288 BLOCK_NUMBER (stmt));
24289 add_AT_low_high_pc (die, label, label_high, false);
24290 }
24291 }
24292
24293 /* Generate a DIE for a lexical block. */
24294
24295 static void
24296 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24297 {
24298 dw_die_ref old_die = lookup_block_die (stmt);
24299 dw_die_ref stmt_die = NULL;
24300 if (!old_die)
24301 {
24302 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24303 equate_block_to_die (stmt, stmt_die);
24304 }
24305
24306 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24307 {
24308 /* If this is an inlined or conrecte instance, create a new lexical
24309 die for anything below to attach DW_AT_abstract_origin to. */
24310 if (old_die)
24311 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24312
24313 tree origin = block_ultimate_origin (stmt);
24314 if (origin != NULL_TREE && (origin != stmt || old_die))
24315 add_abstract_origin_attribute (stmt_die, origin);
24316
24317 old_die = NULL;
24318 }
24319
24320 if (old_die)
24321 stmt_die = old_die;
24322
24323 /* A non abstract block whose blocks have already been reordered
24324 should have the instruction range for this block. If so, set the
24325 high/low attributes. */
24326 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24327 {
24328 gcc_assert (stmt_die);
24329 add_high_low_attributes (stmt, stmt_die);
24330 }
24331
24332 decls_for_scope (stmt, stmt_die);
24333 }
24334
24335 /* Generate a DIE for an inlined subprogram. */
24336
24337 static void
24338 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24339 {
24340 tree decl = block_ultimate_origin (stmt);
24341
24342 /* Make sure any inlined functions are known to be inlineable. */
24343 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24344 || cgraph_function_possibly_inlined_p (decl));
24345
24346 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24347
24348 if (call_arg_locations || debug_inline_points)
24349 equate_block_to_die (stmt, subr_die);
24350 add_abstract_origin_attribute (subr_die, decl);
24351 if (TREE_ASM_WRITTEN (stmt))
24352 add_high_low_attributes (stmt, subr_die);
24353 add_call_src_coords_attributes (stmt, subr_die);
24354
24355 /* The inliner creates an extra BLOCK for the parameter setup,
24356 we want to merge that with the actual outermost BLOCK of the
24357 inlined function to avoid duplicate locals in consumers.
24358 Do that by doing the recursion to subblocks on the single subblock
24359 of STMT. */
24360 bool unwrap_one = false;
24361 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24362 {
24363 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24364 if (origin
24365 && TREE_CODE (origin) == BLOCK
24366 && BLOCK_SUPERCONTEXT (origin) == decl)
24367 unwrap_one = true;
24368 }
24369 decls_for_scope (stmt, subr_die, !unwrap_one);
24370 if (unwrap_one)
24371 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24372 }
24373
24374 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24375 the comment for VLR_CONTEXT. */
24376
24377 static void
24378 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24379 {
24380 dw_die_ref decl_die;
24381
24382 if (TREE_TYPE (decl) == error_mark_node)
24383 return;
24384
24385 decl_die = new_die (DW_TAG_member, context_die, decl);
24386 add_name_and_src_coords_attributes (decl_die, decl);
24387 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24388 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24389 context_die);
24390
24391 if (DECL_BIT_FIELD_TYPE (decl))
24392 {
24393 add_byte_size_attribute (decl_die, decl);
24394 add_bit_size_attribute (decl_die, decl);
24395 add_bit_offset_attribute (decl_die, decl);
24396 }
24397
24398 add_alignment_attribute (decl_die, decl);
24399
24400 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24401 add_data_member_location_attribute (decl_die, decl, ctx);
24402
24403 if (DECL_ARTIFICIAL (decl))
24404 add_AT_flag (decl_die, DW_AT_artificial, 1);
24405
24406 add_accessibility_attribute (decl_die, decl);
24407
24408 /* Equate decl number to die, so that we can look up this decl later on. */
24409 equate_decl_number_to_die (decl, decl_die);
24410 }
24411
24412 /* Generate a DIE for a pointer to a member type. TYPE can be an
24413 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24414 pointer to member function. */
24415
24416 static void
24417 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24418 {
24419 if (lookup_type_die (type))
24420 return;
24421
24422 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24423 scope_die_for (type, context_die), type);
24424
24425 equate_type_number_to_die (type, ptr_die);
24426 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24427 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24428 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24429 context_die);
24430 add_alignment_attribute (ptr_die, type);
24431
24432 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24433 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24434 {
24435 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24436 add_AT_loc (ptr_die, DW_AT_use_location, op);
24437 }
24438 }
24439
24440 static char *producer_string;
24441
24442 /* Given a C and/or C++ language/version string return the "highest".
24443 C++ is assumed to be "higher" than C in this case. Used for merging
24444 LTO translation unit languages. */
24445 static const char *
24446 highest_c_language (const char *lang1, const char *lang2)
24447 {
24448 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24449 return "GNU C++17";
24450 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24451 return "GNU C++14";
24452 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24453 return "GNU C++11";
24454 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24455 return "GNU C++98";
24456
24457 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24458 return "GNU C2X";
24459 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24460 return "GNU C17";
24461 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24462 return "GNU C11";
24463 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24464 return "GNU C99";
24465 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24466 return "GNU C89";
24467
24468 gcc_unreachable ();
24469 }
24470
24471
24472 /* Generate the DIE for the compilation unit. */
24473
24474 static dw_die_ref
24475 gen_compile_unit_die (const char *filename)
24476 {
24477 dw_die_ref die;
24478 const char *language_string = lang_hooks.name;
24479 int language;
24480
24481 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24482
24483 if (filename)
24484 {
24485 add_filename_attribute (die, filename);
24486 /* Don't add cwd for <built-in>. */
24487 if (filename[0] != '<')
24488 add_comp_dir_attribute (die);
24489 }
24490
24491 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24492
24493 /* If our producer is LTO try to figure out a common language to use
24494 from the global list of translation units. */
24495 if (strcmp (language_string, "GNU GIMPLE") == 0)
24496 {
24497 unsigned i;
24498 tree t;
24499 const char *common_lang = NULL;
24500
24501 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24502 {
24503 if (!TRANSLATION_UNIT_LANGUAGE (t))
24504 continue;
24505 if (!common_lang)
24506 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24507 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24508 ;
24509 else if (strncmp (common_lang, "GNU C", 5) == 0
24510 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24511 /* Mixing C and C++ is ok, use C++ in that case. */
24512 common_lang = highest_c_language (common_lang,
24513 TRANSLATION_UNIT_LANGUAGE (t));
24514 else
24515 {
24516 /* Fall back to C. */
24517 common_lang = NULL;
24518 break;
24519 }
24520 }
24521
24522 if (common_lang)
24523 language_string = common_lang;
24524 }
24525
24526 language = DW_LANG_C;
24527 if (strncmp (language_string, "GNU C", 5) == 0
24528 && ISDIGIT (language_string[5]))
24529 {
24530 language = DW_LANG_C89;
24531 if (dwarf_version >= 3 || !dwarf_strict)
24532 {
24533 if (strcmp (language_string, "GNU C89") != 0)
24534 language = DW_LANG_C99;
24535
24536 if (dwarf_version >= 5 /* || !dwarf_strict */)
24537 if (strcmp (language_string, "GNU C11") == 0
24538 || strcmp (language_string, "GNU C17") == 0
24539 || strcmp (language_string, "GNU C2X") == 0)
24540 language = DW_LANG_C11;
24541 }
24542 }
24543 else if (strncmp (language_string, "GNU C++", 7) == 0)
24544 {
24545 language = DW_LANG_C_plus_plus;
24546 if (dwarf_version >= 5 /* || !dwarf_strict */)
24547 {
24548 if (strcmp (language_string, "GNU C++11") == 0)
24549 language = DW_LANG_C_plus_plus_11;
24550 else if (strcmp (language_string, "GNU C++14") == 0)
24551 language = DW_LANG_C_plus_plus_14;
24552 else if (strcmp (language_string, "GNU C++17") == 0
24553 || strcmp (language_string, "GNU C++20") == 0)
24554 /* For now. */
24555 language = DW_LANG_C_plus_plus_14;
24556 }
24557 }
24558 else if (strcmp (language_string, "GNU F77") == 0)
24559 language = DW_LANG_Fortran77;
24560 else if (dwarf_version >= 3 || !dwarf_strict)
24561 {
24562 if (strcmp (language_string, "GNU Ada") == 0)
24563 language = DW_LANG_Ada95;
24564 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24565 {
24566 language = DW_LANG_Fortran95;
24567 if (dwarf_version >= 5 /* || !dwarf_strict */)
24568 {
24569 if (strcmp (language_string, "GNU Fortran2003") == 0)
24570 language = DW_LANG_Fortran03;
24571 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24572 language = DW_LANG_Fortran08;
24573 }
24574 }
24575 else if (strcmp (language_string, "GNU Objective-C") == 0)
24576 language = DW_LANG_ObjC;
24577 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24578 language = DW_LANG_ObjC_plus_plus;
24579 else if (strcmp (language_string, "GNU D") == 0)
24580 language = DW_LANG_D;
24581 else if (dwarf_version >= 5 || !dwarf_strict)
24582 {
24583 if (strcmp (language_string, "GNU Go") == 0)
24584 language = DW_LANG_Go;
24585 }
24586 }
24587 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24588 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24589 language = DW_LANG_Fortran90;
24590 /* Likewise for Ada. */
24591 else if (strcmp (language_string, "GNU Ada") == 0)
24592 language = DW_LANG_Ada83;
24593
24594 add_AT_unsigned (die, DW_AT_language, language);
24595
24596 switch (language)
24597 {
24598 case DW_LANG_Fortran77:
24599 case DW_LANG_Fortran90:
24600 case DW_LANG_Fortran95:
24601 case DW_LANG_Fortran03:
24602 case DW_LANG_Fortran08:
24603 /* Fortran has case insensitive identifiers and the front-end
24604 lowercases everything. */
24605 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24606 break;
24607 default:
24608 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24609 break;
24610 }
24611 return die;
24612 }
24613
24614 /* Generate the DIE for a base class. */
24615
24616 static void
24617 gen_inheritance_die (tree binfo, tree access, tree type,
24618 dw_die_ref context_die)
24619 {
24620 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24621 struct vlr_context ctx = { type, NULL };
24622
24623 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24624 context_die);
24625 add_data_member_location_attribute (die, binfo, &ctx);
24626
24627 if (BINFO_VIRTUAL_P (binfo))
24628 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24629
24630 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24631 children, otherwise the default is DW_ACCESS_public. In DWARF2
24632 the default has always been DW_ACCESS_private. */
24633 if (access == access_public_node)
24634 {
24635 if (dwarf_version == 2
24636 || context_die->die_tag == DW_TAG_class_type)
24637 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24638 }
24639 else if (access == access_protected_node)
24640 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24641 else if (dwarf_version > 2
24642 && context_die->die_tag != DW_TAG_class_type)
24643 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24644 }
24645
24646 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24647 structure. */
24648
24649 static bool
24650 is_variant_part (tree decl)
24651 {
24652 return (TREE_CODE (decl) == FIELD_DECL
24653 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24654 }
24655
24656 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24657 return the FIELD_DECL. Return NULL_TREE otherwise. */
24658
24659 static tree
24660 analyze_discr_in_predicate (tree operand, tree struct_type)
24661 {
24662 while (CONVERT_EXPR_P (operand))
24663 operand = TREE_OPERAND (operand, 0);
24664
24665 /* Match field access to members of struct_type only. */
24666 if (TREE_CODE (operand) == COMPONENT_REF
24667 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24668 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24669 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24670 return TREE_OPERAND (operand, 1);
24671 else
24672 return NULL_TREE;
24673 }
24674
24675 /* Check that SRC is a constant integer that can be represented as a native
24676 integer constant (either signed or unsigned). If so, store it into DEST and
24677 return true. Return false otherwise. */
24678
24679 static bool
24680 get_discr_value (tree src, dw_discr_value *dest)
24681 {
24682 tree discr_type = TREE_TYPE (src);
24683
24684 if (lang_hooks.types.get_debug_type)
24685 {
24686 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24687 if (debug_type != NULL)
24688 discr_type = debug_type;
24689 }
24690
24691 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24692 return false;
24693
24694 /* Signedness can vary between the original type and the debug type. This
24695 can happen for character types in Ada for instance: the character type
24696 used for code generation can be signed, to be compatible with the C one,
24697 but from a debugger point of view, it must be unsigned. */
24698 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24699 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24700
24701 if (is_orig_unsigned != is_debug_unsigned)
24702 src = fold_convert (discr_type, src);
24703
24704 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24705 return false;
24706
24707 dest->pos = is_debug_unsigned;
24708 if (is_debug_unsigned)
24709 dest->v.uval = tree_to_uhwi (src);
24710 else
24711 dest->v.sval = tree_to_shwi (src);
24712
24713 return true;
24714 }
24715
24716 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24717 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24718 store NULL_TREE in DISCR_DECL. Otherwise:
24719
24720 - store the discriminant field in STRUCT_TYPE that controls the variant
24721 part to *DISCR_DECL
24722
24723 - put in *DISCR_LISTS_P an array where for each variant, the item
24724 represents the corresponding matching list of discriminant values.
24725
24726 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24727 the above array.
24728
24729 Note that when the array is allocated (i.e. when the analysis is
24730 successful), it is up to the caller to free the array. */
24731
24732 static void
24733 analyze_variants_discr (tree variant_part_decl,
24734 tree struct_type,
24735 tree *discr_decl,
24736 dw_discr_list_ref **discr_lists_p,
24737 unsigned *discr_lists_length)
24738 {
24739 tree variant_part_type = TREE_TYPE (variant_part_decl);
24740 tree variant;
24741 dw_discr_list_ref *discr_lists;
24742 unsigned i;
24743
24744 /* Compute how many variants there are in this variant part. */
24745 *discr_lists_length = 0;
24746 for (variant = TYPE_FIELDS (variant_part_type);
24747 variant != NULL_TREE;
24748 variant = DECL_CHAIN (variant))
24749 ++*discr_lists_length;
24750
24751 *discr_decl = NULL_TREE;
24752 *discr_lists_p
24753 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24754 sizeof (**discr_lists_p));
24755 discr_lists = *discr_lists_p;
24756
24757 /* And then analyze all variants to extract discriminant information for all
24758 of them. This analysis is conservative: as soon as we detect something we
24759 do not support, abort everything and pretend we found nothing. */
24760 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24761 variant != NULL_TREE;
24762 variant = DECL_CHAIN (variant), ++i)
24763 {
24764 tree match_expr = DECL_QUALIFIER (variant);
24765
24766 /* Now, try to analyze the predicate and deduce a discriminant for
24767 it. */
24768 if (match_expr == boolean_true_node)
24769 /* Typically happens for the default variant: it matches all cases that
24770 previous variants rejected. Don't output any matching value for
24771 this one. */
24772 continue;
24773
24774 /* The following loop tries to iterate over each discriminant
24775 possibility: single values or ranges. */
24776 while (match_expr != NULL_TREE)
24777 {
24778 tree next_round_match_expr;
24779 tree candidate_discr = NULL_TREE;
24780 dw_discr_list_ref new_node = NULL;
24781
24782 /* Possibilities are matched one after the other by nested
24783 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24784 continue with the rest at next iteration. */
24785 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24786 {
24787 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24788 match_expr = TREE_OPERAND (match_expr, 1);
24789 }
24790 else
24791 next_round_match_expr = NULL_TREE;
24792
24793 if (match_expr == boolean_false_node)
24794 /* This sub-expression matches nothing: just wait for the next
24795 one. */
24796 ;
24797
24798 else if (TREE_CODE (match_expr) == EQ_EXPR)
24799 {
24800 /* We are matching: <discr_field> == <integer_cst>
24801 This sub-expression matches a single value. */
24802 tree integer_cst = TREE_OPERAND (match_expr, 1);
24803
24804 candidate_discr
24805 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24806 struct_type);
24807
24808 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24809 if (!get_discr_value (integer_cst,
24810 &new_node->dw_discr_lower_bound))
24811 goto abort;
24812 new_node->dw_discr_range = false;
24813 }
24814
24815 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24816 {
24817 /* We are matching:
24818 <discr_field> > <integer_cst>
24819 && <discr_field> < <integer_cst>.
24820 This sub-expression matches the range of values between the
24821 two matched integer constants. Note that comparisons can be
24822 inclusive or exclusive. */
24823 tree candidate_discr_1, candidate_discr_2;
24824 tree lower_cst, upper_cst;
24825 bool lower_cst_included, upper_cst_included;
24826 tree lower_op = TREE_OPERAND (match_expr, 0);
24827 tree upper_op = TREE_OPERAND (match_expr, 1);
24828
24829 /* When the comparison is exclusive, the integer constant is not
24830 the discriminant range bound we are looking for: we will have
24831 to increment or decrement it. */
24832 if (TREE_CODE (lower_op) == GE_EXPR)
24833 lower_cst_included = true;
24834 else if (TREE_CODE (lower_op) == GT_EXPR)
24835 lower_cst_included = false;
24836 else
24837 goto abort;
24838
24839 if (TREE_CODE (upper_op) == LE_EXPR)
24840 upper_cst_included = true;
24841 else if (TREE_CODE (upper_op) == LT_EXPR)
24842 upper_cst_included = false;
24843 else
24844 goto abort;
24845
24846 /* Extract the discriminant from the first operand and check it
24847 is consistant with the same analysis in the second
24848 operand. */
24849 candidate_discr_1
24850 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24851 struct_type);
24852 candidate_discr_2
24853 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24854 struct_type);
24855 if (candidate_discr_1 == candidate_discr_2)
24856 candidate_discr = candidate_discr_1;
24857 else
24858 goto abort;
24859
24860 /* Extract bounds from both. */
24861 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24862 lower_cst = TREE_OPERAND (lower_op, 1);
24863 upper_cst = TREE_OPERAND (upper_op, 1);
24864
24865 if (!lower_cst_included)
24866 lower_cst
24867 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24868 build_int_cst (TREE_TYPE (lower_cst), 1));
24869 if (!upper_cst_included)
24870 upper_cst
24871 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24872 build_int_cst (TREE_TYPE (upper_cst), 1));
24873
24874 if (!get_discr_value (lower_cst,
24875 &new_node->dw_discr_lower_bound)
24876 || !get_discr_value (upper_cst,
24877 &new_node->dw_discr_upper_bound))
24878 goto abort;
24879
24880 new_node->dw_discr_range = true;
24881 }
24882
24883 else if ((candidate_discr
24884 = analyze_discr_in_predicate (match_expr, struct_type))
24885 && (TREE_TYPE (candidate_discr) == boolean_type_node
24886 || TREE_TYPE (TREE_TYPE (candidate_discr))
24887 == boolean_type_node))
24888 {
24889 /* We are matching: <discr_field> for a boolean discriminant.
24890 This sub-expression matches boolean_true_node. */
24891 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24892 if (!get_discr_value (boolean_true_node,
24893 &new_node->dw_discr_lower_bound))
24894 goto abort;
24895 new_node->dw_discr_range = false;
24896 }
24897
24898 else
24899 /* Unsupported sub-expression: we cannot determine the set of
24900 matching discriminant values. Abort everything. */
24901 goto abort;
24902
24903 /* If the discriminant info is not consistant with what we saw so
24904 far, consider the analysis failed and abort everything. */
24905 if (candidate_discr == NULL_TREE
24906 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24907 goto abort;
24908 else
24909 *discr_decl = candidate_discr;
24910
24911 if (new_node != NULL)
24912 {
24913 new_node->dw_discr_next = discr_lists[i];
24914 discr_lists[i] = new_node;
24915 }
24916 match_expr = next_round_match_expr;
24917 }
24918 }
24919
24920 /* If we reach this point, we could match everything we were interested
24921 in. */
24922 return;
24923
24924 abort:
24925 /* Clean all data structure and return no result. */
24926 free (*discr_lists_p);
24927 *discr_lists_p = NULL;
24928 *discr_decl = NULL_TREE;
24929 }
24930
24931 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24932 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24933 under CONTEXT_DIE.
24934
24935 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24936 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24937 this type, which are record types, represent the available variants and each
24938 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24939 values are inferred from these attributes.
24940
24941 In trees, the offsets for the fields inside these sub-records are relative
24942 to the variant part itself, whereas the corresponding DIEs should have
24943 offset attributes that are relative to the embedding record base address.
24944 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24945 must be an expression that computes the offset of the variant part to
24946 describe in DWARF. */
24947
24948 static void
24949 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24950 dw_die_ref context_die)
24951 {
24952 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24953 tree variant_part_offset = vlr_ctx->variant_part_offset;
24954 struct loc_descr_context ctx = {
24955 vlr_ctx->struct_type, /* context_type */
24956 NULL_TREE, /* base_decl */
24957 NULL, /* dpi */
24958 false, /* placeholder_arg */
24959 false /* placeholder_seen */
24960 };
24961
24962 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24963 NULL_TREE if there is no such field. */
24964 tree discr_decl = NULL_TREE;
24965 dw_discr_list_ref *discr_lists;
24966 unsigned discr_lists_length = 0;
24967 unsigned i;
24968
24969 dw_die_ref dwarf_proc_die = NULL;
24970 dw_die_ref variant_part_die
24971 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24972
24973 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24974
24975 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24976 &discr_decl, &discr_lists, &discr_lists_length);
24977
24978 if (discr_decl != NULL_TREE)
24979 {
24980 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24981
24982 if (discr_die)
24983 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24984 else
24985 /* We have no DIE for the discriminant, so just discard all
24986 discrimimant information in the output. */
24987 discr_decl = NULL_TREE;
24988 }
24989
24990 /* If the offset for this variant part is more complex than a constant,
24991 create a DWARF procedure for it so that we will not have to generate DWARF
24992 expressions for it for each member. */
24993 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24994 && (dwarf_version >= 3 || !dwarf_strict))
24995 {
24996 const tree dwarf_proc_fndecl
24997 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24998 build_function_type (TREE_TYPE (variant_part_offset),
24999 NULL_TREE));
25000 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
25001 const dw_loc_descr_ref dwarf_proc_body
25002 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
25003
25004 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
25005 dwarf_proc_fndecl, context_die);
25006 if (dwarf_proc_die != NULL)
25007 variant_part_offset = dwarf_proc_call;
25008 }
25009
25010 /* Output DIEs for all variants. */
25011 i = 0;
25012 for (tree variant = TYPE_FIELDS (variant_part_type);
25013 variant != NULL_TREE;
25014 variant = DECL_CHAIN (variant), ++i)
25015 {
25016 tree variant_type = TREE_TYPE (variant);
25017 dw_die_ref variant_die;
25018
25019 /* All variants (i.e. members of a variant part) are supposed to be
25020 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25021 under these records. */
25022 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25023
25024 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25025 equate_decl_number_to_die (variant, variant_die);
25026
25027 /* Output discriminant values this variant matches, if any. */
25028 if (discr_decl == NULL || discr_lists[i] == NULL)
25029 /* In the case we have discriminant information at all, this is
25030 probably the default variant: as the standard says, don't
25031 output any discriminant value/list attribute. */
25032 ;
25033 else if (discr_lists[i]->dw_discr_next == NULL
25034 && !discr_lists[i]->dw_discr_range)
25035 /* If there is only one accepted value, don't bother outputting a
25036 list. */
25037 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25038 else
25039 add_discr_list (variant_die, discr_lists[i]);
25040
25041 for (tree member = TYPE_FIELDS (variant_type);
25042 member != NULL_TREE;
25043 member = DECL_CHAIN (member))
25044 {
25045 struct vlr_context vlr_sub_ctx = {
25046 vlr_ctx->struct_type, /* struct_type */
25047 NULL /* variant_part_offset */
25048 };
25049 if (is_variant_part (member))
25050 {
25051 /* All offsets for fields inside variant parts are relative to
25052 the top-level embedding RECORD_TYPE's base address. On the
25053 other hand, offsets in GCC's types are relative to the
25054 nested-most variant part. So we have to sum offsets each time
25055 we recurse. */
25056
25057 vlr_sub_ctx.variant_part_offset
25058 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25059 variant_part_offset, byte_position (member));
25060 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25061 }
25062 else
25063 {
25064 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25065 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25066 }
25067 }
25068 }
25069
25070 free (discr_lists);
25071 }
25072
25073 /* Generate a DIE for a class member. */
25074
25075 static void
25076 gen_member_die (tree type, dw_die_ref context_die)
25077 {
25078 tree member;
25079 tree binfo = TYPE_BINFO (type);
25080
25081 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25082
25083 /* If this is not an incomplete type, output descriptions of each of its
25084 members. Note that as we output the DIEs necessary to represent the
25085 members of this record or union type, we will also be trying to output
25086 DIEs to represent the *types* of those members. However the `type'
25087 function (above) will specifically avoid generating type DIEs for member
25088 types *within* the list of member DIEs for this (containing) type except
25089 for those types (of members) which are explicitly marked as also being
25090 members of this (containing) type themselves. The g++ front- end can
25091 force any given type to be treated as a member of some other (containing)
25092 type by setting the TYPE_CONTEXT of the given (member) type to point to
25093 the TREE node representing the appropriate (containing) type. */
25094
25095 /* First output info about the base classes. */
25096 if (binfo && early_dwarf)
25097 {
25098 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25099 int i;
25100 tree base;
25101
25102 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25103 gen_inheritance_die (base,
25104 (accesses ? (*accesses)[i] : access_public_node),
25105 type,
25106 context_die);
25107 }
25108
25109 /* Now output info about the members. */
25110 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25111 {
25112 /* Ignore clones. */
25113 if (DECL_ABSTRACT_ORIGIN (member))
25114 continue;
25115
25116 struct vlr_context vlr_ctx = { type, NULL_TREE };
25117 bool static_inline_p
25118 = (VAR_P (member)
25119 && TREE_STATIC (member)
25120 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25121 != -1));
25122
25123 /* If we thought we were generating minimal debug info for TYPE
25124 and then changed our minds, some of the member declarations
25125 may have already been defined. Don't define them again, but
25126 do put them in the right order. */
25127
25128 if (dw_die_ref child = lookup_decl_die (member))
25129 {
25130 /* Handle inline static data members, which only have in-class
25131 declarations. */
25132 bool splice = true;
25133
25134 dw_die_ref ref = NULL;
25135 if (child->die_tag == DW_TAG_variable
25136 && child->die_parent == comp_unit_die ())
25137 {
25138 ref = get_AT_ref (child, DW_AT_specification);
25139
25140 /* For C++17 inline static data members followed by redundant
25141 out of class redeclaration, we might get here with
25142 child being the DIE created for the out of class
25143 redeclaration and with its DW_AT_specification being
25144 the DIE created for in-class definition. We want to
25145 reparent the latter, and don't want to create another
25146 DIE with DW_AT_specification in that case, because
25147 we already have one. */
25148 if (ref
25149 && static_inline_p
25150 && ref->die_tag == DW_TAG_variable
25151 && ref->die_parent == comp_unit_die ()
25152 && get_AT (ref, DW_AT_specification) == NULL)
25153 {
25154 child = ref;
25155 ref = NULL;
25156 static_inline_p = false;
25157 }
25158
25159 if (!ref)
25160 {
25161 reparent_child (child, context_die);
25162 if (dwarf_version < 5)
25163 child->die_tag = DW_TAG_member;
25164 splice = false;
25165 }
25166 }
25167 else if (child->die_tag == DW_TAG_enumerator)
25168 /* Enumerators remain under their enumeration even if
25169 their names are introduced in the enclosing scope. */
25170 splice = false;
25171
25172 if (splice)
25173 splice_child_die (context_die, child);
25174 }
25175
25176 /* Do not generate standard DWARF for variant parts if we are generating
25177 the corresponding GNAT encodings: DIEs generated for both would
25178 conflict in our mappings. */
25179 else if (is_variant_part (member)
25180 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25181 {
25182 vlr_ctx.variant_part_offset = byte_position (member);
25183 gen_variant_part (member, &vlr_ctx, context_die);
25184 }
25185 else
25186 {
25187 vlr_ctx.variant_part_offset = NULL_TREE;
25188 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25189 }
25190
25191 /* For C++ inline static data members emit immediately a DW_TAG_variable
25192 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25193 DW_AT_specification. */
25194 if (static_inline_p)
25195 {
25196 int old_extern = DECL_EXTERNAL (member);
25197 DECL_EXTERNAL (member) = 0;
25198 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25199 DECL_EXTERNAL (member) = old_extern;
25200 }
25201 }
25202 }
25203
25204 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25205 is set, we pretend that the type was never defined, so we only get the
25206 member DIEs needed by later specification DIEs. */
25207
25208 static void
25209 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25210 enum debug_info_usage usage)
25211 {
25212 if (TREE_ASM_WRITTEN (type))
25213 {
25214 /* Fill in the bound of variable-length fields in late dwarf if
25215 still incomplete. */
25216 if (!early_dwarf && variably_modified_type_p (type, NULL))
25217 for (tree member = TYPE_FIELDS (type);
25218 member;
25219 member = DECL_CHAIN (member))
25220 fill_variable_array_bounds (TREE_TYPE (member));
25221 return;
25222 }
25223
25224 dw_die_ref type_die = lookup_type_die (type);
25225 dw_die_ref scope_die = 0;
25226 int nested = 0;
25227 int complete = (TYPE_SIZE (type)
25228 && (! TYPE_STUB_DECL (type)
25229 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25230 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25231 complete = complete && should_emit_struct_debug (type, usage);
25232
25233 if (type_die && ! complete)
25234 return;
25235
25236 if (TYPE_CONTEXT (type) != NULL_TREE
25237 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25238 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25239 nested = 1;
25240
25241 scope_die = scope_die_for (type, context_die);
25242
25243 /* Generate child dies for template paramaters. */
25244 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25245 schedule_generic_params_dies_gen (type);
25246
25247 if (! type_die || (nested && is_cu_die (scope_die)))
25248 /* First occurrence of type or toplevel definition of nested class. */
25249 {
25250 dw_die_ref old_die = type_die;
25251
25252 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25253 ? record_type_tag (type) : DW_TAG_union_type,
25254 scope_die, type);
25255 equate_type_number_to_die (type, type_die);
25256 if (old_die)
25257 add_AT_specification (type_die, old_die);
25258 else
25259 add_name_attribute (type_die, type_tag (type));
25260 }
25261 else
25262 remove_AT (type_die, DW_AT_declaration);
25263
25264 /* If this type has been completed, then give it a byte_size attribute and
25265 then give a list of members. */
25266 if (complete && !ns_decl)
25267 {
25268 /* Prevent infinite recursion in cases where the type of some member of
25269 this type is expressed in terms of this type itself. */
25270 TREE_ASM_WRITTEN (type) = 1;
25271 add_byte_size_attribute (type_die, type);
25272 add_alignment_attribute (type_die, type);
25273 if (TYPE_STUB_DECL (type) != NULL_TREE)
25274 {
25275 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25276 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25277 }
25278
25279 /* If the first reference to this type was as the return type of an
25280 inline function, then it may not have a parent. Fix this now. */
25281 if (type_die->die_parent == NULL)
25282 add_child_die (scope_die, type_die);
25283
25284 gen_member_die (type, type_die);
25285
25286 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25287 if (TYPE_ARTIFICIAL (type))
25288 add_AT_flag (type_die, DW_AT_artificial, 1);
25289
25290 /* GNU extension: Record what type our vtable lives in. */
25291 if (TYPE_VFIELD (type))
25292 {
25293 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25294
25295 gen_type_die (vtype, context_die);
25296 add_AT_die_ref (type_die, DW_AT_containing_type,
25297 lookup_type_die (vtype));
25298 }
25299 }
25300 else
25301 {
25302 add_AT_flag (type_die, DW_AT_declaration, 1);
25303
25304 /* We don't need to do this for function-local types. */
25305 if (TYPE_STUB_DECL (type)
25306 && ! decl_function_context (TYPE_STUB_DECL (type)))
25307 vec_safe_push (incomplete_types, type);
25308 }
25309
25310 if (get_AT (type_die, DW_AT_name))
25311 add_pubtype (type, type_die);
25312 }
25313
25314 /* Generate a DIE for a subroutine _type_. */
25315
25316 static void
25317 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25318 {
25319 tree return_type = TREE_TYPE (type);
25320 dw_die_ref subr_die
25321 = new_die (DW_TAG_subroutine_type,
25322 scope_die_for (type, context_die), type);
25323
25324 equate_type_number_to_die (type, subr_die);
25325 add_prototyped_attribute (subr_die, type);
25326 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25327 context_die);
25328 add_alignment_attribute (subr_die, type);
25329 gen_formal_types_die (type, subr_die);
25330
25331 if (get_AT (subr_die, DW_AT_name))
25332 add_pubtype (type, subr_die);
25333 if ((dwarf_version >= 5 || !dwarf_strict)
25334 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25335 add_AT_flag (subr_die, DW_AT_reference, 1);
25336 if ((dwarf_version >= 5 || !dwarf_strict)
25337 && lang_hooks.types.type_dwarf_attribute (type,
25338 DW_AT_rvalue_reference) != -1)
25339 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25340 }
25341
25342 /* Generate a DIE for a type definition. */
25343
25344 static void
25345 gen_typedef_die (tree decl, dw_die_ref context_die)
25346 {
25347 dw_die_ref type_die;
25348 tree type;
25349
25350 if (TREE_ASM_WRITTEN (decl))
25351 {
25352 if (DECL_ORIGINAL_TYPE (decl))
25353 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25354 return;
25355 }
25356
25357 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25358 checks in process_scope_var and modified_type_die), this should be called
25359 only for original types. */
25360 gcc_assert (decl_ultimate_origin (decl) == NULL
25361 || decl_ultimate_origin (decl) == decl);
25362
25363 TREE_ASM_WRITTEN (decl) = 1;
25364 type_die = new_die (DW_TAG_typedef, context_die, decl);
25365
25366 add_name_and_src_coords_attributes (type_die, decl);
25367 if (DECL_ORIGINAL_TYPE (decl))
25368 {
25369 type = DECL_ORIGINAL_TYPE (decl);
25370 if (type == error_mark_node)
25371 return;
25372
25373 gcc_assert (type != TREE_TYPE (decl));
25374 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25375 }
25376 else
25377 {
25378 type = TREE_TYPE (decl);
25379 if (type == error_mark_node)
25380 return;
25381
25382 if (is_naming_typedef_decl (TYPE_NAME (type)))
25383 {
25384 /* Here, we are in the case of decl being a typedef naming
25385 an anonymous type, e.g:
25386 typedef struct {...} foo;
25387 In that case TREE_TYPE (decl) is not a typedef variant
25388 type and TYPE_NAME of the anonymous type is set to the
25389 TYPE_DECL of the typedef. This construct is emitted by
25390 the C++ FE.
25391
25392 TYPE is the anonymous struct named by the typedef
25393 DECL. As we need the DW_AT_type attribute of the
25394 DW_TAG_typedef to point to the DIE of TYPE, let's
25395 generate that DIE right away. add_type_attribute
25396 called below will then pick (via lookup_type_die) that
25397 anonymous struct DIE. */
25398 if (!TREE_ASM_WRITTEN (type))
25399 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25400
25401 /* This is a GNU Extension. We are adding a
25402 DW_AT_linkage_name attribute to the DIE of the
25403 anonymous struct TYPE. The value of that attribute
25404 is the name of the typedef decl naming the anonymous
25405 struct. This greatly eases the work of consumers of
25406 this debug info. */
25407 add_linkage_name_raw (lookup_type_die (type), decl);
25408 }
25409 }
25410
25411 add_type_attribute (type_die, type, decl_quals (decl), false,
25412 context_die);
25413
25414 if (is_naming_typedef_decl (decl))
25415 /* We want that all subsequent calls to lookup_type_die with
25416 TYPE in argument yield the DW_TAG_typedef we have just
25417 created. */
25418 equate_type_number_to_die (type, type_die);
25419
25420 add_alignment_attribute (type_die, TREE_TYPE (decl));
25421
25422 add_accessibility_attribute (type_die, decl);
25423
25424 if (DECL_ABSTRACT_P (decl))
25425 equate_decl_number_to_die (decl, type_die);
25426
25427 if (get_AT (type_die, DW_AT_name))
25428 add_pubtype (decl, type_die);
25429 }
25430
25431 /* Generate a DIE for a struct, class, enum or union type. */
25432
25433 static void
25434 gen_tagged_type_die (tree type,
25435 dw_die_ref context_die,
25436 enum debug_info_usage usage)
25437 {
25438 if (type == NULL_TREE
25439 || !is_tagged_type (type))
25440 return;
25441
25442 if (TREE_ASM_WRITTEN (type))
25443 ;
25444 /* If this is a nested type whose containing class hasn't been written
25445 out yet, writing it out will cover this one, too. This does not apply
25446 to instantiations of member class templates; they need to be added to
25447 the containing class as they are generated. FIXME: This hurts the
25448 idea of combining type decls from multiple TUs, since we can't predict
25449 what set of template instantiations we'll get. */
25450 else if (TYPE_CONTEXT (type)
25451 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25452 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25453 {
25454 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25455
25456 if (TREE_ASM_WRITTEN (type))
25457 return;
25458
25459 /* If that failed, attach ourselves to the stub. */
25460 context_die = lookup_type_die (TYPE_CONTEXT (type));
25461 }
25462 else if (TYPE_CONTEXT (type) != NULL_TREE
25463 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25464 {
25465 /* If this type is local to a function that hasn't been written
25466 out yet, use a NULL context for now; it will be fixed up in
25467 decls_for_scope. */
25468 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25469 /* A declaration DIE doesn't count; nested types need to go in the
25470 specification. */
25471 if (context_die && is_declaration_die (context_die))
25472 context_die = NULL;
25473 }
25474 else
25475 context_die = declare_in_namespace (type, context_die);
25476
25477 if (TREE_CODE (type) == ENUMERAL_TYPE)
25478 {
25479 /* This might have been written out by the call to
25480 declare_in_namespace. */
25481 if (!TREE_ASM_WRITTEN (type))
25482 gen_enumeration_type_die (type, context_die);
25483 }
25484 else
25485 gen_struct_or_union_type_die (type, context_die, usage);
25486
25487 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25488 it up if it is ever completed. gen_*_type_die will set it for us
25489 when appropriate. */
25490 }
25491
25492 /* Generate a type description DIE. */
25493
25494 static void
25495 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25496 enum debug_info_usage usage)
25497 {
25498 struct array_descr_info info;
25499
25500 if (type == NULL_TREE || type == error_mark_node)
25501 return;
25502
25503 if (flag_checking && type)
25504 verify_type (type);
25505
25506 if (TYPE_NAME (type) != NULL_TREE
25507 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25508 && is_redundant_typedef (TYPE_NAME (type))
25509 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25510 /* The DECL of this type is a typedef we don't want to emit debug
25511 info for but we want debug info for its underlying typedef.
25512 This can happen for e.g, the injected-class-name of a C++
25513 type. */
25514 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25515
25516 /* If TYPE is a typedef type variant, let's generate debug info
25517 for the parent typedef which TYPE is a type of. */
25518 if (typedef_variant_p (type))
25519 {
25520 if (TREE_ASM_WRITTEN (type))
25521 return;
25522
25523 tree name = TYPE_NAME (type);
25524 tree origin = decl_ultimate_origin (name);
25525 if (origin != NULL && origin != name)
25526 {
25527 gen_decl_die (origin, NULL, NULL, context_die);
25528 return;
25529 }
25530
25531 /* Prevent broken recursion; we can't hand off to the same type. */
25532 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25533
25534 /* Give typedefs the right scope. */
25535 context_die = scope_die_for (type, context_die);
25536
25537 TREE_ASM_WRITTEN (type) = 1;
25538
25539 gen_decl_die (name, NULL, NULL, context_die);
25540 return;
25541 }
25542
25543 /* If type is an anonymous tagged type named by a typedef, let's
25544 generate debug info for the typedef. */
25545 if (is_naming_typedef_decl (TYPE_NAME (type)))
25546 {
25547 /* Give typedefs the right scope. */
25548 context_die = scope_die_for (type, context_die);
25549
25550 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25551 return;
25552 }
25553
25554 if (lang_hooks.types.get_debug_type)
25555 {
25556 tree debug_type = lang_hooks.types.get_debug_type (type);
25557
25558 if (debug_type != NULL_TREE && debug_type != type)
25559 {
25560 gen_type_die_with_usage (debug_type, context_die, usage);
25561 return;
25562 }
25563 }
25564
25565 /* We are going to output a DIE to represent the unqualified version
25566 of this type (i.e. without any const or volatile qualifiers) so
25567 get the main variant (i.e. the unqualified version) of this type
25568 now. (Vectors and arrays are special because the debugging info is in the
25569 cloned type itself. Similarly function/method types can contain extra
25570 ref-qualification). */
25571 if (TREE_CODE (type) == FUNCTION_TYPE
25572 || TREE_CODE (type) == METHOD_TYPE)
25573 {
25574 /* For function/method types, can't use type_main_variant here,
25575 because that can have different ref-qualifiers for C++,
25576 but try to canonicalize. */
25577 tree main = TYPE_MAIN_VARIANT (type);
25578 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25579 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25580 && check_base_type (t, main)
25581 && check_lang_type (t, type))
25582 {
25583 type = t;
25584 break;
25585 }
25586 }
25587 else if (TREE_CODE (type) != VECTOR_TYPE
25588 && TREE_CODE (type) != ARRAY_TYPE)
25589 type = type_main_variant (type);
25590
25591 /* If this is an array type with hidden descriptor, handle it first. */
25592 if (!TREE_ASM_WRITTEN (type)
25593 && lang_hooks.types.get_array_descr_info)
25594 {
25595 memset (&info, 0, sizeof (info));
25596 if (lang_hooks.types.get_array_descr_info (type, &info))
25597 {
25598 /* Fortran sometimes emits array types with no dimension. */
25599 gcc_assert (info.ndimensions >= 0
25600 && (info.ndimensions
25601 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25602 gen_descr_array_type_die (type, &info, context_die);
25603 TREE_ASM_WRITTEN (type) = 1;
25604 return;
25605 }
25606 }
25607
25608 if (TREE_ASM_WRITTEN (type))
25609 {
25610 /* Variable-length types may be incomplete even if
25611 TREE_ASM_WRITTEN. For such types, fall through to
25612 gen_array_type_die() and possibly fill in
25613 DW_AT_{upper,lower}_bound attributes. */
25614 if ((TREE_CODE (type) != ARRAY_TYPE
25615 && TREE_CODE (type) != RECORD_TYPE
25616 && TREE_CODE (type) != UNION_TYPE
25617 && TREE_CODE (type) != QUAL_UNION_TYPE)
25618 || !variably_modified_type_p (type, NULL))
25619 return;
25620 }
25621
25622 switch (TREE_CODE (type))
25623 {
25624 case ERROR_MARK:
25625 break;
25626
25627 case POINTER_TYPE:
25628 case REFERENCE_TYPE:
25629 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25630 ensures that the gen_type_die recursion will terminate even if the
25631 type is recursive. Recursive types are possible in Ada. */
25632 /* ??? We could perhaps do this for all types before the switch
25633 statement. */
25634 TREE_ASM_WRITTEN (type) = 1;
25635
25636 /* For these types, all that is required is that we output a DIE (or a
25637 set of DIEs) to represent the "basis" type. */
25638 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25639 DINFO_USAGE_IND_USE);
25640 break;
25641
25642 case OFFSET_TYPE:
25643 /* This code is used for C++ pointer-to-data-member types.
25644 Output a description of the relevant class type. */
25645 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25646 DINFO_USAGE_IND_USE);
25647
25648 /* Output a description of the type of the object pointed to. */
25649 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25650 DINFO_USAGE_IND_USE);
25651
25652 /* Now output a DIE to represent this pointer-to-data-member type
25653 itself. */
25654 gen_ptr_to_mbr_type_die (type, context_die);
25655 break;
25656
25657 case FUNCTION_TYPE:
25658 /* Force out return type (in case it wasn't forced out already). */
25659 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25660 DINFO_USAGE_DIR_USE);
25661 gen_subroutine_type_die (type, context_die);
25662 break;
25663
25664 case METHOD_TYPE:
25665 /* Force out return type (in case it wasn't forced out already). */
25666 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25667 DINFO_USAGE_DIR_USE);
25668 gen_subroutine_type_die (type, context_die);
25669 break;
25670
25671 case ARRAY_TYPE:
25672 case VECTOR_TYPE:
25673 gen_array_type_die (type, context_die);
25674 break;
25675
25676 case ENUMERAL_TYPE:
25677 case RECORD_TYPE:
25678 case UNION_TYPE:
25679 case QUAL_UNION_TYPE:
25680 gen_tagged_type_die (type, context_die, usage);
25681 return;
25682
25683 case VOID_TYPE:
25684 case OPAQUE_TYPE:
25685 case INTEGER_TYPE:
25686 case REAL_TYPE:
25687 case FIXED_POINT_TYPE:
25688 case COMPLEX_TYPE:
25689 case BOOLEAN_TYPE:
25690 /* No DIEs needed for fundamental types. */
25691 break;
25692
25693 case NULLPTR_TYPE:
25694 case LANG_TYPE:
25695 /* Just use DW_TAG_unspecified_type. */
25696 {
25697 dw_die_ref type_die = lookup_type_die (type);
25698 if (type_die == NULL)
25699 {
25700 tree name = TYPE_IDENTIFIER (type);
25701 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25702 type);
25703 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25704 equate_type_number_to_die (type, type_die);
25705 }
25706 }
25707 break;
25708
25709 default:
25710 if (is_cxx_auto (type))
25711 {
25712 tree name = TYPE_IDENTIFIER (type);
25713 dw_die_ref *die = (name == get_identifier ("auto")
25714 ? &auto_die : &decltype_auto_die);
25715 if (!*die)
25716 {
25717 *die = new_die (DW_TAG_unspecified_type,
25718 comp_unit_die (), NULL_TREE);
25719 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25720 }
25721 equate_type_number_to_die (type, *die);
25722 break;
25723 }
25724 gcc_unreachable ();
25725 }
25726
25727 TREE_ASM_WRITTEN (type) = 1;
25728 }
25729
25730 static void
25731 gen_type_die (tree type, dw_die_ref context_die)
25732 {
25733 if (type != error_mark_node)
25734 {
25735 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25736 if (flag_checking)
25737 {
25738 dw_die_ref die = lookup_type_die (type);
25739 if (die)
25740 check_die (die);
25741 }
25742 }
25743 }
25744
25745 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25746 things which are local to the given block. */
25747
25748 static void
25749 gen_block_die (tree stmt, dw_die_ref context_die)
25750 {
25751 int must_output_die = 0;
25752 bool inlined_func;
25753
25754 /* Ignore blocks that are NULL. */
25755 if (stmt == NULL_TREE)
25756 return;
25757
25758 inlined_func = inlined_function_outer_scope_p (stmt);
25759
25760 /* If the block is one fragment of a non-contiguous block, do not
25761 process the variables, since they will have been done by the
25762 origin block. Do process subblocks. */
25763 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25764 {
25765 tree sub;
25766
25767 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25768 gen_block_die (sub, context_die);
25769
25770 return;
25771 }
25772
25773 /* Determine if we need to output any Dwarf DIEs at all to represent this
25774 block. */
25775 if (inlined_func)
25776 /* The outer scopes for inlinings *must* always be represented. We
25777 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25778 must_output_die = 1;
25779 else if (lookup_block_die (stmt))
25780 /* If we already have a DIE then it was filled early. Meanwhile
25781 we might have pruned all BLOCK_VARS as optimized out but we
25782 still want to generate high/low PC attributes so output it. */
25783 must_output_die = 1;
25784 else if (TREE_USED (stmt)
25785 || TREE_ASM_WRITTEN (stmt))
25786 {
25787 /* Determine if this block directly contains any "significant"
25788 local declarations which we will need to output DIEs for. */
25789 if (debug_info_level > DINFO_LEVEL_TERSE)
25790 {
25791 /* We are not in terse mode so any local declaration that
25792 is not ignored for debug purposes counts as being a
25793 "significant" one. */
25794 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25795 must_output_die = 1;
25796 else
25797 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25798 if (!DECL_IGNORED_P (var))
25799 {
25800 must_output_die = 1;
25801 break;
25802 }
25803 }
25804 else if (!dwarf2out_ignore_block (stmt))
25805 must_output_die = 1;
25806 }
25807
25808 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25809 DIE for any block which contains no significant local declarations at
25810 all. Rather, in such cases we just call `decls_for_scope' so that any
25811 needed Dwarf info for any sub-blocks will get properly generated. Note
25812 that in terse mode, our definition of what constitutes a "significant"
25813 local declaration gets restricted to include only inlined function
25814 instances and local (nested) function definitions. */
25815 if (must_output_die)
25816 {
25817 if (inlined_func)
25818 gen_inlined_subroutine_die (stmt, context_die);
25819 else
25820 gen_lexical_block_die (stmt, context_die);
25821 }
25822 else
25823 decls_for_scope (stmt, context_die);
25824 }
25825
25826 /* Process variable DECL (or variable with origin ORIGIN) within
25827 block STMT and add it to CONTEXT_DIE. */
25828 static void
25829 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25830 {
25831 dw_die_ref die;
25832 tree decl_or_origin = decl ? decl : origin;
25833
25834 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25835 die = lookup_decl_die (decl_or_origin);
25836 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25837 {
25838 if (TYPE_DECL_IS_STUB (decl_or_origin))
25839 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25840 else
25841 die = lookup_decl_die (decl_or_origin);
25842 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25843 if (! die && ! early_dwarf)
25844 return;
25845 }
25846 else
25847 die = NULL;
25848
25849 /* Avoid creating DIEs for local typedefs and concrete static variables that
25850 will only be pruned later. */
25851 if ((origin || decl_ultimate_origin (decl))
25852 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25853 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25854 {
25855 origin = decl_ultimate_origin (decl_or_origin);
25856 if (decl && VAR_P (decl) && die != NULL)
25857 {
25858 die = lookup_decl_die (origin);
25859 if (die != NULL)
25860 equate_decl_number_to_die (decl, die);
25861 }
25862 return;
25863 }
25864
25865 if (die != NULL && die->die_parent == NULL)
25866 add_child_die (context_die, die);
25867 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25868 {
25869 if (early_dwarf)
25870 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25871 stmt, context_die);
25872 }
25873 else
25874 {
25875 if (decl && DECL_P (decl))
25876 {
25877 die = lookup_decl_die (decl);
25878
25879 /* Early created DIEs do not have a parent as the decls refer
25880 to the function as DECL_CONTEXT rather than the BLOCK. */
25881 if (die && die->die_parent == NULL)
25882 {
25883 gcc_assert (in_lto_p);
25884 add_child_die (context_die, die);
25885 }
25886 }
25887
25888 gen_decl_die (decl, origin, NULL, context_die);
25889 }
25890 }
25891
25892 /* Generate all of the decls declared within a given scope and (recursively)
25893 all of its sub-blocks. */
25894
25895 static void
25896 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25897 {
25898 tree decl;
25899 unsigned int i;
25900 tree subblocks;
25901
25902 /* Ignore NULL blocks. */
25903 if (stmt == NULL_TREE)
25904 return;
25905
25906 /* Output the DIEs to represent all of the data objects and typedefs
25907 declared directly within this block but not within any nested
25908 sub-blocks. Also, nested function and tag DIEs have been
25909 generated with a parent of NULL; fix that up now. We don't
25910 have to do this if we're at -g1. */
25911 if (debug_info_level > DINFO_LEVEL_TERSE)
25912 {
25913 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25914 process_scope_var (stmt, decl, NULL_TREE, context_die);
25915 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25916 origin - avoid doing this twice as we have no good way to see
25917 if we've done it once already. */
25918 if (! early_dwarf)
25919 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25920 {
25921 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25922 if (decl == current_function_decl)
25923 /* Ignore declarations of the current function, while they
25924 are declarations, gen_subprogram_die would treat them
25925 as definitions again, because they are equal to
25926 current_function_decl and endlessly recurse. */;
25927 else if (TREE_CODE (decl) == FUNCTION_DECL)
25928 process_scope_var (stmt, decl, NULL_TREE, context_die);
25929 else
25930 process_scope_var (stmt, NULL_TREE, decl, context_die);
25931 }
25932 }
25933
25934 /* Even if we're at -g1, we need to process the subblocks in order to get
25935 inlined call information. */
25936
25937 /* Output the DIEs to represent all sub-blocks (and the items declared
25938 therein) of this block. */
25939 if (recurse)
25940 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25941 subblocks != NULL;
25942 subblocks = BLOCK_CHAIN (subblocks))
25943 gen_block_die (subblocks, context_die);
25944 }
25945
25946 /* Is this a typedef we can avoid emitting? */
25947
25948 static bool
25949 is_redundant_typedef (const_tree decl)
25950 {
25951 if (TYPE_DECL_IS_STUB (decl))
25952 return true;
25953
25954 if (DECL_ARTIFICIAL (decl)
25955 && DECL_CONTEXT (decl)
25956 && is_tagged_type (DECL_CONTEXT (decl))
25957 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25958 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25959 /* Also ignore the artificial member typedef for the class name. */
25960 return true;
25961
25962 return false;
25963 }
25964
25965 /* Return TRUE if TYPE is a typedef that names a type for linkage
25966 purposes. This kind of typedefs is produced by the C++ FE for
25967 constructs like:
25968
25969 typedef struct {...} foo;
25970
25971 In that case, there is no typedef variant type produced for foo.
25972 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25973 struct type. */
25974
25975 static bool
25976 is_naming_typedef_decl (const_tree decl)
25977 {
25978 if (decl == NULL_TREE
25979 || TREE_CODE (decl) != TYPE_DECL
25980 || DECL_NAMELESS (decl)
25981 || !is_tagged_type (TREE_TYPE (decl))
25982 || DECL_IS_UNDECLARED_BUILTIN (decl)
25983 || is_redundant_typedef (decl)
25984 /* It looks like Ada produces TYPE_DECLs that are very similar
25985 to C++ naming typedefs but that have different
25986 semantics. Let's be specific to c++ for now. */
25987 || !is_cxx (decl))
25988 return FALSE;
25989
25990 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25991 && TYPE_NAME (TREE_TYPE (decl)) == decl
25992 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25993 != TYPE_NAME (TREE_TYPE (decl))));
25994 }
25995
25996 /* Looks up the DIE for a context. */
25997
25998 static inline dw_die_ref
25999 lookup_context_die (tree context)
26000 {
26001 if (context)
26002 {
26003 /* Find die that represents this context. */
26004 if (TYPE_P (context))
26005 {
26006 context = TYPE_MAIN_VARIANT (context);
26007 dw_die_ref ctx = lookup_type_die (context);
26008 if (!ctx)
26009 return NULL;
26010 return strip_naming_typedef (context, ctx);
26011 }
26012 else
26013 return lookup_decl_die (context);
26014 }
26015 return comp_unit_die ();
26016 }
26017
26018 /* Returns the DIE for a context. */
26019
26020 static inline dw_die_ref
26021 get_context_die (tree context)
26022 {
26023 if (context)
26024 {
26025 /* Find die that represents this context. */
26026 if (TYPE_P (context))
26027 {
26028 context = TYPE_MAIN_VARIANT (context);
26029 return strip_naming_typedef (context, force_type_die (context));
26030 }
26031 else
26032 return force_decl_die (context);
26033 }
26034 return comp_unit_die ();
26035 }
26036
26037 /* Returns the DIE for decl. A DIE will always be returned. */
26038
26039 static dw_die_ref
26040 force_decl_die (tree decl)
26041 {
26042 dw_die_ref decl_die;
26043 unsigned saved_external_flag;
26044 tree save_fn = NULL_TREE;
26045 decl_die = lookup_decl_die (decl);
26046 if (!decl_die)
26047 {
26048 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26049
26050 decl_die = lookup_decl_die (decl);
26051 if (decl_die)
26052 return decl_die;
26053
26054 switch (TREE_CODE (decl))
26055 {
26056 case FUNCTION_DECL:
26057 /* Clear current_function_decl, so that gen_subprogram_die thinks
26058 that this is a declaration. At this point, we just want to force
26059 declaration die. */
26060 save_fn = current_function_decl;
26061 current_function_decl = NULL_TREE;
26062 gen_subprogram_die (decl, context_die);
26063 current_function_decl = save_fn;
26064 break;
26065
26066 case VAR_DECL:
26067 /* Set external flag to force declaration die. Restore it after
26068 gen_decl_die() call. */
26069 saved_external_flag = DECL_EXTERNAL (decl);
26070 DECL_EXTERNAL (decl) = 1;
26071 gen_decl_die (decl, NULL, NULL, context_die);
26072 DECL_EXTERNAL (decl) = saved_external_flag;
26073 break;
26074
26075 case NAMESPACE_DECL:
26076 if (dwarf_version >= 3 || !dwarf_strict)
26077 dwarf2out_decl (decl);
26078 else
26079 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26080 decl_die = comp_unit_die ();
26081 break;
26082
26083 case CONST_DECL:
26084 /* Enumerators shouldn't need force_decl_die. */
26085 gcc_assert (DECL_CONTEXT (decl) == NULL_TREE
26086 || TREE_CODE (DECL_CONTEXT (decl)) != ENUMERAL_TYPE);
26087 gen_decl_die (decl, NULL, NULL, context_die);
26088 break;
26089
26090 case TRANSLATION_UNIT_DECL:
26091 decl_die = comp_unit_die ();
26092 break;
26093
26094 default:
26095 gcc_unreachable ();
26096 }
26097
26098 /* We should be able to find the DIE now. */
26099 if (!decl_die)
26100 decl_die = lookup_decl_die (decl);
26101 gcc_assert (decl_die);
26102 }
26103
26104 return decl_die;
26105 }
26106
26107 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26108 always returned. */
26109
26110 static dw_die_ref
26111 force_type_die (tree type)
26112 {
26113 dw_die_ref type_die;
26114
26115 type_die = lookup_type_die (type);
26116 if (!type_die)
26117 {
26118 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26119
26120 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26121 false, context_die);
26122 gcc_assert (type_die);
26123 }
26124 return type_die;
26125 }
26126
26127 /* Force out any required namespaces to be able to output DECL,
26128 and return the new context_die for it, if it's changed. */
26129
26130 static dw_die_ref
26131 setup_namespace_context (tree thing, dw_die_ref context_die)
26132 {
26133 tree context = (DECL_P (thing)
26134 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26135 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26136 /* Force out the namespace. */
26137 context_die = force_decl_die (context);
26138
26139 return context_die;
26140 }
26141
26142 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26143 type) within its namespace, if appropriate.
26144
26145 For compatibility with older debuggers, namespace DIEs only contain
26146 declarations; all definitions are emitted at CU scope, with
26147 DW_AT_specification pointing to the declaration (like with class
26148 members). */
26149
26150 static dw_die_ref
26151 declare_in_namespace (tree thing, dw_die_ref context_die)
26152 {
26153 dw_die_ref ns_context;
26154
26155 if (debug_info_level <= DINFO_LEVEL_TERSE)
26156 return context_die;
26157
26158 /* External declarations in the local scope only need to be emitted
26159 once, not once in the namespace and once in the scope.
26160
26161 This avoids declaring the `extern' below in the
26162 namespace DIE as well as in the innermost scope:
26163
26164 namespace S
26165 {
26166 int i=5;
26167 int foo()
26168 {
26169 int i=8;
26170 extern int i;
26171 return i;
26172 }
26173 }
26174 */
26175 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26176 return context_die;
26177
26178 /* If this decl is from an inlined function, then don't try to emit it in its
26179 namespace, as we will get confused. It would have already been emitted
26180 when the abstract instance of the inline function was emitted anyways. */
26181 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26182 return context_die;
26183
26184 ns_context = setup_namespace_context (thing, context_die);
26185
26186 if (ns_context != context_die)
26187 {
26188 if (is_fortran () || is_dlang ())
26189 return ns_context;
26190 if (DECL_P (thing))
26191 gen_decl_die (thing, NULL, NULL, ns_context);
26192 else
26193 gen_type_die (thing, ns_context);
26194 }
26195 return context_die;
26196 }
26197
26198 /* Generate a DIE for a namespace or namespace alias. */
26199
26200 static void
26201 gen_namespace_die (tree decl, dw_die_ref context_die)
26202 {
26203 dw_die_ref namespace_die;
26204
26205 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26206 they are an alias of. */
26207 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26208 {
26209 /* Output a real namespace or module. */
26210 context_die = setup_namespace_context (decl, comp_unit_die ());
26211 namespace_die = new_die (is_fortran () || is_dlang ()
26212 ? DW_TAG_module : DW_TAG_namespace,
26213 context_die, decl);
26214 /* For Fortran modules defined in different CU don't add src coords. */
26215 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26216 {
26217 const char *name = dwarf2_name (decl, 0);
26218 if (name)
26219 add_name_attribute (namespace_die, name);
26220 }
26221 else
26222 add_name_and_src_coords_attributes (namespace_die, decl);
26223 if (DECL_EXTERNAL (decl))
26224 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26225 equate_decl_number_to_die (decl, namespace_die);
26226 }
26227 else
26228 {
26229 /* Output a namespace alias. */
26230
26231 /* Force out the namespace we are an alias of, if necessary. */
26232 dw_die_ref origin_die
26233 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26234
26235 if (DECL_FILE_SCOPE_P (decl)
26236 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26237 context_die = setup_namespace_context (decl, comp_unit_die ());
26238 /* Now create the namespace alias DIE. */
26239 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26240 add_name_and_src_coords_attributes (namespace_die, decl);
26241 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26242 equate_decl_number_to_die (decl, namespace_die);
26243 }
26244 if ((dwarf_version >= 5 || !dwarf_strict)
26245 && lang_hooks.decls.decl_dwarf_attribute (decl,
26246 DW_AT_export_symbols) == 1)
26247 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26248
26249 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26250 if (want_pubnames ())
26251 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26252 }
26253
26254 /* Generate Dwarf debug information for a decl described by DECL.
26255 The return value is currently only meaningful for PARM_DECLs,
26256 for all other decls it returns NULL.
26257
26258 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26259 It can be NULL otherwise. */
26260
26261 static dw_die_ref
26262 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26263 dw_die_ref context_die)
26264 {
26265 tree decl_or_origin = decl ? decl : origin;
26266 tree class_origin = NULL, ultimate_origin;
26267
26268 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26269 return NULL;
26270
26271 switch (TREE_CODE (decl_or_origin))
26272 {
26273 case ERROR_MARK:
26274 break;
26275
26276 case CONST_DECL:
26277 if (!is_fortran () && !is_ada () && !is_dlang ())
26278 {
26279 /* The individual enumerators of an enum type get output when we output
26280 the Dwarf representation of the relevant enum type itself. */
26281 break;
26282 }
26283
26284 /* Emit its type. */
26285 gen_type_die (TREE_TYPE (decl), context_die);
26286
26287 /* And its containing namespace. */
26288 context_die = declare_in_namespace (decl, context_die);
26289
26290 gen_const_die (decl, context_die);
26291 break;
26292
26293 case FUNCTION_DECL:
26294 #if 0
26295 /* FIXME */
26296 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26297 on local redeclarations of global functions. That seems broken. */
26298 if (current_function_decl != decl)
26299 /* This is only a declaration. */;
26300 #endif
26301
26302 /* We should have abstract copies already and should not generate
26303 stray type DIEs in late LTO dumping. */
26304 if (! early_dwarf)
26305 ;
26306
26307 /* If we're emitting a clone, emit info for the abstract instance. */
26308 else if (origin || DECL_ORIGIN (decl) != decl)
26309 dwarf2out_abstract_function (origin
26310 ? DECL_ORIGIN (origin)
26311 : DECL_ABSTRACT_ORIGIN (decl));
26312
26313 /* If we're emitting a possibly inlined function emit it as
26314 abstract instance. */
26315 else if (cgraph_function_possibly_inlined_p (decl)
26316 && ! DECL_ABSTRACT_P (decl)
26317 && ! class_or_namespace_scope_p (context_die)
26318 /* dwarf2out_abstract_function won't emit a die if this is just
26319 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26320 that case, because that works only if we have a die. */
26321 && DECL_INITIAL (decl) != NULL_TREE)
26322 dwarf2out_abstract_function (decl);
26323
26324 /* Otherwise we're emitting the primary DIE for this decl. */
26325 else if (debug_info_level > DINFO_LEVEL_TERSE)
26326 {
26327 /* Before we describe the FUNCTION_DECL itself, make sure that we
26328 have its containing type. */
26329 if (!origin)
26330 origin = decl_class_context (decl);
26331 if (origin != NULL_TREE)
26332 gen_type_die (origin, context_die);
26333
26334 /* And its return type. */
26335 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26336
26337 /* And its virtual context. */
26338 if (DECL_VINDEX (decl) != NULL_TREE)
26339 gen_type_die (DECL_CONTEXT (decl), context_die);
26340
26341 /* Make sure we have a member DIE for decl. */
26342 if (origin != NULL_TREE)
26343 gen_type_die_for_member (origin, decl, context_die);
26344
26345 /* And its containing namespace. */
26346 context_die = declare_in_namespace (decl, context_die);
26347 }
26348
26349 /* Now output a DIE to represent the function itself. */
26350 if (decl)
26351 gen_subprogram_die (decl, context_die);
26352 break;
26353
26354 case TYPE_DECL:
26355 /* If we are in terse mode, don't generate any DIEs to represent any
26356 actual typedefs. */
26357 if (debug_info_level <= DINFO_LEVEL_TERSE)
26358 break;
26359
26360 /* In the special case of a TYPE_DECL node representing the declaration
26361 of some type tag, if the given TYPE_DECL is marked as having been
26362 instantiated from some other (original) TYPE_DECL node (e.g. one which
26363 was generated within the original definition of an inline function) we
26364 used to generate a special (abbreviated) DW_TAG_structure_type,
26365 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26366 should be actually referencing those DIEs, as variable DIEs with that
26367 type would be emitted already in the abstract origin, so it was always
26368 removed during unused type prunning. Don't add anything in this
26369 case. */
26370 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26371 break;
26372
26373 if (is_redundant_typedef (decl))
26374 gen_type_die (TREE_TYPE (decl), context_die);
26375 else
26376 /* Output a DIE to represent the typedef itself. */
26377 gen_typedef_die (decl, context_die);
26378 break;
26379
26380 case LABEL_DECL:
26381 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26382 gen_label_die (decl, context_die);
26383 break;
26384
26385 case VAR_DECL:
26386 case RESULT_DECL:
26387 /* If we are in terse mode, don't generate any DIEs to represent any
26388 variable declarations or definitions unless it is external. */
26389 if (debug_info_level < DINFO_LEVEL_TERSE
26390 || (debug_info_level == DINFO_LEVEL_TERSE
26391 && !TREE_PUBLIC (decl_or_origin)))
26392 break;
26393
26394 if (debug_info_level > DINFO_LEVEL_TERSE)
26395 {
26396 /* Avoid generating stray type DIEs during late dwarf dumping.
26397 All types have been dumped early. */
26398 if (early_dwarf
26399 /* ??? But in LTRANS we cannot annotate early created variably
26400 modified type DIEs without copying them and adjusting all
26401 references to them. Dump them again as happens for inlining
26402 which copies both the decl and the types. */
26403 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26404 in VLA bound information for example. */
26405 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26406 current_function_decl)))
26407 {
26408 /* Output any DIEs that are needed to specify the type of this data
26409 object. */
26410 if (decl_by_reference_p (decl_or_origin))
26411 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26412 else
26413 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26414 }
26415
26416 if (early_dwarf)
26417 {
26418 /* And its containing type. */
26419 class_origin = decl_class_context (decl_or_origin);
26420 if (class_origin != NULL_TREE)
26421 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26422
26423 /* And its containing namespace. */
26424 context_die = declare_in_namespace (decl_or_origin, context_die);
26425 }
26426 }
26427
26428 /* Now output the DIE to represent the data object itself. This gets
26429 complicated because of the possibility that the VAR_DECL really
26430 represents an inlined instance of a formal parameter for an inline
26431 function. */
26432 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26433 if (ultimate_origin != NULL_TREE
26434 && TREE_CODE (ultimate_origin) == PARM_DECL)
26435 gen_formal_parameter_die (decl, origin,
26436 true /* Emit name attribute. */,
26437 context_die);
26438 else
26439 gen_variable_die (decl, origin, context_die);
26440 break;
26441
26442 case FIELD_DECL:
26443 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26444 /* Ignore the nameless fields that are used to skip bits but handle C++
26445 anonymous unions and structs. */
26446 if (DECL_NAME (decl) != NULL_TREE
26447 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26448 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26449 {
26450 gen_type_die (member_declared_type (decl), context_die);
26451 gen_field_die (decl, ctx, context_die);
26452 }
26453 break;
26454
26455 case PARM_DECL:
26456 /* Avoid generating stray type DIEs during late dwarf dumping.
26457 All types have been dumped early. */
26458 if (early_dwarf
26459 /* ??? But in LTRANS we cannot annotate early created variably
26460 modified type DIEs without copying them and adjusting all
26461 references to them. Dump them again as happens for inlining
26462 which copies both the decl and the types. */
26463 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26464 in VLA bound information for example. */
26465 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26466 current_function_decl)))
26467 {
26468 if (DECL_BY_REFERENCE (decl_or_origin))
26469 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26470 else
26471 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26472 }
26473 return gen_formal_parameter_die (decl, origin,
26474 true /* Emit name attribute. */,
26475 context_die);
26476
26477 case NAMESPACE_DECL:
26478 if (dwarf_version >= 3 || !dwarf_strict)
26479 gen_namespace_die (decl, context_die);
26480 break;
26481
26482 case IMPORTED_DECL:
26483 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26484 DECL_CONTEXT (decl), context_die);
26485 break;
26486
26487 case NAMELIST_DECL:
26488 gen_namelist_decl (DECL_NAME (decl), context_die,
26489 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26490 break;
26491
26492 default:
26493 /* Probably some frontend-internal decl. Assume we don't care. */
26494 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26495 break;
26496 }
26497
26498 return NULL;
26499 }
26500 \f
26501 /* Output initial debug information for global DECL. Called at the
26502 end of the parsing process.
26503
26504 This is the initial debug generation process. As such, the DIEs
26505 generated may be incomplete. A later debug generation pass
26506 (dwarf2out_late_global_decl) will augment the information generated
26507 in this pass (e.g., with complete location info). */
26508
26509 static void
26510 dwarf2out_early_global_decl (tree decl)
26511 {
26512 set_early_dwarf s;
26513
26514 /* gen_decl_die() will set DECL_ABSTRACT because
26515 cgraph_function_possibly_inlined_p() returns true. This is in
26516 turn will cause DW_AT_inline attributes to be set.
26517
26518 This happens because at early dwarf generation, there is no
26519 cgraph information, causing cgraph_function_possibly_inlined_p()
26520 to return true. Trick cgraph_function_possibly_inlined_p()
26521 while we generate dwarf early. */
26522 bool save = symtab->global_info_ready;
26523 symtab->global_info_ready = true;
26524
26525 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26526 other DECLs and they can point to template types or other things
26527 that dwarf2out can't handle when done via dwarf2out_decl. */
26528 if (TREE_CODE (decl) != TYPE_DECL
26529 && TREE_CODE (decl) != PARM_DECL)
26530 {
26531 if (TREE_CODE (decl) == FUNCTION_DECL)
26532 {
26533 tree save_fndecl = current_function_decl;
26534
26535 /* For nested functions, make sure we have DIEs for the parents first
26536 so that all nested DIEs are generated at the proper scope in the
26537 first shot. */
26538 tree context = decl_function_context (decl);
26539 if (context != NULL)
26540 {
26541 dw_die_ref context_die = lookup_decl_die (context);
26542 current_function_decl = context;
26543
26544 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26545 enough so that it lands in its own context. This avoids type
26546 pruning issues later on. */
26547 if (context_die == NULL || is_declaration_die (context_die))
26548 dwarf2out_early_global_decl (context);
26549 }
26550
26551 /* Emit an abstract origin of a function first. This happens
26552 with C++ constructor clones for example and makes
26553 dwarf2out_abstract_function happy which requires the early
26554 DIE of the abstract instance to be present. */
26555 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26556 dw_die_ref origin_die;
26557 if (origin != NULL
26558 /* Do not emit the DIE multiple times but make sure to
26559 process it fully here in case we just saw a declaration. */
26560 && ((origin_die = lookup_decl_die (origin)) == NULL
26561 || is_declaration_die (origin_die)))
26562 {
26563 current_function_decl = origin;
26564 dwarf2out_decl (origin);
26565 }
26566
26567 /* Emit the DIE for decl but avoid doing that multiple times. */
26568 dw_die_ref old_die;
26569 if ((old_die = lookup_decl_die (decl)) == NULL
26570 || is_declaration_die (old_die))
26571 {
26572 current_function_decl = decl;
26573 dwarf2out_decl (decl);
26574 }
26575
26576 current_function_decl = save_fndecl;
26577 }
26578 else
26579 dwarf2out_decl (decl);
26580 }
26581 symtab->global_info_ready = save;
26582 }
26583
26584 /* Return whether EXPR is an expression with the following pattern:
26585 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26586
26587 static bool
26588 is_trivial_indirect_ref (tree expr)
26589 {
26590 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26591 return false;
26592
26593 tree nop = TREE_OPERAND (expr, 0);
26594 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26595 return false;
26596
26597 tree int_cst = TREE_OPERAND (nop, 0);
26598 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26599 }
26600
26601 /* Output debug information for global decl DECL. Called from
26602 toplev.c after compilation proper has finished. */
26603
26604 static void
26605 dwarf2out_late_global_decl (tree decl)
26606 {
26607 /* Fill-in any location information we were unable to determine
26608 on the first pass. */
26609 if (VAR_P (decl))
26610 {
26611 dw_die_ref die = lookup_decl_die (decl);
26612
26613 /* We may have to generate full debug late for LTO in case debug
26614 was not enabled at compile-time or the target doesn't support
26615 the LTO early debug scheme. */
26616 if (! die && in_lto_p)
26617 dwarf2out_decl (decl);
26618 else if (die)
26619 {
26620 /* We get called via the symtab code invoking late_global_decl
26621 for symbols that are optimized out.
26622
26623 Do not add locations for those, except if they have a
26624 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26625 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26626 INDIRECT_REF expression, as this could generate relocations to
26627 text symbols in LTO object files, which is invalid. */
26628 varpool_node *node = varpool_node::get (decl);
26629 if ((! node || ! node->definition)
26630 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26631 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26632 tree_add_const_value_attribute_for_decl (die, decl);
26633 else
26634 add_location_or_const_value_attribute (die, decl, false);
26635 }
26636 }
26637 }
26638
26639 /* Output debug information for type decl DECL. Called from toplev.c
26640 and from language front ends (to record built-in types). */
26641 static void
26642 dwarf2out_type_decl (tree decl, int local)
26643 {
26644 if (!local)
26645 {
26646 set_early_dwarf s;
26647 dwarf2out_decl (decl);
26648 }
26649 }
26650
26651 /* Output debug information for imported module or decl DECL.
26652 NAME is non-NULL name in the lexical block if the decl has been renamed.
26653 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26654 that DECL belongs to.
26655 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26656 static void
26657 dwarf2out_imported_module_or_decl_1 (tree decl,
26658 tree name,
26659 tree lexical_block,
26660 dw_die_ref lexical_block_die)
26661 {
26662 expanded_location xloc;
26663 dw_die_ref imported_die = NULL;
26664 dw_die_ref at_import_die;
26665
26666 if (TREE_CODE (decl) == IMPORTED_DECL)
26667 {
26668 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26669 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26670 gcc_assert (decl);
26671 }
26672 else
26673 xloc = expand_location (input_location);
26674
26675 if (TREE_CODE (decl) == TYPE_DECL)
26676 {
26677 at_import_die = force_type_die (TREE_TYPE (decl));
26678 /* For namespace N { typedef void T; } using N::T; base_type_die
26679 returns NULL, but DW_TAG_imported_declaration requires
26680 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26681 if (!at_import_die)
26682 {
26683 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26684 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26685 at_import_die = lookup_type_die (TREE_TYPE (decl));
26686 gcc_assert (at_import_die);
26687 }
26688 }
26689 else
26690 {
26691 at_import_die = lookup_decl_die (decl);
26692 if (!at_import_die)
26693 {
26694 /* If we're trying to avoid duplicate debug info, we may not have
26695 emitted the member decl for this field. Emit it now. */
26696 if (TREE_CODE (decl) == FIELD_DECL)
26697 {
26698 tree type = DECL_CONTEXT (decl);
26699
26700 if (TYPE_CONTEXT (type)
26701 && TYPE_P (TYPE_CONTEXT (type))
26702 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26703 DINFO_USAGE_DIR_USE))
26704 return;
26705 gen_type_die_for_member (type, decl,
26706 get_context_die (TYPE_CONTEXT (type)));
26707 }
26708 if (TREE_CODE (decl) == CONST_DECL)
26709 {
26710 /* Individual enumerators of an enum type do not get output here
26711 (see gen_decl_die), so we cannot call force_decl_die. */
26712 if (!is_fortran () && !is_ada () && !is_dlang ())
26713 return;
26714 }
26715 if (TREE_CODE (decl) == NAMELIST_DECL)
26716 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26717 get_context_die (DECL_CONTEXT (decl)),
26718 NULL_TREE);
26719 else
26720 at_import_die = force_decl_die (decl);
26721 }
26722 }
26723
26724 if (TREE_CODE (decl) == NAMESPACE_DECL)
26725 {
26726 if (dwarf_version >= 3 || !dwarf_strict)
26727 imported_die = new_die (DW_TAG_imported_module,
26728 lexical_block_die,
26729 lexical_block);
26730 else
26731 return;
26732 }
26733 else
26734 imported_die = new_die (DW_TAG_imported_declaration,
26735 lexical_block_die,
26736 lexical_block);
26737
26738 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26739 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26740 if (debug_column_info && xloc.column)
26741 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26742 if (name)
26743 add_AT_string (imported_die, DW_AT_name,
26744 IDENTIFIER_POINTER (name));
26745 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26746 }
26747
26748 /* Output debug information for imported module or decl DECL.
26749 NAME is non-NULL name in context if the decl has been renamed.
26750 CHILD is true if decl is one of the renamed decls as part of
26751 importing whole module.
26752 IMPLICIT is set if this hook is called for an implicit import
26753 such as inline namespace. */
26754
26755 static void
26756 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26757 bool child, bool implicit)
26758 {
26759 /* dw_die_ref at_import_die; */
26760 dw_die_ref scope_die;
26761
26762 if (debug_info_level <= DINFO_LEVEL_TERSE)
26763 return;
26764
26765 gcc_assert (decl);
26766
26767 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26768 should be enough, for DWARF4 and older even if we emit as extension
26769 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26770 for the benefit of consumers unaware of DW_AT_export_symbols. */
26771 if (implicit
26772 && dwarf_version >= 5
26773 && lang_hooks.decls.decl_dwarf_attribute (decl,
26774 DW_AT_export_symbols) == 1)
26775 return;
26776
26777 set_early_dwarf s;
26778
26779 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26780 We need decl DIE for reference and scope die. First, get DIE for the decl
26781 itself. */
26782
26783 /* Get the scope die for decl context. Use comp_unit_die for global module
26784 or decl. If die is not found for non globals, force new die. */
26785 if (context
26786 && TYPE_P (context)
26787 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26788 return;
26789
26790 scope_die = get_context_die (context);
26791
26792 if (child)
26793 {
26794 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26795 there is nothing we can do, here. */
26796 if (dwarf_version < 3 && dwarf_strict)
26797 return;
26798
26799 gcc_assert (scope_die->die_child);
26800 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26801 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26802 scope_die = scope_die->die_child;
26803 }
26804
26805 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26806 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26807 }
26808
26809 /* Output debug information for namelists. */
26810
26811 static dw_die_ref
26812 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26813 {
26814 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26815 tree value;
26816 unsigned i;
26817
26818 if (debug_info_level <= DINFO_LEVEL_TERSE)
26819 return NULL;
26820
26821 gcc_assert (scope_die != NULL);
26822 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26823 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26824
26825 /* If there are no item_decls, we have a nondefining namelist, e.g.
26826 with USE association; hence, set DW_AT_declaration. */
26827 if (item_decls == NULL_TREE)
26828 {
26829 add_AT_flag (nml_die, DW_AT_declaration, 1);
26830 return nml_die;
26831 }
26832
26833 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26834 {
26835 nml_item_ref_die = lookup_decl_die (value);
26836 if (!nml_item_ref_die)
26837 nml_item_ref_die = force_decl_die (value);
26838
26839 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26840 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26841 }
26842 return nml_die;
26843 }
26844
26845
26846 /* Write the debugging output for DECL and return the DIE. */
26847
26848 static void
26849 dwarf2out_decl (tree decl)
26850 {
26851 dw_die_ref context_die = comp_unit_die ();
26852
26853 switch (TREE_CODE (decl))
26854 {
26855 case ERROR_MARK:
26856 return;
26857
26858 case FUNCTION_DECL:
26859 /* If we're a nested function, initially use a parent of NULL; if we're
26860 a plain function, this will be fixed up in decls_for_scope. If
26861 we're a method, it will be ignored, since we already have a DIE.
26862 Avoid doing this late though since clones of class methods may
26863 otherwise end up in limbo and create type DIEs late. */
26864 if (early_dwarf
26865 && decl_function_context (decl)
26866 /* But if we're in terse mode, we don't care about scope. */
26867 && debug_info_level > DINFO_LEVEL_TERSE)
26868 context_die = NULL;
26869 break;
26870
26871 case VAR_DECL:
26872 /* For local statics lookup proper context die. */
26873 if (local_function_static (decl))
26874 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26875
26876 /* If we are in terse mode, don't generate any DIEs to represent any
26877 variable declarations or definitions unless it is external. */
26878 if (debug_info_level < DINFO_LEVEL_TERSE
26879 || (debug_info_level == DINFO_LEVEL_TERSE
26880 && !TREE_PUBLIC (decl)))
26881 return;
26882 break;
26883
26884 case CONST_DECL:
26885 if (debug_info_level <= DINFO_LEVEL_TERSE)
26886 return;
26887 if (!is_fortran () && !is_ada () && !is_dlang ())
26888 return;
26889 if (TREE_STATIC (decl) && decl_function_context (decl))
26890 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26891 break;
26892
26893 case NAMESPACE_DECL:
26894 case IMPORTED_DECL:
26895 if (debug_info_level <= DINFO_LEVEL_TERSE)
26896 return;
26897 if (lookup_decl_die (decl) != NULL)
26898 return;
26899 break;
26900
26901 case TYPE_DECL:
26902 /* Don't emit stubs for types unless they are needed by other DIEs. */
26903 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26904 return;
26905
26906 /* Don't bother trying to generate any DIEs to represent any of the
26907 normal built-in types for the language we are compiling. */
26908 if (DECL_IS_UNDECLARED_BUILTIN (decl))
26909 return;
26910
26911 /* If we are in terse mode, don't generate any DIEs for types. */
26912 if (debug_info_level <= DINFO_LEVEL_TERSE)
26913 return;
26914
26915 /* If we're a function-scope tag, initially use a parent of NULL;
26916 this will be fixed up in decls_for_scope. */
26917 if (decl_function_context (decl))
26918 context_die = NULL;
26919
26920 break;
26921
26922 case NAMELIST_DECL:
26923 break;
26924
26925 default:
26926 return;
26927 }
26928
26929 gen_decl_die (decl, NULL, NULL, context_die);
26930
26931 if (flag_checking)
26932 {
26933 dw_die_ref die = lookup_decl_die (decl);
26934 if (die)
26935 check_die (die);
26936 }
26937 }
26938
26939 /* Write the debugging output for DECL. */
26940
26941 static void
26942 dwarf2out_function_decl (tree decl)
26943 {
26944 dwarf2out_decl (decl);
26945 call_arg_locations = NULL;
26946 call_arg_loc_last = NULL;
26947 call_site_count = -1;
26948 tail_call_site_count = -1;
26949 decl_loc_table->empty ();
26950 cached_dw_loc_list_table->empty ();
26951 }
26952
26953 /* Output a marker (i.e. a label) for the beginning of the generated code for
26954 a lexical block. */
26955
26956 static void
26957 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26958 unsigned int blocknum)
26959 {
26960 switch_to_section (current_function_section ());
26961 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26962 }
26963
26964 /* Output a marker (i.e. a label) for the end of the generated code for a
26965 lexical block. */
26966
26967 static void
26968 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26969 {
26970 switch_to_section (current_function_section ());
26971 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26972 }
26973
26974 /* Returns nonzero if it is appropriate not to emit any debugging
26975 information for BLOCK, because it doesn't contain any instructions.
26976
26977 Don't allow this for blocks with nested functions or local classes
26978 as we would end up with orphans, and in the presence of scheduling
26979 we may end up calling them anyway. */
26980
26981 static bool
26982 dwarf2out_ignore_block (const_tree block)
26983 {
26984 tree decl;
26985 unsigned int i;
26986
26987 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26988 if (TREE_CODE (decl) == FUNCTION_DECL
26989 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26990 return 0;
26991 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26992 {
26993 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26994 if (TREE_CODE (decl) == FUNCTION_DECL
26995 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26996 return 0;
26997 }
26998
26999 return 1;
27000 }
27001
27002 /* Hash table routines for file_hash. */
27003
27004 bool
27005 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
27006 {
27007 return filename_cmp (p1->filename, p2) == 0;
27008 }
27009
27010 hashval_t
27011 dwarf_file_hasher::hash (dwarf_file_data *p)
27012 {
27013 return htab_hash_string (p->filename);
27014 }
27015
27016 /* Lookup FILE_NAME (in the list of filenames that we know about here in
27017 dwarf2out.c) and return its "index". The index of each (known) filename is
27018 just a unique number which is associated with only that one filename. We
27019 need such numbers for the sake of generating labels (in the .debug_sfnames
27020 section) and references to those files numbers (in the .debug_srcinfo
27021 and .debug_macinfo sections). If the filename given as an argument is not
27022 found in our current list, add it to the list and assign it the next
27023 available unique index number. */
27024
27025 static struct dwarf_file_data *
27026 lookup_filename (const char *file_name)
27027 {
27028 struct dwarf_file_data * created;
27029
27030 if (!file_name)
27031 return NULL;
27032
27033 if (!file_name[0])
27034 file_name = "<stdin>";
27035
27036 dwarf_file_data **slot
27037 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27038 INSERT);
27039 if (*slot)
27040 return *slot;
27041
27042 created = ggc_alloc<dwarf_file_data> ();
27043 created->filename = file_name;
27044 created->emitted_number = 0;
27045 *slot = created;
27046 return created;
27047 }
27048
27049 /* If the assembler will construct the file table, then translate the compiler
27050 internal file table number into the assembler file table number, and emit
27051 a .file directive if we haven't already emitted one yet. The file table
27052 numbers are different because we prune debug info for unused variables and
27053 types, which may include filenames. */
27054
27055 static int
27056 maybe_emit_file (struct dwarf_file_data * fd)
27057 {
27058 if (! fd->emitted_number)
27059 {
27060 if (last_emitted_file)
27061 fd->emitted_number = last_emitted_file->emitted_number + 1;
27062 else
27063 fd->emitted_number = 1;
27064 last_emitted_file = fd;
27065
27066 if (output_asm_line_debug_info ())
27067 {
27068 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27069 output_quoted_string (asm_out_file,
27070 remap_debug_filename (fd->filename));
27071 fputc ('\n', asm_out_file);
27072 }
27073 }
27074
27075 return fd->emitted_number;
27076 }
27077
27078 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27079 That generation should happen after function debug info has been
27080 generated. The value of the attribute is the constant value of ARG. */
27081
27082 static void
27083 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27084 {
27085 die_arg_entry entry;
27086
27087 if (!die || !arg)
27088 return;
27089
27090 gcc_assert (early_dwarf);
27091
27092 if (!tmpl_value_parm_die_table)
27093 vec_alloc (tmpl_value_parm_die_table, 32);
27094
27095 entry.die = die;
27096 entry.arg = arg;
27097 vec_safe_push (tmpl_value_parm_die_table, entry);
27098 }
27099
27100 /* Return TRUE if T is an instance of generic type, FALSE
27101 otherwise. */
27102
27103 static bool
27104 generic_type_p (tree t)
27105 {
27106 if (t == NULL_TREE || !TYPE_P (t))
27107 return false;
27108 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27109 }
27110
27111 /* Schedule the generation of the generic parameter dies for the
27112 instance of generic type T. The proper generation itself is later
27113 done by gen_scheduled_generic_parms_dies. */
27114
27115 static void
27116 schedule_generic_params_dies_gen (tree t)
27117 {
27118 if (!generic_type_p (t))
27119 return;
27120
27121 gcc_assert (early_dwarf);
27122
27123 if (!generic_type_instances)
27124 vec_alloc (generic_type_instances, 256);
27125
27126 vec_safe_push (generic_type_instances, t);
27127 }
27128
27129 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27130 by append_entry_to_tmpl_value_parm_die_table. This function must
27131 be called after function DIEs have been generated. */
27132
27133 static void
27134 gen_remaining_tmpl_value_param_die_attribute (void)
27135 {
27136 if (tmpl_value_parm_die_table)
27137 {
27138 unsigned i, j;
27139 die_arg_entry *e;
27140
27141 /* We do this in two phases - first get the cases we can
27142 handle during early-finish, preserving those we cannot
27143 (containing symbolic constants where we don't yet know
27144 whether we are going to output the referenced symbols).
27145 For those we try again at late-finish. */
27146 j = 0;
27147 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27148 {
27149 if (!e->die->removed
27150 && !tree_add_const_value_attribute (e->die, e->arg))
27151 {
27152 dw_loc_descr_ref loc = NULL;
27153 if (! early_dwarf
27154 && (dwarf_version >= 5 || !dwarf_strict))
27155 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27156 if (loc)
27157 add_AT_loc (e->die, DW_AT_location, loc);
27158 else
27159 (*tmpl_value_parm_die_table)[j++] = *e;
27160 }
27161 }
27162 tmpl_value_parm_die_table->truncate (j);
27163 }
27164 }
27165
27166 /* Generate generic parameters DIEs for instances of generic types
27167 that have been previously scheduled by
27168 schedule_generic_params_dies_gen. This function must be called
27169 after all the types of the CU have been laid out. */
27170
27171 static void
27172 gen_scheduled_generic_parms_dies (void)
27173 {
27174 unsigned i;
27175 tree t;
27176
27177 if (!generic_type_instances)
27178 return;
27179
27180 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27181 if (COMPLETE_TYPE_P (t))
27182 gen_generic_params_dies (t);
27183
27184 generic_type_instances = NULL;
27185 }
27186
27187
27188 /* Replace DW_AT_name for the decl with name. */
27189
27190 static void
27191 dwarf2out_set_name (tree decl, tree name)
27192 {
27193 dw_die_ref die;
27194 dw_attr_node *attr;
27195 const char *dname;
27196
27197 die = TYPE_SYMTAB_DIE (decl);
27198 if (!die)
27199 return;
27200
27201 dname = dwarf2_name (name, 0);
27202 if (!dname)
27203 return;
27204
27205 attr = get_AT (die, DW_AT_name);
27206 if (attr)
27207 {
27208 struct indirect_string_node *node;
27209
27210 node = find_AT_string (dname);
27211 /* replace the string. */
27212 attr->dw_attr_val.v.val_str = node;
27213 }
27214
27215 else
27216 add_name_attribute (die, dname);
27217 }
27218
27219 /* True if before or during processing of the first function being emitted. */
27220 static bool in_first_function_p = true;
27221 /* True if loc_note during dwarf2out_var_location call might still be
27222 before first real instruction at address equal to .Ltext0. */
27223 static bool maybe_at_text_label_p = true;
27224 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27225 static unsigned int first_loclabel_num_not_at_text_label;
27226
27227 /* Look ahead for a real insn. */
27228
27229 static rtx_insn *
27230 dwarf2out_next_real_insn (rtx_insn *loc_note)
27231 {
27232 rtx_insn *next_real = NEXT_INSN (loc_note);
27233
27234 while (next_real)
27235 if (INSN_P (next_real))
27236 break;
27237 else
27238 next_real = NEXT_INSN (next_real);
27239
27240 return next_real;
27241 }
27242
27243 /* Called by the final INSN scan whenever we see a var location. We
27244 use it to drop labels in the right places, and throw the location in
27245 our lookup table. */
27246
27247 static void
27248 dwarf2out_var_location (rtx_insn *loc_note)
27249 {
27250 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27251 struct var_loc_node *newloc;
27252 rtx_insn *next_real;
27253 rtx_insn *call_insn = NULL;
27254 static const char *last_label;
27255 static const char *last_postcall_label;
27256 static bool last_in_cold_section_p;
27257 static rtx_insn *expected_next_loc_note;
27258 tree decl;
27259 bool var_loc_p;
27260 var_loc_view view = 0;
27261
27262 if (!NOTE_P (loc_note))
27263 {
27264 if (CALL_P (loc_note))
27265 {
27266 maybe_reset_location_view (loc_note, cur_line_info_table);
27267 call_site_count++;
27268 if (SIBLING_CALL_P (loc_note))
27269 tail_call_site_count++;
27270 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27271 {
27272 call_insn = loc_note;
27273 loc_note = NULL;
27274 var_loc_p = false;
27275
27276 next_real = dwarf2out_next_real_insn (call_insn);
27277 cached_next_real_insn = NULL;
27278 goto create_label;
27279 }
27280 if (optimize == 0 && !flag_var_tracking)
27281 {
27282 /* When the var-tracking pass is not running, there is no note
27283 for indirect calls whose target is compile-time known. In this
27284 case, process such calls specifically so that we generate call
27285 sites for them anyway. */
27286 rtx x = PATTERN (loc_note);
27287 if (GET_CODE (x) == PARALLEL)
27288 x = XVECEXP (x, 0, 0);
27289 if (GET_CODE (x) == SET)
27290 x = SET_SRC (x);
27291 if (GET_CODE (x) == CALL)
27292 x = XEXP (x, 0);
27293 if (!MEM_P (x)
27294 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27295 || !SYMBOL_REF_DECL (XEXP (x, 0))
27296 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27297 != FUNCTION_DECL))
27298 {
27299 call_insn = loc_note;
27300 loc_note = NULL;
27301 var_loc_p = false;
27302
27303 next_real = dwarf2out_next_real_insn (call_insn);
27304 cached_next_real_insn = NULL;
27305 goto create_label;
27306 }
27307 }
27308 }
27309 else if (!debug_variable_location_views)
27310 gcc_unreachable ();
27311 else
27312 maybe_reset_location_view (loc_note, cur_line_info_table);
27313
27314 return;
27315 }
27316
27317 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27318 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27319 return;
27320
27321 /* Optimize processing a large consecutive sequence of location
27322 notes so we don't spend too much time in next_real_insn. If the
27323 next insn is another location note, remember the next_real_insn
27324 calculation for next time. */
27325 next_real = cached_next_real_insn;
27326 if (next_real)
27327 {
27328 if (expected_next_loc_note != loc_note)
27329 next_real = NULL;
27330 }
27331
27332 if (! next_real)
27333 next_real = dwarf2out_next_real_insn (loc_note);
27334
27335 if (next_real)
27336 {
27337 rtx_insn *next_note = NEXT_INSN (loc_note);
27338 while (next_note != next_real)
27339 {
27340 if (! next_note->deleted ()
27341 && NOTE_P (next_note)
27342 && NOTE_KIND (next_note) == NOTE_INSN_VAR_LOCATION)
27343 break;
27344 next_note = NEXT_INSN (next_note);
27345 }
27346
27347 if (next_note == next_real)
27348 cached_next_real_insn = NULL;
27349 else
27350 {
27351 expected_next_loc_note = next_note;
27352 cached_next_real_insn = next_real;
27353 }
27354 }
27355 else
27356 cached_next_real_insn = NULL;
27357
27358 /* If there are no instructions which would be affected by this note,
27359 don't do anything. */
27360 if (var_loc_p
27361 && next_real == NULL_RTX
27362 && !NOTE_DURING_CALL_P (loc_note))
27363 return;
27364
27365 create_label:
27366
27367 if (next_real == NULL_RTX)
27368 next_real = get_last_insn ();
27369
27370 /* If there were any real insns between note we processed last time
27371 and this note (or if it is the first note), clear
27372 last_{,postcall_}label so that they are not reused this time. */
27373 if (last_var_location_insn == NULL_RTX
27374 || last_var_location_insn != next_real
27375 || last_in_cold_section_p != in_cold_section_p)
27376 {
27377 last_label = NULL;
27378 last_postcall_label = NULL;
27379 }
27380
27381 if (var_loc_p)
27382 {
27383 const char *label
27384 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27385 view = cur_line_info_table->view;
27386 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27387 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27388 if (newloc == NULL)
27389 return;
27390 }
27391 else
27392 {
27393 decl = NULL_TREE;
27394 newloc = NULL;
27395 }
27396
27397 /* If there were no real insns between note we processed last time
27398 and this note, use the label we emitted last time. Otherwise
27399 create a new label and emit it. */
27400 if (last_label == NULL)
27401 {
27402 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27403 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27404 loclabel_num++;
27405 last_label = ggc_strdup (loclabel);
27406 /* See if loclabel might be equal to .Ltext0. If yes,
27407 bump first_loclabel_num_not_at_text_label. */
27408 if (!have_multiple_function_sections
27409 && in_first_function_p
27410 && maybe_at_text_label_p)
27411 {
27412 static rtx_insn *last_start;
27413 rtx_insn *insn;
27414 for (insn = loc_note; insn; insn = previous_insn (insn))
27415 if (insn == last_start)
27416 break;
27417 else if (!NONDEBUG_INSN_P (insn))
27418 continue;
27419 else
27420 {
27421 rtx body = PATTERN (insn);
27422 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27423 continue;
27424 /* Inline asm could occupy zero bytes. */
27425 else if (GET_CODE (body) == ASM_INPUT
27426 || asm_noperands (body) >= 0)
27427 continue;
27428 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27429 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27430 continue;
27431 #endif
27432 else
27433 {
27434 /* Assume insn has non-zero length. */
27435 maybe_at_text_label_p = false;
27436 break;
27437 }
27438 }
27439 if (maybe_at_text_label_p)
27440 {
27441 last_start = loc_note;
27442 first_loclabel_num_not_at_text_label = loclabel_num;
27443 }
27444 }
27445 }
27446
27447 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27448 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27449
27450 if (!var_loc_p)
27451 {
27452 struct call_arg_loc_node *ca_loc
27453 = ggc_cleared_alloc<call_arg_loc_node> ();
27454 rtx_insn *prev = call_insn;
27455
27456 ca_loc->call_arg_loc_note
27457 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27458 ca_loc->next = NULL;
27459 ca_loc->label = last_label;
27460 gcc_assert (prev
27461 && (CALL_P (prev)
27462 || (NONJUMP_INSN_P (prev)
27463 && GET_CODE (PATTERN (prev)) == SEQUENCE
27464 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27465 if (!CALL_P (prev))
27466 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27467 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27468
27469 /* Look for a SYMBOL_REF in the "prev" instruction. */
27470 rtx x = get_call_rtx_from (prev);
27471 if (x)
27472 {
27473 /* Try to get the call symbol, if any. */
27474 if (MEM_P (XEXP (x, 0)))
27475 x = XEXP (x, 0);
27476 /* First, look for a memory access to a symbol_ref. */
27477 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27478 && SYMBOL_REF_DECL (XEXP (x, 0))
27479 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27480 ca_loc->symbol_ref = XEXP (x, 0);
27481 /* Otherwise, look at a compile-time known user-level function
27482 declaration. */
27483 else if (MEM_P (x)
27484 && MEM_EXPR (x)
27485 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27486 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27487 }
27488
27489 ca_loc->block = insn_scope (prev);
27490 if (call_arg_locations)
27491 call_arg_loc_last->next = ca_loc;
27492 else
27493 call_arg_locations = ca_loc;
27494 call_arg_loc_last = ca_loc;
27495 }
27496 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27497 {
27498 newloc->label = last_label;
27499 newloc->view = view;
27500 }
27501 else
27502 {
27503 if (!last_postcall_label)
27504 {
27505 sprintf (loclabel, "%s-1", last_label);
27506 last_postcall_label = ggc_strdup (loclabel);
27507 }
27508 newloc->label = last_postcall_label;
27509 /* ??? This view is at last_label, not last_label-1, but we
27510 could only assume view at last_label-1 is zero if we could
27511 assume calls always have length greater than one. This is
27512 probably true in general, though there might be a rare
27513 exception to this rule, e.g. if a call insn is optimized out
27514 by target magic. Then, even the -1 in the label will be
27515 wrong, which might invalidate the range. Anyway, using view,
27516 though technically possibly incorrect, will work as far as
27517 ranges go: since L-1 is in the middle of the call insn,
27518 (L-1).0 and (L-1).V shouldn't make any difference, and having
27519 the loclist entry refer to the .loc entry might be useful, so
27520 leave it like this. */
27521 newloc->view = view;
27522 }
27523
27524 if (var_loc_p && flag_debug_asm)
27525 {
27526 const char *name, *sep, *patstr;
27527 if (decl && DECL_NAME (decl))
27528 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27529 else
27530 name = "";
27531 if (NOTE_VAR_LOCATION_LOC (loc_note))
27532 {
27533 sep = " => ";
27534 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27535 }
27536 else
27537 {
27538 sep = " ";
27539 patstr = "RESET";
27540 }
27541 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27542 name, sep, patstr);
27543 }
27544
27545 last_var_location_insn = next_real;
27546 last_in_cold_section_p = in_cold_section_p;
27547 }
27548
27549 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27550 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27551 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27552 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27553 BLOCK_FRAGMENT_ORIGIN links. */
27554 static bool
27555 block_within_block_p (tree block, tree outer, bool bothways)
27556 {
27557 if (block == outer)
27558 return true;
27559
27560 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27561 for (tree context = BLOCK_SUPERCONTEXT (block);
27562 context != outer;
27563 context = BLOCK_SUPERCONTEXT (context))
27564 if (!context || TREE_CODE (context) != BLOCK)
27565 return false;
27566
27567 if (!bothways)
27568 return true;
27569
27570 /* Now check that each block is actually referenced by its
27571 parent. */
27572 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27573 context = BLOCK_SUPERCONTEXT (context))
27574 {
27575 if (BLOCK_FRAGMENT_ORIGIN (context))
27576 {
27577 gcc_assert (!BLOCK_SUBBLOCKS (context));
27578 context = BLOCK_FRAGMENT_ORIGIN (context);
27579 }
27580 for (tree sub = BLOCK_SUBBLOCKS (context);
27581 sub != block;
27582 sub = BLOCK_CHAIN (sub))
27583 if (!sub)
27584 return false;
27585 if (context == outer)
27586 return true;
27587 else
27588 block = context;
27589 }
27590 }
27591
27592 /* Called during final while assembling the marker of the entry point
27593 for an inlined function. */
27594
27595 static void
27596 dwarf2out_inline_entry (tree block)
27597 {
27598 gcc_assert (debug_inline_points);
27599
27600 /* If we can't represent it, don't bother. */
27601 if (!(dwarf_version >= 3 || !dwarf_strict))
27602 return;
27603
27604 gcc_assert (DECL_P (block_ultimate_origin (block)));
27605
27606 /* Sanity check the block tree. This would catch a case in which
27607 BLOCK got removed from the tree reachable from the outermost
27608 lexical block, but got retained in markers. It would still link
27609 back to its parents, but some ancestor would be missing a link
27610 down the path to the sub BLOCK. If the block got removed, its
27611 BLOCK_NUMBER will not be a usable value. */
27612 if (flag_checking)
27613 gcc_assert (block_within_block_p (block,
27614 DECL_INITIAL (current_function_decl),
27615 true));
27616
27617 gcc_assert (inlined_function_outer_scope_p (block));
27618 gcc_assert (!lookup_block_die (block));
27619
27620 if (BLOCK_FRAGMENT_ORIGIN (block))
27621 block = BLOCK_FRAGMENT_ORIGIN (block);
27622 /* Can the entry point ever not be at the beginning of an
27623 unfragmented lexical block? */
27624 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27625 || (cur_line_info_table
27626 && !ZERO_VIEW_P (cur_line_info_table->view))))
27627 return;
27628
27629 if (!inline_entry_data_table)
27630 inline_entry_data_table
27631 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27632
27633
27634 inline_entry_data **iedp
27635 = inline_entry_data_table->find_slot_with_hash (block,
27636 htab_hash_pointer (block),
27637 INSERT);
27638 if (*iedp)
27639 /* ??? Ideally, we'd record all entry points for the same inlined
27640 function (some may have been duplicated by e.g. unrolling), but
27641 we have no way to represent that ATM. */
27642 return;
27643
27644 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27645 ied->block = block;
27646 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27647 ied->label_num = BLOCK_NUMBER (block);
27648 if (cur_line_info_table)
27649 ied->view = cur_line_info_table->view;
27650
27651 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL,
27652 BLOCK_NUMBER (block));
27653 }
27654
27655 /* Called from finalize_size_functions for size functions so that their body
27656 can be encoded in the debug info to describe the layout of variable-length
27657 structures. */
27658
27659 static void
27660 dwarf2out_size_function (tree decl)
27661 {
27662 set_early_dwarf s;
27663 function_to_dwarf_procedure (decl);
27664 }
27665
27666 /* Note in one location list that text section has changed. */
27667
27668 int
27669 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27670 {
27671 var_loc_list *list = *slot;
27672 if (list->first)
27673 list->last_before_switch
27674 = list->last->next ? list->last->next : list->last;
27675 return 1;
27676 }
27677
27678 /* Note in all location lists that text section has changed. */
27679
27680 static void
27681 var_location_switch_text_section (void)
27682 {
27683 if (decl_loc_table == NULL)
27684 return;
27685
27686 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27687 }
27688
27689 /* Create a new line number table. */
27690
27691 static dw_line_info_table *
27692 new_line_info_table (void)
27693 {
27694 dw_line_info_table *table;
27695
27696 table = ggc_cleared_alloc<dw_line_info_table> ();
27697 table->file_num = 1;
27698 table->line_num = 1;
27699 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27700 FORCE_RESET_NEXT_VIEW (table->view);
27701 table->symviews_since_reset = 0;
27702
27703 return table;
27704 }
27705
27706 /* Lookup the "current" table into which we emit line info, so
27707 that we don't have to do it for every source line. */
27708
27709 static void
27710 set_cur_line_info_table (section *sec)
27711 {
27712 dw_line_info_table *table;
27713
27714 if (sec == text_section)
27715 table = text_section_line_info;
27716 else if (sec == cold_text_section)
27717 {
27718 table = cold_text_section_line_info;
27719 if (!table)
27720 {
27721 cold_text_section_line_info = table = new_line_info_table ();
27722 table->end_label = cold_end_label;
27723 }
27724 }
27725 else
27726 {
27727 const char *end_label;
27728
27729 if (crtl->has_bb_partition)
27730 {
27731 if (in_cold_section_p)
27732 end_label = crtl->subsections.cold_section_end_label;
27733 else
27734 end_label = crtl->subsections.hot_section_end_label;
27735 }
27736 else
27737 {
27738 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27739 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27740 current_function_funcdef_no);
27741 end_label = ggc_strdup (label);
27742 }
27743
27744 table = new_line_info_table ();
27745 table->end_label = end_label;
27746
27747 vec_safe_push (separate_line_info, table);
27748 }
27749
27750 if (output_asm_line_debug_info ())
27751 table->is_stmt = (cur_line_info_table
27752 ? cur_line_info_table->is_stmt
27753 : DWARF_LINE_DEFAULT_IS_STMT_START);
27754 cur_line_info_table = table;
27755 }
27756
27757
27758 /* We need to reset the locations at the beginning of each
27759 function. We can't do this in the end_function hook, because the
27760 declarations that use the locations won't have been output when
27761 that hook is called. Also compute have_multiple_function_sections here. */
27762
27763 static void
27764 dwarf2out_begin_function (tree fun)
27765 {
27766 section *sec = function_section (fun);
27767
27768 if (sec != text_section)
27769 have_multiple_function_sections = true;
27770
27771 if (crtl->has_bb_partition && !cold_text_section)
27772 {
27773 gcc_assert (current_function_decl == fun);
27774 cold_text_section = unlikely_text_section ();
27775 switch_to_section (cold_text_section);
27776 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27777 switch_to_section (sec);
27778 }
27779
27780 dwarf2out_note_section_used ();
27781 call_site_count = 0;
27782 tail_call_site_count = 0;
27783
27784 set_cur_line_info_table (sec);
27785 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27786 }
27787
27788 /* Helper function of dwarf2out_end_function, called only after emitting
27789 the very first function into assembly. Check if some .debug_loc range
27790 might end with a .LVL* label that could be equal to .Ltext0.
27791 In that case we must force using absolute addresses in .debug_loc ranges,
27792 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27793 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27794 list terminator.
27795 Set have_multiple_function_sections to true in that case and
27796 terminate htab traversal. */
27797
27798 int
27799 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27800 {
27801 var_loc_list *entry = *slot;
27802 struct var_loc_node *node;
27803
27804 node = entry->first;
27805 if (node && node->next && node->next->label)
27806 {
27807 unsigned int i;
27808 const char *label = node->next->label;
27809 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27810
27811 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27812 {
27813 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27814 if (strcmp (label, loclabel) == 0)
27815 {
27816 have_multiple_function_sections = true;
27817 return 0;
27818 }
27819 }
27820 }
27821 return 1;
27822 }
27823
27824 /* Hook called after emitting a function into assembly.
27825 This does something only for the very first function emitted. */
27826
27827 static void
27828 dwarf2out_end_function (unsigned int)
27829 {
27830 if (in_first_function_p
27831 && !have_multiple_function_sections
27832 && first_loclabel_num_not_at_text_label
27833 && decl_loc_table)
27834 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27835 in_first_function_p = false;
27836 maybe_at_text_label_p = false;
27837 }
27838
27839 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27840 front-ends register a translation unit even before dwarf2out_init is
27841 called. */
27842 static tree main_translation_unit = NULL_TREE;
27843
27844 /* Hook called by front-ends after they built their main translation unit.
27845 Associate comp_unit_die to UNIT. */
27846
27847 static void
27848 dwarf2out_register_main_translation_unit (tree unit)
27849 {
27850 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27851 && main_translation_unit == NULL_TREE);
27852 main_translation_unit = unit;
27853 /* If dwarf2out_init has not been called yet, it will perform the association
27854 itself looking at main_translation_unit. */
27855 if (decl_die_table != NULL)
27856 equate_decl_number_to_die (unit, comp_unit_die ());
27857 }
27858
27859 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27860
27861 static void
27862 push_dw_line_info_entry (dw_line_info_table *table,
27863 enum dw_line_info_opcode opcode, unsigned int val)
27864 {
27865 dw_line_info_entry e;
27866 e.opcode = opcode;
27867 e.val = val;
27868 vec_safe_push (table->entries, e);
27869 }
27870
27871 /* Output a label to mark the beginning of a source code line entry
27872 and record information relating to this source line, in
27873 'line_info_table' for later output of the .debug_line section. */
27874 /* ??? The discriminator parameter ought to be unsigned. */
27875
27876 static void
27877 dwarf2out_source_line (unsigned int line, unsigned int column,
27878 const char *filename,
27879 int discriminator, bool is_stmt)
27880 {
27881 unsigned int file_num;
27882 dw_line_info_table *table;
27883 static var_loc_view lvugid;
27884
27885 if (debug_info_level < DINFO_LEVEL_TERSE)
27886 return;
27887
27888 table = cur_line_info_table;
27889
27890 if (line == 0)
27891 {
27892 if (debug_variable_location_views
27893 && output_asm_line_debug_info ()
27894 && table && !RESETTING_VIEW_P (table->view))
27895 {
27896 /* If we're using the assembler to compute view numbers, we
27897 can't issue a .loc directive for line zero, so we can't
27898 get a view number at this point. We might attempt to
27899 compute it from the previous view, or equate it to a
27900 subsequent view (though it might not be there!), but
27901 since we're omitting the line number entry, we might as
27902 well omit the view number as well. That means pretending
27903 it's a view number zero, which might very well turn out
27904 to be correct. ??? Extend the assembler so that the
27905 compiler could emit e.g. ".locview .LVU#", to output a
27906 view without changing line number information. We'd then
27907 have to count it in symviews_since_reset; when it's omitted,
27908 it doesn't count. */
27909 if (!zero_view_p)
27910 zero_view_p = BITMAP_GGC_ALLOC ();
27911 bitmap_set_bit (zero_view_p, table->view);
27912 if (flag_debug_asm)
27913 {
27914 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27915 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27916 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27917 ASM_COMMENT_START);
27918 assemble_name (asm_out_file, label);
27919 putc ('\n', asm_out_file);
27920 }
27921 table->view = ++lvugid;
27922 }
27923 return;
27924 }
27925
27926 /* The discriminator column was added in dwarf4. Simplify the below
27927 by simply removing it if we're not supposed to output it. */
27928 if (dwarf_version < 4 && dwarf_strict)
27929 discriminator = 0;
27930
27931 if (!debug_column_info)
27932 column = 0;
27933
27934 file_num = maybe_emit_file (lookup_filename (filename));
27935
27936 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27937 the debugger has used the second (possibly duplicate) line number
27938 at the beginning of the function to mark the end of the prologue.
27939 We could eliminate any other duplicates within the function. For
27940 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27941 that second line number entry. */
27942 /* Recall that this end-of-prologue indication is *not* the same thing
27943 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27944 to which the hook corresponds, follows the last insn that was
27945 emitted by gen_prologue. What we need is to precede the first insn
27946 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27947 insn that corresponds to something the user wrote. These may be
27948 very different locations once scheduling is enabled. */
27949
27950 if (0 && file_num == table->file_num
27951 && line == table->line_num
27952 && column == table->column_num
27953 && discriminator == table->discrim_num
27954 && is_stmt == table->is_stmt)
27955 return;
27956
27957 switch_to_section (current_function_section ());
27958
27959 /* If requested, emit something human-readable. */
27960 if (flag_debug_asm)
27961 {
27962 if (debug_column_info)
27963 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27964 filename, line, column);
27965 else
27966 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27967 filename, line);
27968 }
27969
27970 if (output_asm_line_debug_info ())
27971 {
27972 /* Emit the .loc directive understood by GNU as. */
27973 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27974 file_num, line, is_stmt, discriminator */
27975 fputs ("\t.loc ", asm_out_file);
27976 fprint_ul (asm_out_file, file_num);
27977 putc (' ', asm_out_file);
27978 fprint_ul (asm_out_file, line);
27979 putc (' ', asm_out_file);
27980 fprint_ul (asm_out_file, column);
27981
27982 if (is_stmt != table->is_stmt)
27983 {
27984 #if HAVE_GAS_LOC_STMT
27985 fputs (" is_stmt ", asm_out_file);
27986 putc (is_stmt ? '1' : '0', asm_out_file);
27987 #endif
27988 }
27989 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27990 {
27991 gcc_assert (discriminator > 0);
27992 fputs (" discriminator ", asm_out_file);
27993 fprint_ul (asm_out_file, (unsigned long) discriminator);
27994 }
27995 if (debug_variable_location_views)
27996 {
27997 if (!RESETTING_VIEW_P (table->view))
27998 {
27999 table->symviews_since_reset++;
28000 if (table->symviews_since_reset > symview_upper_bound)
28001 symview_upper_bound = table->symviews_since_reset;
28002 /* When we're using the assembler to compute view
28003 numbers, we output symbolic labels after "view" in
28004 .loc directives, and the assembler will set them for
28005 us, so that we can refer to the view numbers in
28006 location lists. The only exceptions are when we know
28007 a view will be zero: "-0" is a forced reset, used
28008 e.g. in the beginning of functions, whereas "0" tells
28009 the assembler to check that there was a PC change
28010 since the previous view, in a way that implicitly
28011 resets the next view. */
28012 fputs (" view ", asm_out_file);
28013 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28014 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
28015 assemble_name (asm_out_file, label);
28016 table->view = ++lvugid;
28017 }
28018 else
28019 {
28020 table->symviews_since_reset = 0;
28021 if (FORCE_RESETTING_VIEW_P (table->view))
28022 fputs (" view -0", asm_out_file);
28023 else
28024 fputs (" view 0", asm_out_file);
28025 /* Mark the present view as a zero view. Earlier debug
28026 binds may have already added its id to loclists to be
28027 emitted later, so we can't reuse the id for something
28028 else. However, it's good to know whether a view is
28029 known to be zero, because then we may be able to
28030 optimize out locviews that are all zeros, so take
28031 note of it in zero_view_p. */
28032 if (!zero_view_p)
28033 zero_view_p = BITMAP_GGC_ALLOC ();
28034 bitmap_set_bit (zero_view_p, lvugid);
28035 table->view = ++lvugid;
28036 }
28037 }
28038 putc ('\n', asm_out_file);
28039 }
28040 else
28041 {
28042 unsigned int label_num = ++line_info_label_num;
28043
28044 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28045
28046 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28047 push_dw_line_info_entry (table, LI_adv_address, label_num);
28048 else
28049 push_dw_line_info_entry (table, LI_set_address, label_num);
28050 if (debug_variable_location_views)
28051 {
28052 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28053 if (resetting)
28054 table->view = 0;
28055
28056 if (flag_debug_asm)
28057 fprintf (asm_out_file, "\t%s view %s%d\n",
28058 ASM_COMMENT_START,
28059 resetting ? "-" : "",
28060 table->view);
28061
28062 table->view++;
28063 }
28064 if (file_num != table->file_num)
28065 push_dw_line_info_entry (table, LI_set_file, file_num);
28066 if (discriminator != table->discrim_num)
28067 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28068 if (is_stmt != table->is_stmt)
28069 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28070 push_dw_line_info_entry (table, LI_set_line, line);
28071 if (debug_column_info)
28072 push_dw_line_info_entry (table, LI_set_column, column);
28073 }
28074
28075 table->file_num = file_num;
28076 table->line_num = line;
28077 table->column_num = column;
28078 table->discrim_num = discriminator;
28079 table->is_stmt = is_stmt;
28080 table->in_use = true;
28081 }
28082
28083 /* Record the beginning of a new source file. */
28084
28085 static void
28086 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28087 {
28088 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28089 {
28090 macinfo_entry e;
28091 e.code = DW_MACINFO_start_file;
28092 e.lineno = lineno;
28093 e.info = ggc_strdup (filename);
28094 vec_safe_push (macinfo_table, e);
28095 }
28096 }
28097
28098 /* Record the end of a source file. */
28099
28100 static void
28101 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28102 {
28103 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28104 {
28105 macinfo_entry e;
28106 e.code = DW_MACINFO_end_file;
28107 e.lineno = lineno;
28108 e.info = NULL;
28109 vec_safe_push (macinfo_table, e);
28110 }
28111 }
28112
28113 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28114 the tail part of the directive line, i.e. the part which is past the
28115 initial whitespace, #, whitespace, directive-name, whitespace part. */
28116
28117 static void
28118 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28119 const char *buffer ATTRIBUTE_UNUSED)
28120 {
28121 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28122 {
28123 macinfo_entry e;
28124 /* Insert a dummy first entry to be able to optimize the whole
28125 predefined macro block using DW_MACRO_import. */
28126 if (macinfo_table->is_empty () && lineno <= 1)
28127 {
28128 e.code = 0;
28129 e.lineno = 0;
28130 e.info = NULL;
28131 vec_safe_push (macinfo_table, e);
28132 }
28133 e.code = DW_MACINFO_define;
28134 e.lineno = lineno;
28135 e.info = ggc_strdup (buffer);
28136 vec_safe_push (macinfo_table, e);
28137 }
28138 }
28139
28140 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28141 the tail part of the directive line, i.e. the part which is past the
28142 initial whitespace, #, whitespace, directive-name, whitespace part. */
28143
28144 static void
28145 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28146 const char *buffer ATTRIBUTE_UNUSED)
28147 {
28148 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28149 {
28150 macinfo_entry e;
28151 /* Insert a dummy first entry to be able to optimize the whole
28152 predefined macro block using DW_MACRO_import. */
28153 if (macinfo_table->is_empty () && lineno <= 1)
28154 {
28155 e.code = 0;
28156 e.lineno = 0;
28157 e.info = NULL;
28158 vec_safe_push (macinfo_table, e);
28159 }
28160 e.code = DW_MACINFO_undef;
28161 e.lineno = lineno;
28162 e.info = ggc_strdup (buffer);
28163 vec_safe_push (macinfo_table, e);
28164 }
28165 }
28166
28167 /* Helpers to manipulate hash table of CUs. */
28168
28169 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28170 {
28171 static inline hashval_t hash (const macinfo_entry *);
28172 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28173 };
28174
28175 inline hashval_t
28176 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28177 {
28178 return htab_hash_string (entry->info);
28179 }
28180
28181 inline bool
28182 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28183 const macinfo_entry *entry2)
28184 {
28185 return !strcmp (entry1->info, entry2->info);
28186 }
28187
28188 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28189
28190 /* Output a single .debug_macinfo entry. */
28191
28192 static void
28193 output_macinfo_op (macinfo_entry *ref)
28194 {
28195 int file_num;
28196 size_t len;
28197 struct indirect_string_node *node;
28198 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28199 struct dwarf_file_data *fd;
28200
28201 switch (ref->code)
28202 {
28203 case DW_MACINFO_start_file:
28204 fd = lookup_filename (ref->info);
28205 file_num = maybe_emit_file (fd);
28206 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28207 dw2_asm_output_data_uleb128 (ref->lineno,
28208 "Included from line number %lu",
28209 (unsigned long) ref->lineno);
28210 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28211 break;
28212 case DW_MACINFO_end_file:
28213 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28214 break;
28215 case DW_MACINFO_define:
28216 case DW_MACINFO_undef:
28217 len = strlen (ref->info) + 1;
28218 if (!dwarf_strict
28219 && len > (size_t) dwarf_offset_size
28220 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28221 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28222 {
28223 ref->code = ref->code == DW_MACINFO_define
28224 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28225 output_macinfo_op (ref);
28226 return;
28227 }
28228 dw2_asm_output_data (1, ref->code,
28229 ref->code == DW_MACINFO_define
28230 ? "Define macro" : "Undefine macro");
28231 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28232 (unsigned long) ref->lineno);
28233 dw2_asm_output_nstring (ref->info, -1, "The macro");
28234 break;
28235 case DW_MACRO_define_strp:
28236 case DW_MACRO_undef_strp:
28237 /* NB: dwarf2out_finish performs:
28238 1. save_macinfo_strings
28239 2. hash table traverse of index_string
28240 3. output_macinfo -> output_macinfo_op
28241 4. output_indirect_strings
28242 -> hash table traverse of output_index_string
28243
28244 When output_macinfo_op is called, all index strings have been
28245 added to hash table by save_macinfo_strings and we can't pass
28246 INSERT to find_slot_with_hash which may expand hash table, even
28247 if no insertion is needed, and change hash table traverse order
28248 between index_string and output_index_string. */
28249 node = find_AT_string (ref->info, NO_INSERT);
28250 gcc_assert (node
28251 && (node->form == DW_FORM_strp
28252 || node->form == dwarf_FORM (DW_FORM_strx)));
28253 dw2_asm_output_data (1, ref->code,
28254 ref->code == DW_MACRO_define_strp
28255 ? "Define macro strp"
28256 : "Undefine macro strp");
28257 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28258 (unsigned long) ref->lineno);
28259 if (node->form == DW_FORM_strp)
28260 dw2_asm_output_offset (dwarf_offset_size, node->label,
28261 debug_str_section, "The macro: \"%s\"",
28262 ref->info);
28263 else
28264 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28265 ref->info);
28266 break;
28267 case DW_MACRO_import:
28268 dw2_asm_output_data (1, ref->code, "Import");
28269 ASM_GENERATE_INTERNAL_LABEL (label,
28270 DEBUG_MACRO_SECTION_LABEL,
28271 ref->lineno + macinfo_label_base);
28272 dw2_asm_output_offset (dwarf_offset_size, label, NULL, NULL);
28273 break;
28274 default:
28275 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28276 ASM_COMMENT_START, (unsigned long) ref->code);
28277 break;
28278 }
28279 }
28280
28281 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28282 other compilation unit .debug_macinfo sections. IDX is the first
28283 index of a define/undef, return the number of ops that should be
28284 emitted in a comdat .debug_macinfo section and emit
28285 a DW_MACRO_import entry referencing it.
28286 If the define/undef entry should be emitted normally, return 0. */
28287
28288 static unsigned
28289 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28290 macinfo_hash_type **macinfo_htab)
28291 {
28292 macinfo_entry *first, *second, *cur, *inc;
28293 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28294 unsigned char checksum[16];
28295 struct md5_ctx ctx;
28296 char *grp_name, *tail;
28297 const char *base;
28298 unsigned int i, count, encoded_filename_len, linebuf_len;
28299 macinfo_entry **slot;
28300
28301 first = &(*macinfo_table)[idx];
28302 second = &(*macinfo_table)[idx + 1];
28303
28304 /* Optimize only if there are at least two consecutive define/undef ops,
28305 and either all of them are before first DW_MACINFO_start_file
28306 with lineno {0,1} (i.e. predefined macro block), or all of them are
28307 in some included header file. */
28308 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28309 return 0;
28310 if (vec_safe_is_empty (files))
28311 {
28312 if (first->lineno > 1 || second->lineno > 1)
28313 return 0;
28314 }
28315 else if (first->lineno == 0)
28316 return 0;
28317
28318 /* Find the last define/undef entry that can be grouped together
28319 with first and at the same time compute md5 checksum of their
28320 codes, linenumbers and strings. */
28321 md5_init_ctx (&ctx);
28322 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28323 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28324 break;
28325 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28326 break;
28327 else
28328 {
28329 unsigned char code = cur->code;
28330 md5_process_bytes (&code, 1, &ctx);
28331 checksum_uleb128 (cur->lineno, &ctx);
28332 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28333 }
28334 md5_finish_ctx (&ctx, checksum);
28335 count = i - idx;
28336
28337 /* From the containing include filename (if any) pick up just
28338 usable characters from its basename. */
28339 if (vec_safe_is_empty (files))
28340 base = "";
28341 else
28342 base = lbasename (files->last ().info);
28343 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28344 if (ISIDNUM (base[i]) || base[i] == '.')
28345 encoded_filename_len++;
28346 /* Count . at the end. */
28347 if (encoded_filename_len)
28348 encoded_filename_len++;
28349
28350 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28351 linebuf_len = strlen (linebuf);
28352
28353 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28354 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28355 + 16 * 2 + 1);
28356 memcpy (grp_name, dwarf_offset_size == 4 ? "wm4." : "wm8.", 4);
28357 tail = grp_name + 4;
28358 if (encoded_filename_len)
28359 {
28360 for (i = 0; base[i]; i++)
28361 if (ISIDNUM (base[i]) || base[i] == '.')
28362 *tail++ = base[i];
28363 *tail++ = '.';
28364 }
28365 memcpy (tail, linebuf, linebuf_len);
28366 tail += linebuf_len;
28367 *tail++ = '.';
28368 for (i = 0; i < 16; i++)
28369 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28370
28371 /* Construct a macinfo_entry for DW_MACRO_import
28372 in the empty vector entry before the first define/undef. */
28373 inc = &(*macinfo_table)[idx - 1];
28374 inc->code = DW_MACRO_import;
28375 inc->lineno = 0;
28376 inc->info = ggc_strdup (grp_name);
28377 if (!*macinfo_htab)
28378 *macinfo_htab = new macinfo_hash_type (10);
28379 /* Avoid emitting duplicates. */
28380 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28381 if (*slot != NULL)
28382 {
28383 inc->code = 0;
28384 inc->info = NULL;
28385 /* If such an entry has been used before, just emit
28386 a DW_MACRO_import op. */
28387 inc = *slot;
28388 output_macinfo_op (inc);
28389 /* And clear all macinfo_entry in the range to avoid emitting them
28390 in the second pass. */
28391 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28392 {
28393 cur->code = 0;
28394 cur->info = NULL;
28395 }
28396 }
28397 else
28398 {
28399 *slot = inc;
28400 inc->lineno = (*macinfo_htab)->elements ();
28401 output_macinfo_op (inc);
28402 }
28403 return count;
28404 }
28405
28406 /* Save any strings needed by the macinfo table in the debug str
28407 table. All strings must be collected into the table by the time
28408 index_string is called. */
28409
28410 static void
28411 save_macinfo_strings (void)
28412 {
28413 unsigned len;
28414 unsigned i;
28415 macinfo_entry *ref;
28416
28417 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28418 {
28419 switch (ref->code)
28420 {
28421 /* Match the logic in output_macinfo_op to decide on
28422 indirect strings. */
28423 case DW_MACINFO_define:
28424 case DW_MACINFO_undef:
28425 len = strlen (ref->info) + 1;
28426 if (!dwarf_strict
28427 && len > (unsigned) dwarf_offset_size
28428 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28429 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28430 set_indirect_string (find_AT_string (ref->info));
28431 break;
28432 case DW_MACINFO_start_file:
28433 /* -gsplit-dwarf -g3 will also output filename as indirect
28434 string. */
28435 if (!dwarf_split_debug_info)
28436 break;
28437 /* Fall through. */
28438 case DW_MACRO_define_strp:
28439 case DW_MACRO_undef_strp:
28440 set_indirect_string (find_AT_string (ref->info));
28441 break;
28442 default:
28443 break;
28444 }
28445 }
28446 }
28447
28448 /* Output macinfo section(s). */
28449
28450 static void
28451 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28452 {
28453 unsigned i;
28454 unsigned long length = vec_safe_length (macinfo_table);
28455 macinfo_entry *ref;
28456 vec<macinfo_entry, va_gc> *files = NULL;
28457 macinfo_hash_type *macinfo_htab = NULL;
28458 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28459
28460 if (! length)
28461 return;
28462
28463 /* output_macinfo* uses these interchangeably. */
28464 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28465 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28466 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28467 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28468
28469 /* AIX Assembler inserts the length, so adjust the reference to match the
28470 offset expected by debuggers. */
28471 strcpy (dl_section_ref, debug_line_label);
28472 if (XCOFF_DEBUGGING_INFO)
28473 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28474
28475 /* For .debug_macro emit the section header. */
28476 if (!dwarf_strict || dwarf_version >= 5)
28477 {
28478 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28479 "DWARF macro version number");
28480 if (dwarf_offset_size == 8)
28481 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28482 else
28483 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28484 dw2_asm_output_offset (dwarf_offset_size, debug_line_label,
28485 debug_line_section, NULL);
28486 }
28487
28488 /* In the first loop, it emits the primary .debug_macinfo section
28489 and after each emitted op the macinfo_entry is cleared.
28490 If a longer range of define/undef ops can be optimized using
28491 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28492 the vector before the first define/undef in the range and the
28493 whole range of define/undef ops is not emitted and kept. */
28494 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28495 {
28496 switch (ref->code)
28497 {
28498 case DW_MACINFO_start_file:
28499 vec_safe_push (files, *ref);
28500 break;
28501 case DW_MACINFO_end_file:
28502 if (!vec_safe_is_empty (files))
28503 files->pop ();
28504 break;
28505 case DW_MACINFO_define:
28506 case DW_MACINFO_undef:
28507 if ((!dwarf_strict || dwarf_version >= 5)
28508 && HAVE_COMDAT_GROUP
28509 && vec_safe_length (files) != 1
28510 && i > 0
28511 && i + 1 < length
28512 && (*macinfo_table)[i - 1].code == 0)
28513 {
28514 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28515 if (count)
28516 {
28517 i += count - 1;
28518 continue;
28519 }
28520 }
28521 break;
28522 case 0:
28523 /* A dummy entry may be inserted at the beginning to be able
28524 to optimize the whole block of predefined macros. */
28525 if (i == 0)
28526 continue;
28527 default:
28528 break;
28529 }
28530 output_macinfo_op (ref);
28531 ref->info = NULL;
28532 ref->code = 0;
28533 }
28534
28535 if (!macinfo_htab)
28536 return;
28537
28538 /* Save the number of transparent includes so we can adjust the
28539 label number for the fat LTO object DWARF. */
28540 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28541
28542 delete macinfo_htab;
28543 macinfo_htab = NULL;
28544
28545 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28546 terminate the current chain and switch to a new comdat .debug_macinfo
28547 section and emit the define/undef entries within it. */
28548 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28549 switch (ref->code)
28550 {
28551 case 0:
28552 continue;
28553 case DW_MACRO_import:
28554 {
28555 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28556 tree comdat_key = get_identifier (ref->info);
28557 /* Terminate the previous .debug_macinfo section. */
28558 dw2_asm_output_data (1, 0, "End compilation unit");
28559 targetm.asm_out.named_section (debug_macinfo_section_name,
28560 SECTION_DEBUG
28561 | SECTION_LINKONCE
28562 | (early_lto_debug
28563 ? SECTION_EXCLUDE : 0),
28564 comdat_key);
28565 ASM_GENERATE_INTERNAL_LABEL (label,
28566 DEBUG_MACRO_SECTION_LABEL,
28567 ref->lineno + macinfo_label_base);
28568 ASM_OUTPUT_LABEL (asm_out_file, label);
28569 ref->code = 0;
28570 ref->info = NULL;
28571 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28572 "DWARF macro version number");
28573 if (dwarf_offset_size == 8)
28574 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28575 else
28576 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28577 }
28578 break;
28579 case DW_MACINFO_define:
28580 case DW_MACINFO_undef:
28581 output_macinfo_op (ref);
28582 ref->code = 0;
28583 ref->info = NULL;
28584 break;
28585 default:
28586 gcc_unreachable ();
28587 }
28588
28589 macinfo_label_base += macinfo_label_base_adj;
28590 }
28591
28592 /* Initialize the various sections and labels for dwarf output and prefix
28593 them with PREFIX if non-NULL. Returns the generation (zero based
28594 number of times function was called). */
28595
28596 static unsigned
28597 init_sections_and_labels (bool early_lto_debug)
28598 {
28599 /* As we may get called multiple times have a generation count for
28600 labels. */
28601 static unsigned generation = 0;
28602
28603 if (early_lto_debug)
28604 {
28605 if (!dwarf_split_debug_info)
28606 {
28607 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28608 SECTION_DEBUG | SECTION_EXCLUDE,
28609 NULL);
28610 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28611 SECTION_DEBUG | SECTION_EXCLUDE,
28612 NULL);
28613 debug_macinfo_section_name
28614 = ((dwarf_strict && dwarf_version < 5)
28615 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28616 debug_macinfo_section = get_section (debug_macinfo_section_name,
28617 SECTION_DEBUG
28618 | SECTION_EXCLUDE, NULL);
28619 }
28620 else
28621 {
28622 /* ??? Which of the following do we need early? */
28623 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28624 SECTION_DEBUG | SECTION_EXCLUDE,
28625 NULL);
28626 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28627 SECTION_DEBUG | SECTION_EXCLUDE,
28628 NULL);
28629 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28630 SECTION_DEBUG
28631 | SECTION_EXCLUDE, NULL);
28632 debug_skeleton_abbrev_section
28633 = get_section (DEBUG_LTO_ABBREV_SECTION,
28634 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28635 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28636 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28637 generation);
28638
28639 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28640 stay in the main .o, but the skeleton_line goes into the split
28641 off dwo. */
28642 debug_skeleton_line_section
28643 = get_section (DEBUG_LTO_LINE_SECTION,
28644 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28645 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28646 DEBUG_SKELETON_LINE_SECTION_LABEL,
28647 generation);
28648 debug_str_offsets_section
28649 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28650 SECTION_DEBUG | SECTION_EXCLUDE,
28651 NULL);
28652 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28653 DEBUG_SKELETON_INFO_SECTION_LABEL,
28654 generation);
28655 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28656 DEBUG_STR_DWO_SECTION_FLAGS,
28657 NULL);
28658 debug_macinfo_section_name
28659 = ((dwarf_strict && dwarf_version < 5)
28660 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28661 debug_macinfo_section = get_section (debug_macinfo_section_name,
28662 SECTION_DEBUG | SECTION_EXCLUDE,
28663 NULL);
28664 }
28665 /* For macro info and the file table we have to refer to a
28666 debug_line section. */
28667 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28668 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28669 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28670 DEBUG_LINE_SECTION_LABEL, generation);
28671
28672 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28673 DEBUG_STR_SECTION_FLAGS
28674 | SECTION_EXCLUDE, NULL);
28675 if (!dwarf_split_debug_info)
28676 debug_line_str_section
28677 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28678 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28679 }
28680 else
28681 {
28682 if (!dwarf_split_debug_info)
28683 {
28684 debug_info_section = get_section (DEBUG_INFO_SECTION,
28685 SECTION_DEBUG, NULL);
28686 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28687 SECTION_DEBUG, NULL);
28688 debug_loc_section = get_section (dwarf_version >= 5
28689 ? DEBUG_LOCLISTS_SECTION
28690 : DEBUG_LOC_SECTION,
28691 SECTION_DEBUG, NULL);
28692 debug_macinfo_section_name
28693 = ((dwarf_strict && dwarf_version < 5)
28694 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28695 debug_macinfo_section = get_section (debug_macinfo_section_name,
28696 SECTION_DEBUG, NULL);
28697 }
28698 else
28699 {
28700 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28701 SECTION_DEBUG | SECTION_EXCLUDE,
28702 NULL);
28703 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28704 SECTION_DEBUG | SECTION_EXCLUDE,
28705 NULL);
28706 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28707 SECTION_DEBUG, NULL);
28708 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28709 SECTION_DEBUG, NULL);
28710 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28711 SECTION_DEBUG, NULL);
28712 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28713 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28714 generation);
28715
28716 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28717 stay in the main .o, but the skeleton_line goes into the
28718 split off dwo. */
28719 debug_skeleton_line_section
28720 = get_section (DEBUG_DWO_LINE_SECTION,
28721 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28722 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28723 DEBUG_SKELETON_LINE_SECTION_LABEL,
28724 generation);
28725 debug_str_offsets_section
28726 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28727 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28728 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28729 DEBUG_SKELETON_INFO_SECTION_LABEL,
28730 generation);
28731 debug_loc_section = get_section (dwarf_version >= 5
28732 ? DEBUG_DWO_LOCLISTS_SECTION
28733 : DEBUG_DWO_LOC_SECTION,
28734 SECTION_DEBUG | SECTION_EXCLUDE,
28735 NULL);
28736 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28737 DEBUG_STR_DWO_SECTION_FLAGS,
28738 NULL);
28739 debug_macinfo_section_name
28740 = ((dwarf_strict && dwarf_version < 5)
28741 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28742 debug_macinfo_section = get_section (debug_macinfo_section_name,
28743 SECTION_DEBUG | SECTION_EXCLUDE,
28744 NULL);
28745 }
28746 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28747 SECTION_DEBUG, NULL);
28748 debug_line_section = get_section (DEBUG_LINE_SECTION,
28749 SECTION_DEBUG, NULL);
28750 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28751 SECTION_DEBUG, NULL);
28752 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28753 SECTION_DEBUG, NULL);
28754 debug_str_section = get_section (DEBUG_STR_SECTION,
28755 DEBUG_STR_SECTION_FLAGS, NULL);
28756 if ((!dwarf_split_debug_info && !output_asm_line_debug_info ())
28757 || asm_outputs_debug_line_str ())
28758 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28759 DEBUG_STR_SECTION_FLAGS, NULL);
28760
28761 debug_ranges_section = get_section (dwarf_version >= 5
28762 ? DEBUG_RNGLISTS_SECTION
28763 : DEBUG_RANGES_SECTION,
28764 SECTION_DEBUG, NULL);
28765 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28766 SECTION_DEBUG, NULL);
28767 }
28768
28769 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28770 DEBUG_ABBREV_SECTION_LABEL, generation);
28771 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28772 DEBUG_INFO_SECTION_LABEL, generation);
28773 info_section_emitted = false;
28774 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28775 DEBUG_LINE_SECTION_LABEL, generation);
28776 /* There are up to 4 unique ranges labels per generation.
28777 See also output_rnglists. */
28778 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28779 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28780 if (dwarf_version >= 5 && dwarf_split_debug_info)
28781 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28782 DEBUG_RANGES_SECTION_LABEL,
28783 1 + generation * 4);
28784 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28785 DEBUG_ADDR_SECTION_LABEL, generation);
28786 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28787 (dwarf_strict && dwarf_version < 5)
28788 ? DEBUG_MACINFO_SECTION_LABEL
28789 : DEBUG_MACRO_SECTION_LABEL, generation);
28790 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28791 generation);
28792
28793 ++generation;
28794 return generation - 1;
28795 }
28796
28797 /* Set up for Dwarf output at the start of compilation. */
28798
28799 static void
28800 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28801 {
28802 /* Allocate the file_table. */
28803 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28804
28805 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28806 /* Allocate the decl_die_table. */
28807 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28808
28809 /* Allocate the decl_loc_table. */
28810 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28811
28812 /* Allocate the cached_dw_loc_list_table. */
28813 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28814
28815 /* Allocate the initial hunk of the abbrev_die_table. */
28816 vec_alloc (abbrev_die_table, 256);
28817 /* Zero-th entry is allocated, but unused. */
28818 abbrev_die_table->quick_push (NULL);
28819
28820 /* Allocate the dwarf_proc_stack_usage_map. */
28821 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28822
28823 /* Allocate the pubtypes and pubnames vectors. */
28824 vec_alloc (pubname_table, 32);
28825 vec_alloc (pubtype_table, 32);
28826
28827 vec_alloc (incomplete_types, 64);
28828
28829 vec_alloc (used_rtx_array, 32);
28830
28831 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28832 vec_alloc (macinfo_table, 64);
28833 #endif
28834
28835 /* If front-ends already registered a main translation unit but we were not
28836 ready to perform the association, do this now. */
28837 if (main_translation_unit != NULL_TREE)
28838 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28839 }
28840
28841 /* Called before compile () starts outputtting functions, variables
28842 and toplevel asms into assembly. */
28843
28844 static void
28845 dwarf2out_assembly_start (void)
28846 {
28847 if (text_section_line_info)
28848 return;
28849
28850 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28851 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28852 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28853 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28854 COLD_TEXT_SECTION_LABEL, 0);
28855 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28856
28857 switch_to_section (text_section);
28858 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28859 #endif
28860
28861 /* Make sure the line number table for .text always exists. */
28862 text_section_line_info = new_line_info_table ();
28863 text_section_line_info->end_label = text_end_label;
28864
28865 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28866 cur_line_info_table = text_section_line_info;
28867 #endif
28868
28869 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28870 && dwarf2out_do_cfi_asm ()
28871 && !dwarf2out_do_eh_frame ())
28872 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28873 }
28874
28875 /* A helper function for dwarf2out_finish called through
28876 htab_traverse. Assign a string its index. All strings must be
28877 collected into the table by the time index_string is called,
28878 because the indexing code relies on htab_traverse to traverse nodes
28879 in the same order for each run. */
28880
28881 int
28882 index_string (indirect_string_node **h, unsigned int *index)
28883 {
28884 indirect_string_node *node = *h;
28885
28886 find_string_form (node);
28887 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28888 {
28889 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28890 node->index = *index;
28891 *index += 1;
28892 }
28893 return 1;
28894 }
28895
28896 /* A helper function for output_indirect_strings called through
28897 htab_traverse. Output the offset to a string and update the
28898 current offset. */
28899
28900 int
28901 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28902 {
28903 indirect_string_node *node = *h;
28904
28905 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28906 {
28907 /* Assert that this node has been assigned an index. */
28908 gcc_assert (node->index != NO_INDEX_ASSIGNED
28909 && node->index != NOT_INDEXED);
28910 dw2_asm_output_data (dwarf_offset_size, *offset,
28911 "indexed string 0x%x: %s", node->index, node->str);
28912 *offset += strlen (node->str) + 1;
28913 }
28914 return 1;
28915 }
28916
28917 /* A helper function for dwarf2out_finish called through
28918 htab_traverse. Output the indexed string. */
28919
28920 int
28921 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28922 {
28923 struct indirect_string_node *node = *h;
28924
28925 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28926 {
28927 /* Assert that the strings are output in the same order as their
28928 indexes were assigned. */
28929 gcc_assert (*cur_idx == node->index);
28930 assemble_string (node->str, strlen (node->str) + 1);
28931 *cur_idx += 1;
28932 }
28933 return 1;
28934 }
28935
28936 /* A helper function for output_indirect_strings. Counts the number
28937 of index strings offsets. Must match the logic of the functions
28938 output_index_string[_offsets] above. */
28939 int
28940 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28941 {
28942 struct indirect_string_node *node = *h;
28943
28944 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28945 *last_idx += 1;
28946 return 1;
28947 }
28948
28949 /* A helper function for dwarf2out_finish called through
28950 htab_traverse. Emit one queued .debug_str string. */
28951
28952 int
28953 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28954 {
28955 struct indirect_string_node *node = *h;
28956
28957 node->form = find_string_form (node);
28958 if (node->form == form && node->refcount > 0)
28959 {
28960 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28961 assemble_string (node->str, strlen (node->str) + 1);
28962 }
28963
28964 return 1;
28965 }
28966
28967 /* Output the indexed string table. */
28968
28969 static void
28970 output_indirect_strings (void)
28971 {
28972 switch_to_section (debug_str_section);
28973 if (!dwarf_split_debug_info)
28974 debug_str_hash->traverse<enum dwarf_form,
28975 output_indirect_string> (DW_FORM_strp);
28976 else
28977 {
28978 unsigned int offset = 0;
28979 unsigned int cur_idx = 0;
28980
28981 if (skeleton_debug_str_hash)
28982 skeleton_debug_str_hash->traverse<enum dwarf_form,
28983 output_indirect_string> (DW_FORM_strp);
28984
28985 switch_to_section (debug_str_offsets_section);
28986 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28987 header. Note that we don't need to generate a label to the
28988 actual index table following the header here, because this is
28989 for the split dwarf case only. In an .dwo file there is only
28990 one string offsets table (and one debug info section). But
28991 if we would start using string offset tables for the main (or
28992 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28993 pointing to the actual index after the header. Split dwarf
28994 units will never have a string offsets base attribute. When
28995 a split unit is moved into a .dwp file the string offsets can
28996 be found through the .debug_cu_index section table. */
28997 if (dwarf_version >= 5)
28998 {
28999 unsigned int last_idx = 0;
29000 unsigned long str_offsets_length;
29001
29002 debug_str_hash->traverse_noresize
29003 <unsigned int *, count_index_strings> (&last_idx);
29004 str_offsets_length = last_idx * dwarf_offset_size + 4;
29005 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
29006 dw2_asm_output_data (4, 0xffffffff,
29007 "Escape value for 64-bit DWARF extension");
29008 dw2_asm_output_data (dwarf_offset_size, str_offsets_length,
29009 "Length of string offsets unit");
29010 dw2_asm_output_data (2, 5, "DWARF string offsets version");
29011 dw2_asm_output_data (2, 0, "Header zero padding");
29012 }
29013 debug_str_hash->traverse_noresize
29014 <unsigned int *, output_index_string_offset> (&offset);
29015 switch_to_section (debug_str_dwo_section);
29016 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
29017 (&cur_idx);
29018 }
29019 }
29020
29021 /* Callback for htab_traverse to assign an index to an entry in the
29022 table, and to write that entry to the .debug_addr section. */
29023
29024 int
29025 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
29026 {
29027 addr_table_entry *entry = *slot;
29028
29029 if (entry->refcount == 0)
29030 {
29031 gcc_assert (entry->index == NO_INDEX_ASSIGNED
29032 || entry->index == NOT_INDEXED);
29033 return 1;
29034 }
29035
29036 gcc_assert (entry->index == *cur_index);
29037 (*cur_index)++;
29038
29039 switch (entry->kind)
29040 {
29041 case ate_kind_rtx:
29042 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29043 "0x%x", entry->index);
29044 break;
29045 case ate_kind_rtx_dtprel:
29046 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29047 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29048 DWARF2_ADDR_SIZE,
29049 entry->addr.rtl);
29050 fputc ('\n', asm_out_file);
29051 break;
29052 case ate_kind_label:
29053 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29054 "0x%x", entry->index);
29055 break;
29056 default:
29057 gcc_unreachable ();
29058 }
29059 return 1;
29060 }
29061
29062 /* A helper function for dwarf2out_finish. Counts the number
29063 of indexed addresses. Must match the logic of the functions
29064 output_addr_table_entry above. */
29065 int
29066 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29067 {
29068 addr_table_entry *entry = *slot;
29069
29070 if (entry->refcount > 0)
29071 *last_idx += 1;
29072 return 1;
29073 }
29074
29075 /* Produce the .debug_addr section. */
29076
29077 static void
29078 output_addr_table (void)
29079 {
29080 unsigned int index = 0;
29081 if (addr_index_table == NULL || addr_index_table->size () == 0)
29082 return;
29083
29084 switch_to_section (debug_addr_section);
29085 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
29086 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
29087 before DWARF5, didn't have a header for .debug_addr units.
29088 DWARF5 specifies a small header when address tables are used. */
29089 if (dwarf_version >= 5)
29090 {
29091 unsigned int last_idx = 0;
29092 unsigned long addrs_length;
29093
29094 addr_index_table->traverse_noresize
29095 <unsigned int *, count_index_addrs> (&last_idx);
29096 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
29097
29098 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
29099 dw2_asm_output_data (4, 0xffffffff,
29100 "Escape value for 64-bit DWARF extension");
29101 dw2_asm_output_data (dwarf_offset_size, addrs_length,
29102 "Length of Address Unit");
29103 dw2_asm_output_data (2, 5, "DWARF addr version");
29104 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
29105 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
29106 }
29107 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
29108
29109 addr_index_table
29110 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29111 }
29112
29113 #if ENABLE_ASSERT_CHECKING
29114 /* Verify that all marks are clear. */
29115
29116 static void
29117 verify_marks_clear (dw_die_ref die)
29118 {
29119 dw_die_ref c;
29120
29121 gcc_assert (! die->die_mark);
29122 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29123 }
29124 #endif /* ENABLE_ASSERT_CHECKING */
29125
29126 /* Clear the marks for a die and its children.
29127 Be cool if the mark isn't set. */
29128
29129 static void
29130 prune_unmark_dies (dw_die_ref die)
29131 {
29132 dw_die_ref c;
29133
29134 if (die->die_mark)
29135 die->die_mark = 0;
29136 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29137 }
29138
29139 /* Given LOC that is referenced by a DIE we're marking as used, find all
29140 referenced DWARF procedures it references and mark them as used. */
29141
29142 static void
29143 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29144 {
29145 for (; loc != NULL; loc = loc->dw_loc_next)
29146 switch (loc->dw_loc_opc)
29147 {
29148 case DW_OP_implicit_pointer:
29149 case DW_OP_convert:
29150 case DW_OP_reinterpret:
29151 case DW_OP_GNU_implicit_pointer:
29152 case DW_OP_GNU_convert:
29153 case DW_OP_GNU_reinterpret:
29154 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29155 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29156 break;
29157 case DW_OP_GNU_variable_value:
29158 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29159 {
29160 dw_die_ref ref
29161 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29162 if (ref == NULL)
29163 break;
29164 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29165 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29166 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29167 }
29168 /* FALLTHRU */
29169 case DW_OP_call2:
29170 case DW_OP_call4:
29171 case DW_OP_call_ref:
29172 case DW_OP_const_type:
29173 case DW_OP_GNU_const_type:
29174 case DW_OP_GNU_parameter_ref:
29175 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29176 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29177 break;
29178 case DW_OP_regval_type:
29179 case DW_OP_deref_type:
29180 case DW_OP_GNU_regval_type:
29181 case DW_OP_GNU_deref_type:
29182 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29183 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29184 break;
29185 case DW_OP_entry_value:
29186 case DW_OP_GNU_entry_value:
29187 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29188 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29189 break;
29190 default:
29191 break;
29192 }
29193 }
29194
29195 /* Given DIE that we're marking as used, find any other dies
29196 it references as attributes and mark them as used. */
29197
29198 static void
29199 prune_unused_types_walk_attribs (dw_die_ref die)
29200 {
29201 dw_attr_node *a;
29202 unsigned ix;
29203
29204 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29205 {
29206 switch (AT_class (a))
29207 {
29208 /* Make sure DWARF procedures referenced by location descriptions will
29209 get emitted. */
29210 case dw_val_class_loc:
29211 prune_unused_types_walk_loc_descr (AT_loc (a));
29212 break;
29213 case dw_val_class_loc_list:
29214 for (dw_loc_list_ref list = AT_loc_list (a);
29215 list != NULL;
29216 list = list->dw_loc_next)
29217 prune_unused_types_walk_loc_descr (list->expr);
29218 break;
29219
29220 case dw_val_class_view_list:
29221 /* This points to a loc_list in another attribute, so it's
29222 already covered. */
29223 break;
29224
29225 case dw_val_class_die_ref:
29226 /* A reference to another DIE.
29227 Make sure that it will get emitted.
29228 If it was broken out into a comdat group, don't follow it. */
29229 if (! AT_ref (a)->comdat_type_p
29230 || a->dw_attr == DW_AT_specification)
29231 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29232 break;
29233
29234 case dw_val_class_str:
29235 /* Set the string's refcount to 0 so that prune_unused_types_mark
29236 accounts properly for it. */
29237 a->dw_attr_val.v.val_str->refcount = 0;
29238 break;
29239
29240 default:
29241 break;
29242 }
29243 }
29244 }
29245
29246 /* Mark the generic parameters and arguments children DIEs of DIE. */
29247
29248 static void
29249 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29250 {
29251 dw_die_ref c;
29252
29253 if (die == NULL || die->die_child == NULL)
29254 return;
29255 c = die->die_child;
29256 do
29257 {
29258 if (is_template_parameter (c))
29259 prune_unused_types_mark (c, 1);
29260 c = c->die_sib;
29261 } while (c && c != die->die_child);
29262 }
29263
29264 /* Mark DIE as being used. If DOKIDS is true, then walk down
29265 to DIE's children. */
29266
29267 static void
29268 prune_unused_types_mark (dw_die_ref die, int dokids)
29269 {
29270 dw_die_ref c;
29271
29272 if (die->die_mark == 0)
29273 {
29274 /* We haven't done this node yet. Mark it as used. */
29275 die->die_mark = 1;
29276 /* If this is the DIE of a generic type instantiation,
29277 mark the children DIEs that describe its generic parms and
29278 args. */
29279 prune_unused_types_mark_generic_parms_dies (die);
29280
29281 /* We also have to mark its parents as used.
29282 (But we don't want to mark our parent's kids due to this,
29283 unless it is a class.) */
29284 if (die->die_parent)
29285 prune_unused_types_mark (die->die_parent,
29286 class_scope_p (die->die_parent));
29287
29288 /* Mark any referenced nodes. */
29289 prune_unused_types_walk_attribs (die);
29290
29291 /* If this node is a specification,
29292 also mark the definition, if it exists. */
29293 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29294 prune_unused_types_mark (die->die_definition, 1);
29295 }
29296
29297 if (dokids && die->die_mark != 2)
29298 {
29299 /* We need to walk the children, but haven't done so yet.
29300 Remember that we've walked the kids. */
29301 die->die_mark = 2;
29302
29303 /* If this is an array type, we need to make sure our
29304 kids get marked, even if they're types. If we're
29305 breaking out types into comdat sections, do this
29306 for all type definitions. */
29307 if (die->die_tag == DW_TAG_array_type
29308 || (use_debug_types
29309 && is_type_die (die) && ! is_declaration_die (die)))
29310 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29311 else
29312 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29313 }
29314 }
29315
29316 /* For local classes, look if any static member functions were emitted
29317 and if so, mark them. */
29318
29319 static void
29320 prune_unused_types_walk_local_classes (dw_die_ref die)
29321 {
29322 dw_die_ref c;
29323
29324 if (die->die_mark == 2)
29325 return;
29326
29327 switch (die->die_tag)
29328 {
29329 case DW_TAG_structure_type:
29330 case DW_TAG_union_type:
29331 case DW_TAG_class_type:
29332 case DW_TAG_interface_type:
29333 break;
29334
29335 case DW_TAG_subprogram:
29336 if (!get_AT_flag (die, DW_AT_declaration)
29337 || die->die_definition != NULL)
29338 prune_unused_types_mark (die, 1);
29339 return;
29340
29341 default:
29342 return;
29343 }
29344
29345 /* Mark children. */
29346 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29347 }
29348
29349 /* Walk the tree DIE and mark types that we actually use. */
29350
29351 static void
29352 prune_unused_types_walk (dw_die_ref die)
29353 {
29354 dw_die_ref c;
29355
29356 /* Don't do anything if this node is already marked and
29357 children have been marked as well. */
29358 if (die->die_mark == 2)
29359 return;
29360
29361 switch (die->die_tag)
29362 {
29363 case DW_TAG_structure_type:
29364 case DW_TAG_union_type:
29365 case DW_TAG_class_type:
29366 case DW_TAG_interface_type:
29367 if (die->die_perennial_p)
29368 break;
29369
29370 for (c = die->die_parent; c; c = c->die_parent)
29371 if (c->die_tag == DW_TAG_subprogram)
29372 break;
29373
29374 /* Finding used static member functions inside of classes
29375 is needed just for local classes, because for other classes
29376 static member function DIEs with DW_AT_specification
29377 are emitted outside of the DW_TAG_*_type. If we ever change
29378 it, we'd need to call this even for non-local classes. */
29379 if (c)
29380 prune_unused_types_walk_local_classes (die);
29381
29382 /* It's a type node --- don't mark it. */
29383 return;
29384
29385 case DW_TAG_const_type:
29386 case DW_TAG_packed_type:
29387 case DW_TAG_pointer_type:
29388 case DW_TAG_reference_type:
29389 case DW_TAG_rvalue_reference_type:
29390 case DW_TAG_volatile_type:
29391 case DW_TAG_typedef:
29392 case DW_TAG_array_type:
29393 case DW_TAG_friend:
29394 case DW_TAG_enumeration_type:
29395 case DW_TAG_subroutine_type:
29396 case DW_TAG_string_type:
29397 case DW_TAG_set_type:
29398 case DW_TAG_subrange_type:
29399 case DW_TAG_ptr_to_member_type:
29400 case DW_TAG_file_type:
29401 /* Type nodes are useful only when other DIEs reference them --- don't
29402 mark them. */
29403 /* FALLTHROUGH */
29404
29405 case DW_TAG_dwarf_procedure:
29406 /* Likewise for DWARF procedures. */
29407
29408 if (die->die_perennial_p)
29409 break;
29410
29411 return;
29412
29413 case DW_TAG_variable:
29414 if (flag_debug_only_used_symbols)
29415 {
29416 if (die->die_perennial_p)
29417 break;
29418
29419 /* premark_used_variables marks external variables --- don't mark
29420 them here. But function-local externals are always considered
29421 used. */
29422 if (get_AT (die, DW_AT_external))
29423 {
29424 for (c = die->die_parent; c; c = c->die_parent)
29425 if (c->die_tag == DW_TAG_subprogram)
29426 break;
29427 if (!c)
29428 return;
29429 }
29430 }
29431 /* FALLTHROUGH */
29432
29433 default:
29434 /* Mark everything else. */
29435 break;
29436 }
29437
29438 if (die->die_mark == 0)
29439 {
29440 die->die_mark = 1;
29441
29442 /* Now, mark any dies referenced from here. */
29443 prune_unused_types_walk_attribs (die);
29444 }
29445
29446 die->die_mark = 2;
29447
29448 /* Mark children. */
29449 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29450 }
29451
29452 /* Increment the string counts on strings referred to from DIE's
29453 attributes. */
29454
29455 static void
29456 prune_unused_types_update_strings (dw_die_ref die)
29457 {
29458 dw_attr_node *a;
29459 unsigned ix;
29460
29461 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29462 if (AT_class (a) == dw_val_class_str)
29463 {
29464 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29465 s->refcount++;
29466 /* Avoid unnecessarily putting strings that are used less than
29467 twice in the hash table. */
29468 if (s->refcount
29469 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29470 {
29471 indirect_string_node **slot
29472 = debug_str_hash->find_slot_with_hash (s->str,
29473 htab_hash_string (s->str),
29474 INSERT);
29475 gcc_assert (*slot == NULL);
29476 *slot = s;
29477 }
29478 }
29479 }
29480
29481 /* Mark DIE and its children as removed. */
29482
29483 static void
29484 mark_removed (dw_die_ref die)
29485 {
29486 dw_die_ref c;
29487 die->removed = true;
29488 FOR_EACH_CHILD (die, c, mark_removed (c));
29489 }
29490
29491 /* Remove from the tree DIE any dies that aren't marked. */
29492
29493 static void
29494 prune_unused_types_prune (dw_die_ref die)
29495 {
29496 dw_die_ref c;
29497
29498 gcc_assert (die->die_mark);
29499 prune_unused_types_update_strings (die);
29500
29501 if (! die->die_child)
29502 return;
29503
29504 c = die->die_child;
29505 do {
29506 dw_die_ref prev = c, next;
29507 for (c = c->die_sib; ! c->die_mark; c = next)
29508 if (c == die->die_child)
29509 {
29510 /* No marked children between 'prev' and the end of the list. */
29511 if (prev == c)
29512 /* No marked children at all. */
29513 die->die_child = NULL;
29514 else
29515 {
29516 prev->die_sib = c->die_sib;
29517 die->die_child = prev;
29518 }
29519 c->die_sib = NULL;
29520 mark_removed (c);
29521 return;
29522 }
29523 else
29524 {
29525 next = c->die_sib;
29526 c->die_sib = NULL;
29527 mark_removed (c);
29528 }
29529
29530 if (c != prev->die_sib)
29531 prev->die_sib = c;
29532 prune_unused_types_prune (c);
29533 } while (c != die->die_child);
29534 }
29535
29536 /* Remove dies representing declarations that we never use. */
29537
29538 static void
29539 prune_unused_types (void)
29540 {
29541 unsigned int i;
29542 limbo_die_node *node;
29543 comdat_type_node *ctnode;
29544 pubname_entry *pub;
29545 dw_die_ref base_type;
29546
29547 #if ENABLE_ASSERT_CHECKING
29548 /* All the marks should already be clear. */
29549 verify_marks_clear (comp_unit_die ());
29550 for (node = limbo_die_list; node; node = node->next)
29551 verify_marks_clear (node->die);
29552 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29553 verify_marks_clear (ctnode->root_die);
29554 #endif /* ENABLE_ASSERT_CHECKING */
29555
29556 /* Mark types that are used in global variables. */
29557 premark_types_used_by_global_vars ();
29558
29559 /* Mark variables used in the symtab. */
29560 if (flag_debug_only_used_symbols)
29561 premark_used_variables ();
29562
29563 /* Set the mark on nodes that are actually used. */
29564 prune_unused_types_walk (comp_unit_die ());
29565 for (node = limbo_die_list; node; node = node->next)
29566 prune_unused_types_walk (node->die);
29567 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29568 {
29569 prune_unused_types_walk (ctnode->root_die);
29570 prune_unused_types_mark (ctnode->type_die, 1);
29571 }
29572
29573 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29574 are unusual in that they are pubnames that are the children of pubtypes.
29575 They should only be marked via their parent DW_TAG_enumeration_type die,
29576 not as roots in themselves. */
29577 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29578 if (pub->die->die_tag != DW_TAG_enumerator)
29579 prune_unused_types_mark (pub->die, 1);
29580 for (i = 0; base_types.iterate (i, &base_type); i++)
29581 prune_unused_types_mark (base_type, 1);
29582
29583 /* Also set the mark on nodes that could be referenced by
29584 DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or
29585 by DW_TAG_inlined_subroutine origins. */
29586 cgraph_node *cnode;
29587 FOR_EACH_FUNCTION (cnode)
29588 if (cnode->referred_to_p (false))
29589 {
29590 dw_die_ref die = lookup_decl_die (cnode->decl);
29591 if (die == NULL || die->die_mark)
29592 continue;
29593 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29594 if (e->caller != cnode)
29595 {
29596 prune_unused_types_mark (die, 1);
29597 break;
29598 }
29599 }
29600
29601 if (debug_str_hash)
29602 debug_str_hash->empty ();
29603 if (skeleton_debug_str_hash)
29604 skeleton_debug_str_hash->empty ();
29605 prune_unused_types_prune (comp_unit_die ());
29606 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29607 {
29608 node = *pnode;
29609 if (!node->die->die_mark)
29610 *pnode = node->next;
29611 else
29612 {
29613 prune_unused_types_prune (node->die);
29614 pnode = &node->next;
29615 }
29616 }
29617 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29618 prune_unused_types_prune (ctnode->root_die);
29619
29620 /* Leave the marks clear. */
29621 prune_unmark_dies (comp_unit_die ());
29622 for (node = limbo_die_list; node; node = node->next)
29623 prune_unmark_dies (node->die);
29624 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29625 prune_unmark_dies (ctnode->root_die);
29626 }
29627
29628 /* Helpers to manipulate hash table of comdat type units. */
29629
29630 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29631 {
29632 static inline hashval_t hash (const comdat_type_node *);
29633 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29634 };
29635
29636 inline hashval_t
29637 comdat_type_hasher::hash (const comdat_type_node *type_node)
29638 {
29639 hashval_t h;
29640 memcpy (&h, type_node->signature, sizeof (h));
29641 return h;
29642 }
29643
29644 inline bool
29645 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29646 const comdat_type_node *type_node_2)
29647 {
29648 return (! memcmp (type_node_1->signature, type_node_2->signature,
29649 DWARF_TYPE_SIGNATURE_SIZE));
29650 }
29651
29652 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29653 to the location it would have been added, should we know its
29654 DECL_ASSEMBLER_NAME when we added other attributes. This will
29655 probably improve compactness of debug info, removing equivalent
29656 abbrevs, and hide any differences caused by deferring the
29657 computation of the assembler name, triggered by e.g. PCH. */
29658
29659 static inline void
29660 move_linkage_attr (dw_die_ref die)
29661 {
29662 unsigned ix = vec_safe_length (die->die_attr);
29663 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29664
29665 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29666 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29667
29668 while (--ix > 0)
29669 {
29670 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29671
29672 if (prev->dw_attr == DW_AT_decl_line
29673 || prev->dw_attr == DW_AT_decl_column
29674 || prev->dw_attr == DW_AT_name)
29675 break;
29676 }
29677
29678 if (ix != vec_safe_length (die->die_attr) - 1)
29679 {
29680 die->die_attr->pop ();
29681 die->die_attr->quick_insert (ix, linkage);
29682 }
29683 }
29684
29685 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29686 referenced from typed stack ops and count how often they are used. */
29687
29688 static void
29689 mark_base_types (dw_loc_descr_ref loc)
29690 {
29691 dw_die_ref base_type = NULL;
29692
29693 for (; loc; loc = loc->dw_loc_next)
29694 {
29695 switch (loc->dw_loc_opc)
29696 {
29697 case DW_OP_regval_type:
29698 case DW_OP_deref_type:
29699 case DW_OP_GNU_regval_type:
29700 case DW_OP_GNU_deref_type:
29701 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29702 break;
29703 case DW_OP_convert:
29704 case DW_OP_reinterpret:
29705 case DW_OP_GNU_convert:
29706 case DW_OP_GNU_reinterpret:
29707 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29708 continue;
29709 /* FALLTHRU */
29710 case DW_OP_const_type:
29711 case DW_OP_GNU_const_type:
29712 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29713 break;
29714 case DW_OP_entry_value:
29715 case DW_OP_GNU_entry_value:
29716 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29717 continue;
29718 default:
29719 continue;
29720 }
29721 gcc_assert (base_type->die_parent == comp_unit_die ());
29722 if (base_type->die_mark)
29723 base_type->die_mark++;
29724 else
29725 {
29726 base_types.safe_push (base_type);
29727 base_type->die_mark = 1;
29728 }
29729 }
29730 }
29731
29732 /* Comparison function for sorting marked base types. */
29733
29734 static int
29735 base_type_cmp (const void *x, const void *y)
29736 {
29737 dw_die_ref dx = *(const dw_die_ref *) x;
29738 dw_die_ref dy = *(const dw_die_ref *) y;
29739 unsigned int byte_size1, byte_size2;
29740 unsigned int encoding1, encoding2;
29741 unsigned int align1, align2;
29742 if (dx->die_mark > dy->die_mark)
29743 return -1;
29744 if (dx->die_mark < dy->die_mark)
29745 return 1;
29746 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29747 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29748 if (byte_size1 < byte_size2)
29749 return 1;
29750 if (byte_size1 > byte_size2)
29751 return -1;
29752 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29753 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29754 if (encoding1 < encoding2)
29755 return 1;
29756 if (encoding1 > encoding2)
29757 return -1;
29758 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29759 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29760 if (align1 < align2)
29761 return 1;
29762 if (align1 > align2)
29763 return -1;
29764 return 0;
29765 }
29766
29767 /* Move base types marked by mark_base_types as early as possible
29768 in the CU, sorted by decreasing usage count both to make the
29769 uleb128 references as small as possible and to make sure they
29770 will have die_offset already computed by calc_die_sizes when
29771 sizes of typed stack loc ops is computed. */
29772
29773 static void
29774 move_marked_base_types (void)
29775 {
29776 unsigned int i;
29777 dw_die_ref base_type, die, c;
29778
29779 if (base_types.is_empty ())
29780 return;
29781
29782 /* Sort by decreasing usage count, they will be added again in that
29783 order later on. */
29784 base_types.qsort (base_type_cmp);
29785 die = comp_unit_die ();
29786 c = die->die_child;
29787 do
29788 {
29789 dw_die_ref prev = c;
29790 c = c->die_sib;
29791 while (c->die_mark)
29792 {
29793 remove_child_with_prev (c, prev);
29794 /* As base types got marked, there must be at least
29795 one node other than DW_TAG_base_type. */
29796 gcc_assert (die->die_child != NULL);
29797 c = prev->die_sib;
29798 }
29799 }
29800 while (c != die->die_child);
29801 gcc_assert (die->die_child);
29802 c = die->die_child;
29803 for (i = 0; base_types.iterate (i, &base_type); i++)
29804 {
29805 base_type->die_mark = 0;
29806 base_type->die_sib = c->die_sib;
29807 c->die_sib = base_type;
29808 c = base_type;
29809 }
29810 }
29811
29812 /* Helper function for resolve_addr, attempt to resolve
29813 one CONST_STRING, return true if successful. Similarly verify that
29814 SYMBOL_REFs refer to variables emitted in the current CU. */
29815
29816 static bool
29817 resolve_one_addr (rtx *addr)
29818 {
29819 rtx rtl = *addr;
29820
29821 if (GET_CODE (rtl) == CONST_STRING)
29822 {
29823 size_t len = strlen (XSTR (rtl, 0)) + 1;
29824 tree t = build_string (len, XSTR (rtl, 0));
29825 tree tlen = size_int (len - 1);
29826 TREE_TYPE (t)
29827 = build_array_type (char_type_node, build_index_type (tlen));
29828 rtl = lookup_constant_def (t);
29829 if (!rtl || !MEM_P (rtl))
29830 return false;
29831 rtl = XEXP (rtl, 0);
29832 if (GET_CODE (rtl) == SYMBOL_REF
29833 && SYMBOL_REF_DECL (rtl)
29834 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29835 return false;
29836 vec_safe_push (used_rtx_array, rtl);
29837 *addr = rtl;
29838 return true;
29839 }
29840
29841 if (GET_CODE (rtl) == SYMBOL_REF
29842 && SYMBOL_REF_DECL (rtl))
29843 {
29844 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29845 {
29846 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29847 return false;
29848 }
29849 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29850 return false;
29851 }
29852
29853 if (GET_CODE (rtl) == CONST)
29854 {
29855 subrtx_ptr_iterator::array_type array;
29856 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29857 if (!resolve_one_addr (*iter))
29858 return false;
29859 }
29860
29861 return true;
29862 }
29863
29864 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29865 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29866 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29867
29868 static rtx
29869 string_cst_pool_decl (tree t)
29870 {
29871 rtx rtl = output_constant_def (t, 1);
29872 unsigned char *array;
29873 dw_loc_descr_ref l;
29874 tree decl;
29875 size_t len;
29876 dw_die_ref ref;
29877
29878 if (!rtl || !MEM_P (rtl))
29879 return NULL_RTX;
29880 rtl = XEXP (rtl, 0);
29881 if (GET_CODE (rtl) != SYMBOL_REF
29882 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29883 return NULL_RTX;
29884
29885 decl = SYMBOL_REF_DECL (rtl);
29886 if (!lookup_decl_die (decl))
29887 {
29888 len = TREE_STRING_LENGTH (t);
29889 vec_safe_push (used_rtx_array, rtl);
29890 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29891 array = ggc_vec_alloc<unsigned char> (len);
29892 memcpy (array, TREE_STRING_POINTER (t), len);
29893 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29894 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29895 l->dw_loc_oprnd2.v.val_vec.length = len;
29896 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29897 l->dw_loc_oprnd2.v.val_vec.array = array;
29898 add_AT_loc (ref, DW_AT_location, l);
29899 equate_decl_number_to_die (decl, ref);
29900 }
29901 return rtl;
29902 }
29903
29904 /* Helper function of resolve_addr_in_expr. LOC is
29905 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29906 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29907 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29908 with DW_OP_implicit_pointer if possible
29909 and return true, if unsuccessful, return false. */
29910
29911 static bool
29912 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29913 {
29914 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29915 HOST_WIDE_INT offset = 0;
29916 dw_die_ref ref = NULL;
29917 tree decl;
29918
29919 if (GET_CODE (rtl) == CONST
29920 && GET_CODE (XEXP (rtl, 0)) == PLUS
29921 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29922 {
29923 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29924 rtl = XEXP (XEXP (rtl, 0), 0);
29925 }
29926 if (GET_CODE (rtl) == CONST_STRING)
29927 {
29928 size_t len = strlen (XSTR (rtl, 0)) + 1;
29929 tree t = build_string (len, XSTR (rtl, 0));
29930 tree tlen = size_int (len - 1);
29931
29932 TREE_TYPE (t)
29933 = build_array_type (char_type_node, build_index_type (tlen));
29934 rtl = string_cst_pool_decl (t);
29935 if (!rtl)
29936 return false;
29937 }
29938 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29939 {
29940 decl = SYMBOL_REF_DECL (rtl);
29941 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29942 {
29943 ref = lookup_decl_die (decl);
29944 if (ref && (get_AT (ref, DW_AT_location)
29945 || get_AT (ref, DW_AT_const_value)))
29946 {
29947 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29948 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29949 loc->dw_loc_oprnd1.val_entry = NULL;
29950 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29951 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29952 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29953 loc->dw_loc_oprnd2.v.val_int = offset;
29954 return true;
29955 }
29956 }
29957 }
29958 return false;
29959 }
29960
29961 /* Helper function for resolve_addr, handle one location
29962 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29963 the location list couldn't be resolved. */
29964
29965 static bool
29966 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29967 {
29968 dw_loc_descr_ref keep = NULL;
29969 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29970 switch (loc->dw_loc_opc)
29971 {
29972 case DW_OP_addr:
29973 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29974 {
29975 if ((prev == NULL
29976 || prev->dw_loc_opc == DW_OP_piece
29977 || prev->dw_loc_opc == DW_OP_bit_piece)
29978 && loc->dw_loc_next
29979 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29980 && (!dwarf_strict || dwarf_version >= 5)
29981 && optimize_one_addr_into_implicit_ptr (loc))
29982 break;
29983 return false;
29984 }
29985 break;
29986 case DW_OP_GNU_addr_index:
29987 case DW_OP_addrx:
29988 case DW_OP_GNU_const_index:
29989 case DW_OP_constx:
29990 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29991 || loc->dw_loc_opc == DW_OP_addrx)
29992 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29993 || loc->dw_loc_opc == DW_OP_constx)
29994 && loc->dtprel))
29995 {
29996 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29997 if (!resolve_one_addr (&rtl))
29998 return false;
29999 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
30000 loc->dw_loc_oprnd1.val_entry
30001 = add_addr_table_entry (rtl, ate_kind_rtx);
30002 }
30003 break;
30004 case DW_OP_const4u:
30005 case DW_OP_const8u:
30006 if (loc->dtprel
30007 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30008 return false;
30009 break;
30010 case DW_OP_plus_uconst:
30011 if (size_of_loc_descr (loc)
30012 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
30013 + 1
30014 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
30015 {
30016 dw_loc_descr_ref repl
30017 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
30018 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
30019 add_loc_descr (&repl, loc->dw_loc_next);
30020 *loc = *repl;
30021 }
30022 break;
30023 case DW_OP_implicit_value:
30024 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
30025 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
30026 return false;
30027 break;
30028 case DW_OP_implicit_pointer:
30029 case DW_OP_GNU_implicit_pointer:
30030 case DW_OP_GNU_parameter_ref:
30031 case DW_OP_GNU_variable_value:
30032 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30033 {
30034 dw_die_ref ref
30035 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
30036 if (ref == NULL)
30037 return false;
30038 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30039 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30040 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30041 }
30042 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30043 {
30044 if (prev == NULL
30045 && loc->dw_loc_next == NULL
30046 && AT_class (a) == dw_val_class_loc)
30047 switch (a->dw_attr)
30048 {
30049 /* Following attributes allow both exprloc and reference,
30050 so if the whole expression is DW_OP_GNU_variable_value
30051 alone we could transform it into reference. */
30052 case DW_AT_byte_size:
30053 case DW_AT_bit_size:
30054 case DW_AT_lower_bound:
30055 case DW_AT_upper_bound:
30056 case DW_AT_bit_stride:
30057 case DW_AT_count:
30058 case DW_AT_allocated:
30059 case DW_AT_associated:
30060 case DW_AT_byte_stride:
30061 a->dw_attr_val.val_class = dw_val_class_die_ref;
30062 a->dw_attr_val.val_entry = NULL;
30063 a->dw_attr_val.v.val_die_ref.die
30064 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30065 a->dw_attr_val.v.val_die_ref.external = 0;
30066 return true;
30067 default:
30068 break;
30069 }
30070 if (dwarf_strict)
30071 return false;
30072 }
30073 break;
30074 case DW_OP_const_type:
30075 case DW_OP_regval_type:
30076 case DW_OP_deref_type:
30077 case DW_OP_convert:
30078 case DW_OP_reinterpret:
30079 case DW_OP_GNU_const_type:
30080 case DW_OP_GNU_regval_type:
30081 case DW_OP_GNU_deref_type:
30082 case DW_OP_GNU_convert:
30083 case DW_OP_GNU_reinterpret:
30084 while (loc->dw_loc_next
30085 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30086 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30087 {
30088 dw_die_ref base1, base2;
30089 unsigned enc1, enc2, size1, size2;
30090 if (loc->dw_loc_opc == DW_OP_regval_type
30091 || loc->dw_loc_opc == DW_OP_deref_type
30092 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30093 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30094 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30095 else if (loc->dw_loc_oprnd1.val_class
30096 == dw_val_class_unsigned_const)
30097 break;
30098 else
30099 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30100 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30101 == dw_val_class_unsigned_const)
30102 break;
30103 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30104 gcc_assert (base1->die_tag == DW_TAG_base_type
30105 && base2->die_tag == DW_TAG_base_type);
30106 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30107 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30108 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30109 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30110 if (size1 == size2
30111 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30112 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30113 && loc != keep)
30114 || enc1 == enc2))
30115 {
30116 /* Optimize away next DW_OP_convert after
30117 adjusting LOC's base type die reference. */
30118 if (loc->dw_loc_opc == DW_OP_regval_type
30119 || loc->dw_loc_opc == DW_OP_deref_type
30120 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30121 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30122 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30123 else
30124 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30125 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30126 continue;
30127 }
30128 /* Don't change integer DW_OP_convert after e.g. floating
30129 point typed stack entry. */
30130 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30131 keep = loc->dw_loc_next;
30132 break;
30133 }
30134 break;
30135 default:
30136 break;
30137 }
30138 return true;
30139 }
30140
30141 /* Helper function of resolve_addr. DIE had DW_AT_location of
30142 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30143 and DW_OP_addr couldn't be resolved. resolve_addr has already
30144 removed the DW_AT_location attribute. This function attempts to
30145 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30146 to it or DW_AT_const_value attribute, if possible. */
30147
30148 static void
30149 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30150 {
30151 if (!VAR_P (decl)
30152 || lookup_decl_die (decl) != die
30153 || DECL_EXTERNAL (decl)
30154 || !TREE_STATIC (decl)
30155 || DECL_INITIAL (decl) == NULL_TREE
30156 || DECL_P (DECL_INITIAL (decl))
30157 || get_AT (die, DW_AT_const_value))
30158 return;
30159
30160 tree init = DECL_INITIAL (decl);
30161 HOST_WIDE_INT offset = 0;
30162 /* For variables that have been optimized away and thus
30163 don't have a memory location, see if we can emit
30164 DW_AT_const_value instead. */
30165 if (tree_add_const_value_attribute (die, init))
30166 return;
30167 if (dwarf_strict && dwarf_version < 5)
30168 return;
30169 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30170 and ADDR_EXPR refers to a decl that has DW_AT_location or
30171 DW_AT_const_value (but isn't addressable, otherwise
30172 resolving the original DW_OP_addr wouldn't fail), see if
30173 we can add DW_OP_implicit_pointer. */
30174 STRIP_NOPS (init);
30175 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30176 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30177 {
30178 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30179 init = TREE_OPERAND (init, 0);
30180 STRIP_NOPS (init);
30181 }
30182 if (TREE_CODE (init) != ADDR_EXPR)
30183 return;
30184 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30185 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30186 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30187 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30188 && TREE_OPERAND (init, 0) != decl))
30189 {
30190 dw_die_ref ref;
30191 dw_loc_descr_ref l;
30192
30193 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30194 {
30195 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30196 if (!rtl)
30197 return;
30198 decl = SYMBOL_REF_DECL (rtl);
30199 }
30200 else
30201 decl = TREE_OPERAND (init, 0);
30202 ref = lookup_decl_die (decl);
30203 if (ref == NULL
30204 || (!get_AT (ref, DW_AT_location)
30205 && !get_AT (ref, DW_AT_const_value)))
30206 return;
30207 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30208 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30209 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30210 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30211 add_AT_loc (die, DW_AT_location, l);
30212 }
30213 }
30214
30215 /* Return NULL if l is a DWARF expression, or first op that is not
30216 valid DWARF expression. */
30217
30218 static dw_loc_descr_ref
30219 non_dwarf_expression (dw_loc_descr_ref l)
30220 {
30221 while (l)
30222 {
30223 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30224 return l;
30225 switch (l->dw_loc_opc)
30226 {
30227 case DW_OP_regx:
30228 case DW_OP_implicit_value:
30229 case DW_OP_stack_value:
30230 case DW_OP_implicit_pointer:
30231 case DW_OP_GNU_implicit_pointer:
30232 case DW_OP_GNU_parameter_ref:
30233 case DW_OP_piece:
30234 case DW_OP_bit_piece:
30235 return l;
30236 default:
30237 break;
30238 }
30239 l = l->dw_loc_next;
30240 }
30241 return NULL;
30242 }
30243
30244 /* Return adjusted copy of EXPR:
30245 If it is empty DWARF expression, return it.
30246 If it is valid non-empty DWARF expression,
30247 return copy of EXPR with DW_OP_deref appended to it.
30248 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30249 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30250 If it is DWARF expression followed by DW_OP_stack_value, return
30251 copy of the DWARF expression without anything appended.
30252 Otherwise, return NULL. */
30253
30254 static dw_loc_descr_ref
30255 copy_deref_exprloc (dw_loc_descr_ref expr)
30256 {
30257 dw_loc_descr_ref tail = NULL;
30258
30259 if (expr == NULL)
30260 return NULL;
30261
30262 dw_loc_descr_ref l = non_dwarf_expression (expr);
30263 if (l && l->dw_loc_next)
30264 return NULL;
30265
30266 if (l)
30267 {
30268 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30269 tail = new_loc_descr ((enum dwarf_location_atom)
30270 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30271 0, 0);
30272 else
30273 switch (l->dw_loc_opc)
30274 {
30275 case DW_OP_regx:
30276 tail = new_loc_descr (DW_OP_bregx,
30277 l->dw_loc_oprnd1.v.val_unsigned, 0);
30278 break;
30279 case DW_OP_stack_value:
30280 break;
30281 default:
30282 return NULL;
30283 }
30284 }
30285 else
30286 tail = new_loc_descr (DW_OP_deref, 0, 0);
30287
30288 dw_loc_descr_ref ret = NULL, *p = &ret;
30289 while (expr != l)
30290 {
30291 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30292 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30293 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30294 p = &(*p)->dw_loc_next;
30295 expr = expr->dw_loc_next;
30296 }
30297 *p = tail;
30298 return ret;
30299 }
30300
30301 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30302 reference to a variable or argument, adjust it if needed and return:
30303 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30304 attribute if present should be removed
30305 0 keep the attribute perhaps with minor modifications, no need to rescan
30306 1 if the attribute has been successfully adjusted. */
30307
30308 static int
30309 optimize_string_length (dw_attr_node *a)
30310 {
30311 dw_loc_descr_ref l = AT_loc (a), lv;
30312 dw_die_ref die;
30313 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30314 {
30315 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30316 die = lookup_decl_die (decl);
30317 if (die)
30318 {
30319 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30320 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30321 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30322 }
30323 else
30324 return -1;
30325 }
30326 else
30327 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30328
30329 /* DWARF5 allows reference class, so we can then reference the DIE.
30330 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30331 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30332 {
30333 a->dw_attr_val.val_class = dw_val_class_die_ref;
30334 a->dw_attr_val.val_entry = NULL;
30335 a->dw_attr_val.v.val_die_ref.die = die;
30336 a->dw_attr_val.v.val_die_ref.external = 0;
30337 return 0;
30338 }
30339
30340 dw_attr_node *av = get_AT (die, DW_AT_location);
30341 dw_loc_list_ref d;
30342 bool non_dwarf_expr = false;
30343
30344 if (av == NULL)
30345 return dwarf_strict ? -1 : 0;
30346 switch (AT_class (av))
30347 {
30348 case dw_val_class_loc_list:
30349 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30350 if (d->expr && non_dwarf_expression (d->expr))
30351 non_dwarf_expr = true;
30352 break;
30353 case dw_val_class_view_list:
30354 gcc_unreachable ();
30355 case dw_val_class_loc:
30356 lv = AT_loc (av);
30357 if (lv == NULL)
30358 return dwarf_strict ? -1 : 0;
30359 if (non_dwarf_expression (lv))
30360 non_dwarf_expr = true;
30361 break;
30362 default:
30363 return dwarf_strict ? -1 : 0;
30364 }
30365
30366 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30367 into DW_OP_call4 or DW_OP_GNU_variable_value into
30368 DW_OP_call4 DW_OP_deref, do so. */
30369 if (!non_dwarf_expr
30370 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30371 {
30372 l->dw_loc_opc = DW_OP_call4;
30373 if (l->dw_loc_next)
30374 l->dw_loc_next = NULL;
30375 else
30376 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30377 return 0;
30378 }
30379
30380 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30381 copy over the DW_AT_location attribute from die to a. */
30382 if (l->dw_loc_next != NULL)
30383 {
30384 a->dw_attr_val = av->dw_attr_val;
30385 return 1;
30386 }
30387
30388 dw_loc_list_ref list, *p;
30389 switch (AT_class (av))
30390 {
30391 case dw_val_class_loc_list:
30392 p = &list;
30393 list = NULL;
30394 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30395 {
30396 lv = copy_deref_exprloc (d->expr);
30397 if (lv)
30398 {
30399 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30400 p = &(*p)->dw_loc_next;
30401 }
30402 else if (!dwarf_strict && d->expr)
30403 return 0;
30404 }
30405 if (list == NULL)
30406 return dwarf_strict ? -1 : 0;
30407 a->dw_attr_val.val_class = dw_val_class_loc_list;
30408 gen_llsym (list);
30409 *AT_loc_list_ptr (a) = list;
30410 return 1;
30411 case dw_val_class_loc:
30412 lv = copy_deref_exprloc (AT_loc (av));
30413 if (lv == NULL)
30414 return dwarf_strict ? -1 : 0;
30415 a->dw_attr_val.v.val_loc = lv;
30416 return 1;
30417 default:
30418 gcc_unreachable ();
30419 }
30420 }
30421
30422 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30423 an address in .rodata section if the string literal is emitted there,
30424 or remove the containing location list or replace DW_AT_const_value
30425 with DW_AT_location and empty location expression, if it isn't found
30426 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30427 to something that has been emitted in the current CU. */
30428
30429 static void
30430 resolve_addr (dw_die_ref die)
30431 {
30432 dw_die_ref c;
30433 dw_attr_node *a;
30434 dw_loc_list_ref *curr, *start, loc;
30435 unsigned ix;
30436 bool remove_AT_byte_size = false;
30437
30438 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30439 switch (AT_class (a))
30440 {
30441 case dw_val_class_loc_list:
30442 start = curr = AT_loc_list_ptr (a);
30443 loc = *curr;
30444 gcc_assert (loc);
30445 /* The same list can be referenced more than once. See if we have
30446 already recorded the result from a previous pass. */
30447 if (loc->replaced)
30448 *curr = loc->dw_loc_next;
30449 else if (!loc->resolved_addr)
30450 {
30451 /* As things stand, we do not expect or allow one die to
30452 reference a suffix of another die's location list chain.
30453 References must be identical or completely separate.
30454 There is therefore no need to cache the result of this
30455 pass on any list other than the first; doing so
30456 would lead to unnecessary writes. */
30457 while (*curr)
30458 {
30459 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30460 if (!resolve_addr_in_expr (a, (*curr)->expr))
30461 {
30462 dw_loc_list_ref next = (*curr)->dw_loc_next;
30463 dw_loc_descr_ref l = (*curr)->expr;
30464
30465 if (next && (*curr)->ll_symbol)
30466 {
30467 gcc_assert (!next->ll_symbol);
30468 next->ll_symbol = (*curr)->ll_symbol;
30469 next->vl_symbol = (*curr)->vl_symbol;
30470 }
30471 if (dwarf_split_debug_info)
30472 remove_loc_list_addr_table_entries (l);
30473 *curr = next;
30474 }
30475 else
30476 {
30477 mark_base_types ((*curr)->expr);
30478 curr = &(*curr)->dw_loc_next;
30479 }
30480 }
30481 if (loc == *start)
30482 loc->resolved_addr = 1;
30483 else
30484 {
30485 loc->replaced = 1;
30486 loc->dw_loc_next = *start;
30487 }
30488 }
30489 if (!*start)
30490 {
30491 remove_AT (die, a->dw_attr);
30492 ix--;
30493 }
30494 break;
30495 case dw_val_class_view_list:
30496 {
30497 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30498 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30499 dw_val_node *llnode
30500 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30501 /* If we no longer have a loclist, or it no longer needs
30502 views, drop this attribute. */
30503 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30504 {
30505 remove_AT (die, a->dw_attr);
30506 ix--;
30507 }
30508 break;
30509 }
30510 case dw_val_class_loc:
30511 {
30512 dw_loc_descr_ref l = AT_loc (a);
30513 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30514 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30515 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30516 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30517 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30518 with DW_FORM_ref referencing the same DIE as
30519 DW_OP_GNU_variable_value used to reference. */
30520 if (a->dw_attr == DW_AT_string_length
30521 && l
30522 && l->dw_loc_opc == DW_OP_GNU_variable_value
30523 && (l->dw_loc_next == NULL
30524 || (l->dw_loc_next->dw_loc_next == NULL
30525 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30526 {
30527 switch (optimize_string_length (a))
30528 {
30529 case -1:
30530 remove_AT (die, a->dw_attr);
30531 ix--;
30532 /* If we drop DW_AT_string_length, we need to drop also
30533 DW_AT_{string_length_,}byte_size. */
30534 remove_AT_byte_size = true;
30535 continue;
30536 default:
30537 break;
30538 case 1:
30539 /* Even if we keep the optimized DW_AT_string_length,
30540 it might have changed AT_class, so process it again. */
30541 ix--;
30542 continue;
30543 }
30544 }
30545 /* For -gdwarf-2 don't attempt to optimize
30546 DW_AT_data_member_location containing
30547 DW_OP_plus_uconst - older consumers might
30548 rely on it being that op instead of a more complex,
30549 but shorter, location description. */
30550 if ((dwarf_version > 2
30551 || a->dw_attr != DW_AT_data_member_location
30552 || l == NULL
30553 || l->dw_loc_opc != DW_OP_plus_uconst
30554 || l->dw_loc_next != NULL)
30555 && !resolve_addr_in_expr (a, l))
30556 {
30557 if (dwarf_split_debug_info)
30558 remove_loc_list_addr_table_entries (l);
30559 if (l != NULL
30560 && l->dw_loc_next == NULL
30561 && l->dw_loc_opc == DW_OP_addr
30562 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30563 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30564 && a->dw_attr == DW_AT_location)
30565 {
30566 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30567 remove_AT (die, a->dw_attr);
30568 ix--;
30569 optimize_location_into_implicit_ptr (die, decl);
30570 break;
30571 }
30572 if (a->dw_attr == DW_AT_string_length)
30573 /* If we drop DW_AT_string_length, we need to drop also
30574 DW_AT_{string_length_,}byte_size. */
30575 remove_AT_byte_size = true;
30576 remove_AT (die, a->dw_attr);
30577 ix--;
30578 }
30579 else
30580 mark_base_types (l);
30581 }
30582 break;
30583 case dw_val_class_addr:
30584 if (a->dw_attr == DW_AT_const_value
30585 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30586 {
30587 if (AT_index (a) != NOT_INDEXED)
30588 remove_addr_table_entry (a->dw_attr_val.val_entry);
30589 remove_AT (die, a->dw_attr);
30590 ix--;
30591 }
30592 if ((die->die_tag == DW_TAG_call_site
30593 && a->dw_attr == DW_AT_call_origin)
30594 || (die->die_tag == DW_TAG_GNU_call_site
30595 && a->dw_attr == DW_AT_abstract_origin))
30596 {
30597 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30598 dw_die_ref tdie = lookup_decl_die (tdecl);
30599 dw_die_ref cdie;
30600 if (tdie == NULL
30601 && DECL_EXTERNAL (tdecl)
30602 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30603 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30604 {
30605 dw_die_ref pdie = cdie;
30606 /* Make sure we don't add these DIEs into type units.
30607 We could emit skeleton DIEs for context (namespaces,
30608 outer structs/classes) and a skeleton DIE for the
30609 innermost context with DW_AT_signature pointing to the
30610 type unit. See PR78835. */
30611 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30612 pdie = pdie->die_parent;
30613 if (pdie == NULL)
30614 {
30615 /* Creating a full DIE for tdecl is overly expensive and
30616 at this point even wrong when in the LTO phase
30617 as it can end up generating new type DIEs we didn't
30618 output and thus optimize_external_refs will crash. */
30619 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30620 add_AT_flag (tdie, DW_AT_external, 1);
30621 add_AT_flag (tdie, DW_AT_declaration, 1);
30622 add_linkage_attr (tdie, tdecl);
30623 add_name_and_src_coords_attributes (tdie, tdecl, true);
30624 equate_decl_number_to_die (tdecl, tdie);
30625 }
30626 }
30627 if (tdie)
30628 {
30629 a->dw_attr_val.val_class = dw_val_class_die_ref;
30630 a->dw_attr_val.v.val_die_ref.die = tdie;
30631 a->dw_attr_val.v.val_die_ref.external = 0;
30632 }
30633 else
30634 {
30635 if (AT_index (a) != NOT_INDEXED)
30636 remove_addr_table_entry (a->dw_attr_val.val_entry);
30637 remove_AT (die, a->dw_attr);
30638 ix--;
30639 }
30640 }
30641 break;
30642 default:
30643 break;
30644 }
30645
30646 if (remove_AT_byte_size)
30647 remove_AT (die, dwarf_version >= 5
30648 ? DW_AT_string_length_byte_size
30649 : DW_AT_byte_size);
30650
30651 FOR_EACH_CHILD (die, c, resolve_addr (c));
30652 }
30653 \f
30654 /* Helper routines for optimize_location_lists.
30655 This pass tries to share identical local lists in .debug_loc
30656 section. */
30657
30658 /* Iteratively hash operands of LOC opcode into HSTATE. */
30659
30660 static void
30661 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30662 {
30663 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30664 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30665
30666 switch (loc->dw_loc_opc)
30667 {
30668 case DW_OP_const4u:
30669 case DW_OP_const8u:
30670 if (loc->dtprel)
30671 goto hash_addr;
30672 /* FALLTHRU */
30673 case DW_OP_const1u:
30674 case DW_OP_const1s:
30675 case DW_OP_const2u:
30676 case DW_OP_const2s:
30677 case DW_OP_const4s:
30678 case DW_OP_const8s:
30679 case DW_OP_constu:
30680 case DW_OP_consts:
30681 case DW_OP_pick:
30682 case DW_OP_plus_uconst:
30683 case DW_OP_breg0:
30684 case DW_OP_breg1:
30685 case DW_OP_breg2:
30686 case DW_OP_breg3:
30687 case DW_OP_breg4:
30688 case DW_OP_breg5:
30689 case DW_OP_breg6:
30690 case DW_OP_breg7:
30691 case DW_OP_breg8:
30692 case DW_OP_breg9:
30693 case DW_OP_breg10:
30694 case DW_OP_breg11:
30695 case DW_OP_breg12:
30696 case DW_OP_breg13:
30697 case DW_OP_breg14:
30698 case DW_OP_breg15:
30699 case DW_OP_breg16:
30700 case DW_OP_breg17:
30701 case DW_OP_breg18:
30702 case DW_OP_breg19:
30703 case DW_OP_breg20:
30704 case DW_OP_breg21:
30705 case DW_OP_breg22:
30706 case DW_OP_breg23:
30707 case DW_OP_breg24:
30708 case DW_OP_breg25:
30709 case DW_OP_breg26:
30710 case DW_OP_breg27:
30711 case DW_OP_breg28:
30712 case DW_OP_breg29:
30713 case DW_OP_breg30:
30714 case DW_OP_breg31:
30715 case DW_OP_regx:
30716 case DW_OP_fbreg:
30717 case DW_OP_piece:
30718 case DW_OP_deref_size:
30719 case DW_OP_xderef_size:
30720 hstate.add_object (val1->v.val_int);
30721 break;
30722 case DW_OP_skip:
30723 case DW_OP_bra:
30724 {
30725 int offset;
30726
30727 gcc_assert (val1->val_class == dw_val_class_loc);
30728 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30729 hstate.add_object (offset);
30730 }
30731 break;
30732 case DW_OP_implicit_value:
30733 hstate.add_object (val1->v.val_unsigned);
30734 switch (val2->val_class)
30735 {
30736 case dw_val_class_const:
30737 hstate.add_object (val2->v.val_int);
30738 break;
30739 case dw_val_class_vec:
30740 {
30741 unsigned int elt_size = val2->v.val_vec.elt_size;
30742 unsigned int len = val2->v.val_vec.length;
30743
30744 hstate.add_int (elt_size);
30745 hstate.add_int (len);
30746 hstate.add (val2->v.val_vec.array, len * elt_size);
30747 }
30748 break;
30749 case dw_val_class_const_double:
30750 hstate.add_object (val2->v.val_double.low);
30751 hstate.add_object (val2->v.val_double.high);
30752 break;
30753 case dw_val_class_wide_int:
30754 hstate.add (val2->v.val_wide->get_val (),
30755 get_full_len (*val2->v.val_wide)
30756 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30757 break;
30758 case dw_val_class_addr:
30759 inchash::add_rtx (val2->v.val_addr, hstate);
30760 break;
30761 default:
30762 gcc_unreachable ();
30763 }
30764 break;
30765 case DW_OP_bregx:
30766 case DW_OP_bit_piece:
30767 hstate.add_object (val1->v.val_int);
30768 hstate.add_object (val2->v.val_int);
30769 break;
30770 case DW_OP_addr:
30771 hash_addr:
30772 if (loc->dtprel)
30773 {
30774 unsigned char dtprel = 0xd1;
30775 hstate.add_object (dtprel);
30776 }
30777 inchash::add_rtx (val1->v.val_addr, hstate);
30778 break;
30779 case DW_OP_GNU_addr_index:
30780 case DW_OP_addrx:
30781 case DW_OP_GNU_const_index:
30782 case DW_OP_constx:
30783 {
30784 if (loc->dtprel)
30785 {
30786 unsigned char dtprel = 0xd1;
30787 hstate.add_object (dtprel);
30788 }
30789 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30790 }
30791 break;
30792 case DW_OP_implicit_pointer:
30793 case DW_OP_GNU_implicit_pointer:
30794 hstate.add_int (val2->v.val_int);
30795 break;
30796 case DW_OP_entry_value:
30797 case DW_OP_GNU_entry_value:
30798 hstate.add_object (val1->v.val_loc);
30799 break;
30800 case DW_OP_regval_type:
30801 case DW_OP_deref_type:
30802 case DW_OP_GNU_regval_type:
30803 case DW_OP_GNU_deref_type:
30804 {
30805 unsigned int byte_size
30806 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30807 unsigned int encoding
30808 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30809 hstate.add_object (val1->v.val_int);
30810 hstate.add_object (byte_size);
30811 hstate.add_object (encoding);
30812 }
30813 break;
30814 case DW_OP_convert:
30815 case DW_OP_reinterpret:
30816 case DW_OP_GNU_convert:
30817 case DW_OP_GNU_reinterpret:
30818 if (val1->val_class == dw_val_class_unsigned_const)
30819 {
30820 hstate.add_object (val1->v.val_unsigned);
30821 break;
30822 }
30823 /* FALLTHRU */
30824 case DW_OP_const_type:
30825 case DW_OP_GNU_const_type:
30826 {
30827 unsigned int byte_size
30828 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30829 unsigned int encoding
30830 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30831 hstate.add_object (byte_size);
30832 hstate.add_object (encoding);
30833 if (loc->dw_loc_opc != DW_OP_const_type
30834 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30835 break;
30836 hstate.add_object (val2->val_class);
30837 switch (val2->val_class)
30838 {
30839 case dw_val_class_const:
30840 hstate.add_object (val2->v.val_int);
30841 break;
30842 case dw_val_class_vec:
30843 {
30844 unsigned int elt_size = val2->v.val_vec.elt_size;
30845 unsigned int len = val2->v.val_vec.length;
30846
30847 hstate.add_object (elt_size);
30848 hstate.add_object (len);
30849 hstate.add (val2->v.val_vec.array, len * elt_size);
30850 }
30851 break;
30852 case dw_val_class_const_double:
30853 hstate.add_object (val2->v.val_double.low);
30854 hstate.add_object (val2->v.val_double.high);
30855 break;
30856 case dw_val_class_wide_int:
30857 hstate.add (val2->v.val_wide->get_val (),
30858 get_full_len (*val2->v.val_wide)
30859 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30860 break;
30861 default:
30862 gcc_unreachable ();
30863 }
30864 }
30865 break;
30866
30867 default:
30868 /* Other codes have no operands. */
30869 break;
30870 }
30871 }
30872
30873 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30874
30875 static inline void
30876 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30877 {
30878 dw_loc_descr_ref l;
30879 bool sizes_computed = false;
30880 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30881 size_of_locs (loc);
30882
30883 for (l = loc; l != NULL; l = l->dw_loc_next)
30884 {
30885 enum dwarf_location_atom opc = l->dw_loc_opc;
30886 hstate.add_object (opc);
30887 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30888 {
30889 size_of_locs (loc);
30890 sizes_computed = true;
30891 }
30892 hash_loc_operands (l, hstate);
30893 }
30894 }
30895
30896 /* Compute hash of the whole location list LIST_HEAD. */
30897
30898 static inline void
30899 hash_loc_list (dw_loc_list_ref list_head)
30900 {
30901 dw_loc_list_ref curr = list_head;
30902 inchash::hash hstate;
30903
30904 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30905 {
30906 hstate.add (curr->begin, strlen (curr->begin) + 1);
30907 hstate.add (curr->end, strlen (curr->end) + 1);
30908 hstate.add_object (curr->vbegin);
30909 hstate.add_object (curr->vend);
30910 if (curr->section)
30911 hstate.add (curr->section, strlen (curr->section) + 1);
30912 hash_locs (curr->expr, hstate);
30913 }
30914 list_head->hash = hstate.end ();
30915 }
30916
30917 /* Return true if X and Y opcodes have the same operands. */
30918
30919 static inline bool
30920 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30921 {
30922 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30923 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30924 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30925 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30926
30927 switch (x->dw_loc_opc)
30928 {
30929 case DW_OP_const4u:
30930 case DW_OP_const8u:
30931 if (x->dtprel)
30932 goto hash_addr;
30933 /* FALLTHRU */
30934 case DW_OP_const1u:
30935 case DW_OP_const1s:
30936 case DW_OP_const2u:
30937 case DW_OP_const2s:
30938 case DW_OP_const4s:
30939 case DW_OP_const8s:
30940 case DW_OP_constu:
30941 case DW_OP_consts:
30942 case DW_OP_pick:
30943 case DW_OP_plus_uconst:
30944 case DW_OP_breg0:
30945 case DW_OP_breg1:
30946 case DW_OP_breg2:
30947 case DW_OP_breg3:
30948 case DW_OP_breg4:
30949 case DW_OP_breg5:
30950 case DW_OP_breg6:
30951 case DW_OP_breg7:
30952 case DW_OP_breg8:
30953 case DW_OP_breg9:
30954 case DW_OP_breg10:
30955 case DW_OP_breg11:
30956 case DW_OP_breg12:
30957 case DW_OP_breg13:
30958 case DW_OP_breg14:
30959 case DW_OP_breg15:
30960 case DW_OP_breg16:
30961 case DW_OP_breg17:
30962 case DW_OP_breg18:
30963 case DW_OP_breg19:
30964 case DW_OP_breg20:
30965 case DW_OP_breg21:
30966 case DW_OP_breg22:
30967 case DW_OP_breg23:
30968 case DW_OP_breg24:
30969 case DW_OP_breg25:
30970 case DW_OP_breg26:
30971 case DW_OP_breg27:
30972 case DW_OP_breg28:
30973 case DW_OP_breg29:
30974 case DW_OP_breg30:
30975 case DW_OP_breg31:
30976 case DW_OP_regx:
30977 case DW_OP_fbreg:
30978 case DW_OP_piece:
30979 case DW_OP_deref_size:
30980 case DW_OP_xderef_size:
30981 return valx1->v.val_int == valy1->v.val_int;
30982 case DW_OP_skip:
30983 case DW_OP_bra:
30984 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30985 can cause irrelevant differences in dw_loc_addr. */
30986 gcc_assert (valx1->val_class == dw_val_class_loc
30987 && valy1->val_class == dw_val_class_loc
30988 && (dwarf_split_debug_info
30989 || x->dw_loc_addr == y->dw_loc_addr));
30990 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30991 case DW_OP_implicit_value:
30992 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30993 || valx2->val_class != valy2->val_class)
30994 return false;
30995 switch (valx2->val_class)
30996 {
30997 case dw_val_class_const:
30998 return valx2->v.val_int == valy2->v.val_int;
30999 case dw_val_class_vec:
31000 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31001 && valx2->v.val_vec.length == valy2->v.val_vec.length
31002 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31003 valx2->v.val_vec.elt_size
31004 * valx2->v.val_vec.length) == 0;
31005 case dw_val_class_const_double:
31006 return valx2->v.val_double.low == valy2->v.val_double.low
31007 && valx2->v.val_double.high == valy2->v.val_double.high;
31008 case dw_val_class_wide_int:
31009 return *valx2->v.val_wide == *valy2->v.val_wide;
31010 case dw_val_class_addr:
31011 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
31012 default:
31013 gcc_unreachable ();
31014 }
31015 case DW_OP_bregx:
31016 case DW_OP_bit_piece:
31017 return valx1->v.val_int == valy1->v.val_int
31018 && valx2->v.val_int == valy2->v.val_int;
31019 case DW_OP_addr:
31020 hash_addr:
31021 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
31022 case DW_OP_GNU_addr_index:
31023 case DW_OP_addrx:
31024 case DW_OP_GNU_const_index:
31025 case DW_OP_constx:
31026 {
31027 rtx ax1 = valx1->val_entry->addr.rtl;
31028 rtx ay1 = valy1->val_entry->addr.rtl;
31029 return rtx_equal_p (ax1, ay1);
31030 }
31031 case DW_OP_implicit_pointer:
31032 case DW_OP_GNU_implicit_pointer:
31033 return valx1->val_class == dw_val_class_die_ref
31034 && valx1->val_class == valy1->val_class
31035 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
31036 && valx2->v.val_int == valy2->v.val_int;
31037 case DW_OP_entry_value:
31038 case DW_OP_GNU_entry_value:
31039 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31040 case DW_OP_const_type:
31041 case DW_OP_GNU_const_type:
31042 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31043 || valx2->val_class != valy2->val_class)
31044 return false;
31045 switch (valx2->val_class)
31046 {
31047 case dw_val_class_const:
31048 return valx2->v.val_int == valy2->v.val_int;
31049 case dw_val_class_vec:
31050 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31051 && valx2->v.val_vec.length == valy2->v.val_vec.length
31052 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31053 valx2->v.val_vec.elt_size
31054 * valx2->v.val_vec.length) == 0;
31055 case dw_val_class_const_double:
31056 return valx2->v.val_double.low == valy2->v.val_double.low
31057 && valx2->v.val_double.high == valy2->v.val_double.high;
31058 case dw_val_class_wide_int:
31059 return *valx2->v.val_wide == *valy2->v.val_wide;
31060 default:
31061 gcc_unreachable ();
31062 }
31063 case DW_OP_regval_type:
31064 case DW_OP_deref_type:
31065 case DW_OP_GNU_regval_type:
31066 case DW_OP_GNU_deref_type:
31067 return valx1->v.val_int == valy1->v.val_int
31068 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31069 case DW_OP_convert:
31070 case DW_OP_reinterpret:
31071 case DW_OP_GNU_convert:
31072 case DW_OP_GNU_reinterpret:
31073 if (valx1->val_class != valy1->val_class)
31074 return false;
31075 if (valx1->val_class == dw_val_class_unsigned_const)
31076 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31077 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31078 case DW_OP_GNU_parameter_ref:
31079 return valx1->val_class == dw_val_class_die_ref
31080 && valx1->val_class == valy1->val_class
31081 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31082 default:
31083 /* Other codes have no operands. */
31084 return true;
31085 }
31086 }
31087
31088 /* Return true if DWARF location expressions X and Y are the same. */
31089
31090 static inline bool
31091 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31092 {
31093 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31094 if (x->dw_loc_opc != y->dw_loc_opc
31095 || x->dtprel != y->dtprel
31096 || !compare_loc_operands (x, y))
31097 break;
31098 return x == NULL && y == NULL;
31099 }
31100
31101 /* Hashtable helpers. */
31102
31103 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31104 {
31105 static inline hashval_t hash (const dw_loc_list_struct *);
31106 static inline bool equal (const dw_loc_list_struct *,
31107 const dw_loc_list_struct *);
31108 };
31109
31110 /* Return precomputed hash of location list X. */
31111
31112 inline hashval_t
31113 loc_list_hasher::hash (const dw_loc_list_struct *x)
31114 {
31115 return x->hash;
31116 }
31117
31118 /* Return true if location lists A and B are the same. */
31119
31120 inline bool
31121 loc_list_hasher::equal (const dw_loc_list_struct *a,
31122 const dw_loc_list_struct *b)
31123 {
31124 if (a == b)
31125 return 1;
31126 if (a->hash != b->hash)
31127 return 0;
31128 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31129 if (strcmp (a->begin, b->begin) != 0
31130 || strcmp (a->end, b->end) != 0
31131 || (a->section == NULL) != (b->section == NULL)
31132 || (a->section && strcmp (a->section, b->section) != 0)
31133 || a->vbegin != b->vbegin || a->vend != b->vend
31134 || !compare_locs (a->expr, b->expr))
31135 break;
31136 return a == NULL && b == NULL;
31137 }
31138
31139 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31140
31141
31142 /* Recursively optimize location lists referenced from DIE
31143 children and share them whenever possible. */
31144
31145 static void
31146 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31147 {
31148 dw_die_ref c;
31149 dw_attr_node *a;
31150 unsigned ix;
31151 dw_loc_list_struct **slot;
31152 bool drop_locviews = false;
31153 bool has_locviews = false;
31154
31155 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31156 if (AT_class (a) == dw_val_class_loc_list)
31157 {
31158 dw_loc_list_ref list = AT_loc_list (a);
31159 /* TODO: perform some optimizations here, before hashing
31160 it and storing into the hash table. */
31161 hash_loc_list (list);
31162 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31163 if (*slot == NULL)
31164 {
31165 *slot = list;
31166 if (loc_list_has_views (list))
31167 gcc_assert (list->vl_symbol);
31168 else if (list->vl_symbol)
31169 {
31170 drop_locviews = true;
31171 list->vl_symbol = NULL;
31172 }
31173 }
31174 else
31175 {
31176 if (list->vl_symbol && !(*slot)->vl_symbol)
31177 drop_locviews = true;
31178 a->dw_attr_val.v.val_loc_list = *slot;
31179 }
31180 }
31181 else if (AT_class (a) == dw_val_class_view_list)
31182 {
31183 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31184 has_locviews = true;
31185 }
31186
31187
31188 if (drop_locviews && has_locviews)
31189 remove_AT (die, DW_AT_GNU_locviews);
31190
31191 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31192 }
31193
31194
31195 /* Recursively assign each location list a unique index into the debug_addr
31196 section. */
31197
31198 static void
31199 index_location_lists (dw_die_ref die)
31200 {
31201 dw_die_ref c;
31202 dw_attr_node *a;
31203 unsigned ix;
31204
31205 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31206 if (AT_class (a) == dw_val_class_loc_list)
31207 {
31208 dw_loc_list_ref list = AT_loc_list (a);
31209 dw_loc_list_ref curr;
31210 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31211 {
31212 /* Don't index an entry that has already been indexed
31213 or won't be output. Make sure skip_loc_list_entry doesn't
31214 call size_of_locs, because that might cause circular dependency,
31215 index_location_lists requiring address table indexes to be
31216 computed, but adding new indexes through add_addr_table_entry
31217 and address table index computation requiring no new additions
31218 to the hash table. In the rare case of DWARF[234] >= 64KB
31219 location expression, we'll just waste unused address table entry
31220 for it. */
31221 if (curr->begin_entry != NULL
31222 || skip_loc_list_entry (curr))
31223 continue;
31224
31225 curr->begin_entry
31226 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31227 }
31228 }
31229
31230 FOR_EACH_CHILD (die, c, index_location_lists (c));
31231 }
31232
31233 /* Optimize location lists referenced from DIE
31234 children and share them whenever possible. */
31235
31236 static void
31237 optimize_location_lists (dw_die_ref die)
31238 {
31239 loc_list_hash_type htab (500);
31240 optimize_location_lists_1 (die, &htab);
31241 }
31242 \f
31243 /* Traverse the limbo die list, and add parent/child links. The only
31244 dies without parents that should be here are concrete instances of
31245 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31246 For concrete instances, we can get the parent die from the abstract
31247 instance. */
31248
31249 static void
31250 flush_limbo_die_list (void)
31251 {
31252 limbo_die_node *node;
31253
31254 /* get_context_die calls force_decl_die, which can put new DIEs on the
31255 limbo list in LTO mode when nested functions are put in a different
31256 partition than that of their parent function. */
31257 while ((node = limbo_die_list))
31258 {
31259 dw_die_ref die = node->die;
31260 limbo_die_list = node->next;
31261
31262 if (die->die_parent == NULL)
31263 {
31264 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31265
31266 if (origin && origin->die_parent)
31267 add_child_die (origin->die_parent, die);
31268 else if (is_cu_die (die))
31269 ;
31270 else if (seen_error ())
31271 /* It's OK to be confused by errors in the input. */
31272 add_child_die (comp_unit_die (), die);
31273 else
31274 {
31275 /* In certain situations, the lexical block containing a
31276 nested function can be optimized away, which results
31277 in the nested function die being orphaned. Likewise
31278 with the return type of that nested function. Force
31279 this to be a child of the containing function.
31280
31281 It may happen that even the containing function got fully
31282 inlined and optimized out. In that case we are lost and
31283 assign the empty child. This should not be big issue as
31284 the function is likely unreachable too. */
31285 gcc_assert (node->created_for);
31286
31287 if (DECL_P (node->created_for))
31288 origin = get_context_die (DECL_CONTEXT (node->created_for));
31289 else if (TYPE_P (node->created_for))
31290 origin = scope_die_for (node->created_for, comp_unit_die ());
31291 else
31292 origin = comp_unit_die ();
31293
31294 add_child_die (origin, die);
31295 }
31296 }
31297 }
31298 }
31299
31300 /* Reset DIEs so we can output them again. */
31301
31302 static void
31303 reset_dies (dw_die_ref die)
31304 {
31305 dw_die_ref c;
31306
31307 /* Remove stuff we re-generate. */
31308 die->die_mark = 0;
31309 die->die_offset = 0;
31310 die->die_abbrev = 0;
31311 remove_AT (die, DW_AT_sibling);
31312
31313 FOR_EACH_CHILD (die, c, reset_dies (c));
31314 }
31315
31316 /* Output stuff that dwarf requires at the end of every file,
31317 and generate the DWARF-2 debugging info. */
31318
31319 static void
31320 dwarf2out_finish (const char *filename)
31321 {
31322 comdat_type_node *ctnode;
31323 dw_die_ref main_comp_unit_die;
31324 unsigned char checksum[16];
31325 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31326
31327 /* Flush out any latecomers to the limbo party. */
31328 flush_limbo_die_list ();
31329
31330 if (inline_entry_data_table)
31331 gcc_assert (inline_entry_data_table->is_empty ());
31332
31333 if (flag_checking)
31334 {
31335 verify_die (comp_unit_die ());
31336 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31337 verify_die (node->die);
31338 }
31339
31340 /* We shouldn't have any symbols with delayed asm names for
31341 DIEs generated after early finish. */
31342 gcc_assert (deferred_asm_name == NULL);
31343
31344 gen_remaining_tmpl_value_param_die_attribute ();
31345
31346 if (flag_generate_lto || flag_generate_offload)
31347 {
31348 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31349
31350 /* Prune stuff so that dwarf2out_finish runs successfully
31351 for the fat part of the object. */
31352 reset_dies (comp_unit_die ());
31353 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31354 reset_dies (node->die);
31355
31356 hash_table<comdat_type_hasher> comdat_type_table (100);
31357 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31358 {
31359 comdat_type_node **slot
31360 = comdat_type_table.find_slot (ctnode, INSERT);
31361
31362 /* Don't reset types twice. */
31363 if (*slot != HTAB_EMPTY_ENTRY)
31364 continue;
31365
31366 /* Remove the pointer to the line table. */
31367 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31368
31369 if (debug_info_level >= DINFO_LEVEL_TERSE)
31370 reset_dies (ctnode->root_die);
31371
31372 *slot = ctnode;
31373 }
31374
31375 /* Reset die CU symbol so we don't output it twice. */
31376 comp_unit_die ()->die_id.die_symbol = NULL;
31377
31378 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31379 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31380 if (have_macinfo)
31381 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31382
31383 /* Remove indirect string decisions. */
31384 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31385 if (debug_line_str_hash)
31386 {
31387 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31388 debug_line_str_hash = NULL;
31389 }
31390 }
31391
31392 #if ENABLE_ASSERT_CHECKING
31393 {
31394 dw_die_ref die = comp_unit_die (), c;
31395 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31396 }
31397 #endif
31398 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31399 resolve_addr (ctnode->root_die);
31400 resolve_addr (comp_unit_die ());
31401 move_marked_base_types ();
31402
31403 if (dump_file)
31404 {
31405 fprintf (dump_file, "DWARF for %s\n", filename);
31406 print_die (comp_unit_die (), dump_file);
31407 }
31408
31409 /* Initialize sections and labels used for actual assembler output. */
31410 unsigned generation = init_sections_and_labels (false);
31411
31412 /* Traverse the DIE's and add sibling attributes to those DIE's that
31413 have children. */
31414 add_sibling_attributes (comp_unit_die ());
31415 limbo_die_node *node;
31416 for (node = cu_die_list; node; node = node->next)
31417 add_sibling_attributes (node->die);
31418 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31419 add_sibling_attributes (ctnode->root_die);
31420
31421 /* When splitting DWARF info, we put some attributes in the
31422 skeleton compile_unit DIE that remains in the .o, while
31423 most attributes go in the DWO compile_unit_die. */
31424 if (dwarf_split_debug_info)
31425 {
31426 limbo_die_node *cu;
31427 main_comp_unit_die = gen_compile_unit_die (NULL);
31428 if (dwarf_version >= 5)
31429 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31430 cu = limbo_die_list;
31431 gcc_assert (cu->die == main_comp_unit_die);
31432 limbo_die_list = limbo_die_list->next;
31433 cu->next = cu_die_list;
31434 cu_die_list = cu;
31435 }
31436 else
31437 main_comp_unit_die = comp_unit_die ();
31438
31439 /* Output a terminator label for the .text section. */
31440 switch_to_section (text_section);
31441 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31442 if (cold_text_section)
31443 {
31444 switch_to_section (cold_text_section);
31445 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31446 }
31447
31448 /* We can only use the low/high_pc attributes if all of the code was
31449 in .text. */
31450 if (!have_multiple_function_sections
31451 || (dwarf_version < 3 && dwarf_strict))
31452 {
31453 /* Don't add if the CU has no associated code. */
31454 if (text_section_used)
31455 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31456 text_end_label, true);
31457 }
31458 else
31459 {
31460 unsigned fde_idx;
31461 dw_fde_ref fde;
31462 bool range_list_added = false;
31463
31464 if (text_section_used)
31465 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31466 text_end_label, &range_list_added, true);
31467 if (cold_text_section_used)
31468 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31469 cold_end_label, &range_list_added, true);
31470
31471 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31472 {
31473 if (DECL_IGNORED_P (fde->decl))
31474 continue;
31475 if (!fde->in_std_section)
31476 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31477 fde->dw_fde_end, &range_list_added,
31478 true);
31479 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31480 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31481 fde->dw_fde_second_end, &range_list_added,
31482 true);
31483 }
31484
31485 if (range_list_added)
31486 {
31487 /* We need to give .debug_loc and .debug_ranges an appropriate
31488 "base address". Use zero so that these addresses become
31489 absolute. Historically, we've emitted the unexpected
31490 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31491 Emit both to give time for other tools to adapt. */
31492 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31493 if (! dwarf_strict && dwarf_version < 4)
31494 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31495
31496 add_ranges (NULL);
31497 }
31498 }
31499
31500 /* AIX Assembler inserts the length, so adjust the reference to match the
31501 offset expected by debuggers. */
31502 strcpy (dl_section_ref, debug_line_section_label);
31503 if (XCOFF_DEBUGGING_INFO)
31504 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31505
31506 if (debug_info_level >= DINFO_LEVEL_TERSE)
31507 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31508 dl_section_ref);
31509
31510 if (have_macinfo)
31511 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31512 macinfo_section_label);
31513
31514 if (dwarf_split_debug_info)
31515 {
31516 if (have_location_lists)
31517 {
31518 /* Since we generate the loclists in the split DWARF .dwo
31519 file itself, we don't need to generate a loclists_base
31520 attribute for the split compile unit DIE. That attribute
31521 (and using relocatable sec_offset FORMs) isn't allowed
31522 for a split compile unit. Only if the .debug_loclists
31523 section was in the main file, would we need to generate a
31524 loclists_base attribute here (for the full or skeleton
31525 unit DIE). */
31526
31527 /* optimize_location_lists calculates the size of the lists,
31528 so index them first, and assign indices to the entries.
31529 Although optimize_location_lists will remove entries from
31530 the table, it only does so for duplicates, and therefore
31531 only reduces ref_counts to 1. */
31532 index_location_lists (comp_unit_die ());
31533 }
31534
31535 if (addr_index_table != NULL)
31536 {
31537 unsigned int index = 0;
31538 addr_index_table
31539 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31540 (&index);
31541 }
31542 }
31543
31544 loc_list_idx = 0;
31545 if (have_location_lists)
31546 {
31547 optimize_location_lists (comp_unit_die ());
31548 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31549 if (dwarf_version >= 5 && dwarf_split_debug_info)
31550 assign_location_list_indexes (comp_unit_die ());
31551 }
31552
31553 save_macinfo_strings ();
31554
31555 if (dwarf_split_debug_info)
31556 {
31557 unsigned int index = 0;
31558
31559 /* Add attributes common to skeleton compile_units and
31560 type_units. Because these attributes include strings, it
31561 must be done before freezing the string table. Top-level
31562 skeleton die attrs are added when the skeleton type unit is
31563 created, so ensure it is created by this point. */
31564 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31565 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31566 }
31567
31568 /* Output all of the compilation units. We put the main one last so that
31569 the offsets are available to output_pubnames. */
31570 for (node = cu_die_list; node; node = node->next)
31571 output_comp_unit (node->die, 0, NULL);
31572
31573 hash_table<comdat_type_hasher> comdat_type_table (100);
31574 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31575 {
31576 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31577
31578 /* Don't output duplicate types. */
31579 if (*slot != HTAB_EMPTY_ENTRY)
31580 continue;
31581
31582 /* Add a pointer to the line table for the main compilation unit
31583 so that the debugger can make sense of DW_AT_decl_file
31584 attributes. */
31585 if (debug_info_level >= DINFO_LEVEL_TERSE)
31586 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31587 (!dwarf_split_debug_info
31588 ? dl_section_ref
31589 : debug_skeleton_line_section_label));
31590
31591 output_comdat_type_unit (ctnode, false);
31592 *slot = ctnode;
31593 }
31594
31595 if (dwarf_split_debug_info)
31596 {
31597 int mark;
31598 struct md5_ctx ctx;
31599
31600 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31601 index_rnglists ();
31602
31603 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31604 md5_init_ctx (&ctx);
31605 mark = 0;
31606 die_checksum (comp_unit_die (), &ctx, &mark);
31607 unmark_all_dies (comp_unit_die ());
31608 md5_finish_ctx (&ctx, checksum);
31609
31610 if (dwarf_version < 5)
31611 {
31612 /* Use the first 8 bytes of the checksum as the dwo_id,
31613 and add it to both comp-unit DIEs. */
31614 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31615 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31616 }
31617
31618 /* Add the base offset of the ranges table to the skeleton
31619 comp-unit DIE. */
31620 if (!vec_safe_is_empty (ranges_table))
31621 {
31622 if (dwarf_version >= 5)
31623 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31624 ranges_base_label);
31625 else
31626 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31627 ranges_section_label);
31628 }
31629
31630 output_addr_table ();
31631 }
31632
31633 /* Output the main compilation unit if non-empty or if .debug_macinfo
31634 or .debug_macro will be emitted. */
31635 output_comp_unit (comp_unit_die (), have_macinfo,
31636 dwarf_split_debug_info ? checksum : NULL);
31637
31638 if (dwarf_split_debug_info && info_section_emitted)
31639 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31640
31641 /* Output the abbreviation table. */
31642 if (vec_safe_length (abbrev_die_table) != 1)
31643 {
31644 switch_to_section (debug_abbrev_section);
31645 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31646 output_abbrev_section ();
31647 }
31648
31649 /* Output location list section if necessary. */
31650 if (have_location_lists)
31651 {
31652 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31653 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31654 /* Output the location lists info. */
31655 switch_to_section (debug_loc_section);
31656 if (dwarf_version >= 5)
31657 {
31658 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31659 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31660 if (DWARF_INITIAL_LENGTH_SIZE - dwarf_offset_size == 4)
31661 dw2_asm_output_data (4, 0xffffffff,
31662 "Initial length escape value indicating "
31663 "64-bit DWARF extension");
31664 dw2_asm_output_delta (dwarf_offset_size, l2, l1,
31665 "Length of Location Lists");
31666 ASM_OUTPUT_LABEL (asm_out_file, l1);
31667 output_dwarf_version ();
31668 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31669 dw2_asm_output_data (1, 0, "Segment Size");
31670 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31671 "Offset Entry Count");
31672 }
31673 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31674 if (dwarf_version >= 5 && dwarf_split_debug_info)
31675 {
31676 unsigned int save_loc_list_idx = loc_list_idx;
31677 loc_list_idx = 0;
31678 output_loclists_offsets (comp_unit_die ());
31679 gcc_assert (save_loc_list_idx == loc_list_idx);
31680 }
31681 output_location_lists (comp_unit_die ());
31682 if (dwarf_version >= 5)
31683 ASM_OUTPUT_LABEL (asm_out_file, l2);
31684 }
31685
31686 output_pubtables ();
31687
31688 /* Output the address range information if a CU (.debug_info section)
31689 was emitted. We output an empty table even if we had no functions
31690 to put in it. This because the consumer has no way to tell the
31691 difference between an empty table that we omitted and failure to
31692 generate a table that would have contained data. */
31693 if (info_section_emitted)
31694 {
31695 switch_to_section (debug_aranges_section);
31696 output_aranges ();
31697 }
31698
31699 /* Output ranges section if necessary. */
31700 if (!vec_safe_is_empty (ranges_table))
31701 {
31702 if (dwarf_version >= 5)
31703 output_rnglists (generation);
31704 else
31705 output_ranges ();
31706 }
31707
31708 /* Have to end the macro section. */
31709 if (have_macinfo)
31710 {
31711 switch_to_section (debug_macinfo_section);
31712 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31713 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31714 : debug_skeleton_line_section_label, false);
31715 dw2_asm_output_data (1, 0, "End compilation unit");
31716 }
31717
31718 /* Output the source line correspondence table. We must do this
31719 even if there is no line information. Otherwise, on an empty
31720 translation unit, we will generate a present, but empty,
31721 .debug_info section. IRIX 6.5 `nm' will then complain when
31722 examining the file. This is done late so that any filenames
31723 used by the debug_info section are marked as 'used'. */
31724 switch_to_section (debug_line_section);
31725 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31726 if (! output_asm_line_debug_info ())
31727 output_line_info (false);
31728 else if (asm_outputs_debug_line_str ())
31729 {
31730 /* When gas outputs DWARF5 .debug_line[_str] then we have to
31731 tell it the comp_dir and main file name for the zero entry
31732 line table. */
31733 const char *comp_dir, *filename0;
31734
31735 comp_dir = comp_dir_string ();
31736 if (comp_dir == NULL)
31737 comp_dir = "";
31738
31739 filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
31740 if (filename0 == NULL)
31741 filename0 = "";
31742
31743 fprintf (asm_out_file, "\t.file 0 ");
31744 output_quoted_string (asm_out_file, remap_debug_filename (comp_dir));
31745 fputc (' ', asm_out_file);
31746 output_quoted_string (asm_out_file, remap_debug_filename (filename0));
31747 fputc ('\n', asm_out_file);
31748 }
31749
31750 if (dwarf_split_debug_info && info_section_emitted)
31751 {
31752 switch_to_section (debug_skeleton_line_section);
31753 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31754 output_line_info (true);
31755 }
31756
31757 /* If we emitted any indirect strings, output the string table too. */
31758 if (debug_str_hash || skeleton_debug_str_hash)
31759 output_indirect_strings ();
31760 if (debug_line_str_hash)
31761 {
31762 switch_to_section (debug_line_str_section);
31763 const enum dwarf_form form = DW_FORM_line_strp;
31764 debug_line_str_hash->traverse<enum dwarf_form,
31765 output_indirect_string> (form);
31766 }
31767
31768 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31769 symview_upper_bound = 0;
31770 if (zero_view_p)
31771 bitmap_clear (zero_view_p);
31772 }
31773
31774 /* Returns a hash value for X (which really is a variable_value_struct). */
31775
31776 inline hashval_t
31777 variable_value_hasher::hash (variable_value_struct *x)
31778 {
31779 return (hashval_t) x->decl_id;
31780 }
31781
31782 /* Return nonzero if decl_id of variable_value_struct X is the same as
31783 UID of decl Y. */
31784
31785 inline bool
31786 variable_value_hasher::equal (variable_value_struct *x, tree y)
31787 {
31788 return x->decl_id == DECL_UID (y);
31789 }
31790
31791 /* Helper function for resolve_variable_value, handle
31792 DW_OP_GNU_variable_value in one location expression.
31793 Return true if exprloc has been changed into loclist. */
31794
31795 static bool
31796 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31797 {
31798 dw_loc_descr_ref next;
31799 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31800 {
31801 next = loc->dw_loc_next;
31802 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31803 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31804 continue;
31805
31806 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31807 if (DECL_CONTEXT (decl) != current_function_decl)
31808 continue;
31809
31810 dw_die_ref ref = lookup_decl_die (decl);
31811 if (ref)
31812 {
31813 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31814 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31815 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31816 continue;
31817 }
31818 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31819 if (l == NULL)
31820 continue;
31821 if (l->dw_loc_next)
31822 {
31823 if (AT_class (a) != dw_val_class_loc)
31824 continue;
31825 switch (a->dw_attr)
31826 {
31827 /* Following attributes allow both exprloc and loclist
31828 classes, so we can change them into a loclist. */
31829 case DW_AT_location:
31830 case DW_AT_string_length:
31831 case DW_AT_return_addr:
31832 case DW_AT_data_member_location:
31833 case DW_AT_frame_base:
31834 case DW_AT_segment:
31835 case DW_AT_static_link:
31836 case DW_AT_use_location:
31837 case DW_AT_vtable_elem_location:
31838 if (prev)
31839 {
31840 prev->dw_loc_next = NULL;
31841 prepend_loc_descr_to_each (l, AT_loc (a));
31842 }
31843 if (next)
31844 add_loc_descr_to_each (l, next);
31845 a->dw_attr_val.val_class = dw_val_class_loc_list;
31846 a->dw_attr_val.val_entry = NULL;
31847 a->dw_attr_val.v.val_loc_list = l;
31848 have_location_lists = true;
31849 return true;
31850 /* Following attributes allow both exprloc and reference,
31851 so if the whole expression is DW_OP_GNU_variable_value alone
31852 we could transform it into reference. */
31853 case DW_AT_byte_size:
31854 case DW_AT_bit_size:
31855 case DW_AT_lower_bound:
31856 case DW_AT_upper_bound:
31857 case DW_AT_bit_stride:
31858 case DW_AT_count:
31859 case DW_AT_allocated:
31860 case DW_AT_associated:
31861 case DW_AT_byte_stride:
31862 if (prev == NULL && next == NULL)
31863 break;
31864 /* FALLTHRU */
31865 default:
31866 if (dwarf_strict)
31867 continue;
31868 break;
31869 }
31870 /* Create DW_TAG_variable that we can refer to. */
31871 gen_decl_die (decl, NULL_TREE, NULL,
31872 lookup_decl_die (current_function_decl));
31873 ref = lookup_decl_die (decl);
31874 if (ref)
31875 {
31876 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31877 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31878 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31879 }
31880 continue;
31881 }
31882 if (prev)
31883 {
31884 prev->dw_loc_next = l->expr;
31885 add_loc_descr (&prev->dw_loc_next, next);
31886 free_loc_descr (loc, NULL);
31887 next = prev->dw_loc_next;
31888 }
31889 else
31890 {
31891 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31892 add_loc_descr (&loc, next);
31893 next = loc;
31894 }
31895 loc = prev;
31896 }
31897 return false;
31898 }
31899
31900 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31901
31902 static void
31903 resolve_variable_value (dw_die_ref die)
31904 {
31905 dw_attr_node *a;
31906 dw_loc_list_ref loc;
31907 unsigned ix;
31908
31909 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31910 switch (AT_class (a))
31911 {
31912 case dw_val_class_loc:
31913 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31914 break;
31915 /* FALLTHRU */
31916 case dw_val_class_loc_list:
31917 loc = AT_loc_list (a);
31918 gcc_assert (loc);
31919 for (; loc; loc = loc->dw_loc_next)
31920 resolve_variable_value_in_expr (a, loc->expr);
31921 break;
31922 default:
31923 break;
31924 }
31925 }
31926
31927 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31928 temporaries in the current function. */
31929
31930 static void
31931 resolve_variable_values (void)
31932 {
31933 if (!variable_value_hash || !current_function_decl)
31934 return;
31935
31936 struct variable_value_struct *node
31937 = variable_value_hash->find_with_hash (current_function_decl,
31938 DECL_UID (current_function_decl));
31939
31940 if (node == NULL)
31941 return;
31942
31943 unsigned int i;
31944 dw_die_ref die;
31945 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31946 resolve_variable_value (die);
31947 }
31948
31949 /* Helper function for note_variable_value, handle one location
31950 expression. */
31951
31952 static void
31953 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31954 {
31955 for (; loc; loc = loc->dw_loc_next)
31956 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31957 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31958 {
31959 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31960 dw_die_ref ref = lookup_decl_die (decl);
31961 if (! ref && (flag_generate_lto || flag_generate_offload))
31962 {
31963 /* ??? This is somewhat a hack because we do not create DIEs
31964 for variables not in BLOCK trees early but when generating
31965 early LTO output we need the dw_val_class_decl_ref to be
31966 fully resolved. For fat LTO objects we'd also like to
31967 undo this after LTO dwarf output. */
31968 gcc_assert (DECL_CONTEXT (decl));
31969 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31970 gcc_assert (ctx != NULL);
31971 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31972 ref = lookup_decl_die (decl);
31973 gcc_assert (ref != NULL);
31974 }
31975 if (ref)
31976 {
31977 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31978 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31979 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31980 continue;
31981 }
31982 if (VAR_P (decl)
31983 && DECL_CONTEXT (decl)
31984 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31985 && lookup_decl_die (DECL_CONTEXT (decl)))
31986 {
31987 if (!variable_value_hash)
31988 variable_value_hash
31989 = hash_table<variable_value_hasher>::create_ggc (10);
31990
31991 tree fndecl = DECL_CONTEXT (decl);
31992 struct variable_value_struct *node;
31993 struct variable_value_struct **slot
31994 = variable_value_hash->find_slot_with_hash (fndecl,
31995 DECL_UID (fndecl),
31996 INSERT);
31997 if (*slot == NULL)
31998 {
31999 node = ggc_cleared_alloc<variable_value_struct> ();
32000 node->decl_id = DECL_UID (fndecl);
32001 *slot = node;
32002 }
32003 else
32004 node = *slot;
32005
32006 vec_safe_push (node->dies, die);
32007 }
32008 }
32009 }
32010
32011 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
32012 with dw_val_class_decl_ref operand. */
32013
32014 static void
32015 note_variable_value (dw_die_ref die)
32016 {
32017 dw_die_ref c;
32018 dw_attr_node *a;
32019 dw_loc_list_ref loc;
32020 unsigned ix;
32021
32022 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
32023 switch (AT_class (a))
32024 {
32025 case dw_val_class_loc_list:
32026 loc = AT_loc_list (a);
32027 gcc_assert (loc);
32028 if (!loc->noted_variable_value)
32029 {
32030 loc->noted_variable_value = 1;
32031 for (; loc; loc = loc->dw_loc_next)
32032 note_variable_value_in_expr (die, loc->expr);
32033 }
32034 break;
32035 case dw_val_class_loc:
32036 note_variable_value_in_expr (die, AT_loc (a));
32037 break;
32038 default:
32039 break;
32040 }
32041
32042 /* Mark children. */
32043 FOR_EACH_CHILD (die, c, note_variable_value (c));
32044 }
32045
32046 /* Perform any cleanups needed after the early debug generation pass
32047 has run. */
32048
32049 static void
32050 dwarf2out_early_finish (const char *filename)
32051 {
32052 set_early_dwarf s;
32053 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
32054
32055 /* PCH might result in DW_AT_producer string being restored from the
32056 header compilation, so always fill it with empty string initially
32057 and overwrite only here. */
32058 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
32059
32060 if (dwarf_record_gcc_switches)
32061 producer_string = gen_producer_string (lang_hooks.name,
32062 save_decoded_options,
32063 save_decoded_options_count);
32064 else
32065 producer_string = concat (lang_hooks.name, " ", version_string, NULL);
32066
32067 producer->dw_attr_val.v.val_str->refcount--;
32068 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32069
32070 /* Add the name for the main input file now. We delayed this from
32071 dwarf2out_init to avoid complications with PCH. */
32072 add_filename_attribute (comp_unit_die (), remap_debug_filename (filename));
32073 add_comp_dir_attribute (comp_unit_die ());
32074
32075 /* With LTO early dwarf was really finished at compile-time, so make
32076 sure to adjust the phase after annotating the LTRANS CU DIE. */
32077 if (in_lto_p)
32078 {
32079 early_dwarf_finished = true;
32080 if (dump_file)
32081 {
32082 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32083 print_die (comp_unit_die (), dump_file);
32084 }
32085 return;
32086 }
32087
32088 /* Walk through the list of incomplete types again, trying once more to
32089 emit full debugging info for them. */
32090 retry_incomplete_types ();
32091
32092 gen_scheduled_generic_parms_dies ();
32093 gen_remaining_tmpl_value_param_die_attribute ();
32094
32095 /* The point here is to flush out the limbo list so that it is empty
32096 and we don't need to stream it for LTO. */
32097 flush_limbo_die_list ();
32098
32099 /* Add DW_AT_linkage_name for all deferred DIEs. */
32100 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32101 {
32102 tree decl = node->created_for;
32103 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32104 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32105 ended up in deferred_asm_name before we knew it was
32106 constant and never written to disk. */
32107 && DECL_ASSEMBLER_NAME (decl))
32108 {
32109 add_linkage_attr (node->die, decl);
32110 move_linkage_attr (node->die);
32111 }
32112 }
32113 deferred_asm_name = NULL;
32114
32115 if (flag_eliminate_unused_debug_types)
32116 prune_unused_types ();
32117
32118 /* Generate separate COMDAT sections for type DIEs. */
32119 if (use_debug_types)
32120 {
32121 break_out_comdat_types (comp_unit_die ());
32122
32123 /* Each new type_unit DIE was added to the limbo die list when created.
32124 Since these have all been added to comdat_type_list, clear the
32125 limbo die list. */
32126 limbo_die_list = NULL;
32127
32128 /* For each new comdat type unit, copy declarations for incomplete
32129 types to make the new unit self-contained (i.e., no direct
32130 references to the main compile unit). */
32131 for (comdat_type_node *ctnode = comdat_type_list;
32132 ctnode != NULL; ctnode = ctnode->next)
32133 copy_decls_for_unworthy_types (ctnode->root_die);
32134 copy_decls_for_unworthy_types (comp_unit_die ());
32135
32136 /* In the process of copying declarations from one unit to another,
32137 we may have left some declarations behind that are no longer
32138 referenced. Prune them. */
32139 prune_unused_types ();
32140 }
32141
32142 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32143 with dw_val_class_decl_ref operand. */
32144 note_variable_value (comp_unit_die ());
32145 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32146 note_variable_value (node->die);
32147 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32148 ctnode = ctnode->next)
32149 note_variable_value (ctnode->root_die);
32150 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32151 note_variable_value (node->die);
32152
32153 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32154 both the main_cu and all skeleton TUs. Making this call unconditional
32155 would end up either adding a second copy of the AT_pubnames attribute, or
32156 requiring a special case in add_top_level_skeleton_die_attrs. */
32157 if (!dwarf_split_debug_info)
32158 add_AT_pubnames (comp_unit_die ());
32159
32160 /* The early debug phase is now finished. */
32161 early_dwarf_finished = true;
32162 if (dump_file)
32163 {
32164 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32165 print_die (comp_unit_die (), dump_file);
32166 }
32167
32168 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32169 if ((!flag_generate_lto && !flag_generate_offload)
32170 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32171 copy_lto_debug_sections operation of the simple object support in
32172 libiberty is not implemented for them yet. */
32173 || TARGET_PECOFF || TARGET_COFF)
32174 return;
32175
32176 /* Now as we are going to output for LTO initialize sections and labels
32177 to the LTO variants. We don't need a random-seed postfix as other
32178 LTO sections as linking the LTO debug sections into one in a partial
32179 link is fine. */
32180 init_sections_and_labels (true);
32181
32182 /* The output below is modeled after dwarf2out_finish with all
32183 location related output removed and some LTO specific changes.
32184 Some refactoring might make both smaller and easier to match up. */
32185
32186 /* Traverse the DIE's and add sibling attributes to those DIE's
32187 that have children. */
32188 add_sibling_attributes (comp_unit_die ());
32189 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32190 add_sibling_attributes (node->die);
32191 for (comdat_type_node *ctnode = comdat_type_list;
32192 ctnode != NULL; ctnode = ctnode->next)
32193 add_sibling_attributes (ctnode->root_die);
32194
32195 /* AIX Assembler inserts the length, so adjust the reference to match the
32196 offset expected by debuggers. */
32197 strcpy (dl_section_ref, debug_line_section_label);
32198 if (XCOFF_DEBUGGING_INFO)
32199 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32200
32201 if (debug_info_level >= DINFO_LEVEL_TERSE)
32202 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32203
32204 if (have_macinfo)
32205 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32206 macinfo_section_label);
32207
32208 save_macinfo_strings ();
32209
32210 if (dwarf_split_debug_info)
32211 {
32212 unsigned int index = 0;
32213 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32214 }
32215
32216 /* Output all of the compilation units. We put the main one last so that
32217 the offsets are available to output_pubnames. */
32218 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32219 output_comp_unit (node->die, 0, NULL);
32220
32221 hash_table<comdat_type_hasher> comdat_type_table (100);
32222 for (comdat_type_node *ctnode = comdat_type_list;
32223 ctnode != NULL; ctnode = ctnode->next)
32224 {
32225 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32226
32227 /* Don't output duplicate types. */
32228 if (*slot != HTAB_EMPTY_ENTRY)
32229 continue;
32230
32231 /* Add a pointer to the line table for the main compilation unit
32232 so that the debugger can make sense of DW_AT_decl_file
32233 attributes. */
32234 if (debug_info_level >= DINFO_LEVEL_TERSE)
32235 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32236 (!dwarf_split_debug_info
32237 ? debug_line_section_label
32238 : debug_skeleton_line_section_label));
32239
32240 output_comdat_type_unit (ctnode, true);
32241 *slot = ctnode;
32242 }
32243
32244 /* Stick a unique symbol to the main debuginfo section. */
32245 compute_comp_unit_symbol (comp_unit_die ());
32246
32247 /* Output the main compilation unit. We always need it if only for
32248 the CU symbol. */
32249 output_comp_unit (comp_unit_die (), true, NULL);
32250
32251 /* Output the abbreviation table. */
32252 if (vec_safe_length (abbrev_die_table) != 1)
32253 {
32254 switch_to_section (debug_abbrev_section);
32255 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32256 output_abbrev_section ();
32257 }
32258
32259 /* Have to end the macro section. */
32260 if (have_macinfo)
32261 {
32262 /* We have to save macinfo state if we need to output it again
32263 for the FAT part of the object. */
32264 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32265 if (flag_fat_lto_objects)
32266 macinfo_table = macinfo_table->copy ();
32267
32268 switch_to_section (debug_macinfo_section);
32269 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32270 output_macinfo (debug_line_section_label, true);
32271 dw2_asm_output_data (1, 0, "End compilation unit");
32272
32273 if (flag_fat_lto_objects)
32274 {
32275 vec_free (macinfo_table);
32276 macinfo_table = saved_macinfo_table;
32277 }
32278 }
32279
32280 /* Emit a skeleton debug_line section. */
32281 switch_to_section (debug_line_section);
32282 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32283 output_line_info (true);
32284
32285 /* If we emitted any indirect strings, output the string table too. */
32286 if (debug_str_hash || skeleton_debug_str_hash)
32287 output_indirect_strings ();
32288 if (debug_line_str_hash)
32289 {
32290 switch_to_section (debug_line_str_section);
32291 const enum dwarf_form form = DW_FORM_line_strp;
32292 debug_line_str_hash->traverse<enum dwarf_form,
32293 output_indirect_string> (form);
32294 }
32295
32296 /* Switch back to the text section. */
32297 switch_to_section (text_section);
32298 }
32299
32300 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32301 within the same process. For use by toplev::finalize. */
32302
32303 void
32304 dwarf2out_c_finalize (void)
32305 {
32306 last_var_location_insn = NULL;
32307 cached_next_real_insn = NULL;
32308 used_rtx_array = NULL;
32309 incomplete_types = NULL;
32310 debug_info_section = NULL;
32311 debug_skeleton_info_section = NULL;
32312 debug_abbrev_section = NULL;
32313 debug_skeleton_abbrev_section = NULL;
32314 debug_aranges_section = NULL;
32315 debug_addr_section = NULL;
32316 debug_macinfo_section = NULL;
32317 debug_line_section = NULL;
32318 debug_skeleton_line_section = NULL;
32319 debug_loc_section = NULL;
32320 debug_pubnames_section = NULL;
32321 debug_pubtypes_section = NULL;
32322 debug_str_section = NULL;
32323 debug_line_str_section = NULL;
32324 debug_str_dwo_section = NULL;
32325 debug_str_offsets_section = NULL;
32326 debug_ranges_section = NULL;
32327 debug_frame_section = NULL;
32328 fde_vec = NULL;
32329 debug_str_hash = NULL;
32330 debug_line_str_hash = NULL;
32331 skeleton_debug_str_hash = NULL;
32332 dw2_string_counter = 0;
32333 have_multiple_function_sections = false;
32334 text_section_used = false;
32335 cold_text_section_used = false;
32336 cold_text_section = NULL;
32337 current_unit_personality = NULL;
32338
32339 early_dwarf = false;
32340 early_dwarf_finished = false;
32341
32342 next_die_offset = 0;
32343 single_comp_unit_die = NULL;
32344 comdat_type_list = NULL;
32345 limbo_die_list = NULL;
32346 file_table = NULL;
32347 decl_die_table = NULL;
32348 common_block_die_table = NULL;
32349 decl_loc_table = NULL;
32350 call_arg_locations = NULL;
32351 call_arg_loc_last = NULL;
32352 call_site_count = -1;
32353 tail_call_site_count = -1;
32354 cached_dw_loc_list_table = NULL;
32355 abbrev_die_table = NULL;
32356 delete dwarf_proc_stack_usage_map;
32357 dwarf_proc_stack_usage_map = NULL;
32358 line_info_label_num = 0;
32359 cur_line_info_table = NULL;
32360 text_section_line_info = NULL;
32361 cold_text_section_line_info = NULL;
32362 separate_line_info = NULL;
32363 info_section_emitted = false;
32364 pubname_table = NULL;
32365 pubtype_table = NULL;
32366 macinfo_table = NULL;
32367 ranges_table = NULL;
32368 ranges_by_label = NULL;
32369 rnglist_idx = 0;
32370 have_location_lists = false;
32371 loclabel_num = 0;
32372 poc_label_num = 0;
32373 last_emitted_file = NULL;
32374 label_num = 0;
32375 tmpl_value_parm_die_table = NULL;
32376 generic_type_instances = NULL;
32377 frame_pointer_fb_offset = 0;
32378 frame_pointer_fb_offset_valid = false;
32379 base_types.release ();
32380 XDELETEVEC (producer_string);
32381 producer_string = NULL;
32382 }
32383
32384 #include "gt-dwarf2out.h"