[LVU] reset view at function entry, omit views at line zero
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_lbl_id:
1438 case dw_val_class_lineptr:
1439 case dw_val_class_macptr:
1440 case dw_val_class_loclistsptr:
1441 case dw_val_class_high_pc:
1442 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1443 case dw_val_class_str:
1444 return a->v.val_str == b->v.val_str;
1445 case dw_val_class_flag:
1446 return a->v.val_flag == b->v.val_flag;
1447 case dw_val_class_file:
1448 case dw_val_class_file_implicit:
1449 return a->v.val_file == b->v.val_file;
1450 case dw_val_class_decl_ref:
1451 return a->v.val_decl_ref == b->v.val_decl_ref;
1452
1453 case dw_val_class_const_double:
1454 return (a->v.val_double.high == b->v.val_double.high
1455 && a->v.val_double.low == b->v.val_double.low);
1456
1457 case dw_val_class_wide_int:
1458 return *a->v.val_wide == *b->v.val_wide;
1459
1460 case dw_val_class_vec:
1461 {
1462 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1463 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1464
1465 return (a_len == b_len
1466 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1467 }
1468
1469 case dw_val_class_data8:
1470 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1471
1472 case dw_val_class_vms_delta:
1473 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1474 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1475
1476 case dw_val_class_discr_value:
1477 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1478 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1479 case dw_val_class_discr_list:
1480 /* It makes no sense comparing two discriminant value lists. */
1481 return false;
1482 }
1483 gcc_unreachable ();
1484 }
1485
1486 /* Compare two location atoms for exact equality. */
1487
1488 static bool
1489 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1490 {
1491 if (a->dw_loc_opc != b->dw_loc_opc)
1492 return false;
1493
1494 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1495 address size, but since we always allocate cleared storage it
1496 should be zero for other types of locations. */
1497 if (a->dtprel != b->dtprel)
1498 return false;
1499
1500 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1501 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1502 }
1503
1504 /* Compare two complete location expressions for exact equality. */
1505
1506 bool
1507 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1508 {
1509 while (1)
1510 {
1511 if (a == b)
1512 return true;
1513 if (a == NULL || b == NULL)
1514 return false;
1515 if (!loc_descr_equal_p_1 (a, b))
1516 return false;
1517
1518 a = a->dw_loc_next;
1519 b = b->dw_loc_next;
1520 }
1521 }
1522
1523
1524 /* Add a constant POLY_OFFSET to a location expression. */
1525
1526 static void
1527 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1528 {
1529 dw_loc_descr_ref loc;
1530 HOST_WIDE_INT *p;
1531
1532 gcc_assert (*list_head != NULL);
1533
1534 if (known_eq (poly_offset, 0))
1535 return;
1536
1537 /* Find the end of the chain. */
1538 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1539 ;
1540
1541 HOST_WIDE_INT offset;
1542 if (!poly_offset.is_constant (&offset))
1543 {
1544 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1545 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1546 return;
1547 }
1548
1549 p = NULL;
1550 if (loc->dw_loc_opc == DW_OP_fbreg
1551 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1552 p = &loc->dw_loc_oprnd1.v.val_int;
1553 else if (loc->dw_loc_opc == DW_OP_bregx)
1554 p = &loc->dw_loc_oprnd2.v.val_int;
1555
1556 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1557 offset. Don't optimize if an signed integer overflow would happen. */
1558 if (p != NULL
1559 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1560 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1561 *p += offset;
1562
1563 else if (offset > 0)
1564 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1565
1566 else
1567 {
1568 loc->dw_loc_next
1569 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1570 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1571 }
1572 }
1573
1574 /* Return a pointer to a newly allocated location description for
1575 REG and OFFSET. */
1576
1577 static inline dw_loc_descr_ref
1578 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1579 {
1580 HOST_WIDE_INT const_offset;
1581 if (offset.is_constant (&const_offset))
1582 {
1583 if (reg <= 31)
1584 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1585 const_offset, 0);
1586 else
1587 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1588 }
1589 else
1590 {
1591 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1592 loc_descr_plus_const (&ret, offset);
1593 return ret;
1594 }
1595 }
1596
1597 /* Add a constant OFFSET to a location list. */
1598
1599 static void
1600 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1601 {
1602 dw_loc_list_ref d;
1603 for (d = list_head; d != NULL; d = d->dw_loc_next)
1604 loc_descr_plus_const (&d->expr, offset);
1605 }
1606
1607 #define DWARF_REF_SIZE \
1608 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1609
1610 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1611 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1612 DW_FORM_data16 with 128 bits. */
1613 #define DWARF_LARGEST_DATA_FORM_BITS \
1614 (dwarf_version >= 5 ? 128 : 64)
1615
1616 /* Utility inline function for construction of ops that were GNU extension
1617 before DWARF 5. */
1618 static inline enum dwarf_location_atom
1619 dwarf_OP (enum dwarf_location_atom op)
1620 {
1621 switch (op)
1622 {
1623 case DW_OP_implicit_pointer:
1624 if (dwarf_version < 5)
1625 return DW_OP_GNU_implicit_pointer;
1626 break;
1627
1628 case DW_OP_entry_value:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_entry_value;
1631 break;
1632
1633 case DW_OP_const_type:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_const_type;
1636 break;
1637
1638 case DW_OP_regval_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_regval_type;
1641 break;
1642
1643 case DW_OP_deref_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_deref_type;
1646 break;
1647
1648 case DW_OP_convert:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_convert;
1651 break;
1652
1653 case DW_OP_reinterpret:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_reinterpret;
1656 break;
1657
1658 default:
1659 break;
1660 }
1661 return op;
1662 }
1663
1664 /* Similarly for attributes. */
1665 static inline enum dwarf_attribute
1666 dwarf_AT (enum dwarf_attribute at)
1667 {
1668 switch (at)
1669 {
1670 case DW_AT_call_return_pc:
1671 if (dwarf_version < 5)
1672 return DW_AT_low_pc;
1673 break;
1674
1675 case DW_AT_call_tail_call:
1676 if (dwarf_version < 5)
1677 return DW_AT_GNU_tail_call;
1678 break;
1679
1680 case DW_AT_call_origin:
1681 if (dwarf_version < 5)
1682 return DW_AT_abstract_origin;
1683 break;
1684
1685 case DW_AT_call_target:
1686 if (dwarf_version < 5)
1687 return DW_AT_GNU_call_site_target;
1688 break;
1689
1690 case DW_AT_call_target_clobbered:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_call_site_target_clobbered;
1693 break;
1694
1695 case DW_AT_call_parameter:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_value:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_value;
1703 break;
1704
1705 case DW_AT_call_data_value:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_data_value;
1708 break;
1709
1710 case DW_AT_call_all_calls:
1711 if (dwarf_version < 5)
1712 return DW_AT_GNU_all_call_sites;
1713 break;
1714
1715 case DW_AT_call_all_tail_calls:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_all_tail_call_sites;
1718 break;
1719
1720 case DW_AT_dwo_name:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_dwo_name;
1723 break;
1724
1725 default:
1726 break;
1727 }
1728 return at;
1729 }
1730
1731 /* And similarly for tags. */
1732 static inline enum dwarf_tag
1733 dwarf_TAG (enum dwarf_tag tag)
1734 {
1735 switch (tag)
1736 {
1737 case DW_TAG_call_site:
1738 if (dwarf_version < 5)
1739 return DW_TAG_GNU_call_site;
1740 break;
1741
1742 case DW_TAG_call_site_parameter:
1743 if (dwarf_version < 5)
1744 return DW_TAG_GNU_call_site_parameter;
1745 break;
1746
1747 default:
1748 break;
1749 }
1750 return tag;
1751 }
1752
1753 static unsigned long int get_base_type_offset (dw_die_ref);
1754
1755 /* Return the size of a location descriptor. */
1756
1757 static unsigned long
1758 size_of_loc_descr (dw_loc_descr_ref loc)
1759 {
1760 unsigned long size = 1;
1761
1762 switch (loc->dw_loc_opc)
1763 {
1764 case DW_OP_addr:
1765 size += DWARF2_ADDR_SIZE;
1766 break;
1767 case DW_OP_GNU_addr_index:
1768 case DW_OP_GNU_const_index:
1769 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1770 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1771 break;
1772 case DW_OP_const1u:
1773 case DW_OP_const1s:
1774 size += 1;
1775 break;
1776 case DW_OP_const2u:
1777 case DW_OP_const2s:
1778 size += 2;
1779 break;
1780 case DW_OP_const4u:
1781 case DW_OP_const4s:
1782 size += 4;
1783 break;
1784 case DW_OP_const8u:
1785 case DW_OP_const8s:
1786 size += 8;
1787 break;
1788 case DW_OP_constu:
1789 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1790 break;
1791 case DW_OP_consts:
1792 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1793 break;
1794 case DW_OP_pick:
1795 size += 1;
1796 break;
1797 case DW_OP_plus_uconst:
1798 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1799 break;
1800 case DW_OP_skip:
1801 case DW_OP_bra:
1802 size += 2;
1803 break;
1804 case DW_OP_breg0:
1805 case DW_OP_breg1:
1806 case DW_OP_breg2:
1807 case DW_OP_breg3:
1808 case DW_OP_breg4:
1809 case DW_OP_breg5:
1810 case DW_OP_breg6:
1811 case DW_OP_breg7:
1812 case DW_OP_breg8:
1813 case DW_OP_breg9:
1814 case DW_OP_breg10:
1815 case DW_OP_breg11:
1816 case DW_OP_breg12:
1817 case DW_OP_breg13:
1818 case DW_OP_breg14:
1819 case DW_OP_breg15:
1820 case DW_OP_breg16:
1821 case DW_OP_breg17:
1822 case DW_OP_breg18:
1823 case DW_OP_breg19:
1824 case DW_OP_breg20:
1825 case DW_OP_breg21:
1826 case DW_OP_breg22:
1827 case DW_OP_breg23:
1828 case DW_OP_breg24:
1829 case DW_OP_breg25:
1830 case DW_OP_breg26:
1831 case DW_OP_breg27:
1832 case DW_OP_breg28:
1833 case DW_OP_breg29:
1834 case DW_OP_breg30:
1835 case DW_OP_breg31:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_regx:
1839 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1840 break;
1841 case DW_OP_fbreg:
1842 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1843 break;
1844 case DW_OP_bregx:
1845 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1846 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1847 break;
1848 case DW_OP_piece:
1849 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1850 break;
1851 case DW_OP_bit_piece:
1852 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1853 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1854 break;
1855 case DW_OP_deref_size:
1856 case DW_OP_xderef_size:
1857 size += 1;
1858 break;
1859 case DW_OP_call2:
1860 size += 2;
1861 break;
1862 case DW_OP_call4:
1863 size += 4;
1864 break;
1865 case DW_OP_call_ref:
1866 case DW_OP_GNU_variable_value:
1867 size += DWARF_REF_SIZE;
1868 break;
1869 case DW_OP_implicit_value:
1870 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1871 + loc->dw_loc_oprnd1.v.val_unsigned;
1872 break;
1873 case DW_OP_implicit_pointer:
1874 case DW_OP_GNU_implicit_pointer:
1875 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1876 break;
1877 case DW_OP_entry_value:
1878 case DW_OP_GNU_entry_value:
1879 {
1880 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1881 size += size_of_uleb128 (op_size) + op_size;
1882 break;
1883 }
1884 case DW_OP_const_type:
1885 case DW_OP_GNU_const_type:
1886 {
1887 unsigned long o
1888 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1889 size += size_of_uleb128 (o) + 1;
1890 switch (loc->dw_loc_oprnd2.val_class)
1891 {
1892 case dw_val_class_vec:
1893 size += loc->dw_loc_oprnd2.v.val_vec.length
1894 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1895 break;
1896 case dw_val_class_const:
1897 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1898 break;
1899 case dw_val_class_const_double:
1900 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1901 break;
1902 case dw_val_class_wide_int:
1903 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1904 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1905 break;
1906 default:
1907 gcc_unreachable ();
1908 }
1909 break;
1910 }
1911 case DW_OP_regval_type:
1912 case DW_OP_GNU_regval_type:
1913 {
1914 unsigned long o
1915 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1916 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1917 + size_of_uleb128 (o);
1918 }
1919 break;
1920 case DW_OP_deref_type:
1921 case DW_OP_GNU_deref_type:
1922 {
1923 unsigned long o
1924 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1925 size += 1 + size_of_uleb128 (o);
1926 }
1927 break;
1928 case DW_OP_convert:
1929 case DW_OP_reinterpret:
1930 case DW_OP_GNU_convert:
1931 case DW_OP_GNU_reinterpret:
1932 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1933 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1934 else
1935 {
1936 unsigned long o
1937 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1938 size += size_of_uleb128 (o);
1939 }
1940 break;
1941 case DW_OP_GNU_parameter_ref:
1942 size += 4;
1943 break;
1944 default:
1945 break;
1946 }
1947
1948 return size;
1949 }
1950
1951 /* Return the size of a series of location descriptors. */
1952
1953 unsigned long
1954 size_of_locs (dw_loc_descr_ref loc)
1955 {
1956 dw_loc_descr_ref l;
1957 unsigned long size;
1958
1959 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1960 field, to avoid writing to a PCH file. */
1961 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1962 {
1963 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1964 break;
1965 size += size_of_loc_descr (l);
1966 }
1967 if (! l)
1968 return size;
1969
1970 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1971 {
1972 l->dw_loc_addr = size;
1973 size += size_of_loc_descr (l);
1974 }
1975
1976 return size;
1977 }
1978
1979 /* Return the size of the value in a DW_AT_discr_value attribute. */
1980
1981 static int
1982 size_of_discr_value (dw_discr_value *discr_value)
1983 {
1984 if (discr_value->pos)
1985 return size_of_uleb128 (discr_value->v.uval);
1986 else
1987 return size_of_sleb128 (discr_value->v.sval);
1988 }
1989
1990 /* Return the size of the value in a DW_AT_discr_list attribute. */
1991
1992 static int
1993 size_of_discr_list (dw_discr_list_ref discr_list)
1994 {
1995 int size = 0;
1996
1997 for (dw_discr_list_ref list = discr_list;
1998 list != NULL;
1999 list = list->dw_discr_next)
2000 {
2001 /* One byte for the discriminant value descriptor, and then one or two
2002 LEB128 numbers, depending on whether it's a single case label or a
2003 range label. */
2004 size += 1;
2005 size += size_of_discr_value (&list->dw_discr_lower_bound);
2006 if (list->dw_discr_range != 0)
2007 size += size_of_discr_value (&list->dw_discr_upper_bound);
2008 }
2009 return size;
2010 }
2011
2012 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2013 static void get_ref_die_offset_label (char *, dw_die_ref);
2014 static unsigned long int get_ref_die_offset (dw_die_ref);
2015
2016 /* Output location description stack opcode's operands (if any).
2017 The for_eh_or_skip parameter controls whether register numbers are
2018 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2019 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2020 info). This should be suppressed for the cases that have not been converted
2021 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2022
2023 static void
2024 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2025 {
2026 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2027 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2028
2029 switch (loc->dw_loc_opc)
2030 {
2031 #ifdef DWARF2_DEBUGGING_INFO
2032 case DW_OP_const2u:
2033 case DW_OP_const2s:
2034 dw2_asm_output_data (2, val1->v.val_int, NULL);
2035 break;
2036 case DW_OP_const4u:
2037 if (loc->dtprel)
2038 {
2039 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2040 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2041 val1->v.val_addr);
2042 fputc ('\n', asm_out_file);
2043 break;
2044 }
2045 /* FALLTHRU */
2046 case DW_OP_const4s:
2047 dw2_asm_output_data (4, val1->v.val_int, NULL);
2048 break;
2049 case DW_OP_const8u:
2050 if (loc->dtprel)
2051 {
2052 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2053 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2054 val1->v.val_addr);
2055 fputc ('\n', asm_out_file);
2056 break;
2057 }
2058 /* FALLTHRU */
2059 case DW_OP_const8s:
2060 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2061 dw2_asm_output_data (8, val1->v.val_int, NULL);
2062 break;
2063 case DW_OP_skip:
2064 case DW_OP_bra:
2065 {
2066 int offset;
2067
2068 gcc_assert (val1->val_class == dw_val_class_loc);
2069 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2070
2071 dw2_asm_output_data (2, offset, NULL);
2072 }
2073 break;
2074 case DW_OP_implicit_value:
2075 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2076 switch (val2->val_class)
2077 {
2078 case dw_val_class_const:
2079 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2080 break;
2081 case dw_val_class_vec:
2082 {
2083 unsigned int elt_size = val2->v.val_vec.elt_size;
2084 unsigned int len = val2->v.val_vec.length;
2085 unsigned int i;
2086 unsigned char *p;
2087
2088 if (elt_size > sizeof (HOST_WIDE_INT))
2089 {
2090 elt_size /= 2;
2091 len *= 2;
2092 }
2093 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2094 i < len;
2095 i++, p += elt_size)
2096 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2097 "fp or vector constant word %u", i);
2098 }
2099 break;
2100 case dw_val_class_const_double:
2101 {
2102 unsigned HOST_WIDE_INT first, second;
2103
2104 if (WORDS_BIG_ENDIAN)
2105 {
2106 first = val2->v.val_double.high;
2107 second = val2->v.val_double.low;
2108 }
2109 else
2110 {
2111 first = val2->v.val_double.low;
2112 second = val2->v.val_double.high;
2113 }
2114 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2115 first, NULL);
2116 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2117 second, NULL);
2118 }
2119 break;
2120 case dw_val_class_wide_int:
2121 {
2122 int i;
2123 int len = get_full_len (*val2->v.val_wide);
2124 if (WORDS_BIG_ENDIAN)
2125 for (i = len - 1; i >= 0; --i)
2126 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2127 val2->v.val_wide->elt (i), NULL);
2128 else
2129 for (i = 0; i < len; ++i)
2130 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2131 val2->v.val_wide->elt (i), NULL);
2132 }
2133 break;
2134 case dw_val_class_addr:
2135 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2136 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2137 break;
2138 default:
2139 gcc_unreachable ();
2140 }
2141 break;
2142 #else
2143 case DW_OP_const2u:
2144 case DW_OP_const2s:
2145 case DW_OP_const4u:
2146 case DW_OP_const4s:
2147 case DW_OP_const8u:
2148 case DW_OP_const8s:
2149 case DW_OP_skip:
2150 case DW_OP_bra:
2151 case DW_OP_implicit_value:
2152 /* We currently don't make any attempt to make sure these are
2153 aligned properly like we do for the main unwind info, so
2154 don't support emitting things larger than a byte if we're
2155 only doing unwinding. */
2156 gcc_unreachable ();
2157 #endif
2158 case DW_OP_const1u:
2159 case DW_OP_const1s:
2160 dw2_asm_output_data (1, val1->v.val_int, NULL);
2161 break;
2162 case DW_OP_constu:
2163 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2164 break;
2165 case DW_OP_consts:
2166 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2167 break;
2168 case DW_OP_pick:
2169 dw2_asm_output_data (1, val1->v.val_int, NULL);
2170 break;
2171 case DW_OP_plus_uconst:
2172 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2173 break;
2174 case DW_OP_breg0:
2175 case DW_OP_breg1:
2176 case DW_OP_breg2:
2177 case DW_OP_breg3:
2178 case DW_OP_breg4:
2179 case DW_OP_breg5:
2180 case DW_OP_breg6:
2181 case DW_OP_breg7:
2182 case DW_OP_breg8:
2183 case DW_OP_breg9:
2184 case DW_OP_breg10:
2185 case DW_OP_breg11:
2186 case DW_OP_breg12:
2187 case DW_OP_breg13:
2188 case DW_OP_breg14:
2189 case DW_OP_breg15:
2190 case DW_OP_breg16:
2191 case DW_OP_breg17:
2192 case DW_OP_breg18:
2193 case DW_OP_breg19:
2194 case DW_OP_breg20:
2195 case DW_OP_breg21:
2196 case DW_OP_breg22:
2197 case DW_OP_breg23:
2198 case DW_OP_breg24:
2199 case DW_OP_breg25:
2200 case DW_OP_breg26:
2201 case DW_OP_breg27:
2202 case DW_OP_breg28:
2203 case DW_OP_breg29:
2204 case DW_OP_breg30:
2205 case DW_OP_breg31:
2206 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2207 break;
2208 case DW_OP_regx:
2209 {
2210 unsigned r = val1->v.val_unsigned;
2211 if (for_eh_or_skip >= 0)
2212 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2213 gcc_assert (size_of_uleb128 (r)
2214 == size_of_uleb128 (val1->v.val_unsigned));
2215 dw2_asm_output_data_uleb128 (r, NULL);
2216 }
2217 break;
2218 case DW_OP_fbreg:
2219 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2220 break;
2221 case DW_OP_bregx:
2222 {
2223 unsigned r = val1->v.val_unsigned;
2224 if (for_eh_or_skip >= 0)
2225 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2226 gcc_assert (size_of_uleb128 (r)
2227 == size_of_uleb128 (val1->v.val_unsigned));
2228 dw2_asm_output_data_uleb128 (r, NULL);
2229 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2230 }
2231 break;
2232 case DW_OP_piece:
2233 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2234 break;
2235 case DW_OP_bit_piece:
2236 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2237 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2238 break;
2239 case DW_OP_deref_size:
2240 case DW_OP_xderef_size:
2241 dw2_asm_output_data (1, val1->v.val_int, NULL);
2242 break;
2243
2244 case DW_OP_addr:
2245 if (loc->dtprel)
2246 {
2247 if (targetm.asm_out.output_dwarf_dtprel)
2248 {
2249 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2250 DWARF2_ADDR_SIZE,
2251 val1->v.val_addr);
2252 fputc ('\n', asm_out_file);
2253 }
2254 else
2255 gcc_unreachable ();
2256 }
2257 else
2258 {
2259 #ifdef DWARF2_DEBUGGING_INFO
2260 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2261 #else
2262 gcc_unreachable ();
2263 #endif
2264 }
2265 break;
2266
2267 case DW_OP_GNU_addr_index:
2268 case DW_OP_GNU_const_index:
2269 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2270 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2271 "(index into .debug_addr)");
2272 break;
2273
2274 case DW_OP_call2:
2275 case DW_OP_call4:
2276 {
2277 unsigned long die_offset
2278 = get_ref_die_offset (val1->v.val_die_ref.die);
2279 /* Make sure the offset has been computed and that we can encode it as
2280 an operand. */
2281 gcc_assert (die_offset > 0
2282 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2283 ? 0xffff
2284 : 0xffffffff));
2285 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2286 die_offset, NULL);
2287 }
2288 break;
2289
2290 case DW_OP_call_ref:
2291 case DW_OP_GNU_variable_value:
2292 {
2293 char label[MAX_ARTIFICIAL_LABEL_BYTES
2294 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2295 gcc_assert (val1->val_class == dw_val_class_die_ref);
2296 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2297 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2298 }
2299 break;
2300
2301 case DW_OP_implicit_pointer:
2302 case DW_OP_GNU_implicit_pointer:
2303 {
2304 char label[MAX_ARTIFICIAL_LABEL_BYTES
2305 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2306 gcc_assert (val1->val_class == dw_val_class_die_ref);
2307 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2308 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2309 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2310 }
2311 break;
2312
2313 case DW_OP_entry_value:
2314 case DW_OP_GNU_entry_value:
2315 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2316 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2317 break;
2318
2319 case DW_OP_const_type:
2320 case DW_OP_GNU_const_type:
2321 {
2322 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2323 gcc_assert (o);
2324 dw2_asm_output_data_uleb128 (o, NULL);
2325 switch (val2->val_class)
2326 {
2327 case dw_val_class_const:
2328 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2329 dw2_asm_output_data (1, l, NULL);
2330 dw2_asm_output_data (l, val2->v.val_int, NULL);
2331 break;
2332 case dw_val_class_vec:
2333 {
2334 unsigned int elt_size = val2->v.val_vec.elt_size;
2335 unsigned int len = val2->v.val_vec.length;
2336 unsigned int i;
2337 unsigned char *p;
2338
2339 l = len * elt_size;
2340 dw2_asm_output_data (1, l, NULL);
2341 if (elt_size > sizeof (HOST_WIDE_INT))
2342 {
2343 elt_size /= 2;
2344 len *= 2;
2345 }
2346 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2347 i < len;
2348 i++, p += elt_size)
2349 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2350 "fp or vector constant word %u", i);
2351 }
2352 break;
2353 case dw_val_class_const_double:
2354 {
2355 unsigned HOST_WIDE_INT first, second;
2356 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2357
2358 dw2_asm_output_data (1, 2 * l, NULL);
2359 if (WORDS_BIG_ENDIAN)
2360 {
2361 first = val2->v.val_double.high;
2362 second = val2->v.val_double.low;
2363 }
2364 else
2365 {
2366 first = val2->v.val_double.low;
2367 second = val2->v.val_double.high;
2368 }
2369 dw2_asm_output_data (l, first, NULL);
2370 dw2_asm_output_data (l, second, NULL);
2371 }
2372 break;
2373 case dw_val_class_wide_int:
2374 {
2375 int i;
2376 int len = get_full_len (*val2->v.val_wide);
2377 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2378
2379 dw2_asm_output_data (1, len * l, NULL);
2380 if (WORDS_BIG_ENDIAN)
2381 for (i = len - 1; i >= 0; --i)
2382 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2383 else
2384 for (i = 0; i < len; ++i)
2385 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2386 }
2387 break;
2388 default:
2389 gcc_unreachable ();
2390 }
2391 }
2392 break;
2393 case DW_OP_regval_type:
2394 case DW_OP_GNU_regval_type:
2395 {
2396 unsigned r = val1->v.val_unsigned;
2397 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2398 gcc_assert (o);
2399 if (for_eh_or_skip >= 0)
2400 {
2401 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2402 gcc_assert (size_of_uleb128 (r)
2403 == size_of_uleb128 (val1->v.val_unsigned));
2404 }
2405 dw2_asm_output_data_uleb128 (r, NULL);
2406 dw2_asm_output_data_uleb128 (o, NULL);
2407 }
2408 break;
2409 case DW_OP_deref_type:
2410 case DW_OP_GNU_deref_type:
2411 {
2412 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2413 gcc_assert (o);
2414 dw2_asm_output_data (1, val1->v.val_int, NULL);
2415 dw2_asm_output_data_uleb128 (o, NULL);
2416 }
2417 break;
2418 case DW_OP_convert:
2419 case DW_OP_reinterpret:
2420 case DW_OP_GNU_convert:
2421 case DW_OP_GNU_reinterpret:
2422 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2423 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2424 else
2425 {
2426 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2427 gcc_assert (o);
2428 dw2_asm_output_data_uleb128 (o, NULL);
2429 }
2430 break;
2431
2432 case DW_OP_GNU_parameter_ref:
2433 {
2434 unsigned long o;
2435 gcc_assert (val1->val_class == dw_val_class_die_ref);
2436 o = get_ref_die_offset (val1->v.val_die_ref.die);
2437 dw2_asm_output_data (4, o, NULL);
2438 }
2439 break;
2440
2441 default:
2442 /* Other codes have no operands. */
2443 break;
2444 }
2445 }
2446
2447 /* Output a sequence of location operations.
2448 The for_eh_or_skip parameter controls whether register numbers are
2449 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2450 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2451 info). This should be suppressed for the cases that have not been converted
2452 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2453
2454 void
2455 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2456 {
2457 for (; loc != NULL; loc = loc->dw_loc_next)
2458 {
2459 enum dwarf_location_atom opc = loc->dw_loc_opc;
2460 /* Output the opcode. */
2461 if (for_eh_or_skip >= 0
2462 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2463 {
2464 unsigned r = (opc - DW_OP_breg0);
2465 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2466 gcc_assert (r <= 31);
2467 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2468 }
2469 else if (for_eh_or_skip >= 0
2470 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2471 {
2472 unsigned r = (opc - DW_OP_reg0);
2473 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2474 gcc_assert (r <= 31);
2475 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2476 }
2477
2478 dw2_asm_output_data (1, opc,
2479 "%s", dwarf_stack_op_name (opc));
2480
2481 /* Output the operand(s) (if any). */
2482 output_loc_operands (loc, for_eh_or_skip);
2483 }
2484 }
2485
2486 /* Output location description stack opcode's operands (if any).
2487 The output is single bytes on a line, suitable for .cfi_escape. */
2488
2489 static void
2490 output_loc_operands_raw (dw_loc_descr_ref loc)
2491 {
2492 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2493 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2494
2495 switch (loc->dw_loc_opc)
2496 {
2497 case DW_OP_addr:
2498 case DW_OP_GNU_addr_index:
2499 case DW_OP_GNU_const_index:
2500 case DW_OP_implicit_value:
2501 /* We cannot output addresses in .cfi_escape, only bytes. */
2502 gcc_unreachable ();
2503
2504 case DW_OP_const1u:
2505 case DW_OP_const1s:
2506 case DW_OP_pick:
2507 case DW_OP_deref_size:
2508 case DW_OP_xderef_size:
2509 fputc (',', asm_out_file);
2510 dw2_asm_output_data_raw (1, val1->v.val_int);
2511 break;
2512
2513 case DW_OP_const2u:
2514 case DW_OP_const2s:
2515 fputc (',', asm_out_file);
2516 dw2_asm_output_data_raw (2, val1->v.val_int);
2517 break;
2518
2519 case DW_OP_const4u:
2520 case DW_OP_const4s:
2521 fputc (',', asm_out_file);
2522 dw2_asm_output_data_raw (4, val1->v.val_int);
2523 break;
2524
2525 case DW_OP_const8u:
2526 case DW_OP_const8s:
2527 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2528 fputc (',', asm_out_file);
2529 dw2_asm_output_data_raw (8, val1->v.val_int);
2530 break;
2531
2532 case DW_OP_skip:
2533 case DW_OP_bra:
2534 {
2535 int offset;
2536
2537 gcc_assert (val1->val_class == dw_val_class_loc);
2538 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2539
2540 fputc (',', asm_out_file);
2541 dw2_asm_output_data_raw (2, offset);
2542 }
2543 break;
2544
2545 case DW_OP_regx:
2546 {
2547 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2548 gcc_assert (size_of_uleb128 (r)
2549 == size_of_uleb128 (val1->v.val_unsigned));
2550 fputc (',', asm_out_file);
2551 dw2_asm_output_data_uleb128_raw (r);
2552 }
2553 break;
2554
2555 case DW_OP_constu:
2556 case DW_OP_plus_uconst:
2557 case DW_OP_piece:
2558 fputc (',', asm_out_file);
2559 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2560 break;
2561
2562 case DW_OP_bit_piece:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2565 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2566 break;
2567
2568 case DW_OP_consts:
2569 case DW_OP_breg0:
2570 case DW_OP_breg1:
2571 case DW_OP_breg2:
2572 case DW_OP_breg3:
2573 case DW_OP_breg4:
2574 case DW_OP_breg5:
2575 case DW_OP_breg6:
2576 case DW_OP_breg7:
2577 case DW_OP_breg8:
2578 case DW_OP_breg9:
2579 case DW_OP_breg10:
2580 case DW_OP_breg11:
2581 case DW_OP_breg12:
2582 case DW_OP_breg13:
2583 case DW_OP_breg14:
2584 case DW_OP_breg15:
2585 case DW_OP_breg16:
2586 case DW_OP_breg17:
2587 case DW_OP_breg18:
2588 case DW_OP_breg19:
2589 case DW_OP_breg20:
2590 case DW_OP_breg21:
2591 case DW_OP_breg22:
2592 case DW_OP_breg23:
2593 case DW_OP_breg24:
2594 case DW_OP_breg25:
2595 case DW_OP_breg26:
2596 case DW_OP_breg27:
2597 case DW_OP_breg28:
2598 case DW_OP_breg29:
2599 case DW_OP_breg30:
2600 case DW_OP_breg31:
2601 case DW_OP_fbreg:
2602 fputc (',', asm_out_file);
2603 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2604 break;
2605
2606 case DW_OP_bregx:
2607 {
2608 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2609 gcc_assert (size_of_uleb128 (r)
2610 == size_of_uleb128 (val1->v.val_unsigned));
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (r);
2613 fputc (',', asm_out_file);
2614 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2615 }
2616 break;
2617
2618 case DW_OP_implicit_pointer:
2619 case DW_OP_entry_value:
2620 case DW_OP_const_type:
2621 case DW_OP_regval_type:
2622 case DW_OP_deref_type:
2623 case DW_OP_convert:
2624 case DW_OP_reinterpret:
2625 case DW_OP_GNU_implicit_pointer:
2626 case DW_OP_GNU_entry_value:
2627 case DW_OP_GNU_const_type:
2628 case DW_OP_GNU_regval_type:
2629 case DW_OP_GNU_deref_type:
2630 case DW_OP_GNU_convert:
2631 case DW_OP_GNU_reinterpret:
2632 case DW_OP_GNU_parameter_ref:
2633 gcc_unreachable ();
2634 break;
2635
2636 default:
2637 /* Other codes have no operands. */
2638 break;
2639 }
2640 }
2641
2642 void
2643 output_loc_sequence_raw (dw_loc_descr_ref loc)
2644 {
2645 while (1)
2646 {
2647 enum dwarf_location_atom opc = loc->dw_loc_opc;
2648 /* Output the opcode. */
2649 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2650 {
2651 unsigned r = (opc - DW_OP_breg0);
2652 r = DWARF2_FRAME_REG_OUT (r, 1);
2653 gcc_assert (r <= 31);
2654 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2655 }
2656 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2657 {
2658 unsigned r = (opc - DW_OP_reg0);
2659 r = DWARF2_FRAME_REG_OUT (r, 1);
2660 gcc_assert (r <= 31);
2661 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2662 }
2663 /* Output the opcode. */
2664 fprintf (asm_out_file, "%#x", opc);
2665 output_loc_operands_raw (loc);
2666
2667 if (!loc->dw_loc_next)
2668 break;
2669 loc = loc->dw_loc_next;
2670
2671 fputc (',', asm_out_file);
2672 }
2673 }
2674
2675 /* This function builds a dwarf location descriptor sequence from a
2676 dw_cfa_location, adding the given OFFSET to the result of the
2677 expression. */
2678
2679 struct dw_loc_descr_node *
2680 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2681 {
2682 struct dw_loc_descr_node *head, *tmp;
2683
2684 offset += cfa->offset;
2685
2686 if (cfa->indirect)
2687 {
2688 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2689 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2690 head->dw_loc_oprnd1.val_entry = NULL;
2691 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2692 add_loc_descr (&head, tmp);
2693 loc_descr_plus_const (&head, offset);
2694 }
2695 else
2696 head = new_reg_loc_descr (cfa->reg, offset);
2697
2698 return head;
2699 }
2700
2701 /* This function builds a dwarf location descriptor sequence for
2702 the address at OFFSET from the CFA when stack is aligned to
2703 ALIGNMENT byte. */
2704
2705 struct dw_loc_descr_node *
2706 build_cfa_aligned_loc (dw_cfa_location *cfa,
2707 poly_int64 offset, HOST_WIDE_INT alignment)
2708 {
2709 struct dw_loc_descr_node *head;
2710 unsigned int dwarf_fp
2711 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2712
2713 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2714 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2715 {
2716 head = new_reg_loc_descr (dwarf_fp, 0);
2717 add_loc_descr (&head, int_loc_descriptor (alignment));
2718 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2719 loc_descr_plus_const (&head, offset);
2720 }
2721 else
2722 head = new_reg_loc_descr (dwarf_fp, offset);
2723 return head;
2724 }
2725 \f
2726 /* And now, the support for symbolic debugging information. */
2727
2728 /* .debug_str support. */
2729
2730 static void dwarf2out_init (const char *);
2731 static void dwarf2out_finish (const char *);
2732 static void dwarf2out_early_finish (const char *);
2733 static void dwarf2out_assembly_start (void);
2734 static void dwarf2out_define (unsigned int, const char *);
2735 static void dwarf2out_undef (unsigned int, const char *);
2736 static void dwarf2out_start_source_file (unsigned, const char *);
2737 static void dwarf2out_end_source_file (unsigned);
2738 static void dwarf2out_function_decl (tree);
2739 static void dwarf2out_begin_block (unsigned, unsigned);
2740 static void dwarf2out_end_block (unsigned, unsigned);
2741 static bool dwarf2out_ignore_block (const_tree);
2742 static void dwarf2out_early_global_decl (tree);
2743 static void dwarf2out_late_global_decl (tree);
2744 static void dwarf2out_type_decl (tree, int);
2745 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2746 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2747 dw_die_ref);
2748 static void dwarf2out_abstract_function (tree);
2749 static void dwarf2out_var_location (rtx_insn *);
2750 static void dwarf2out_inline_entry (tree);
2751 static void dwarf2out_size_function (tree);
2752 static void dwarf2out_begin_function (tree);
2753 static void dwarf2out_end_function (unsigned int);
2754 static void dwarf2out_register_main_translation_unit (tree unit);
2755 static void dwarf2out_set_name (tree, tree);
2756 static void dwarf2out_register_external_die (tree decl, const char *sym,
2757 unsigned HOST_WIDE_INT off);
2758 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2759 unsigned HOST_WIDE_INT *off);
2760
2761 /* The debug hooks structure. */
2762
2763 const struct gcc_debug_hooks dwarf2_debug_hooks =
2764 {
2765 dwarf2out_init,
2766 dwarf2out_finish,
2767 dwarf2out_early_finish,
2768 dwarf2out_assembly_start,
2769 dwarf2out_define,
2770 dwarf2out_undef,
2771 dwarf2out_start_source_file,
2772 dwarf2out_end_source_file,
2773 dwarf2out_begin_block,
2774 dwarf2out_end_block,
2775 dwarf2out_ignore_block,
2776 dwarf2out_source_line,
2777 dwarf2out_begin_prologue,
2778 #if VMS_DEBUGGING_INFO
2779 dwarf2out_vms_end_prologue,
2780 dwarf2out_vms_begin_epilogue,
2781 #else
2782 debug_nothing_int_charstar,
2783 debug_nothing_int_charstar,
2784 #endif
2785 dwarf2out_end_epilogue,
2786 dwarf2out_begin_function,
2787 dwarf2out_end_function, /* end_function */
2788 dwarf2out_register_main_translation_unit,
2789 dwarf2out_function_decl, /* function_decl */
2790 dwarf2out_early_global_decl,
2791 dwarf2out_late_global_decl,
2792 dwarf2out_type_decl, /* type_decl */
2793 dwarf2out_imported_module_or_decl,
2794 dwarf2out_die_ref_for_decl,
2795 dwarf2out_register_external_die,
2796 debug_nothing_tree, /* deferred_inline_function */
2797 /* The DWARF 2 backend tries to reduce debugging bloat by not
2798 emitting the abstract description of inline functions until
2799 something tries to reference them. */
2800 dwarf2out_abstract_function, /* outlining_inline_function */
2801 debug_nothing_rtx_code_label, /* label */
2802 debug_nothing_int, /* handle_pch */
2803 dwarf2out_var_location,
2804 dwarf2out_inline_entry, /* inline_entry */
2805 dwarf2out_size_function, /* size_function */
2806 dwarf2out_switch_text_section,
2807 dwarf2out_set_name,
2808 1, /* start_end_main_source_file */
2809 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2810 };
2811
2812 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2813 {
2814 dwarf2out_init,
2815 debug_nothing_charstar,
2816 debug_nothing_charstar,
2817 dwarf2out_assembly_start,
2818 debug_nothing_int_charstar,
2819 debug_nothing_int_charstar,
2820 debug_nothing_int_charstar,
2821 debug_nothing_int,
2822 debug_nothing_int_int, /* begin_block */
2823 debug_nothing_int_int, /* end_block */
2824 debug_true_const_tree, /* ignore_block */
2825 dwarf2out_source_line, /* source_line */
2826 debug_nothing_int_int_charstar, /* begin_prologue */
2827 debug_nothing_int_charstar, /* end_prologue */
2828 debug_nothing_int_charstar, /* begin_epilogue */
2829 debug_nothing_int_charstar, /* end_epilogue */
2830 debug_nothing_tree, /* begin_function */
2831 debug_nothing_int, /* end_function */
2832 debug_nothing_tree, /* register_main_translation_unit */
2833 debug_nothing_tree, /* function_decl */
2834 debug_nothing_tree, /* early_global_decl */
2835 debug_nothing_tree, /* late_global_decl */
2836 debug_nothing_tree_int, /* type_decl */
2837 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2838 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2839 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2840 debug_nothing_tree, /* deferred_inline_function */
2841 debug_nothing_tree, /* outlining_inline_function */
2842 debug_nothing_rtx_code_label, /* label */
2843 debug_nothing_int, /* handle_pch */
2844 debug_nothing_rtx_insn, /* var_location */
2845 debug_nothing_tree, /* inline_entry */
2846 debug_nothing_tree, /* size_function */
2847 debug_nothing_void, /* switch_text_section */
2848 debug_nothing_tree_tree, /* set_name */
2849 0, /* start_end_main_source_file */
2850 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2851 };
2852 \f
2853 /* NOTE: In the comments in this file, many references are made to
2854 "Debugging Information Entries". This term is abbreviated as `DIE'
2855 throughout the remainder of this file. */
2856
2857 /* An internal representation of the DWARF output is built, and then
2858 walked to generate the DWARF debugging info. The walk of the internal
2859 representation is done after the entire program has been compiled.
2860 The types below are used to describe the internal representation. */
2861
2862 /* Whether to put type DIEs into their own section .debug_types instead
2863 of making them part of the .debug_info section. Only supported for
2864 Dwarf V4 or higher and the user didn't disable them through
2865 -fno-debug-types-section. It is more efficient to put them in a
2866 separate comdat sections since the linker will then be able to
2867 remove duplicates. But not all tools support .debug_types sections
2868 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2869 it is DW_UT_type unit type in .debug_info section. */
2870
2871 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2872
2873 /* Various DIE's use offsets relative to the beginning of the
2874 .debug_info section to refer to each other. */
2875
2876 typedef long int dw_offset;
2877
2878 struct comdat_type_node;
2879
2880 /* The entries in the line_info table more-or-less mirror the opcodes
2881 that are used in the real dwarf line table. Arrays of these entries
2882 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2883 supported. */
2884
2885 enum dw_line_info_opcode {
2886 /* Emit DW_LNE_set_address; the operand is the label index. */
2887 LI_set_address,
2888
2889 /* Emit a row to the matrix with the given line. This may be done
2890 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2891 special opcodes. */
2892 LI_set_line,
2893
2894 /* Emit a DW_LNS_set_file. */
2895 LI_set_file,
2896
2897 /* Emit a DW_LNS_set_column. */
2898 LI_set_column,
2899
2900 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2901 LI_negate_stmt,
2902
2903 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2904 LI_set_prologue_end,
2905 LI_set_epilogue_begin,
2906
2907 /* Emit a DW_LNE_set_discriminator. */
2908 LI_set_discriminator,
2909
2910 /* Output a Fixed Advance PC; the target PC is the label index; the
2911 base PC is the previous LI_adv_address or LI_set_address entry.
2912 We only use this when emitting debug views without assembler
2913 support, at explicit user request. Ideally, we should only use
2914 it when the offset might be zero but we can't tell: it's the only
2915 way to maybe change the PC without resetting the view number. */
2916 LI_adv_address
2917 };
2918
2919 typedef struct GTY(()) dw_line_info_struct {
2920 enum dw_line_info_opcode opcode;
2921 unsigned int val;
2922 } dw_line_info_entry;
2923
2924
2925 struct GTY(()) dw_line_info_table {
2926 /* The label that marks the end of this section. */
2927 const char *end_label;
2928
2929 /* The values for the last row of the matrix, as collected in the table.
2930 These are used to minimize the changes to the next row. */
2931 unsigned int file_num;
2932 unsigned int line_num;
2933 unsigned int column_num;
2934 int discrim_num;
2935 bool is_stmt;
2936 bool in_use;
2937
2938 /* This denotes the NEXT view number.
2939
2940 If it is 0, it is known that the NEXT view will be the first view
2941 at the given PC.
2942
2943 If it is -1, we're forcing the view number to be reset, e.g. at a
2944 function entry.
2945
2946 The meaning of other nonzero values depends on whether we're
2947 computing views internally or leaving it for the assembler to do
2948 so. If we're emitting them internally, view denotes the view
2949 number since the last known advance of PC. If we're leaving it
2950 for the assembler, it denotes the LVU label number that we're
2951 going to ask the assembler to assign. */
2952 var_loc_view view;
2953
2954 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2955 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2956 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
2957 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
2958
2959 vec<dw_line_info_entry, va_gc> *entries;
2960 };
2961
2962 /* If we're keep track of location views and their reset points, and
2963 INSN is a reset point (i.e., it necessarily advances the PC), mark
2964 the next view in TABLE as reset. */
2965
2966 static void
2967 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
2968 {
2969 if (!debug_internal_reset_location_views)
2970 return;
2971
2972 /* Maybe turn (part of?) this test into a default target hook. */
2973 int reset = 0;
2974
2975 if (targetm.reset_location_view)
2976 reset = targetm.reset_location_view (insn);
2977
2978 if (reset)
2979 ;
2980 else if (JUMP_TABLE_DATA_P (insn))
2981 reset = 1;
2982 else if (GET_CODE (insn) == USE
2983 || GET_CODE (insn) == CLOBBER
2984 || GET_CODE (insn) == ASM_INPUT
2985 || asm_noperands (insn) >= 0)
2986 ;
2987 else if (get_attr_min_length (insn) > 0)
2988 reset = 1;
2989
2990 if (reset > 0 && !RESETTING_VIEW_P (table->view))
2991 RESET_NEXT_VIEW (table->view);
2992 }
2993
2994 /* Each DIE attribute has a field specifying the attribute kind,
2995 a link to the next attribute in the chain, and an attribute value.
2996 Attributes are typically linked below the DIE they modify. */
2997
2998 typedef struct GTY(()) dw_attr_struct {
2999 enum dwarf_attribute dw_attr;
3000 dw_val_node dw_attr_val;
3001 }
3002 dw_attr_node;
3003
3004
3005 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3006 The children of each node form a circular list linked by
3007 die_sib. die_child points to the node *before* the "first" child node. */
3008
3009 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3010 union die_symbol_or_type_node
3011 {
3012 const char * GTY ((tag ("0"))) die_symbol;
3013 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3014 }
3015 GTY ((desc ("%0.comdat_type_p"))) die_id;
3016 vec<dw_attr_node, va_gc> *die_attr;
3017 dw_die_ref die_parent;
3018 dw_die_ref die_child;
3019 dw_die_ref die_sib;
3020 dw_die_ref die_definition; /* ref from a specification to its definition */
3021 dw_offset die_offset;
3022 unsigned long die_abbrev;
3023 int die_mark;
3024 unsigned int decl_id;
3025 enum dwarf_tag die_tag;
3026 /* Die is used and must not be pruned as unused. */
3027 BOOL_BITFIELD die_perennial_p : 1;
3028 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3029 /* For an external ref to die_symbol if die_offset contains an extra
3030 offset to that symbol. */
3031 BOOL_BITFIELD with_offset : 1;
3032 /* Whether this DIE was removed from the DIE tree, for example via
3033 prune_unused_types. We don't consider those present from the
3034 DIE lookup routines. */
3035 BOOL_BITFIELD removed : 1;
3036 /* Lots of spare bits. */
3037 }
3038 die_node;
3039
3040 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3041 static bool early_dwarf;
3042 static bool early_dwarf_finished;
3043 struct set_early_dwarf {
3044 bool saved;
3045 set_early_dwarf () : saved(early_dwarf)
3046 {
3047 gcc_assert (! early_dwarf_finished);
3048 early_dwarf = true;
3049 }
3050 ~set_early_dwarf () { early_dwarf = saved; }
3051 };
3052
3053 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3054 #define FOR_EACH_CHILD(die, c, expr) do { \
3055 c = die->die_child; \
3056 if (c) do { \
3057 c = c->die_sib; \
3058 expr; \
3059 } while (c != die->die_child); \
3060 } while (0)
3061
3062 /* The pubname structure */
3063
3064 typedef struct GTY(()) pubname_struct {
3065 dw_die_ref die;
3066 const char *name;
3067 }
3068 pubname_entry;
3069
3070
3071 struct GTY(()) dw_ranges {
3072 const char *label;
3073 /* If this is positive, it's a block number, otherwise it's a
3074 bitwise-negated index into dw_ranges_by_label. */
3075 int num;
3076 /* Index for the range list for DW_FORM_rnglistx. */
3077 unsigned int idx : 31;
3078 /* True if this range might be possibly in a different section
3079 from previous entry. */
3080 unsigned int maybe_new_sec : 1;
3081 };
3082
3083 /* A structure to hold a macinfo entry. */
3084
3085 typedef struct GTY(()) macinfo_struct {
3086 unsigned char code;
3087 unsigned HOST_WIDE_INT lineno;
3088 const char *info;
3089 }
3090 macinfo_entry;
3091
3092
3093 struct GTY(()) dw_ranges_by_label {
3094 const char *begin;
3095 const char *end;
3096 };
3097
3098 /* The comdat type node structure. */
3099 struct GTY(()) comdat_type_node
3100 {
3101 dw_die_ref root_die;
3102 dw_die_ref type_die;
3103 dw_die_ref skeleton_die;
3104 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3105 comdat_type_node *next;
3106 };
3107
3108 /* A list of DIEs for which we can't determine ancestry (parent_die
3109 field) just yet. Later in dwarf2out_finish we will fill in the
3110 missing bits. */
3111 typedef struct GTY(()) limbo_die_struct {
3112 dw_die_ref die;
3113 /* The tree for which this DIE was created. We use this to
3114 determine ancestry later. */
3115 tree created_for;
3116 struct limbo_die_struct *next;
3117 }
3118 limbo_die_node;
3119
3120 typedef struct skeleton_chain_struct
3121 {
3122 dw_die_ref old_die;
3123 dw_die_ref new_die;
3124 struct skeleton_chain_struct *parent;
3125 }
3126 skeleton_chain_node;
3127
3128 /* Define a macro which returns nonzero for a TYPE_DECL which was
3129 implicitly generated for a type.
3130
3131 Note that, unlike the C front-end (which generates a NULL named
3132 TYPE_DECL node for each complete tagged type, each array type,
3133 and each function type node created) the C++ front-end generates
3134 a _named_ TYPE_DECL node for each tagged type node created.
3135 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3136 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3137 front-end, but for each type, tagged or not. */
3138
3139 #define TYPE_DECL_IS_STUB(decl) \
3140 (DECL_NAME (decl) == NULL_TREE \
3141 || (DECL_ARTIFICIAL (decl) \
3142 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3143 /* This is necessary for stub decls that \
3144 appear in nested inline functions. */ \
3145 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3146 && (decl_ultimate_origin (decl) \
3147 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3148
3149 /* Information concerning the compilation unit's programming
3150 language, and compiler version. */
3151
3152 /* Fixed size portion of the DWARF compilation unit header. */
3153 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3154 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3155 + (dwarf_version >= 5 ? 4 : 3))
3156
3157 /* Fixed size portion of the DWARF comdat type unit header. */
3158 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3159 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3160 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3161
3162 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3163 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3164 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3165
3166 /* Fixed size portion of public names info. */
3167 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3168
3169 /* Fixed size portion of the address range info. */
3170 #define DWARF_ARANGES_HEADER_SIZE \
3171 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3172 DWARF2_ADDR_SIZE * 2) \
3173 - DWARF_INITIAL_LENGTH_SIZE)
3174
3175 /* Size of padding portion in the address range info. It must be
3176 aligned to twice the pointer size. */
3177 #define DWARF_ARANGES_PAD_SIZE \
3178 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3179 DWARF2_ADDR_SIZE * 2) \
3180 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3181
3182 /* Use assembler line directives if available. */
3183 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3184 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3185 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3186 #else
3187 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3188 #endif
3189 #endif
3190
3191 /* Use assembler views in line directives if available. */
3192 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3193 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3194 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3195 #else
3196 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3197 #endif
3198 #endif
3199
3200 /* Return true if GCC configure detected assembler support for .loc. */
3201
3202 bool
3203 dwarf2out_default_as_loc_support (void)
3204 {
3205 return DWARF2_ASM_LINE_DEBUG_INFO;
3206 #if (GCC_VERSION >= 3000)
3207 # undef DWARF2_ASM_LINE_DEBUG_INFO
3208 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3209 #endif
3210 }
3211
3212 /* Return true if GCC configure detected assembler support for views
3213 in .loc directives. */
3214
3215 bool
3216 dwarf2out_default_as_locview_support (void)
3217 {
3218 return DWARF2_ASM_VIEW_DEBUG_INFO;
3219 #if (GCC_VERSION >= 3000)
3220 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3221 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3222 #endif
3223 }
3224
3225 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3226 view computation, and it refers to a view identifier for which we
3227 will not emit a label because it is known to map to a view number
3228 zero. We won't allocate the bitmap if we're not using assembler
3229 support for location views, but we have to make the variable
3230 visible for GGC and for code that will be optimized out for lack of
3231 support but that's still parsed and compiled. We could abstract it
3232 out with macros, but it's not worth it. */
3233 static GTY(()) bitmap zero_view_p;
3234
3235 /* Evaluate to TRUE iff N is known to identify the first location view
3236 at its PC. When not using assembler location view computation,
3237 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3238 and views label numbers recorded in it are the ones known to be
3239 zero. */
3240 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3241 || (N) == (var_loc_view)-1 \
3242 || (zero_view_p \
3243 && bitmap_bit_p (zero_view_p, (N))))
3244
3245 /* Return true iff we're to emit .loc directives for the assembler to
3246 generate line number sections.
3247
3248 When we're not emitting views, all we need from the assembler is
3249 support for .loc directives.
3250
3251 If we are emitting views, we can only use the assembler's .loc
3252 support if it also supports views.
3253
3254 When the compiler is emitting the line number programs and
3255 computing view numbers itself, it resets view numbers at known PC
3256 changes and counts from that, and then it emits view numbers as
3257 literal constants in locviewlists. There are cases in which the
3258 compiler is not sure about PC changes, e.g. when extra alignment is
3259 requested for a label. In these cases, the compiler may not reset
3260 the view counter, and the potential PC advance in the line number
3261 program will use an opcode that does not reset the view counter
3262 even if the PC actually changes, so that compiler and debug info
3263 consumer can keep view numbers in sync.
3264
3265 When the compiler defers view computation to the assembler, it
3266 emits symbolic view numbers in locviewlists, with the exception of
3267 views known to be zero (forced resets, or reset after
3268 compiler-visible PC changes): instead of emitting symbols for
3269 these, we emit literal zero and assert the assembler agrees with
3270 the compiler's assessment. We could use symbolic views everywhere,
3271 instead of special-casing zero views, but then we'd be unable to
3272 optimize out locviewlists that contain only zeros. */
3273
3274 static bool
3275 output_asm_line_debug_info (void)
3276 {
3277 return (dwarf2out_as_loc_support
3278 && (dwarf2out_as_locview_support
3279 || !debug_variable_location_views));
3280 }
3281
3282 /* Minimum line offset in a special line info. opcode.
3283 This value was chosen to give a reasonable range of values. */
3284 #define DWARF_LINE_BASE -10
3285
3286 /* First special line opcode - leave room for the standard opcodes. */
3287 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3288
3289 /* Range of line offsets in a special line info. opcode. */
3290 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3291
3292 /* Flag that indicates the initial value of the is_stmt_start flag.
3293 In the present implementation, we do not mark any lines as
3294 the beginning of a source statement, because that information
3295 is not made available by the GCC front-end. */
3296 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3297
3298 /* Maximum number of operations per instruction bundle. */
3299 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3300 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3301 #endif
3302
3303 /* This location is used by calc_die_sizes() to keep track
3304 the offset of each DIE within the .debug_info section. */
3305 static unsigned long next_die_offset;
3306
3307 /* Record the root of the DIE's built for the current compilation unit. */
3308 static GTY(()) dw_die_ref single_comp_unit_die;
3309
3310 /* A list of type DIEs that have been separated into comdat sections. */
3311 static GTY(()) comdat_type_node *comdat_type_list;
3312
3313 /* A list of CU DIEs that have been separated. */
3314 static GTY(()) limbo_die_node *cu_die_list;
3315
3316 /* A list of DIEs with a NULL parent waiting to be relocated. */
3317 static GTY(()) limbo_die_node *limbo_die_list;
3318
3319 /* A list of DIEs for which we may have to generate
3320 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3321 static GTY(()) limbo_die_node *deferred_asm_name;
3322
3323 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3324 {
3325 typedef const char *compare_type;
3326
3327 static hashval_t hash (dwarf_file_data *);
3328 static bool equal (dwarf_file_data *, const char *);
3329 };
3330
3331 /* Filenames referenced by this compilation unit. */
3332 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3333
3334 struct decl_die_hasher : ggc_ptr_hash<die_node>
3335 {
3336 typedef tree compare_type;
3337
3338 static hashval_t hash (die_node *);
3339 static bool equal (die_node *, tree);
3340 };
3341 /* A hash table of references to DIE's that describe declarations.
3342 The key is a DECL_UID() which is a unique number identifying each decl. */
3343 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3344
3345 struct GTY ((for_user)) variable_value_struct {
3346 unsigned int decl_id;
3347 vec<dw_die_ref, va_gc> *dies;
3348 };
3349
3350 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3351 {
3352 typedef tree compare_type;
3353
3354 static hashval_t hash (variable_value_struct *);
3355 static bool equal (variable_value_struct *, tree);
3356 };
3357 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3358 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3359 DECL_CONTEXT of the referenced VAR_DECLs. */
3360 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3361
3362 struct block_die_hasher : ggc_ptr_hash<die_struct>
3363 {
3364 static hashval_t hash (die_struct *);
3365 static bool equal (die_struct *, die_struct *);
3366 };
3367
3368 /* A hash table of references to DIE's that describe COMMON blocks.
3369 The key is DECL_UID() ^ die_parent. */
3370 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3371
3372 typedef struct GTY(()) die_arg_entry_struct {
3373 dw_die_ref die;
3374 tree arg;
3375 } die_arg_entry;
3376
3377
3378 /* Node of the variable location list. */
3379 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3380 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3381 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3382 in mode of the EXPR_LIST node and first EXPR_LIST operand
3383 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3384 location or NULL for padding. For larger bitsizes,
3385 mode is 0 and first operand is a CONCAT with bitsize
3386 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3387 NULL as second operand. */
3388 rtx GTY (()) loc;
3389 const char * GTY (()) label;
3390 struct var_loc_node * GTY (()) next;
3391 var_loc_view view;
3392 };
3393
3394 /* Variable location list. */
3395 struct GTY ((for_user)) var_loc_list_def {
3396 struct var_loc_node * GTY (()) first;
3397
3398 /* Pointer to the last but one or last element of the
3399 chained list. If the list is empty, both first and
3400 last are NULL, if the list contains just one node
3401 or the last node certainly is not redundant, it points
3402 to the last node, otherwise points to the last but one.
3403 Do not mark it for GC because it is marked through the chain. */
3404 struct var_loc_node * GTY ((skip ("%h"))) last;
3405
3406 /* Pointer to the last element before section switch,
3407 if NULL, either sections weren't switched or first
3408 is after section switch. */
3409 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3410
3411 /* DECL_UID of the variable decl. */
3412 unsigned int decl_id;
3413 };
3414 typedef struct var_loc_list_def var_loc_list;
3415
3416 /* Call argument location list. */
3417 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3418 rtx GTY (()) call_arg_loc_note;
3419 const char * GTY (()) label;
3420 tree GTY (()) block;
3421 bool tail_call_p;
3422 rtx GTY (()) symbol_ref;
3423 struct call_arg_loc_node * GTY (()) next;
3424 };
3425
3426
3427 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3428 {
3429 typedef const_tree compare_type;
3430
3431 static hashval_t hash (var_loc_list *);
3432 static bool equal (var_loc_list *, const_tree);
3433 };
3434
3435 /* Table of decl location linked lists. */
3436 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3437
3438 /* Head and tail of call_arg_loc chain. */
3439 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3440 static struct call_arg_loc_node *call_arg_loc_last;
3441
3442 /* Number of call sites in the current function. */
3443 static int call_site_count = -1;
3444 /* Number of tail call sites in the current function. */
3445 static int tail_call_site_count = -1;
3446
3447 /* A cached location list. */
3448 struct GTY ((for_user)) cached_dw_loc_list_def {
3449 /* The DECL_UID of the decl that this entry describes. */
3450 unsigned int decl_id;
3451
3452 /* The cached location list. */
3453 dw_loc_list_ref loc_list;
3454 };
3455 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3456
3457 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3458 {
3459
3460 typedef const_tree compare_type;
3461
3462 static hashval_t hash (cached_dw_loc_list *);
3463 static bool equal (cached_dw_loc_list *, const_tree);
3464 };
3465
3466 /* Table of cached location lists. */
3467 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3468
3469 /* A vector of references to DIE's that are uniquely identified by their tag,
3470 presence/absence of children DIE's, and list of attribute/value pairs. */
3471 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3472
3473 /* A hash map to remember the stack usage for DWARF procedures. The value
3474 stored is the stack size difference between before the DWARF procedure
3475 invokation and after it returned. In other words, for a DWARF procedure
3476 that consumes N stack slots and that pushes M ones, this stores M - N. */
3477 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3478
3479 /* A global counter for generating labels for line number data. */
3480 static unsigned int line_info_label_num;
3481
3482 /* The current table to which we should emit line number information
3483 for the current function. This will be set up at the beginning of
3484 assembly for the function. */
3485 static GTY(()) dw_line_info_table *cur_line_info_table;
3486
3487 /* The two default tables of line number info. */
3488 static GTY(()) dw_line_info_table *text_section_line_info;
3489 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3490
3491 /* The set of all non-default tables of line number info. */
3492 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3493
3494 /* A flag to tell pubnames/types export if there is an info section to
3495 refer to. */
3496 static bool info_section_emitted;
3497
3498 /* A pointer to the base of a table that contains a list of publicly
3499 accessible names. */
3500 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3501
3502 /* A pointer to the base of a table that contains a list of publicly
3503 accessible types. */
3504 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3505
3506 /* A pointer to the base of a table that contains a list of macro
3507 defines/undefines (and file start/end markers). */
3508 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3509
3510 /* True if .debug_macinfo or .debug_macros section is going to be
3511 emitted. */
3512 #define have_macinfo \
3513 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3514 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3515 && !macinfo_table->is_empty ())
3516
3517 /* Vector of dies for which we should generate .debug_ranges info. */
3518 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3519
3520 /* Vector of pairs of labels referenced in ranges_table. */
3521 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3522
3523 /* Whether we have location lists that need outputting */
3524 static GTY(()) bool have_location_lists;
3525
3526 /* Unique label counter. */
3527 static GTY(()) unsigned int loclabel_num;
3528
3529 /* Unique label counter for point-of-call tables. */
3530 static GTY(()) unsigned int poc_label_num;
3531
3532 /* The last file entry emitted by maybe_emit_file(). */
3533 static GTY(()) struct dwarf_file_data * last_emitted_file;
3534
3535 /* Number of internal labels generated by gen_internal_sym(). */
3536 static GTY(()) int label_num;
3537
3538 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3539
3540 /* Instances of generic types for which we need to generate debug
3541 info that describe their generic parameters and arguments. That
3542 generation needs to happen once all types are properly laid out so
3543 we do it at the end of compilation. */
3544 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3545
3546 /* Offset from the "steady-state frame pointer" to the frame base,
3547 within the current function. */
3548 static poly_int64 frame_pointer_fb_offset;
3549 static bool frame_pointer_fb_offset_valid;
3550
3551 static vec<dw_die_ref> base_types;
3552
3553 /* Flags to represent a set of attribute classes for attributes that represent
3554 a scalar value (bounds, pointers, ...). */
3555 enum dw_scalar_form
3556 {
3557 dw_scalar_form_constant = 0x01,
3558 dw_scalar_form_exprloc = 0x02,
3559 dw_scalar_form_reference = 0x04
3560 };
3561
3562 /* Forward declarations for functions defined in this file. */
3563
3564 static int is_pseudo_reg (const_rtx);
3565 static tree type_main_variant (tree);
3566 static int is_tagged_type (const_tree);
3567 static const char *dwarf_tag_name (unsigned);
3568 static const char *dwarf_attr_name (unsigned);
3569 static const char *dwarf_form_name (unsigned);
3570 static tree decl_ultimate_origin (const_tree);
3571 static tree decl_class_context (tree);
3572 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3573 static inline enum dw_val_class AT_class (dw_attr_node *);
3574 static inline unsigned int AT_index (dw_attr_node *);
3575 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3576 static inline unsigned AT_flag (dw_attr_node *);
3577 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3578 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3579 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3580 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3581 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3582 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3583 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3584 unsigned int, unsigned char *);
3585 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3586 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3587 static inline const char *AT_string (dw_attr_node *);
3588 static enum dwarf_form AT_string_form (dw_attr_node *);
3589 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3590 static void add_AT_specification (dw_die_ref, dw_die_ref);
3591 static inline dw_die_ref AT_ref (dw_attr_node *);
3592 static inline int AT_ref_external (dw_attr_node *);
3593 static inline void set_AT_ref_external (dw_attr_node *, int);
3594 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3595 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3596 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3597 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3598 dw_loc_list_ref);
3599 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3600 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3601 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3602 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3603 static void remove_addr_table_entry (addr_table_entry *);
3604 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3605 static inline rtx AT_addr (dw_attr_node *);
3606 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3607 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3608 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3609 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3610 const char *);
3611 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3612 unsigned HOST_WIDE_INT);
3613 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3614 unsigned long, bool);
3615 static inline const char *AT_lbl (dw_attr_node *);
3616 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3617 static const char *get_AT_low_pc (dw_die_ref);
3618 static const char *get_AT_hi_pc (dw_die_ref);
3619 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3620 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3621 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3622 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3623 static bool is_cxx (void);
3624 static bool is_cxx (const_tree);
3625 static bool is_fortran (void);
3626 static bool is_ada (void);
3627 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3628 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3629 static void add_child_die (dw_die_ref, dw_die_ref);
3630 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3631 static dw_die_ref lookup_type_die (tree);
3632 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3633 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3634 static void equate_type_number_to_die (tree, dw_die_ref);
3635 static dw_die_ref lookup_decl_die (tree);
3636 static var_loc_list *lookup_decl_loc (const_tree);
3637 static void equate_decl_number_to_die (tree, dw_die_ref);
3638 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3639 static void print_spaces (FILE *);
3640 static void print_die (dw_die_ref, FILE *);
3641 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3642 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3643 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3644 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3645 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3646 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3647 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3648 struct md5_ctx *, int *);
3649 struct checksum_attributes;
3650 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3651 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3652 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3653 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3654 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3655 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3656 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3657 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3658 static int is_type_die (dw_die_ref);
3659 static int is_comdat_die (dw_die_ref);
3660 static inline bool is_template_instantiation (dw_die_ref);
3661 static int is_declaration_die (dw_die_ref);
3662 static int should_move_die_to_comdat (dw_die_ref);
3663 static dw_die_ref clone_as_declaration (dw_die_ref);
3664 static dw_die_ref clone_die (dw_die_ref);
3665 static dw_die_ref clone_tree (dw_die_ref);
3666 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3667 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3668 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3669 static dw_die_ref generate_skeleton (dw_die_ref);
3670 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3671 dw_die_ref,
3672 dw_die_ref);
3673 static void break_out_comdat_types (dw_die_ref);
3674 static void copy_decls_for_unworthy_types (dw_die_ref);
3675
3676 static void add_sibling_attributes (dw_die_ref);
3677 static void output_location_lists (dw_die_ref);
3678 static int constant_size (unsigned HOST_WIDE_INT);
3679 static unsigned long size_of_die (dw_die_ref);
3680 static void calc_die_sizes (dw_die_ref);
3681 static void calc_base_type_die_sizes (void);
3682 static void mark_dies (dw_die_ref);
3683 static void unmark_dies (dw_die_ref);
3684 static void unmark_all_dies (dw_die_ref);
3685 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3686 static unsigned long size_of_aranges (void);
3687 static enum dwarf_form value_format (dw_attr_node *);
3688 static void output_value_format (dw_attr_node *);
3689 static void output_abbrev_section (void);
3690 static void output_die_abbrevs (unsigned long, dw_die_ref);
3691 static void output_die (dw_die_ref);
3692 static void output_compilation_unit_header (enum dwarf_unit_type);
3693 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3694 static void output_comdat_type_unit (comdat_type_node *);
3695 static const char *dwarf2_name (tree, int);
3696 static void add_pubname (tree, dw_die_ref);
3697 static void add_enumerator_pubname (const char *, dw_die_ref);
3698 static void add_pubname_string (const char *, dw_die_ref);
3699 static void add_pubtype (tree, dw_die_ref);
3700 static void output_pubnames (vec<pubname_entry, va_gc> *);
3701 static void output_aranges (void);
3702 static unsigned int add_ranges (const_tree, bool = false);
3703 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3704 bool *, bool);
3705 static void output_ranges (void);
3706 static dw_line_info_table *new_line_info_table (void);
3707 static void output_line_info (bool);
3708 static void output_file_names (void);
3709 static dw_die_ref base_type_die (tree, bool);
3710 static int is_base_type (tree);
3711 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3712 static int decl_quals (const_tree);
3713 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3714 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3715 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3716 static int type_is_enum (const_tree);
3717 static unsigned int dbx_reg_number (const_rtx);
3718 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3719 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3720 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3721 enum var_init_status);
3722 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3723 enum var_init_status);
3724 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3725 enum var_init_status);
3726 static int is_based_loc (const_rtx);
3727 static bool resolve_one_addr (rtx *);
3728 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3729 enum var_init_status);
3730 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3731 enum var_init_status);
3732 struct loc_descr_context;
3733 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3734 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3735 static dw_loc_list_ref loc_list_from_tree (tree, int,
3736 struct loc_descr_context *);
3737 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3738 struct loc_descr_context *);
3739 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3740 static tree field_type (const_tree);
3741 static unsigned int simple_type_align_in_bits (const_tree);
3742 static unsigned int simple_decl_align_in_bits (const_tree);
3743 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3744 struct vlr_context;
3745 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3746 HOST_WIDE_INT *);
3747 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3748 dw_loc_list_ref);
3749 static void add_data_member_location_attribute (dw_die_ref, tree,
3750 struct vlr_context *);
3751 static bool add_const_value_attribute (dw_die_ref, rtx);
3752 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3753 static void insert_wide_int (const wide_int &, unsigned char *, int);
3754 static void insert_float (const_rtx, unsigned char *);
3755 static rtx rtl_for_decl_location (tree);
3756 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3757 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3758 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3759 static void add_name_attribute (dw_die_ref, const char *);
3760 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3761 static void add_comp_dir_attribute (dw_die_ref);
3762 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3763 struct loc_descr_context *);
3764 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3765 struct loc_descr_context *);
3766 static void add_subscript_info (dw_die_ref, tree, bool);
3767 static void add_byte_size_attribute (dw_die_ref, tree);
3768 static void add_alignment_attribute (dw_die_ref, tree);
3769 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3770 struct vlr_context *);
3771 static void add_bit_size_attribute (dw_die_ref, tree);
3772 static void add_prototyped_attribute (dw_die_ref, tree);
3773 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3774 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3775 static void add_src_coords_attributes (dw_die_ref, tree);
3776 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3777 static void add_discr_value (dw_die_ref, dw_discr_value *);
3778 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3779 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3780 static void push_decl_scope (tree);
3781 static void pop_decl_scope (void);
3782 static dw_die_ref scope_die_for (tree, dw_die_ref);
3783 static inline int local_scope_p (dw_die_ref);
3784 static inline int class_scope_p (dw_die_ref);
3785 static inline int class_or_namespace_scope_p (dw_die_ref);
3786 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3787 static void add_calling_convention_attribute (dw_die_ref, tree);
3788 static const char *type_tag (const_tree);
3789 static tree member_declared_type (const_tree);
3790 #if 0
3791 static const char *decl_start_label (tree);
3792 #endif
3793 static void gen_array_type_die (tree, dw_die_ref);
3794 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3795 #if 0
3796 static void gen_entry_point_die (tree, dw_die_ref);
3797 #endif
3798 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3799 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3800 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3801 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3802 static void gen_formal_types_die (tree, dw_die_ref);
3803 static void gen_subprogram_die (tree, dw_die_ref);
3804 static void gen_variable_die (tree, tree, dw_die_ref);
3805 static void gen_const_die (tree, dw_die_ref);
3806 static void gen_label_die (tree, dw_die_ref);
3807 static void gen_lexical_block_die (tree, dw_die_ref);
3808 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3809 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3810 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3811 static dw_die_ref gen_compile_unit_die (const char *);
3812 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3813 static void gen_member_die (tree, dw_die_ref);
3814 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3815 enum debug_info_usage);
3816 static void gen_subroutine_type_die (tree, dw_die_ref);
3817 static void gen_typedef_die (tree, dw_die_ref);
3818 static void gen_type_die (tree, dw_die_ref);
3819 static void gen_block_die (tree, dw_die_ref);
3820 static void decls_for_scope (tree, dw_die_ref);
3821 static bool is_naming_typedef_decl (const_tree);
3822 static inline dw_die_ref get_context_die (tree);
3823 static void gen_namespace_die (tree, dw_die_ref);
3824 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3825 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3826 static dw_die_ref force_decl_die (tree);
3827 static dw_die_ref force_type_die (tree);
3828 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3829 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3830 static struct dwarf_file_data * lookup_filename (const char *);
3831 static void retry_incomplete_types (void);
3832 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3833 static void gen_generic_params_dies (tree);
3834 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3835 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3836 static void splice_child_die (dw_die_ref, dw_die_ref);
3837 static int file_info_cmp (const void *, const void *);
3838 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3839 const char *, var_loc_view, const char *);
3840 static void output_loc_list (dw_loc_list_ref);
3841 static char *gen_internal_sym (const char *);
3842 static bool want_pubnames (void);
3843
3844 static void prune_unmark_dies (dw_die_ref);
3845 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3846 static void prune_unused_types_mark (dw_die_ref, int);
3847 static void prune_unused_types_walk (dw_die_ref);
3848 static void prune_unused_types_walk_attribs (dw_die_ref);
3849 static void prune_unused_types_prune (dw_die_ref);
3850 static void prune_unused_types (void);
3851 static int maybe_emit_file (struct dwarf_file_data *fd);
3852 static inline const char *AT_vms_delta1 (dw_attr_node *);
3853 static inline const char *AT_vms_delta2 (dw_attr_node *);
3854 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3855 const char *, const char *);
3856 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3857 static void gen_remaining_tmpl_value_param_die_attribute (void);
3858 static bool generic_type_p (tree);
3859 static void schedule_generic_params_dies_gen (tree t);
3860 static void gen_scheduled_generic_parms_dies (void);
3861 static void resolve_variable_values (void);
3862
3863 static const char *comp_dir_string (void);
3864
3865 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3866
3867 /* enum for tracking thread-local variables whose address is really an offset
3868 relative to the TLS pointer, which will need link-time relocation, but will
3869 not need relocation by the DWARF consumer. */
3870
3871 enum dtprel_bool
3872 {
3873 dtprel_false = 0,
3874 dtprel_true = 1
3875 };
3876
3877 /* Return the operator to use for an address of a variable. For dtprel_true, we
3878 use DW_OP_const*. For regular variables, which need both link-time
3879 relocation and consumer-level relocation (e.g., to account for shared objects
3880 loaded at a random address), we use DW_OP_addr*. */
3881
3882 static inline enum dwarf_location_atom
3883 dw_addr_op (enum dtprel_bool dtprel)
3884 {
3885 if (dtprel == dtprel_true)
3886 return (dwarf_split_debug_info ? DW_OP_GNU_const_index
3887 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3888 else
3889 return dwarf_split_debug_info ? DW_OP_GNU_addr_index : DW_OP_addr;
3890 }
3891
3892 /* Return a pointer to a newly allocated address location description. If
3893 dwarf_split_debug_info is true, then record the address with the appropriate
3894 relocation. */
3895 static inline dw_loc_descr_ref
3896 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3897 {
3898 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3899
3900 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3901 ref->dw_loc_oprnd1.v.val_addr = addr;
3902 ref->dtprel = dtprel;
3903 if (dwarf_split_debug_info)
3904 ref->dw_loc_oprnd1.val_entry
3905 = add_addr_table_entry (addr,
3906 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3907 else
3908 ref->dw_loc_oprnd1.val_entry = NULL;
3909
3910 return ref;
3911 }
3912
3913 /* Section names used to hold DWARF debugging information. */
3914
3915 #ifndef DEBUG_INFO_SECTION
3916 #define DEBUG_INFO_SECTION ".debug_info"
3917 #endif
3918 #ifndef DEBUG_DWO_INFO_SECTION
3919 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3920 #endif
3921 #ifndef DEBUG_LTO_INFO_SECTION
3922 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3923 #endif
3924 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3925 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3926 #endif
3927 #ifndef DEBUG_ABBREV_SECTION
3928 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3929 #endif
3930 #ifndef DEBUG_LTO_ABBREV_SECTION
3931 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3932 #endif
3933 #ifndef DEBUG_DWO_ABBREV_SECTION
3934 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3935 #endif
3936 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3937 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3938 #endif
3939 #ifndef DEBUG_ARANGES_SECTION
3940 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3941 #endif
3942 #ifndef DEBUG_ADDR_SECTION
3943 #define DEBUG_ADDR_SECTION ".debug_addr"
3944 #endif
3945 #ifndef DEBUG_MACINFO_SECTION
3946 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3947 #endif
3948 #ifndef DEBUG_LTO_MACINFO_SECTION
3949 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3950 #endif
3951 #ifndef DEBUG_DWO_MACINFO_SECTION
3952 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
3953 #endif
3954 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
3955 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
3956 #endif
3957 #ifndef DEBUG_MACRO_SECTION
3958 #define DEBUG_MACRO_SECTION ".debug_macro"
3959 #endif
3960 #ifndef DEBUG_LTO_MACRO_SECTION
3961 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
3962 #endif
3963 #ifndef DEBUG_DWO_MACRO_SECTION
3964 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
3965 #endif
3966 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
3967 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
3968 #endif
3969 #ifndef DEBUG_LINE_SECTION
3970 #define DEBUG_LINE_SECTION ".debug_line"
3971 #endif
3972 #ifndef DEBUG_LTO_LINE_SECTION
3973 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
3974 #endif
3975 #ifndef DEBUG_DWO_LINE_SECTION
3976 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
3977 #endif
3978 #ifndef DEBUG_LTO_DWO_LINE_SECTION
3979 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
3980 #endif
3981 #ifndef DEBUG_LOC_SECTION
3982 #define DEBUG_LOC_SECTION ".debug_loc"
3983 #endif
3984 #ifndef DEBUG_DWO_LOC_SECTION
3985 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
3986 #endif
3987 #ifndef DEBUG_LOCLISTS_SECTION
3988 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
3989 #endif
3990 #ifndef DEBUG_DWO_LOCLISTS_SECTION
3991 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
3992 #endif
3993 #ifndef DEBUG_PUBNAMES_SECTION
3994 #define DEBUG_PUBNAMES_SECTION \
3995 ((debug_generate_pub_sections == 2) \
3996 ? ".debug_gnu_pubnames" : ".debug_pubnames")
3997 #endif
3998 #ifndef DEBUG_PUBTYPES_SECTION
3999 #define DEBUG_PUBTYPES_SECTION \
4000 ((debug_generate_pub_sections == 2) \
4001 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4002 #endif
4003 #ifndef DEBUG_STR_OFFSETS_SECTION
4004 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4005 #endif
4006 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4007 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4008 #endif
4009 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4010 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4011 #endif
4012 #ifndef DEBUG_STR_SECTION
4013 #define DEBUG_STR_SECTION ".debug_str"
4014 #endif
4015 #ifndef DEBUG_LTO_STR_SECTION
4016 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4017 #endif
4018 #ifndef DEBUG_STR_DWO_SECTION
4019 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4020 #endif
4021 #ifndef DEBUG_LTO_STR_DWO_SECTION
4022 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4023 #endif
4024 #ifndef DEBUG_RANGES_SECTION
4025 #define DEBUG_RANGES_SECTION ".debug_ranges"
4026 #endif
4027 #ifndef DEBUG_RNGLISTS_SECTION
4028 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4029 #endif
4030 #ifndef DEBUG_LINE_STR_SECTION
4031 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4032 #endif
4033 #ifndef DEBUG_LTO_LINE_STR_SECTION
4034 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4035 #endif
4036
4037 /* Standard ELF section names for compiled code and data. */
4038 #ifndef TEXT_SECTION_NAME
4039 #define TEXT_SECTION_NAME ".text"
4040 #endif
4041
4042 /* Section flags for .debug_str section. */
4043 #define DEBUG_STR_SECTION_FLAGS \
4044 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4045 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4046 : SECTION_DEBUG)
4047
4048 /* Section flags for .debug_str.dwo section. */
4049 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4050
4051 /* Attribute used to refer to the macro section. */
4052 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4053 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4054
4055 /* Labels we insert at beginning sections we can reference instead of
4056 the section names themselves. */
4057
4058 #ifndef TEXT_SECTION_LABEL
4059 #define TEXT_SECTION_LABEL "Ltext"
4060 #endif
4061 #ifndef COLD_TEXT_SECTION_LABEL
4062 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4063 #endif
4064 #ifndef DEBUG_LINE_SECTION_LABEL
4065 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4066 #endif
4067 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4068 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4069 #endif
4070 #ifndef DEBUG_INFO_SECTION_LABEL
4071 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4072 #endif
4073 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4074 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4075 #endif
4076 #ifndef DEBUG_ABBREV_SECTION_LABEL
4077 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4078 #endif
4079 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4080 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4081 #endif
4082 #ifndef DEBUG_ADDR_SECTION_LABEL
4083 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4084 #endif
4085 #ifndef DEBUG_LOC_SECTION_LABEL
4086 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4087 #endif
4088 #ifndef DEBUG_RANGES_SECTION_LABEL
4089 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4090 #endif
4091 #ifndef DEBUG_MACINFO_SECTION_LABEL
4092 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4093 #endif
4094 #ifndef DEBUG_MACRO_SECTION_LABEL
4095 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4096 #endif
4097 #define SKELETON_COMP_DIE_ABBREV 1
4098 #define SKELETON_TYPE_DIE_ABBREV 2
4099
4100 /* Definitions of defaults for formats and names of various special
4101 (artificial) labels which may be generated within this file (when the -g
4102 options is used and DWARF2_DEBUGGING_INFO is in effect.
4103 If necessary, these may be overridden from within the tm.h file, but
4104 typically, overriding these defaults is unnecessary. */
4105
4106 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4107 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4108 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4109 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4110 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4111 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4112 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4113 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4114 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4115 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4116 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4117 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4118 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4119 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4120 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4121
4122 #ifndef TEXT_END_LABEL
4123 #define TEXT_END_LABEL "Letext"
4124 #endif
4125 #ifndef COLD_END_LABEL
4126 #define COLD_END_LABEL "Letext_cold"
4127 #endif
4128 #ifndef BLOCK_BEGIN_LABEL
4129 #define BLOCK_BEGIN_LABEL "LBB"
4130 #endif
4131 #ifndef BLOCK_INLINE_ENTRY_LABEL
4132 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4133 #endif
4134 #ifndef BLOCK_END_LABEL
4135 #define BLOCK_END_LABEL "LBE"
4136 #endif
4137 #ifndef LINE_CODE_LABEL
4138 #define LINE_CODE_LABEL "LM"
4139 #endif
4140
4141 \f
4142 /* Return the root of the DIE's built for the current compilation unit. */
4143 static dw_die_ref
4144 comp_unit_die (void)
4145 {
4146 if (!single_comp_unit_die)
4147 single_comp_unit_die = gen_compile_unit_die (NULL);
4148 return single_comp_unit_die;
4149 }
4150
4151 /* We allow a language front-end to designate a function that is to be
4152 called to "demangle" any name before it is put into a DIE. */
4153
4154 static const char *(*demangle_name_func) (const char *);
4155
4156 void
4157 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4158 {
4159 demangle_name_func = func;
4160 }
4161
4162 /* Test if rtl node points to a pseudo register. */
4163
4164 static inline int
4165 is_pseudo_reg (const_rtx rtl)
4166 {
4167 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4168 || (GET_CODE (rtl) == SUBREG
4169 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4170 }
4171
4172 /* Return a reference to a type, with its const and volatile qualifiers
4173 removed. */
4174
4175 static inline tree
4176 type_main_variant (tree type)
4177 {
4178 type = TYPE_MAIN_VARIANT (type);
4179
4180 /* ??? There really should be only one main variant among any group of
4181 variants of a given type (and all of the MAIN_VARIANT values for all
4182 members of the group should point to that one type) but sometimes the C
4183 front-end messes this up for array types, so we work around that bug
4184 here. */
4185 if (TREE_CODE (type) == ARRAY_TYPE)
4186 while (type != TYPE_MAIN_VARIANT (type))
4187 type = TYPE_MAIN_VARIANT (type);
4188
4189 return type;
4190 }
4191
4192 /* Return nonzero if the given type node represents a tagged type. */
4193
4194 static inline int
4195 is_tagged_type (const_tree type)
4196 {
4197 enum tree_code code = TREE_CODE (type);
4198
4199 return (code == RECORD_TYPE || code == UNION_TYPE
4200 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4201 }
4202
4203 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4204
4205 static void
4206 get_ref_die_offset_label (char *label, dw_die_ref ref)
4207 {
4208 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4209 }
4210
4211 /* Return die_offset of a DIE reference to a base type. */
4212
4213 static unsigned long int
4214 get_base_type_offset (dw_die_ref ref)
4215 {
4216 if (ref->die_offset)
4217 return ref->die_offset;
4218 if (comp_unit_die ()->die_abbrev)
4219 {
4220 calc_base_type_die_sizes ();
4221 gcc_assert (ref->die_offset);
4222 }
4223 return ref->die_offset;
4224 }
4225
4226 /* Return die_offset of a DIE reference other than base type. */
4227
4228 static unsigned long int
4229 get_ref_die_offset (dw_die_ref ref)
4230 {
4231 gcc_assert (ref->die_offset);
4232 return ref->die_offset;
4233 }
4234
4235 /* Convert a DIE tag into its string name. */
4236
4237 static const char *
4238 dwarf_tag_name (unsigned int tag)
4239 {
4240 const char *name = get_DW_TAG_name (tag);
4241
4242 if (name != NULL)
4243 return name;
4244
4245 return "DW_TAG_<unknown>";
4246 }
4247
4248 /* Convert a DWARF attribute code into its string name. */
4249
4250 static const char *
4251 dwarf_attr_name (unsigned int attr)
4252 {
4253 const char *name;
4254
4255 switch (attr)
4256 {
4257 #if VMS_DEBUGGING_INFO
4258 case DW_AT_HP_prologue:
4259 return "DW_AT_HP_prologue";
4260 #else
4261 case DW_AT_MIPS_loop_unroll_factor:
4262 return "DW_AT_MIPS_loop_unroll_factor";
4263 #endif
4264
4265 #if VMS_DEBUGGING_INFO
4266 case DW_AT_HP_epilogue:
4267 return "DW_AT_HP_epilogue";
4268 #else
4269 case DW_AT_MIPS_stride:
4270 return "DW_AT_MIPS_stride";
4271 #endif
4272 }
4273
4274 name = get_DW_AT_name (attr);
4275
4276 if (name != NULL)
4277 return name;
4278
4279 return "DW_AT_<unknown>";
4280 }
4281
4282 /* Convert a DWARF value form code into its string name. */
4283
4284 static const char *
4285 dwarf_form_name (unsigned int form)
4286 {
4287 const char *name = get_DW_FORM_name (form);
4288
4289 if (name != NULL)
4290 return name;
4291
4292 return "DW_FORM_<unknown>";
4293 }
4294 \f
4295 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4296 instance of an inlined instance of a decl which is local to an inline
4297 function, so we have to trace all of the way back through the origin chain
4298 to find out what sort of node actually served as the original seed for the
4299 given block. */
4300
4301 static tree
4302 decl_ultimate_origin (const_tree decl)
4303 {
4304 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4305 return NULL_TREE;
4306
4307 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4308 we're trying to output the abstract instance of this function. */
4309 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4310 return NULL_TREE;
4311
4312 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4313 most distant ancestor, this should never happen. */
4314 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4315
4316 return DECL_ABSTRACT_ORIGIN (decl);
4317 }
4318
4319 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4320 of a virtual function may refer to a base class, so we check the 'this'
4321 parameter. */
4322
4323 static tree
4324 decl_class_context (tree decl)
4325 {
4326 tree context = NULL_TREE;
4327
4328 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4329 context = DECL_CONTEXT (decl);
4330 else
4331 context = TYPE_MAIN_VARIANT
4332 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4333
4334 if (context && !TYPE_P (context))
4335 context = NULL_TREE;
4336
4337 return context;
4338 }
4339 \f
4340 /* Add an attribute/value pair to a DIE. */
4341
4342 static inline void
4343 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4344 {
4345 /* Maybe this should be an assert? */
4346 if (die == NULL)
4347 return;
4348
4349 if (flag_checking)
4350 {
4351 /* Check we do not add duplicate attrs. Can't use get_AT here
4352 because that recurses to the specification/abstract origin DIE. */
4353 dw_attr_node *a;
4354 unsigned ix;
4355 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4356 gcc_assert (a->dw_attr != attr->dw_attr);
4357 }
4358
4359 vec_safe_reserve (die->die_attr, 1);
4360 vec_safe_push (die->die_attr, *attr);
4361 }
4362
4363 static inline enum dw_val_class
4364 AT_class (dw_attr_node *a)
4365 {
4366 return a->dw_attr_val.val_class;
4367 }
4368
4369 /* Return the index for any attribute that will be referenced with a
4370 DW_FORM_GNU_addr_index or DW_FORM_GNU_str_index. String indices
4371 are stored in dw_attr_val.v.val_str for reference counting
4372 pruning. */
4373
4374 static inline unsigned int
4375 AT_index (dw_attr_node *a)
4376 {
4377 if (AT_class (a) == dw_val_class_str)
4378 return a->dw_attr_val.v.val_str->index;
4379 else if (a->dw_attr_val.val_entry != NULL)
4380 return a->dw_attr_val.val_entry->index;
4381 return NOT_INDEXED;
4382 }
4383
4384 /* Add a flag value attribute to a DIE. */
4385
4386 static inline void
4387 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4388 {
4389 dw_attr_node attr;
4390
4391 attr.dw_attr = attr_kind;
4392 attr.dw_attr_val.val_class = dw_val_class_flag;
4393 attr.dw_attr_val.val_entry = NULL;
4394 attr.dw_attr_val.v.val_flag = flag;
4395 add_dwarf_attr (die, &attr);
4396 }
4397
4398 static inline unsigned
4399 AT_flag (dw_attr_node *a)
4400 {
4401 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4402 return a->dw_attr_val.v.val_flag;
4403 }
4404
4405 /* Add a signed integer attribute value to a DIE. */
4406
4407 static inline void
4408 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4409 {
4410 dw_attr_node attr;
4411
4412 attr.dw_attr = attr_kind;
4413 attr.dw_attr_val.val_class = dw_val_class_const;
4414 attr.dw_attr_val.val_entry = NULL;
4415 attr.dw_attr_val.v.val_int = int_val;
4416 add_dwarf_attr (die, &attr);
4417 }
4418
4419 static inline HOST_WIDE_INT
4420 AT_int (dw_attr_node *a)
4421 {
4422 gcc_assert (a && (AT_class (a) == dw_val_class_const
4423 || AT_class (a) == dw_val_class_const_implicit));
4424 return a->dw_attr_val.v.val_int;
4425 }
4426
4427 /* Add an unsigned integer attribute value to a DIE. */
4428
4429 static inline void
4430 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4431 unsigned HOST_WIDE_INT unsigned_val)
4432 {
4433 dw_attr_node attr;
4434
4435 attr.dw_attr = attr_kind;
4436 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4437 attr.dw_attr_val.val_entry = NULL;
4438 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4439 add_dwarf_attr (die, &attr);
4440 }
4441
4442 static inline unsigned HOST_WIDE_INT
4443 AT_unsigned (dw_attr_node *a)
4444 {
4445 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4446 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4447 return a->dw_attr_val.v.val_unsigned;
4448 }
4449
4450 /* Add an unsigned wide integer attribute value to a DIE. */
4451
4452 static inline void
4453 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4454 const wide_int& w)
4455 {
4456 dw_attr_node attr;
4457
4458 attr.dw_attr = attr_kind;
4459 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4460 attr.dw_attr_val.val_entry = NULL;
4461 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4462 *attr.dw_attr_val.v.val_wide = w;
4463 add_dwarf_attr (die, &attr);
4464 }
4465
4466 /* Add an unsigned double integer attribute value to a DIE. */
4467
4468 static inline void
4469 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4470 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4471 {
4472 dw_attr_node attr;
4473
4474 attr.dw_attr = attr_kind;
4475 attr.dw_attr_val.val_class = dw_val_class_const_double;
4476 attr.dw_attr_val.val_entry = NULL;
4477 attr.dw_attr_val.v.val_double.high = high;
4478 attr.dw_attr_val.v.val_double.low = low;
4479 add_dwarf_attr (die, &attr);
4480 }
4481
4482 /* Add a floating point attribute value to a DIE and return it. */
4483
4484 static inline void
4485 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4486 unsigned int length, unsigned int elt_size, unsigned char *array)
4487 {
4488 dw_attr_node attr;
4489
4490 attr.dw_attr = attr_kind;
4491 attr.dw_attr_val.val_class = dw_val_class_vec;
4492 attr.dw_attr_val.val_entry = NULL;
4493 attr.dw_attr_val.v.val_vec.length = length;
4494 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4495 attr.dw_attr_val.v.val_vec.array = array;
4496 add_dwarf_attr (die, &attr);
4497 }
4498
4499 /* Add an 8-byte data attribute value to a DIE. */
4500
4501 static inline void
4502 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4503 unsigned char data8[8])
4504 {
4505 dw_attr_node attr;
4506
4507 attr.dw_attr = attr_kind;
4508 attr.dw_attr_val.val_class = dw_val_class_data8;
4509 attr.dw_attr_val.val_entry = NULL;
4510 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4511 add_dwarf_attr (die, &attr);
4512 }
4513
4514 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4515 dwarf_split_debug_info, address attributes in dies destined for the
4516 final executable have force_direct set to avoid using indexed
4517 references. */
4518
4519 static inline void
4520 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4521 bool force_direct)
4522 {
4523 dw_attr_node attr;
4524 char * lbl_id;
4525
4526 lbl_id = xstrdup (lbl_low);
4527 attr.dw_attr = DW_AT_low_pc;
4528 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4529 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4530 if (dwarf_split_debug_info && !force_direct)
4531 attr.dw_attr_val.val_entry
4532 = add_addr_table_entry (lbl_id, ate_kind_label);
4533 else
4534 attr.dw_attr_val.val_entry = NULL;
4535 add_dwarf_attr (die, &attr);
4536
4537 attr.dw_attr = DW_AT_high_pc;
4538 if (dwarf_version < 4)
4539 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4540 else
4541 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4542 lbl_id = xstrdup (lbl_high);
4543 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4544 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4545 && dwarf_split_debug_info && !force_direct)
4546 attr.dw_attr_val.val_entry
4547 = add_addr_table_entry (lbl_id, ate_kind_label);
4548 else
4549 attr.dw_attr_val.val_entry = NULL;
4550 add_dwarf_attr (die, &attr);
4551 }
4552
4553 /* Hash and equality functions for debug_str_hash. */
4554
4555 hashval_t
4556 indirect_string_hasher::hash (indirect_string_node *x)
4557 {
4558 return htab_hash_string (x->str);
4559 }
4560
4561 bool
4562 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4563 {
4564 return strcmp (x1->str, x2) == 0;
4565 }
4566
4567 /* Add STR to the given string hash table. */
4568
4569 static struct indirect_string_node *
4570 find_AT_string_in_table (const char *str,
4571 hash_table<indirect_string_hasher> *table)
4572 {
4573 struct indirect_string_node *node;
4574
4575 indirect_string_node **slot
4576 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4577 if (*slot == NULL)
4578 {
4579 node = ggc_cleared_alloc<indirect_string_node> ();
4580 node->str = ggc_strdup (str);
4581 *slot = node;
4582 }
4583 else
4584 node = *slot;
4585
4586 node->refcount++;
4587 return node;
4588 }
4589
4590 /* Add STR to the indirect string hash table. */
4591
4592 static struct indirect_string_node *
4593 find_AT_string (const char *str)
4594 {
4595 if (! debug_str_hash)
4596 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4597
4598 return find_AT_string_in_table (str, debug_str_hash);
4599 }
4600
4601 /* Add a string attribute value to a DIE. */
4602
4603 static inline void
4604 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4605 {
4606 dw_attr_node attr;
4607 struct indirect_string_node *node;
4608
4609 node = find_AT_string (str);
4610
4611 attr.dw_attr = attr_kind;
4612 attr.dw_attr_val.val_class = dw_val_class_str;
4613 attr.dw_attr_val.val_entry = NULL;
4614 attr.dw_attr_val.v.val_str = node;
4615 add_dwarf_attr (die, &attr);
4616 }
4617
4618 static inline const char *
4619 AT_string (dw_attr_node *a)
4620 {
4621 gcc_assert (a && AT_class (a) == dw_val_class_str);
4622 return a->dw_attr_val.v.val_str->str;
4623 }
4624
4625 /* Call this function directly to bypass AT_string_form's logic to put
4626 the string inline in the die. */
4627
4628 static void
4629 set_indirect_string (struct indirect_string_node *node)
4630 {
4631 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4632 /* Already indirect is a no op. */
4633 if (node->form == DW_FORM_strp
4634 || node->form == DW_FORM_line_strp
4635 || node->form == DW_FORM_GNU_str_index)
4636 {
4637 gcc_assert (node->label);
4638 return;
4639 }
4640 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4641 ++dw2_string_counter;
4642 node->label = xstrdup (label);
4643
4644 if (!dwarf_split_debug_info)
4645 {
4646 node->form = DW_FORM_strp;
4647 node->index = NOT_INDEXED;
4648 }
4649 else
4650 {
4651 node->form = DW_FORM_GNU_str_index;
4652 node->index = NO_INDEX_ASSIGNED;
4653 }
4654 }
4655
4656 /* A helper function for dwarf2out_finish, called to reset indirect
4657 string decisions done for early LTO dwarf output before fat object
4658 dwarf output. */
4659
4660 int
4661 reset_indirect_string (indirect_string_node **h, void *)
4662 {
4663 struct indirect_string_node *node = *h;
4664 if (node->form == DW_FORM_strp || node->form == DW_FORM_GNU_str_index)
4665 {
4666 free (node->label);
4667 node->label = NULL;
4668 node->form = (dwarf_form) 0;
4669 node->index = 0;
4670 }
4671 return 1;
4672 }
4673
4674 /* Find out whether a string should be output inline in DIE
4675 or out-of-line in .debug_str section. */
4676
4677 static enum dwarf_form
4678 find_string_form (struct indirect_string_node *node)
4679 {
4680 unsigned int len;
4681
4682 if (node->form)
4683 return node->form;
4684
4685 len = strlen (node->str) + 1;
4686
4687 /* If the string is shorter or equal to the size of the reference, it is
4688 always better to put it inline. */
4689 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4690 return node->form = DW_FORM_string;
4691
4692 /* If we cannot expect the linker to merge strings in .debug_str
4693 section, only put it into .debug_str if it is worth even in this
4694 single module. */
4695 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4696 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4697 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4698 return node->form = DW_FORM_string;
4699
4700 set_indirect_string (node);
4701
4702 return node->form;
4703 }
4704
4705 /* Find out whether the string referenced from the attribute should be
4706 output inline in DIE or out-of-line in .debug_str section. */
4707
4708 static enum dwarf_form
4709 AT_string_form (dw_attr_node *a)
4710 {
4711 gcc_assert (a && AT_class (a) == dw_val_class_str);
4712 return find_string_form (a->dw_attr_val.v.val_str);
4713 }
4714
4715 /* Add a DIE reference attribute value to a DIE. */
4716
4717 static inline void
4718 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4719 {
4720 dw_attr_node attr;
4721 gcc_checking_assert (targ_die != NULL);
4722
4723 /* With LTO we can end up trying to reference something we didn't create
4724 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4725 if (targ_die == NULL)
4726 return;
4727
4728 attr.dw_attr = attr_kind;
4729 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4730 attr.dw_attr_val.val_entry = NULL;
4731 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4732 attr.dw_attr_val.v.val_die_ref.external = 0;
4733 add_dwarf_attr (die, &attr);
4734 }
4735
4736 /* Change DIE reference REF to point to NEW_DIE instead. */
4737
4738 static inline void
4739 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4740 {
4741 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4742 ref->dw_attr_val.v.val_die_ref.die = new_die;
4743 ref->dw_attr_val.v.val_die_ref.external = 0;
4744 }
4745
4746 /* Add an AT_specification attribute to a DIE, and also make the back
4747 pointer from the specification to the definition. */
4748
4749 static inline void
4750 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4751 {
4752 add_AT_die_ref (die, DW_AT_specification, targ_die);
4753 gcc_assert (!targ_die->die_definition);
4754 targ_die->die_definition = die;
4755 }
4756
4757 static inline dw_die_ref
4758 AT_ref (dw_attr_node *a)
4759 {
4760 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4761 return a->dw_attr_val.v.val_die_ref.die;
4762 }
4763
4764 static inline int
4765 AT_ref_external (dw_attr_node *a)
4766 {
4767 if (a && AT_class (a) == dw_val_class_die_ref)
4768 return a->dw_attr_val.v.val_die_ref.external;
4769
4770 return 0;
4771 }
4772
4773 static inline void
4774 set_AT_ref_external (dw_attr_node *a, int i)
4775 {
4776 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4777 a->dw_attr_val.v.val_die_ref.external = i;
4778 }
4779
4780 /* Add an FDE reference attribute value to a DIE. */
4781
4782 static inline void
4783 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4784 {
4785 dw_attr_node attr;
4786
4787 attr.dw_attr = attr_kind;
4788 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4789 attr.dw_attr_val.val_entry = NULL;
4790 attr.dw_attr_val.v.val_fde_index = targ_fde;
4791 add_dwarf_attr (die, &attr);
4792 }
4793
4794 /* Add a location description attribute value to a DIE. */
4795
4796 static inline void
4797 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4798 {
4799 dw_attr_node attr;
4800
4801 attr.dw_attr = attr_kind;
4802 attr.dw_attr_val.val_class = dw_val_class_loc;
4803 attr.dw_attr_val.val_entry = NULL;
4804 attr.dw_attr_val.v.val_loc = loc;
4805 add_dwarf_attr (die, &attr);
4806 }
4807
4808 static inline dw_loc_descr_ref
4809 AT_loc (dw_attr_node *a)
4810 {
4811 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4812 return a->dw_attr_val.v.val_loc;
4813 }
4814
4815 static inline void
4816 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4817 {
4818 dw_attr_node attr;
4819
4820 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4821 return;
4822
4823 attr.dw_attr = attr_kind;
4824 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4825 attr.dw_attr_val.val_entry = NULL;
4826 attr.dw_attr_val.v.val_loc_list = loc_list;
4827 add_dwarf_attr (die, &attr);
4828 have_location_lists = true;
4829 }
4830
4831 static inline dw_loc_list_ref
4832 AT_loc_list (dw_attr_node *a)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4835 return a->dw_attr_val.v.val_loc_list;
4836 }
4837
4838 /* Add a view list attribute to DIE. It must have a DW_AT_location
4839 attribute, because the view list complements the location list. */
4840
4841 static inline void
4842 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4843 {
4844 dw_attr_node attr;
4845
4846 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4847 return;
4848
4849 attr.dw_attr = attr_kind;
4850 attr.dw_attr_val.val_class = dw_val_class_view_list;
4851 attr.dw_attr_val.val_entry = NULL;
4852 attr.dw_attr_val.v.val_view_list = die;
4853 add_dwarf_attr (die, &attr);
4854 gcc_checking_assert (get_AT (die, DW_AT_location));
4855 gcc_assert (have_location_lists);
4856 }
4857
4858 /* Return a pointer to the location list referenced by the attribute.
4859 If the named attribute is a view list, look up the corresponding
4860 DW_AT_location attribute and return its location list. */
4861
4862 static inline dw_loc_list_ref *
4863 AT_loc_list_ptr (dw_attr_node *a)
4864 {
4865 gcc_assert (a);
4866 switch (AT_class (a))
4867 {
4868 case dw_val_class_loc_list:
4869 return &a->dw_attr_val.v.val_loc_list;
4870 case dw_val_class_view_list:
4871 {
4872 dw_attr_node *l;
4873 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4874 if (!l)
4875 return NULL;
4876 gcc_checking_assert (l + 1 == a);
4877 return AT_loc_list_ptr (l);
4878 }
4879 default:
4880 gcc_unreachable ();
4881 }
4882 }
4883
4884 /* Return the location attribute value associated with a view list
4885 attribute value. */
4886
4887 static inline dw_val_node *
4888 view_list_to_loc_list_val_node (dw_val_node *val)
4889 {
4890 gcc_assert (val->val_class == dw_val_class_view_list);
4891 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4892 if (!loc)
4893 return NULL;
4894 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4895 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4896 return &loc->dw_attr_val;
4897 }
4898
4899 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4900 {
4901 static hashval_t hash (addr_table_entry *);
4902 static bool equal (addr_table_entry *, addr_table_entry *);
4903 };
4904
4905 /* Table of entries into the .debug_addr section. */
4906
4907 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4908
4909 /* Hash an address_table_entry. */
4910
4911 hashval_t
4912 addr_hasher::hash (addr_table_entry *a)
4913 {
4914 inchash::hash hstate;
4915 switch (a->kind)
4916 {
4917 case ate_kind_rtx:
4918 hstate.add_int (0);
4919 break;
4920 case ate_kind_rtx_dtprel:
4921 hstate.add_int (1);
4922 break;
4923 case ate_kind_label:
4924 return htab_hash_string (a->addr.label);
4925 default:
4926 gcc_unreachable ();
4927 }
4928 inchash::add_rtx (a->addr.rtl, hstate);
4929 return hstate.end ();
4930 }
4931
4932 /* Determine equality for two address_table_entries. */
4933
4934 bool
4935 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4936 {
4937 if (a1->kind != a2->kind)
4938 return 0;
4939 switch (a1->kind)
4940 {
4941 case ate_kind_rtx:
4942 case ate_kind_rtx_dtprel:
4943 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4944 case ate_kind_label:
4945 return strcmp (a1->addr.label, a2->addr.label) == 0;
4946 default:
4947 gcc_unreachable ();
4948 }
4949 }
4950
4951 /* Initialize an addr_table_entry. */
4952
4953 void
4954 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4955 {
4956 e->kind = kind;
4957 switch (kind)
4958 {
4959 case ate_kind_rtx:
4960 case ate_kind_rtx_dtprel:
4961 e->addr.rtl = (rtx) addr;
4962 break;
4963 case ate_kind_label:
4964 e->addr.label = (char *) addr;
4965 break;
4966 }
4967 e->refcount = 0;
4968 e->index = NO_INDEX_ASSIGNED;
4969 }
4970
4971 /* Add attr to the address table entry to the table. Defer setting an
4972 index until output time. */
4973
4974 static addr_table_entry *
4975 add_addr_table_entry (void *addr, enum ate_kind kind)
4976 {
4977 addr_table_entry *node;
4978 addr_table_entry finder;
4979
4980 gcc_assert (dwarf_split_debug_info);
4981 if (! addr_index_table)
4982 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
4983 init_addr_table_entry (&finder, kind, addr);
4984 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
4985
4986 if (*slot == HTAB_EMPTY_ENTRY)
4987 {
4988 node = ggc_cleared_alloc<addr_table_entry> ();
4989 init_addr_table_entry (node, kind, addr);
4990 *slot = node;
4991 }
4992 else
4993 node = *slot;
4994
4995 node->refcount++;
4996 return node;
4997 }
4998
4999 /* Remove an entry from the addr table by decrementing its refcount.
5000 Strictly, decrementing the refcount would be enough, but the
5001 assertion that the entry is actually in the table has found
5002 bugs. */
5003
5004 static void
5005 remove_addr_table_entry (addr_table_entry *entry)
5006 {
5007 gcc_assert (dwarf_split_debug_info && addr_index_table);
5008 /* After an index is assigned, the table is frozen. */
5009 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5010 entry->refcount--;
5011 }
5012
5013 /* Given a location list, remove all addresses it refers to from the
5014 address_table. */
5015
5016 static void
5017 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5018 {
5019 for (; descr; descr = descr->dw_loc_next)
5020 if (descr->dw_loc_oprnd1.val_entry != NULL)
5021 {
5022 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5023 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5024 }
5025 }
5026
5027 /* A helper function for dwarf2out_finish called through
5028 htab_traverse. Assign an addr_table_entry its index. All entries
5029 must be collected into the table when this function is called,
5030 because the indexing code relies on htab_traverse to traverse nodes
5031 in the same order for each run. */
5032
5033 int
5034 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5035 {
5036 addr_table_entry *node = *h;
5037
5038 /* Don't index unreferenced nodes. */
5039 if (node->refcount == 0)
5040 return 1;
5041
5042 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5043 node->index = *index;
5044 *index += 1;
5045
5046 return 1;
5047 }
5048
5049 /* Add an address constant attribute value to a DIE. When using
5050 dwarf_split_debug_info, address attributes in dies destined for the
5051 final executable should be direct references--setting the parameter
5052 force_direct ensures this behavior. */
5053
5054 static inline void
5055 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5056 bool force_direct)
5057 {
5058 dw_attr_node attr;
5059
5060 attr.dw_attr = attr_kind;
5061 attr.dw_attr_val.val_class = dw_val_class_addr;
5062 attr.dw_attr_val.v.val_addr = addr;
5063 if (dwarf_split_debug_info && !force_direct)
5064 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5065 else
5066 attr.dw_attr_val.val_entry = NULL;
5067 add_dwarf_attr (die, &attr);
5068 }
5069
5070 /* Get the RTX from to an address DIE attribute. */
5071
5072 static inline rtx
5073 AT_addr (dw_attr_node *a)
5074 {
5075 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5076 return a->dw_attr_val.v.val_addr;
5077 }
5078
5079 /* Add a file attribute value to a DIE. */
5080
5081 static inline void
5082 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5083 struct dwarf_file_data *fd)
5084 {
5085 dw_attr_node attr;
5086
5087 attr.dw_attr = attr_kind;
5088 attr.dw_attr_val.val_class = dw_val_class_file;
5089 attr.dw_attr_val.val_entry = NULL;
5090 attr.dw_attr_val.v.val_file = fd;
5091 add_dwarf_attr (die, &attr);
5092 }
5093
5094 /* Get the dwarf_file_data from a file DIE attribute. */
5095
5096 static inline struct dwarf_file_data *
5097 AT_file (dw_attr_node *a)
5098 {
5099 gcc_assert (a && (AT_class (a) == dw_val_class_file
5100 || AT_class (a) == dw_val_class_file_implicit));
5101 return a->dw_attr_val.v.val_file;
5102 }
5103
5104 /* Add a vms delta attribute value to a DIE. */
5105
5106 static inline void
5107 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5108 const char *lbl1, const char *lbl2)
5109 {
5110 dw_attr_node attr;
5111
5112 attr.dw_attr = attr_kind;
5113 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5114 attr.dw_attr_val.val_entry = NULL;
5115 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5116 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5117 add_dwarf_attr (die, &attr);
5118 }
5119
5120 /* Add a label identifier attribute value to a DIE. */
5121
5122 static inline void
5123 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5124 const char *lbl_id)
5125 {
5126 dw_attr_node attr;
5127
5128 attr.dw_attr = attr_kind;
5129 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5130 attr.dw_attr_val.val_entry = NULL;
5131 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5132 if (dwarf_split_debug_info)
5133 attr.dw_attr_val.val_entry
5134 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5135 ate_kind_label);
5136 add_dwarf_attr (die, &attr);
5137 }
5138
5139 /* Add a section offset attribute value to a DIE, an offset into the
5140 debug_line section. */
5141
5142 static inline void
5143 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5144 const char *label)
5145 {
5146 dw_attr_node attr;
5147
5148 attr.dw_attr = attr_kind;
5149 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5150 attr.dw_attr_val.val_entry = NULL;
5151 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5152 add_dwarf_attr (die, &attr);
5153 }
5154
5155 /* Add a section offset attribute value to a DIE, an offset into the
5156 debug_loclists section. */
5157
5158 static inline void
5159 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5160 const char *label)
5161 {
5162 dw_attr_node attr;
5163
5164 attr.dw_attr = attr_kind;
5165 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5166 attr.dw_attr_val.val_entry = NULL;
5167 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5168 add_dwarf_attr (die, &attr);
5169 }
5170
5171 /* Add a section offset attribute value to a DIE, an offset into the
5172 debug_macinfo section. */
5173
5174 static inline void
5175 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5176 const char *label)
5177 {
5178 dw_attr_node attr;
5179
5180 attr.dw_attr = attr_kind;
5181 attr.dw_attr_val.val_class = dw_val_class_macptr;
5182 attr.dw_attr_val.val_entry = NULL;
5183 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5184 add_dwarf_attr (die, &attr);
5185 }
5186
5187 /* Add an offset attribute value to a DIE. */
5188
5189 static inline void
5190 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5191 unsigned HOST_WIDE_INT offset)
5192 {
5193 dw_attr_node attr;
5194
5195 attr.dw_attr = attr_kind;
5196 attr.dw_attr_val.val_class = dw_val_class_offset;
5197 attr.dw_attr_val.val_entry = NULL;
5198 attr.dw_attr_val.v.val_offset = offset;
5199 add_dwarf_attr (die, &attr);
5200 }
5201
5202 /* Add a range_list attribute value to a DIE. When using
5203 dwarf_split_debug_info, address attributes in dies destined for the
5204 final executable should be direct references--setting the parameter
5205 force_direct ensures this behavior. */
5206
5207 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5208 #define RELOCATED_OFFSET (NULL)
5209
5210 static void
5211 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5212 long unsigned int offset, bool force_direct)
5213 {
5214 dw_attr_node attr;
5215
5216 attr.dw_attr = attr_kind;
5217 attr.dw_attr_val.val_class = dw_val_class_range_list;
5218 /* For the range_list attribute, use val_entry to store whether the
5219 offset should follow split-debug-info or normal semantics. This
5220 value is read in output_range_list_offset. */
5221 if (dwarf_split_debug_info && !force_direct)
5222 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5223 else
5224 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5225 attr.dw_attr_val.v.val_offset = offset;
5226 add_dwarf_attr (die, &attr);
5227 }
5228
5229 /* Return the start label of a delta attribute. */
5230
5231 static inline const char *
5232 AT_vms_delta1 (dw_attr_node *a)
5233 {
5234 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5235 return a->dw_attr_val.v.val_vms_delta.lbl1;
5236 }
5237
5238 /* Return the end label of a delta attribute. */
5239
5240 static inline const char *
5241 AT_vms_delta2 (dw_attr_node *a)
5242 {
5243 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5244 return a->dw_attr_val.v.val_vms_delta.lbl2;
5245 }
5246
5247 static inline const char *
5248 AT_lbl (dw_attr_node *a)
5249 {
5250 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5251 || AT_class (a) == dw_val_class_lineptr
5252 || AT_class (a) == dw_val_class_macptr
5253 || AT_class (a) == dw_val_class_loclistsptr
5254 || AT_class (a) == dw_val_class_high_pc));
5255 return a->dw_attr_val.v.val_lbl_id;
5256 }
5257
5258 /* Get the attribute of type attr_kind. */
5259
5260 static dw_attr_node *
5261 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5262 {
5263 dw_attr_node *a;
5264 unsigned ix;
5265 dw_die_ref spec = NULL;
5266
5267 if (! die)
5268 return NULL;
5269
5270 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5271 if (a->dw_attr == attr_kind)
5272 return a;
5273 else if (a->dw_attr == DW_AT_specification
5274 || a->dw_attr == DW_AT_abstract_origin)
5275 spec = AT_ref (a);
5276
5277 if (spec)
5278 return get_AT (spec, attr_kind);
5279
5280 return NULL;
5281 }
5282
5283 /* Returns the parent of the declaration of DIE. */
5284
5285 static dw_die_ref
5286 get_die_parent (dw_die_ref die)
5287 {
5288 dw_die_ref t;
5289
5290 if (!die)
5291 return NULL;
5292
5293 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5294 || (t = get_AT_ref (die, DW_AT_specification)))
5295 die = t;
5296
5297 return die->die_parent;
5298 }
5299
5300 /* Return the "low pc" attribute value, typically associated with a subprogram
5301 DIE. Return null if the "low pc" attribute is either not present, or if it
5302 cannot be represented as an assembler label identifier. */
5303
5304 static inline const char *
5305 get_AT_low_pc (dw_die_ref die)
5306 {
5307 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5308
5309 return a ? AT_lbl (a) : NULL;
5310 }
5311
5312 /* Return the "high pc" attribute value, typically associated with a subprogram
5313 DIE. Return null if the "high pc" attribute is either not present, or if it
5314 cannot be represented as an assembler label identifier. */
5315
5316 static inline const char *
5317 get_AT_hi_pc (dw_die_ref die)
5318 {
5319 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5320
5321 return a ? AT_lbl (a) : NULL;
5322 }
5323
5324 /* Return the value of the string attribute designated by ATTR_KIND, or
5325 NULL if it is not present. */
5326
5327 static inline const char *
5328 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5329 {
5330 dw_attr_node *a = get_AT (die, attr_kind);
5331
5332 return a ? AT_string (a) : NULL;
5333 }
5334
5335 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5336 if it is not present. */
5337
5338 static inline int
5339 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5340 {
5341 dw_attr_node *a = get_AT (die, attr_kind);
5342
5343 return a ? AT_flag (a) : 0;
5344 }
5345
5346 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5347 if it is not present. */
5348
5349 static inline unsigned
5350 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5351 {
5352 dw_attr_node *a = get_AT (die, attr_kind);
5353
5354 return a ? AT_unsigned (a) : 0;
5355 }
5356
5357 static inline dw_die_ref
5358 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5359 {
5360 dw_attr_node *a = get_AT (die, attr_kind);
5361
5362 return a ? AT_ref (a) : NULL;
5363 }
5364
5365 static inline struct dwarf_file_data *
5366 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5367 {
5368 dw_attr_node *a = get_AT (die, attr_kind);
5369
5370 return a ? AT_file (a) : NULL;
5371 }
5372
5373 /* Return TRUE if the language is C++. */
5374
5375 static inline bool
5376 is_cxx (void)
5377 {
5378 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5379
5380 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5381 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5382 }
5383
5384 /* Return TRUE if DECL was created by the C++ frontend. */
5385
5386 static bool
5387 is_cxx (const_tree decl)
5388 {
5389 if (in_lto_p)
5390 {
5391 const_tree context = get_ultimate_context (decl);
5392 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5393 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5394 }
5395 return is_cxx ();
5396 }
5397
5398 /* Return TRUE if the language is Fortran. */
5399
5400 static inline bool
5401 is_fortran (void)
5402 {
5403 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5404
5405 return (lang == DW_LANG_Fortran77
5406 || lang == DW_LANG_Fortran90
5407 || lang == DW_LANG_Fortran95
5408 || lang == DW_LANG_Fortran03
5409 || lang == DW_LANG_Fortran08);
5410 }
5411
5412 static inline bool
5413 is_fortran (const_tree decl)
5414 {
5415 if (in_lto_p)
5416 {
5417 const_tree context = get_ultimate_context (decl);
5418 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5419 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5420 "GNU Fortran", 11) == 0
5421 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5422 "GNU F77") == 0);
5423 }
5424 return is_fortran ();
5425 }
5426
5427 /* Return TRUE if the language is Ada. */
5428
5429 static inline bool
5430 is_ada (void)
5431 {
5432 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5433
5434 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5435 }
5436
5437 /* Remove the specified attribute if present. Return TRUE if removal
5438 was successful. */
5439
5440 static bool
5441 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5442 {
5443 dw_attr_node *a;
5444 unsigned ix;
5445
5446 if (! die)
5447 return false;
5448
5449 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5450 if (a->dw_attr == attr_kind)
5451 {
5452 if (AT_class (a) == dw_val_class_str)
5453 if (a->dw_attr_val.v.val_str->refcount)
5454 a->dw_attr_val.v.val_str->refcount--;
5455
5456 /* vec::ordered_remove should help reduce the number of abbrevs
5457 that are needed. */
5458 die->die_attr->ordered_remove (ix);
5459 return true;
5460 }
5461 return false;
5462 }
5463
5464 /* Remove CHILD from its parent. PREV must have the property that
5465 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5466
5467 static void
5468 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5469 {
5470 gcc_assert (child->die_parent == prev->die_parent);
5471 gcc_assert (prev->die_sib == child);
5472 if (prev == child)
5473 {
5474 gcc_assert (child->die_parent->die_child == child);
5475 prev = NULL;
5476 }
5477 else
5478 prev->die_sib = child->die_sib;
5479 if (child->die_parent->die_child == child)
5480 child->die_parent->die_child = prev;
5481 child->die_sib = NULL;
5482 }
5483
5484 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5485 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5486
5487 static void
5488 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5489 {
5490 dw_die_ref parent = old_child->die_parent;
5491
5492 gcc_assert (parent == prev->die_parent);
5493 gcc_assert (prev->die_sib == old_child);
5494
5495 new_child->die_parent = parent;
5496 if (prev == old_child)
5497 {
5498 gcc_assert (parent->die_child == old_child);
5499 new_child->die_sib = new_child;
5500 }
5501 else
5502 {
5503 prev->die_sib = new_child;
5504 new_child->die_sib = old_child->die_sib;
5505 }
5506 if (old_child->die_parent->die_child == old_child)
5507 old_child->die_parent->die_child = new_child;
5508 old_child->die_sib = NULL;
5509 }
5510
5511 /* Move all children from OLD_PARENT to NEW_PARENT. */
5512
5513 static void
5514 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5515 {
5516 dw_die_ref c;
5517 new_parent->die_child = old_parent->die_child;
5518 old_parent->die_child = NULL;
5519 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5520 }
5521
5522 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5523 matches TAG. */
5524
5525 static void
5526 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5527 {
5528 dw_die_ref c;
5529
5530 c = die->die_child;
5531 if (c) do {
5532 dw_die_ref prev = c;
5533 c = c->die_sib;
5534 while (c->die_tag == tag)
5535 {
5536 remove_child_with_prev (c, prev);
5537 c->die_parent = NULL;
5538 /* Might have removed every child. */
5539 if (die->die_child == NULL)
5540 return;
5541 c = prev->die_sib;
5542 }
5543 } while (c != die->die_child);
5544 }
5545
5546 /* Add a CHILD_DIE as the last child of DIE. */
5547
5548 static void
5549 add_child_die (dw_die_ref die, dw_die_ref child_die)
5550 {
5551 /* FIXME this should probably be an assert. */
5552 if (! die || ! child_die)
5553 return;
5554 gcc_assert (die != child_die);
5555
5556 child_die->die_parent = die;
5557 if (die->die_child)
5558 {
5559 child_die->die_sib = die->die_child->die_sib;
5560 die->die_child->die_sib = child_die;
5561 }
5562 else
5563 child_die->die_sib = child_die;
5564 die->die_child = child_die;
5565 }
5566
5567 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5568
5569 static void
5570 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5571 dw_die_ref after_die)
5572 {
5573 gcc_assert (die
5574 && child_die
5575 && after_die
5576 && die->die_child
5577 && die != child_die);
5578
5579 child_die->die_parent = die;
5580 child_die->die_sib = after_die->die_sib;
5581 after_die->die_sib = child_die;
5582 if (die->die_child == after_die)
5583 die->die_child = child_die;
5584 }
5585
5586 /* Unassociate CHILD from its parent, and make its parent be
5587 NEW_PARENT. */
5588
5589 static void
5590 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5591 {
5592 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5593 if (p->die_sib == child)
5594 {
5595 remove_child_with_prev (child, p);
5596 break;
5597 }
5598 add_child_die (new_parent, child);
5599 }
5600
5601 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5602 is the specification, to the end of PARENT's list of children.
5603 This is done by removing and re-adding it. */
5604
5605 static void
5606 splice_child_die (dw_die_ref parent, dw_die_ref child)
5607 {
5608 /* We want the declaration DIE from inside the class, not the
5609 specification DIE at toplevel. */
5610 if (child->die_parent != parent)
5611 {
5612 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5613
5614 if (tmp)
5615 child = tmp;
5616 }
5617
5618 gcc_assert (child->die_parent == parent
5619 || (child->die_parent
5620 == get_AT_ref (parent, DW_AT_specification)));
5621
5622 reparent_child (child, parent);
5623 }
5624
5625 /* Create and return a new die with TAG_VALUE as tag. */
5626
5627 static inline dw_die_ref
5628 new_die_raw (enum dwarf_tag tag_value)
5629 {
5630 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5631 die->die_tag = tag_value;
5632 return die;
5633 }
5634
5635 /* Create and return a new die with a parent of PARENT_DIE. If
5636 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5637 associated tree T must be supplied to determine parenthood
5638 later. */
5639
5640 static inline dw_die_ref
5641 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5642 {
5643 dw_die_ref die = new_die_raw (tag_value);
5644
5645 if (parent_die != NULL)
5646 add_child_die (parent_die, die);
5647 else
5648 {
5649 limbo_die_node *limbo_node;
5650
5651 /* No DIEs created after early dwarf should end up in limbo,
5652 because the limbo list should not persist past LTO
5653 streaming. */
5654 if (tag_value != DW_TAG_compile_unit
5655 /* These are allowed because they're generated while
5656 breaking out COMDAT units late. */
5657 && tag_value != DW_TAG_type_unit
5658 && tag_value != DW_TAG_skeleton_unit
5659 && !early_dwarf
5660 /* Allow nested functions to live in limbo because they will
5661 only temporarily live there, as decls_for_scope will fix
5662 them up. */
5663 && (TREE_CODE (t) != FUNCTION_DECL
5664 || !decl_function_context (t))
5665 /* Same as nested functions above but for types. Types that
5666 are local to a function will be fixed in
5667 decls_for_scope. */
5668 && (!RECORD_OR_UNION_TYPE_P (t)
5669 || !TYPE_CONTEXT (t)
5670 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5671 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5672 especially in the ltrans stage, but once we implement LTO
5673 dwarf streaming, we should remove this exception. */
5674 && !in_lto_p)
5675 {
5676 fprintf (stderr, "symbol ended up in limbo too late:");
5677 debug_generic_stmt (t);
5678 gcc_unreachable ();
5679 }
5680
5681 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5682 limbo_node->die = die;
5683 limbo_node->created_for = t;
5684 limbo_node->next = limbo_die_list;
5685 limbo_die_list = limbo_node;
5686 }
5687
5688 return die;
5689 }
5690
5691 /* Return the DIE associated with the given type specifier. */
5692
5693 static inline dw_die_ref
5694 lookup_type_die (tree type)
5695 {
5696 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5697 if (die && die->removed)
5698 {
5699 TYPE_SYMTAB_DIE (type) = NULL;
5700 return NULL;
5701 }
5702 return die;
5703 }
5704
5705 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5706 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5707 anonymous type instead the one of the naming typedef. */
5708
5709 static inline dw_die_ref
5710 strip_naming_typedef (tree type, dw_die_ref type_die)
5711 {
5712 if (type
5713 && TREE_CODE (type) == RECORD_TYPE
5714 && type_die
5715 && type_die->die_tag == DW_TAG_typedef
5716 && is_naming_typedef_decl (TYPE_NAME (type)))
5717 type_die = get_AT_ref (type_die, DW_AT_type);
5718 return type_die;
5719 }
5720
5721 /* Like lookup_type_die, but if type is an anonymous type named by a
5722 typedef[1], return the DIE of the anonymous type instead the one of
5723 the naming typedef. This is because in gen_typedef_die, we did
5724 equate the anonymous struct named by the typedef with the DIE of
5725 the naming typedef. So by default, lookup_type_die on an anonymous
5726 struct yields the DIE of the naming typedef.
5727
5728 [1]: Read the comment of is_naming_typedef_decl to learn about what
5729 a naming typedef is. */
5730
5731 static inline dw_die_ref
5732 lookup_type_die_strip_naming_typedef (tree type)
5733 {
5734 dw_die_ref die = lookup_type_die (type);
5735 return strip_naming_typedef (type, die);
5736 }
5737
5738 /* Equate a DIE to a given type specifier. */
5739
5740 static inline void
5741 equate_type_number_to_die (tree type, dw_die_ref type_die)
5742 {
5743 TYPE_SYMTAB_DIE (type) = type_die;
5744 }
5745
5746 /* Returns a hash value for X (which really is a die_struct). */
5747
5748 inline hashval_t
5749 decl_die_hasher::hash (die_node *x)
5750 {
5751 return (hashval_t) x->decl_id;
5752 }
5753
5754 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5755
5756 inline bool
5757 decl_die_hasher::equal (die_node *x, tree y)
5758 {
5759 return (x->decl_id == DECL_UID (y));
5760 }
5761
5762 /* Return the DIE associated with a given declaration. */
5763
5764 static inline dw_die_ref
5765 lookup_decl_die (tree decl)
5766 {
5767 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5768 NO_INSERT);
5769 if (!die)
5770 return NULL;
5771 if ((*die)->removed)
5772 {
5773 decl_die_table->clear_slot (die);
5774 return NULL;
5775 }
5776 return *die;
5777 }
5778
5779
5780 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5781 style reference. Return true if we found one refering to a DIE for
5782 DECL, otherwise return false. */
5783
5784 static bool
5785 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5786 unsigned HOST_WIDE_INT *off)
5787 {
5788 dw_die_ref die;
5789
5790 if (flag_wpa && !decl_die_table)
5791 return false;
5792
5793 if (TREE_CODE (decl) == BLOCK)
5794 die = BLOCK_DIE (decl);
5795 else
5796 die = lookup_decl_die (decl);
5797 if (!die)
5798 return false;
5799
5800 /* During WPA stage we currently use DIEs to store the
5801 decl <-> label + offset map. That's quite inefficient but it
5802 works for now. */
5803 if (flag_wpa)
5804 {
5805 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5806 if (!ref)
5807 {
5808 gcc_assert (die == comp_unit_die ());
5809 return false;
5810 }
5811 *off = ref->die_offset;
5812 *sym = ref->die_id.die_symbol;
5813 return true;
5814 }
5815
5816 /* Similar to get_ref_die_offset_label, but using the "correct"
5817 label. */
5818 *off = die->die_offset;
5819 while (die->die_parent)
5820 die = die->die_parent;
5821 /* For the containing CU DIE we compute a die_symbol in
5822 compute_comp_unit_symbol. */
5823 gcc_assert (die->die_tag == DW_TAG_compile_unit
5824 && die->die_id.die_symbol != NULL);
5825 *sym = die->die_id.die_symbol;
5826 return true;
5827 }
5828
5829 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5830
5831 static void
5832 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5833 const char *symbol, HOST_WIDE_INT offset)
5834 {
5835 /* Create a fake DIE that contains the reference. Don't use
5836 new_die because we don't want to end up in the limbo list. */
5837 dw_die_ref ref = new_die_raw (die->die_tag);
5838 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5839 ref->die_offset = offset;
5840 ref->with_offset = 1;
5841 add_AT_die_ref (die, attr_kind, ref);
5842 }
5843
5844 /* Create a DIE for DECL if required and add a reference to a DIE
5845 at SYMBOL + OFFSET which contains attributes dumped early. */
5846
5847 static void
5848 dwarf2out_register_external_die (tree decl, const char *sym,
5849 unsigned HOST_WIDE_INT off)
5850 {
5851 if (debug_info_level == DINFO_LEVEL_NONE)
5852 return;
5853
5854 if (flag_wpa && !decl_die_table)
5855 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5856
5857 dw_die_ref die
5858 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5859 gcc_assert (!die);
5860
5861 tree ctx;
5862 dw_die_ref parent = NULL;
5863 /* Need to lookup a DIE for the decls context - the containing
5864 function or translation unit. */
5865 if (TREE_CODE (decl) == BLOCK)
5866 {
5867 ctx = BLOCK_SUPERCONTEXT (decl);
5868 /* ??? We do not output DIEs for all scopes thus skip as
5869 many DIEs as needed. */
5870 while (TREE_CODE (ctx) == BLOCK
5871 && !BLOCK_DIE (ctx))
5872 ctx = BLOCK_SUPERCONTEXT (ctx);
5873 }
5874 else
5875 ctx = DECL_CONTEXT (decl);
5876 while (ctx && TYPE_P (ctx))
5877 ctx = TYPE_CONTEXT (ctx);
5878 if (ctx)
5879 {
5880 if (TREE_CODE (ctx) == BLOCK)
5881 parent = BLOCK_DIE (ctx);
5882 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5883 /* Keep the 1:1 association during WPA. */
5884 && !flag_wpa)
5885 /* Otherwise all late annotations go to the main CU which
5886 imports the original CUs. */
5887 parent = comp_unit_die ();
5888 else if (TREE_CODE (ctx) == FUNCTION_DECL
5889 && TREE_CODE (decl) != PARM_DECL
5890 && TREE_CODE (decl) != BLOCK)
5891 /* Leave function local entities parent determination to when
5892 we process scope vars. */
5893 ;
5894 else
5895 parent = lookup_decl_die (ctx);
5896 }
5897 else
5898 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5899 Handle this case gracefully by globalizing stuff. */
5900 parent = comp_unit_die ();
5901 /* Create a DIE "stub". */
5902 switch (TREE_CODE (decl))
5903 {
5904 case TRANSLATION_UNIT_DECL:
5905 if (! flag_wpa)
5906 {
5907 die = comp_unit_die ();
5908 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5909 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5910 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5911 to create a DIE for the original CUs. */
5912 return;
5913 }
5914 /* Keep the 1:1 association during WPA. */
5915 die = new_die (DW_TAG_compile_unit, NULL, decl);
5916 break;
5917 case NAMESPACE_DECL:
5918 if (is_fortran (decl))
5919 die = new_die (DW_TAG_module, parent, decl);
5920 else
5921 die = new_die (DW_TAG_namespace, parent, decl);
5922 break;
5923 case FUNCTION_DECL:
5924 die = new_die (DW_TAG_subprogram, parent, decl);
5925 break;
5926 case VAR_DECL:
5927 die = new_die (DW_TAG_variable, parent, decl);
5928 break;
5929 case RESULT_DECL:
5930 die = new_die (DW_TAG_variable, parent, decl);
5931 break;
5932 case PARM_DECL:
5933 die = new_die (DW_TAG_formal_parameter, parent, decl);
5934 break;
5935 case CONST_DECL:
5936 die = new_die (DW_TAG_constant, parent, decl);
5937 break;
5938 case LABEL_DECL:
5939 die = new_die (DW_TAG_label, parent, decl);
5940 break;
5941 case BLOCK:
5942 die = new_die (DW_TAG_lexical_block, parent, decl);
5943 break;
5944 default:
5945 gcc_unreachable ();
5946 }
5947 if (TREE_CODE (decl) == BLOCK)
5948 BLOCK_DIE (decl) = die;
5949 else
5950 equate_decl_number_to_die (decl, die);
5951
5952 /* Add a reference to the DIE providing early debug at $sym + off. */
5953 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
5954 }
5955
5956 /* Returns a hash value for X (which really is a var_loc_list). */
5957
5958 inline hashval_t
5959 decl_loc_hasher::hash (var_loc_list *x)
5960 {
5961 return (hashval_t) x->decl_id;
5962 }
5963
5964 /* Return nonzero if decl_id of var_loc_list X is the same as
5965 UID of decl *Y. */
5966
5967 inline bool
5968 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
5969 {
5970 return (x->decl_id == DECL_UID (y));
5971 }
5972
5973 /* Return the var_loc list associated with a given declaration. */
5974
5975 static inline var_loc_list *
5976 lookup_decl_loc (const_tree decl)
5977 {
5978 if (!decl_loc_table)
5979 return NULL;
5980 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
5981 }
5982
5983 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
5984
5985 inline hashval_t
5986 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
5987 {
5988 return (hashval_t) x->decl_id;
5989 }
5990
5991 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
5992 UID of decl *Y. */
5993
5994 inline bool
5995 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
5996 {
5997 return (x->decl_id == DECL_UID (y));
5998 }
5999
6000 /* Equate a DIE to a particular declaration. */
6001
6002 static void
6003 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6004 {
6005 unsigned int decl_id = DECL_UID (decl);
6006
6007 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6008 decl_die->decl_id = decl_id;
6009 }
6010
6011 /* Return how many bits covers PIECE EXPR_LIST. */
6012
6013 static HOST_WIDE_INT
6014 decl_piece_bitsize (rtx piece)
6015 {
6016 int ret = (int) GET_MODE (piece);
6017 if (ret)
6018 return ret;
6019 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6020 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6021 return INTVAL (XEXP (XEXP (piece, 0), 0));
6022 }
6023
6024 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6025
6026 static rtx *
6027 decl_piece_varloc_ptr (rtx piece)
6028 {
6029 if ((int) GET_MODE (piece))
6030 return &XEXP (piece, 0);
6031 else
6032 return &XEXP (XEXP (piece, 0), 1);
6033 }
6034
6035 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6036 Next is the chain of following piece nodes. */
6037
6038 static rtx_expr_list *
6039 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6040 {
6041 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6042 return alloc_EXPR_LIST (bitsize, loc_note, next);
6043 else
6044 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6045 GEN_INT (bitsize),
6046 loc_note), next);
6047 }
6048
6049 /* Return rtx that should be stored into loc field for
6050 LOC_NOTE and BITPOS/BITSIZE. */
6051
6052 static rtx
6053 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6054 HOST_WIDE_INT bitsize)
6055 {
6056 if (bitsize != -1)
6057 {
6058 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6059 if (bitpos != 0)
6060 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6061 }
6062 return loc_note;
6063 }
6064
6065 /* This function either modifies location piece list *DEST in
6066 place (if SRC and INNER is NULL), or copies location piece list
6067 *SRC to *DEST while modifying it. Location BITPOS is modified
6068 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6069 not copied and if needed some padding around it is added.
6070 When modifying in place, DEST should point to EXPR_LIST where
6071 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6072 to the start of the whole list and INNER points to the EXPR_LIST
6073 where earlier pieces cover PIECE_BITPOS bits. */
6074
6075 static void
6076 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6077 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6078 HOST_WIDE_INT bitsize, rtx loc_note)
6079 {
6080 HOST_WIDE_INT diff;
6081 bool copy = inner != NULL;
6082
6083 if (copy)
6084 {
6085 /* First copy all nodes preceding the current bitpos. */
6086 while (src != inner)
6087 {
6088 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6089 decl_piece_bitsize (*src), NULL_RTX);
6090 dest = &XEXP (*dest, 1);
6091 src = &XEXP (*src, 1);
6092 }
6093 }
6094 /* Add padding if needed. */
6095 if (bitpos != piece_bitpos)
6096 {
6097 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6098 copy ? NULL_RTX : *dest);
6099 dest = &XEXP (*dest, 1);
6100 }
6101 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6102 {
6103 gcc_assert (!copy);
6104 /* A piece with correct bitpos and bitsize already exist,
6105 just update the location for it and return. */
6106 *decl_piece_varloc_ptr (*dest) = loc_note;
6107 return;
6108 }
6109 /* Add the piece that changed. */
6110 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6111 dest = &XEXP (*dest, 1);
6112 /* Skip over pieces that overlap it. */
6113 diff = bitpos - piece_bitpos + bitsize;
6114 if (!copy)
6115 src = dest;
6116 while (diff > 0 && *src)
6117 {
6118 rtx piece = *src;
6119 diff -= decl_piece_bitsize (piece);
6120 if (copy)
6121 src = &XEXP (piece, 1);
6122 else
6123 {
6124 *src = XEXP (piece, 1);
6125 free_EXPR_LIST_node (piece);
6126 }
6127 }
6128 /* Add padding if needed. */
6129 if (diff < 0 && *src)
6130 {
6131 if (!copy)
6132 dest = src;
6133 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6134 dest = &XEXP (*dest, 1);
6135 }
6136 if (!copy)
6137 return;
6138 /* Finally copy all nodes following it. */
6139 while (*src)
6140 {
6141 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6142 decl_piece_bitsize (*src), NULL_RTX);
6143 dest = &XEXP (*dest, 1);
6144 src = &XEXP (*src, 1);
6145 }
6146 }
6147
6148 /* Add a variable location node to the linked list for DECL. */
6149
6150 static struct var_loc_node *
6151 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6152 {
6153 unsigned int decl_id;
6154 var_loc_list *temp;
6155 struct var_loc_node *loc = NULL;
6156 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6157
6158 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6159 {
6160 tree realdecl = DECL_DEBUG_EXPR (decl);
6161 if (handled_component_p (realdecl)
6162 || (TREE_CODE (realdecl) == MEM_REF
6163 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6164 {
6165 bool reverse;
6166 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6167 &bitsize, &reverse);
6168 if (!innerdecl
6169 || !DECL_P (innerdecl)
6170 || DECL_IGNORED_P (innerdecl)
6171 || TREE_STATIC (innerdecl)
6172 || bitsize == 0
6173 || bitpos + bitsize > 256)
6174 return NULL;
6175 decl = innerdecl;
6176 }
6177 }
6178
6179 decl_id = DECL_UID (decl);
6180 var_loc_list **slot
6181 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6182 if (*slot == NULL)
6183 {
6184 temp = ggc_cleared_alloc<var_loc_list> ();
6185 temp->decl_id = decl_id;
6186 *slot = temp;
6187 }
6188 else
6189 temp = *slot;
6190
6191 /* For PARM_DECLs try to keep around the original incoming value,
6192 even if that means we'll emit a zero-range .debug_loc entry. */
6193 if (temp->last
6194 && temp->first == temp->last
6195 && TREE_CODE (decl) == PARM_DECL
6196 && NOTE_P (temp->first->loc)
6197 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6198 && DECL_INCOMING_RTL (decl)
6199 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6200 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6201 == GET_CODE (DECL_INCOMING_RTL (decl))
6202 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6203 && (bitsize != -1
6204 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6205 NOTE_VAR_LOCATION_LOC (loc_note))
6206 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6207 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6208 {
6209 loc = ggc_cleared_alloc<var_loc_node> ();
6210 temp->first->next = loc;
6211 temp->last = loc;
6212 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6213 }
6214 else if (temp->last)
6215 {
6216 struct var_loc_node *last = temp->last, *unused = NULL;
6217 rtx *piece_loc = NULL, last_loc_note;
6218 HOST_WIDE_INT piece_bitpos = 0;
6219 if (last->next)
6220 {
6221 last = last->next;
6222 gcc_assert (last->next == NULL);
6223 }
6224 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6225 {
6226 piece_loc = &last->loc;
6227 do
6228 {
6229 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6230 if (piece_bitpos + cur_bitsize > bitpos)
6231 break;
6232 piece_bitpos += cur_bitsize;
6233 piece_loc = &XEXP (*piece_loc, 1);
6234 }
6235 while (*piece_loc);
6236 }
6237 /* TEMP->LAST here is either pointer to the last but one or
6238 last element in the chained list, LAST is pointer to the
6239 last element. */
6240 if (label && strcmp (last->label, label) == 0 && last->view == view)
6241 {
6242 /* For SRA optimized variables if there weren't any real
6243 insns since last note, just modify the last node. */
6244 if (piece_loc != NULL)
6245 {
6246 adjust_piece_list (piece_loc, NULL, NULL,
6247 bitpos, piece_bitpos, bitsize, loc_note);
6248 return NULL;
6249 }
6250 /* If the last note doesn't cover any instructions, remove it. */
6251 if (temp->last != last)
6252 {
6253 temp->last->next = NULL;
6254 unused = last;
6255 last = temp->last;
6256 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6257 }
6258 else
6259 {
6260 gcc_assert (temp->first == temp->last
6261 || (temp->first->next == temp->last
6262 && TREE_CODE (decl) == PARM_DECL));
6263 memset (temp->last, '\0', sizeof (*temp->last));
6264 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6265 return temp->last;
6266 }
6267 }
6268 if (bitsize == -1 && NOTE_P (last->loc))
6269 last_loc_note = last->loc;
6270 else if (piece_loc != NULL
6271 && *piece_loc != NULL_RTX
6272 && piece_bitpos == bitpos
6273 && decl_piece_bitsize (*piece_loc) == bitsize)
6274 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6275 else
6276 last_loc_note = NULL_RTX;
6277 /* If the current location is the same as the end of the list,
6278 and either both or neither of the locations is uninitialized,
6279 we have nothing to do. */
6280 if (last_loc_note == NULL_RTX
6281 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6282 NOTE_VAR_LOCATION_LOC (loc_note)))
6283 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6284 != NOTE_VAR_LOCATION_STATUS (loc_note))
6285 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6286 == VAR_INIT_STATUS_UNINITIALIZED)
6287 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6288 == VAR_INIT_STATUS_UNINITIALIZED))))
6289 {
6290 /* Add LOC to the end of list and update LAST. If the last
6291 element of the list has been removed above, reuse its
6292 memory for the new node, otherwise allocate a new one. */
6293 if (unused)
6294 {
6295 loc = unused;
6296 memset (loc, '\0', sizeof (*loc));
6297 }
6298 else
6299 loc = ggc_cleared_alloc<var_loc_node> ();
6300 if (bitsize == -1 || piece_loc == NULL)
6301 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6302 else
6303 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6304 bitpos, piece_bitpos, bitsize, loc_note);
6305 last->next = loc;
6306 /* Ensure TEMP->LAST will point either to the new last but one
6307 element of the chain, or to the last element in it. */
6308 if (last != temp->last)
6309 temp->last = last;
6310 }
6311 else if (unused)
6312 ggc_free (unused);
6313 }
6314 else
6315 {
6316 loc = ggc_cleared_alloc<var_loc_node> ();
6317 temp->first = loc;
6318 temp->last = loc;
6319 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6320 }
6321 return loc;
6322 }
6323 \f
6324 /* Keep track of the number of spaces used to indent the
6325 output of the debugging routines that print the structure of
6326 the DIE internal representation. */
6327 static int print_indent;
6328
6329 /* Indent the line the number of spaces given by print_indent. */
6330
6331 static inline void
6332 print_spaces (FILE *outfile)
6333 {
6334 fprintf (outfile, "%*s", print_indent, "");
6335 }
6336
6337 /* Print a type signature in hex. */
6338
6339 static inline void
6340 print_signature (FILE *outfile, char *sig)
6341 {
6342 int i;
6343
6344 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6345 fprintf (outfile, "%02x", sig[i] & 0xff);
6346 }
6347
6348 static inline void
6349 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6350 {
6351 if (discr_value->pos)
6352 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6353 else
6354 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6355 }
6356
6357 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6358
6359 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6360 RECURSE, output location descriptor operations. */
6361
6362 static void
6363 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6364 {
6365 switch (val->val_class)
6366 {
6367 case dw_val_class_addr:
6368 fprintf (outfile, "address");
6369 break;
6370 case dw_val_class_offset:
6371 fprintf (outfile, "offset");
6372 break;
6373 case dw_val_class_loc:
6374 fprintf (outfile, "location descriptor");
6375 if (val->v.val_loc == NULL)
6376 fprintf (outfile, " -> <null>\n");
6377 else if (recurse)
6378 {
6379 fprintf (outfile, ":\n");
6380 print_indent += 4;
6381 print_loc_descr (val->v.val_loc, outfile);
6382 print_indent -= 4;
6383 }
6384 else
6385 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6386 break;
6387 case dw_val_class_loc_list:
6388 fprintf (outfile, "location list -> label:%s",
6389 val->v.val_loc_list->ll_symbol);
6390 break;
6391 case dw_val_class_view_list:
6392 val = view_list_to_loc_list_val_node (val);
6393 fprintf (outfile, "location list with views -> labels:%s and %s",
6394 val->v.val_loc_list->ll_symbol,
6395 val->v.val_loc_list->vl_symbol);
6396 break;
6397 case dw_val_class_range_list:
6398 fprintf (outfile, "range list");
6399 break;
6400 case dw_val_class_const:
6401 case dw_val_class_const_implicit:
6402 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6403 break;
6404 case dw_val_class_unsigned_const:
6405 case dw_val_class_unsigned_const_implicit:
6406 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6407 break;
6408 case dw_val_class_const_double:
6409 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6410 HOST_WIDE_INT_PRINT_UNSIGNED")",
6411 val->v.val_double.high,
6412 val->v.val_double.low);
6413 break;
6414 case dw_val_class_wide_int:
6415 {
6416 int i = val->v.val_wide->get_len ();
6417 fprintf (outfile, "constant (");
6418 gcc_assert (i > 0);
6419 if (val->v.val_wide->elt (i - 1) == 0)
6420 fprintf (outfile, "0x");
6421 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6422 val->v.val_wide->elt (--i));
6423 while (--i >= 0)
6424 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6425 val->v.val_wide->elt (i));
6426 fprintf (outfile, ")");
6427 break;
6428 }
6429 case dw_val_class_vec:
6430 fprintf (outfile, "floating-point or vector constant");
6431 break;
6432 case dw_val_class_flag:
6433 fprintf (outfile, "%u", val->v.val_flag);
6434 break;
6435 case dw_val_class_die_ref:
6436 if (val->v.val_die_ref.die != NULL)
6437 {
6438 dw_die_ref die = val->v.val_die_ref.die;
6439
6440 if (die->comdat_type_p)
6441 {
6442 fprintf (outfile, "die -> signature: ");
6443 print_signature (outfile,
6444 die->die_id.die_type_node->signature);
6445 }
6446 else if (die->die_id.die_symbol)
6447 {
6448 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6449 if (die->with_offset)
6450 fprintf (outfile, " + %ld", die->die_offset);
6451 }
6452 else
6453 fprintf (outfile, "die -> %ld", die->die_offset);
6454 fprintf (outfile, " (%p)", (void *) die);
6455 }
6456 else
6457 fprintf (outfile, "die -> <null>");
6458 break;
6459 case dw_val_class_vms_delta:
6460 fprintf (outfile, "delta: @slotcount(%s-%s)",
6461 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6462 break;
6463 case dw_val_class_lbl_id:
6464 case dw_val_class_lineptr:
6465 case dw_val_class_macptr:
6466 case dw_val_class_loclistsptr:
6467 case dw_val_class_high_pc:
6468 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6469 break;
6470 case dw_val_class_str:
6471 if (val->v.val_str->str != NULL)
6472 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6473 else
6474 fprintf (outfile, "<null>");
6475 break;
6476 case dw_val_class_file:
6477 case dw_val_class_file_implicit:
6478 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6479 val->v.val_file->emitted_number);
6480 break;
6481 case dw_val_class_data8:
6482 {
6483 int i;
6484
6485 for (i = 0; i < 8; i++)
6486 fprintf (outfile, "%02x", val->v.val_data8[i]);
6487 break;
6488 }
6489 case dw_val_class_discr_value:
6490 print_discr_value (outfile, &val->v.val_discr_value);
6491 break;
6492 case dw_val_class_discr_list:
6493 for (dw_discr_list_ref node = val->v.val_discr_list;
6494 node != NULL;
6495 node = node->dw_discr_next)
6496 {
6497 if (node->dw_discr_range)
6498 {
6499 fprintf (outfile, " .. ");
6500 print_discr_value (outfile, &node->dw_discr_lower_bound);
6501 print_discr_value (outfile, &node->dw_discr_upper_bound);
6502 }
6503 else
6504 print_discr_value (outfile, &node->dw_discr_lower_bound);
6505
6506 if (node->dw_discr_next != NULL)
6507 fprintf (outfile, " | ");
6508 }
6509 default:
6510 break;
6511 }
6512 }
6513
6514 /* Likewise, for a DIE attribute. */
6515
6516 static void
6517 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6518 {
6519 print_dw_val (&a->dw_attr_val, recurse, outfile);
6520 }
6521
6522
6523 /* Print the list of operands in the LOC location description to OUTFILE. This
6524 routine is a debugging aid only. */
6525
6526 static void
6527 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6528 {
6529 dw_loc_descr_ref l = loc;
6530
6531 if (loc == NULL)
6532 {
6533 print_spaces (outfile);
6534 fprintf (outfile, "<null>\n");
6535 return;
6536 }
6537
6538 for (l = loc; l != NULL; l = l->dw_loc_next)
6539 {
6540 print_spaces (outfile);
6541 fprintf (outfile, "(%p) %s",
6542 (void *) l,
6543 dwarf_stack_op_name (l->dw_loc_opc));
6544 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6545 {
6546 fprintf (outfile, " ");
6547 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6548 }
6549 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6550 {
6551 fprintf (outfile, ", ");
6552 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6553 }
6554 fprintf (outfile, "\n");
6555 }
6556 }
6557
6558 /* Print the information associated with a given DIE, and its children.
6559 This routine is a debugging aid only. */
6560
6561 static void
6562 print_die (dw_die_ref die, FILE *outfile)
6563 {
6564 dw_attr_node *a;
6565 dw_die_ref c;
6566 unsigned ix;
6567
6568 print_spaces (outfile);
6569 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6570 die->die_offset, dwarf_tag_name (die->die_tag),
6571 (void*) die);
6572 print_spaces (outfile);
6573 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6574 fprintf (outfile, " offset: %ld", die->die_offset);
6575 fprintf (outfile, " mark: %d\n", die->die_mark);
6576
6577 if (die->comdat_type_p)
6578 {
6579 print_spaces (outfile);
6580 fprintf (outfile, " signature: ");
6581 print_signature (outfile, die->die_id.die_type_node->signature);
6582 fprintf (outfile, "\n");
6583 }
6584
6585 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6586 {
6587 print_spaces (outfile);
6588 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6589
6590 print_attribute (a, true, outfile);
6591 fprintf (outfile, "\n");
6592 }
6593
6594 if (die->die_child != NULL)
6595 {
6596 print_indent += 4;
6597 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6598 print_indent -= 4;
6599 }
6600 if (print_indent == 0)
6601 fprintf (outfile, "\n");
6602 }
6603
6604 /* Print the list of operations in the LOC location description. */
6605
6606 DEBUG_FUNCTION void
6607 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6608 {
6609 print_loc_descr (loc, stderr);
6610 }
6611
6612 /* Print the information collected for a given DIE. */
6613
6614 DEBUG_FUNCTION void
6615 debug_dwarf_die (dw_die_ref die)
6616 {
6617 print_die (die, stderr);
6618 }
6619
6620 DEBUG_FUNCTION void
6621 debug (die_struct &ref)
6622 {
6623 print_die (&ref, stderr);
6624 }
6625
6626 DEBUG_FUNCTION void
6627 debug (die_struct *ptr)
6628 {
6629 if (ptr)
6630 debug (*ptr);
6631 else
6632 fprintf (stderr, "<nil>\n");
6633 }
6634
6635
6636 /* Print all DWARF information collected for the compilation unit.
6637 This routine is a debugging aid only. */
6638
6639 DEBUG_FUNCTION void
6640 debug_dwarf (void)
6641 {
6642 print_indent = 0;
6643 print_die (comp_unit_die (), stderr);
6644 }
6645
6646 /* Verify the DIE tree structure. */
6647
6648 DEBUG_FUNCTION void
6649 verify_die (dw_die_ref die)
6650 {
6651 gcc_assert (!die->die_mark);
6652 if (die->die_parent == NULL
6653 && die->die_sib == NULL)
6654 return;
6655 /* Verify the die_sib list is cyclic. */
6656 dw_die_ref x = die;
6657 do
6658 {
6659 x->die_mark = 1;
6660 x = x->die_sib;
6661 }
6662 while (x && !x->die_mark);
6663 gcc_assert (x == die);
6664 x = die;
6665 do
6666 {
6667 /* Verify all dies have the same parent. */
6668 gcc_assert (x->die_parent == die->die_parent);
6669 if (x->die_child)
6670 {
6671 /* Verify the child has the proper parent and recurse. */
6672 gcc_assert (x->die_child->die_parent == x);
6673 verify_die (x->die_child);
6674 }
6675 x->die_mark = 0;
6676 x = x->die_sib;
6677 }
6678 while (x && x->die_mark);
6679 }
6680
6681 /* Sanity checks on DIEs. */
6682
6683 static void
6684 check_die (dw_die_ref die)
6685 {
6686 unsigned ix;
6687 dw_attr_node *a;
6688 bool inline_found = false;
6689 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6690 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6691 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6692 {
6693 switch (a->dw_attr)
6694 {
6695 case DW_AT_inline:
6696 if (a->dw_attr_val.v.val_unsigned)
6697 inline_found = true;
6698 break;
6699 case DW_AT_location:
6700 ++n_location;
6701 break;
6702 case DW_AT_low_pc:
6703 ++n_low_pc;
6704 break;
6705 case DW_AT_high_pc:
6706 ++n_high_pc;
6707 break;
6708 case DW_AT_artificial:
6709 ++n_artificial;
6710 break;
6711 case DW_AT_decl_column:
6712 ++n_decl_column;
6713 break;
6714 case DW_AT_decl_line:
6715 ++n_decl_line;
6716 break;
6717 case DW_AT_decl_file:
6718 ++n_decl_file;
6719 break;
6720 default:
6721 break;
6722 }
6723 }
6724 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6725 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6726 {
6727 fprintf (stderr, "Duplicate attributes in DIE:\n");
6728 debug_dwarf_die (die);
6729 gcc_unreachable ();
6730 }
6731 if (inline_found)
6732 {
6733 /* A debugging information entry that is a member of an abstract
6734 instance tree [that has DW_AT_inline] should not contain any
6735 attributes which describe aspects of the subroutine which vary
6736 between distinct inlined expansions or distinct out-of-line
6737 expansions. */
6738 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6739 gcc_assert (a->dw_attr != DW_AT_low_pc
6740 && a->dw_attr != DW_AT_high_pc
6741 && a->dw_attr != DW_AT_location
6742 && a->dw_attr != DW_AT_frame_base
6743 && a->dw_attr != DW_AT_call_all_calls
6744 && a->dw_attr != DW_AT_GNU_all_call_sites);
6745 }
6746 }
6747 \f
6748 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6749 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6750 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6751
6752 /* Calculate the checksum of a location expression. */
6753
6754 static inline void
6755 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6756 {
6757 int tem;
6758 inchash::hash hstate;
6759 hashval_t hash;
6760
6761 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6762 CHECKSUM (tem);
6763 hash_loc_operands (loc, hstate);
6764 hash = hstate.end();
6765 CHECKSUM (hash);
6766 }
6767
6768 /* Calculate the checksum of an attribute. */
6769
6770 static void
6771 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6772 {
6773 dw_loc_descr_ref loc;
6774 rtx r;
6775
6776 CHECKSUM (at->dw_attr);
6777
6778 /* We don't care that this was compiled with a different compiler
6779 snapshot; if the output is the same, that's what matters. */
6780 if (at->dw_attr == DW_AT_producer)
6781 return;
6782
6783 switch (AT_class (at))
6784 {
6785 case dw_val_class_const:
6786 case dw_val_class_const_implicit:
6787 CHECKSUM (at->dw_attr_val.v.val_int);
6788 break;
6789 case dw_val_class_unsigned_const:
6790 case dw_val_class_unsigned_const_implicit:
6791 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6792 break;
6793 case dw_val_class_const_double:
6794 CHECKSUM (at->dw_attr_val.v.val_double);
6795 break;
6796 case dw_val_class_wide_int:
6797 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6798 get_full_len (*at->dw_attr_val.v.val_wide)
6799 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6800 break;
6801 case dw_val_class_vec:
6802 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6803 (at->dw_attr_val.v.val_vec.length
6804 * at->dw_attr_val.v.val_vec.elt_size));
6805 break;
6806 case dw_val_class_flag:
6807 CHECKSUM (at->dw_attr_val.v.val_flag);
6808 break;
6809 case dw_val_class_str:
6810 CHECKSUM_STRING (AT_string (at));
6811 break;
6812
6813 case dw_val_class_addr:
6814 r = AT_addr (at);
6815 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6816 CHECKSUM_STRING (XSTR (r, 0));
6817 break;
6818
6819 case dw_val_class_offset:
6820 CHECKSUM (at->dw_attr_val.v.val_offset);
6821 break;
6822
6823 case dw_val_class_loc:
6824 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6825 loc_checksum (loc, ctx);
6826 break;
6827
6828 case dw_val_class_die_ref:
6829 die_checksum (AT_ref (at), ctx, mark);
6830 break;
6831
6832 case dw_val_class_fde_ref:
6833 case dw_val_class_vms_delta:
6834 case dw_val_class_lbl_id:
6835 case dw_val_class_lineptr:
6836 case dw_val_class_macptr:
6837 case dw_val_class_loclistsptr:
6838 case dw_val_class_high_pc:
6839 break;
6840
6841 case dw_val_class_file:
6842 case dw_val_class_file_implicit:
6843 CHECKSUM_STRING (AT_file (at)->filename);
6844 break;
6845
6846 case dw_val_class_data8:
6847 CHECKSUM (at->dw_attr_val.v.val_data8);
6848 break;
6849
6850 default:
6851 break;
6852 }
6853 }
6854
6855 /* Calculate the checksum of a DIE. */
6856
6857 static void
6858 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6859 {
6860 dw_die_ref c;
6861 dw_attr_node *a;
6862 unsigned ix;
6863
6864 /* To avoid infinite recursion. */
6865 if (die->die_mark)
6866 {
6867 CHECKSUM (die->die_mark);
6868 return;
6869 }
6870 die->die_mark = ++(*mark);
6871
6872 CHECKSUM (die->die_tag);
6873
6874 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6875 attr_checksum (a, ctx, mark);
6876
6877 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6878 }
6879
6880 #undef CHECKSUM
6881 #undef CHECKSUM_BLOCK
6882 #undef CHECKSUM_STRING
6883
6884 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6885 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6886 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6887 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6888 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6889 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6890 #define CHECKSUM_ATTR(FOO) \
6891 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6892
6893 /* Calculate the checksum of a number in signed LEB128 format. */
6894
6895 static void
6896 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6897 {
6898 unsigned char byte;
6899 bool more;
6900
6901 while (1)
6902 {
6903 byte = (value & 0x7f);
6904 value >>= 7;
6905 more = !((value == 0 && (byte & 0x40) == 0)
6906 || (value == -1 && (byte & 0x40) != 0));
6907 if (more)
6908 byte |= 0x80;
6909 CHECKSUM (byte);
6910 if (!more)
6911 break;
6912 }
6913 }
6914
6915 /* Calculate the checksum of a number in unsigned LEB128 format. */
6916
6917 static void
6918 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6919 {
6920 while (1)
6921 {
6922 unsigned char byte = (value & 0x7f);
6923 value >>= 7;
6924 if (value != 0)
6925 /* More bytes to follow. */
6926 byte |= 0x80;
6927 CHECKSUM (byte);
6928 if (value == 0)
6929 break;
6930 }
6931 }
6932
6933 /* Checksum the context of the DIE. This adds the names of any
6934 surrounding namespaces or structures to the checksum. */
6935
6936 static void
6937 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
6938 {
6939 const char *name;
6940 dw_die_ref spec;
6941 int tag = die->die_tag;
6942
6943 if (tag != DW_TAG_namespace
6944 && tag != DW_TAG_structure_type
6945 && tag != DW_TAG_class_type)
6946 return;
6947
6948 name = get_AT_string (die, DW_AT_name);
6949
6950 spec = get_AT_ref (die, DW_AT_specification);
6951 if (spec != NULL)
6952 die = spec;
6953
6954 if (die->die_parent != NULL)
6955 checksum_die_context (die->die_parent, ctx);
6956
6957 CHECKSUM_ULEB128 ('C');
6958 CHECKSUM_ULEB128 (tag);
6959 if (name != NULL)
6960 CHECKSUM_STRING (name);
6961 }
6962
6963 /* Calculate the checksum of a location expression. */
6964
6965 static inline void
6966 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6967 {
6968 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
6969 were emitted as a DW_FORM_sdata instead of a location expression. */
6970 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
6971 {
6972 CHECKSUM_ULEB128 (DW_FORM_sdata);
6973 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
6974 return;
6975 }
6976
6977 /* Otherwise, just checksum the raw location expression. */
6978 while (loc != NULL)
6979 {
6980 inchash::hash hstate;
6981 hashval_t hash;
6982
6983 CHECKSUM_ULEB128 (loc->dtprel);
6984 CHECKSUM_ULEB128 (loc->dw_loc_opc);
6985 hash_loc_operands (loc, hstate);
6986 hash = hstate.end ();
6987 CHECKSUM (hash);
6988 loc = loc->dw_loc_next;
6989 }
6990 }
6991
6992 /* Calculate the checksum of an attribute. */
6993
6994 static void
6995 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
6996 struct md5_ctx *ctx, int *mark)
6997 {
6998 dw_loc_descr_ref loc;
6999 rtx r;
7000
7001 if (AT_class (at) == dw_val_class_die_ref)
7002 {
7003 dw_die_ref target_die = AT_ref (at);
7004
7005 /* For pointer and reference types, we checksum only the (qualified)
7006 name of the target type (if there is a name). For friend entries,
7007 we checksum only the (qualified) name of the target type or function.
7008 This allows the checksum to remain the same whether the target type
7009 is complete or not. */
7010 if ((at->dw_attr == DW_AT_type
7011 && (tag == DW_TAG_pointer_type
7012 || tag == DW_TAG_reference_type
7013 || tag == DW_TAG_rvalue_reference_type
7014 || tag == DW_TAG_ptr_to_member_type))
7015 || (at->dw_attr == DW_AT_friend
7016 && tag == DW_TAG_friend))
7017 {
7018 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7019
7020 if (name_attr != NULL)
7021 {
7022 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7023
7024 if (decl == NULL)
7025 decl = target_die;
7026 CHECKSUM_ULEB128 ('N');
7027 CHECKSUM_ULEB128 (at->dw_attr);
7028 if (decl->die_parent != NULL)
7029 checksum_die_context (decl->die_parent, ctx);
7030 CHECKSUM_ULEB128 ('E');
7031 CHECKSUM_STRING (AT_string (name_attr));
7032 return;
7033 }
7034 }
7035
7036 /* For all other references to another DIE, we check to see if the
7037 target DIE has already been visited. If it has, we emit a
7038 backward reference; if not, we descend recursively. */
7039 if (target_die->die_mark > 0)
7040 {
7041 CHECKSUM_ULEB128 ('R');
7042 CHECKSUM_ULEB128 (at->dw_attr);
7043 CHECKSUM_ULEB128 (target_die->die_mark);
7044 }
7045 else
7046 {
7047 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7048
7049 if (decl == NULL)
7050 decl = target_die;
7051 target_die->die_mark = ++(*mark);
7052 CHECKSUM_ULEB128 ('T');
7053 CHECKSUM_ULEB128 (at->dw_attr);
7054 if (decl->die_parent != NULL)
7055 checksum_die_context (decl->die_parent, ctx);
7056 die_checksum_ordered (target_die, ctx, mark);
7057 }
7058 return;
7059 }
7060
7061 CHECKSUM_ULEB128 ('A');
7062 CHECKSUM_ULEB128 (at->dw_attr);
7063
7064 switch (AT_class (at))
7065 {
7066 case dw_val_class_const:
7067 case dw_val_class_const_implicit:
7068 CHECKSUM_ULEB128 (DW_FORM_sdata);
7069 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7070 break;
7071
7072 case dw_val_class_unsigned_const:
7073 case dw_val_class_unsigned_const_implicit:
7074 CHECKSUM_ULEB128 (DW_FORM_sdata);
7075 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7076 break;
7077
7078 case dw_val_class_const_double:
7079 CHECKSUM_ULEB128 (DW_FORM_block);
7080 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7081 CHECKSUM (at->dw_attr_val.v.val_double);
7082 break;
7083
7084 case dw_val_class_wide_int:
7085 CHECKSUM_ULEB128 (DW_FORM_block);
7086 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7087 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7088 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7089 get_full_len (*at->dw_attr_val.v.val_wide)
7090 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7091 break;
7092
7093 case dw_val_class_vec:
7094 CHECKSUM_ULEB128 (DW_FORM_block);
7095 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7096 * at->dw_attr_val.v.val_vec.elt_size);
7097 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7098 (at->dw_attr_val.v.val_vec.length
7099 * at->dw_attr_val.v.val_vec.elt_size));
7100 break;
7101
7102 case dw_val_class_flag:
7103 CHECKSUM_ULEB128 (DW_FORM_flag);
7104 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7105 break;
7106
7107 case dw_val_class_str:
7108 CHECKSUM_ULEB128 (DW_FORM_string);
7109 CHECKSUM_STRING (AT_string (at));
7110 break;
7111
7112 case dw_val_class_addr:
7113 r = AT_addr (at);
7114 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7115 CHECKSUM_ULEB128 (DW_FORM_string);
7116 CHECKSUM_STRING (XSTR (r, 0));
7117 break;
7118
7119 case dw_val_class_offset:
7120 CHECKSUM_ULEB128 (DW_FORM_sdata);
7121 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7122 break;
7123
7124 case dw_val_class_loc:
7125 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7126 loc_checksum_ordered (loc, ctx);
7127 break;
7128
7129 case dw_val_class_fde_ref:
7130 case dw_val_class_lbl_id:
7131 case dw_val_class_lineptr:
7132 case dw_val_class_macptr:
7133 case dw_val_class_loclistsptr:
7134 case dw_val_class_high_pc:
7135 break;
7136
7137 case dw_val_class_file:
7138 case dw_val_class_file_implicit:
7139 CHECKSUM_ULEB128 (DW_FORM_string);
7140 CHECKSUM_STRING (AT_file (at)->filename);
7141 break;
7142
7143 case dw_val_class_data8:
7144 CHECKSUM (at->dw_attr_val.v.val_data8);
7145 break;
7146
7147 default:
7148 break;
7149 }
7150 }
7151
7152 struct checksum_attributes
7153 {
7154 dw_attr_node *at_name;
7155 dw_attr_node *at_type;
7156 dw_attr_node *at_friend;
7157 dw_attr_node *at_accessibility;
7158 dw_attr_node *at_address_class;
7159 dw_attr_node *at_alignment;
7160 dw_attr_node *at_allocated;
7161 dw_attr_node *at_artificial;
7162 dw_attr_node *at_associated;
7163 dw_attr_node *at_binary_scale;
7164 dw_attr_node *at_bit_offset;
7165 dw_attr_node *at_bit_size;
7166 dw_attr_node *at_bit_stride;
7167 dw_attr_node *at_byte_size;
7168 dw_attr_node *at_byte_stride;
7169 dw_attr_node *at_const_value;
7170 dw_attr_node *at_containing_type;
7171 dw_attr_node *at_count;
7172 dw_attr_node *at_data_location;
7173 dw_attr_node *at_data_member_location;
7174 dw_attr_node *at_decimal_scale;
7175 dw_attr_node *at_decimal_sign;
7176 dw_attr_node *at_default_value;
7177 dw_attr_node *at_digit_count;
7178 dw_attr_node *at_discr;
7179 dw_attr_node *at_discr_list;
7180 dw_attr_node *at_discr_value;
7181 dw_attr_node *at_encoding;
7182 dw_attr_node *at_endianity;
7183 dw_attr_node *at_explicit;
7184 dw_attr_node *at_is_optional;
7185 dw_attr_node *at_location;
7186 dw_attr_node *at_lower_bound;
7187 dw_attr_node *at_mutable;
7188 dw_attr_node *at_ordering;
7189 dw_attr_node *at_picture_string;
7190 dw_attr_node *at_prototyped;
7191 dw_attr_node *at_small;
7192 dw_attr_node *at_segment;
7193 dw_attr_node *at_string_length;
7194 dw_attr_node *at_string_length_bit_size;
7195 dw_attr_node *at_string_length_byte_size;
7196 dw_attr_node *at_threads_scaled;
7197 dw_attr_node *at_upper_bound;
7198 dw_attr_node *at_use_location;
7199 dw_attr_node *at_use_UTF8;
7200 dw_attr_node *at_variable_parameter;
7201 dw_attr_node *at_virtuality;
7202 dw_attr_node *at_visibility;
7203 dw_attr_node *at_vtable_elem_location;
7204 };
7205
7206 /* Collect the attributes that we will want to use for the checksum. */
7207
7208 static void
7209 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7210 {
7211 dw_attr_node *a;
7212 unsigned ix;
7213
7214 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7215 {
7216 switch (a->dw_attr)
7217 {
7218 case DW_AT_name:
7219 attrs->at_name = a;
7220 break;
7221 case DW_AT_type:
7222 attrs->at_type = a;
7223 break;
7224 case DW_AT_friend:
7225 attrs->at_friend = a;
7226 break;
7227 case DW_AT_accessibility:
7228 attrs->at_accessibility = a;
7229 break;
7230 case DW_AT_address_class:
7231 attrs->at_address_class = a;
7232 break;
7233 case DW_AT_alignment:
7234 attrs->at_alignment = a;
7235 break;
7236 case DW_AT_allocated:
7237 attrs->at_allocated = a;
7238 break;
7239 case DW_AT_artificial:
7240 attrs->at_artificial = a;
7241 break;
7242 case DW_AT_associated:
7243 attrs->at_associated = a;
7244 break;
7245 case DW_AT_binary_scale:
7246 attrs->at_binary_scale = a;
7247 break;
7248 case DW_AT_bit_offset:
7249 attrs->at_bit_offset = a;
7250 break;
7251 case DW_AT_bit_size:
7252 attrs->at_bit_size = a;
7253 break;
7254 case DW_AT_bit_stride:
7255 attrs->at_bit_stride = a;
7256 break;
7257 case DW_AT_byte_size:
7258 attrs->at_byte_size = a;
7259 break;
7260 case DW_AT_byte_stride:
7261 attrs->at_byte_stride = a;
7262 break;
7263 case DW_AT_const_value:
7264 attrs->at_const_value = a;
7265 break;
7266 case DW_AT_containing_type:
7267 attrs->at_containing_type = a;
7268 break;
7269 case DW_AT_count:
7270 attrs->at_count = a;
7271 break;
7272 case DW_AT_data_location:
7273 attrs->at_data_location = a;
7274 break;
7275 case DW_AT_data_member_location:
7276 attrs->at_data_member_location = a;
7277 break;
7278 case DW_AT_decimal_scale:
7279 attrs->at_decimal_scale = a;
7280 break;
7281 case DW_AT_decimal_sign:
7282 attrs->at_decimal_sign = a;
7283 break;
7284 case DW_AT_default_value:
7285 attrs->at_default_value = a;
7286 break;
7287 case DW_AT_digit_count:
7288 attrs->at_digit_count = a;
7289 break;
7290 case DW_AT_discr:
7291 attrs->at_discr = a;
7292 break;
7293 case DW_AT_discr_list:
7294 attrs->at_discr_list = a;
7295 break;
7296 case DW_AT_discr_value:
7297 attrs->at_discr_value = a;
7298 break;
7299 case DW_AT_encoding:
7300 attrs->at_encoding = a;
7301 break;
7302 case DW_AT_endianity:
7303 attrs->at_endianity = a;
7304 break;
7305 case DW_AT_explicit:
7306 attrs->at_explicit = a;
7307 break;
7308 case DW_AT_is_optional:
7309 attrs->at_is_optional = a;
7310 break;
7311 case DW_AT_location:
7312 attrs->at_location = a;
7313 break;
7314 case DW_AT_lower_bound:
7315 attrs->at_lower_bound = a;
7316 break;
7317 case DW_AT_mutable:
7318 attrs->at_mutable = a;
7319 break;
7320 case DW_AT_ordering:
7321 attrs->at_ordering = a;
7322 break;
7323 case DW_AT_picture_string:
7324 attrs->at_picture_string = a;
7325 break;
7326 case DW_AT_prototyped:
7327 attrs->at_prototyped = a;
7328 break;
7329 case DW_AT_small:
7330 attrs->at_small = a;
7331 break;
7332 case DW_AT_segment:
7333 attrs->at_segment = a;
7334 break;
7335 case DW_AT_string_length:
7336 attrs->at_string_length = a;
7337 break;
7338 case DW_AT_string_length_bit_size:
7339 attrs->at_string_length_bit_size = a;
7340 break;
7341 case DW_AT_string_length_byte_size:
7342 attrs->at_string_length_byte_size = a;
7343 break;
7344 case DW_AT_threads_scaled:
7345 attrs->at_threads_scaled = a;
7346 break;
7347 case DW_AT_upper_bound:
7348 attrs->at_upper_bound = a;
7349 break;
7350 case DW_AT_use_location:
7351 attrs->at_use_location = a;
7352 break;
7353 case DW_AT_use_UTF8:
7354 attrs->at_use_UTF8 = a;
7355 break;
7356 case DW_AT_variable_parameter:
7357 attrs->at_variable_parameter = a;
7358 break;
7359 case DW_AT_virtuality:
7360 attrs->at_virtuality = a;
7361 break;
7362 case DW_AT_visibility:
7363 attrs->at_visibility = a;
7364 break;
7365 case DW_AT_vtable_elem_location:
7366 attrs->at_vtable_elem_location = a;
7367 break;
7368 default:
7369 break;
7370 }
7371 }
7372 }
7373
7374 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7375
7376 static void
7377 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7378 {
7379 dw_die_ref c;
7380 dw_die_ref decl;
7381 struct checksum_attributes attrs;
7382
7383 CHECKSUM_ULEB128 ('D');
7384 CHECKSUM_ULEB128 (die->die_tag);
7385
7386 memset (&attrs, 0, sizeof (attrs));
7387
7388 decl = get_AT_ref (die, DW_AT_specification);
7389 if (decl != NULL)
7390 collect_checksum_attributes (&attrs, decl);
7391 collect_checksum_attributes (&attrs, die);
7392
7393 CHECKSUM_ATTR (attrs.at_name);
7394 CHECKSUM_ATTR (attrs.at_accessibility);
7395 CHECKSUM_ATTR (attrs.at_address_class);
7396 CHECKSUM_ATTR (attrs.at_allocated);
7397 CHECKSUM_ATTR (attrs.at_artificial);
7398 CHECKSUM_ATTR (attrs.at_associated);
7399 CHECKSUM_ATTR (attrs.at_binary_scale);
7400 CHECKSUM_ATTR (attrs.at_bit_offset);
7401 CHECKSUM_ATTR (attrs.at_bit_size);
7402 CHECKSUM_ATTR (attrs.at_bit_stride);
7403 CHECKSUM_ATTR (attrs.at_byte_size);
7404 CHECKSUM_ATTR (attrs.at_byte_stride);
7405 CHECKSUM_ATTR (attrs.at_const_value);
7406 CHECKSUM_ATTR (attrs.at_containing_type);
7407 CHECKSUM_ATTR (attrs.at_count);
7408 CHECKSUM_ATTR (attrs.at_data_location);
7409 CHECKSUM_ATTR (attrs.at_data_member_location);
7410 CHECKSUM_ATTR (attrs.at_decimal_scale);
7411 CHECKSUM_ATTR (attrs.at_decimal_sign);
7412 CHECKSUM_ATTR (attrs.at_default_value);
7413 CHECKSUM_ATTR (attrs.at_digit_count);
7414 CHECKSUM_ATTR (attrs.at_discr);
7415 CHECKSUM_ATTR (attrs.at_discr_list);
7416 CHECKSUM_ATTR (attrs.at_discr_value);
7417 CHECKSUM_ATTR (attrs.at_encoding);
7418 CHECKSUM_ATTR (attrs.at_endianity);
7419 CHECKSUM_ATTR (attrs.at_explicit);
7420 CHECKSUM_ATTR (attrs.at_is_optional);
7421 CHECKSUM_ATTR (attrs.at_location);
7422 CHECKSUM_ATTR (attrs.at_lower_bound);
7423 CHECKSUM_ATTR (attrs.at_mutable);
7424 CHECKSUM_ATTR (attrs.at_ordering);
7425 CHECKSUM_ATTR (attrs.at_picture_string);
7426 CHECKSUM_ATTR (attrs.at_prototyped);
7427 CHECKSUM_ATTR (attrs.at_small);
7428 CHECKSUM_ATTR (attrs.at_segment);
7429 CHECKSUM_ATTR (attrs.at_string_length);
7430 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7431 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7432 CHECKSUM_ATTR (attrs.at_threads_scaled);
7433 CHECKSUM_ATTR (attrs.at_upper_bound);
7434 CHECKSUM_ATTR (attrs.at_use_location);
7435 CHECKSUM_ATTR (attrs.at_use_UTF8);
7436 CHECKSUM_ATTR (attrs.at_variable_parameter);
7437 CHECKSUM_ATTR (attrs.at_virtuality);
7438 CHECKSUM_ATTR (attrs.at_visibility);
7439 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7440 CHECKSUM_ATTR (attrs.at_type);
7441 CHECKSUM_ATTR (attrs.at_friend);
7442 CHECKSUM_ATTR (attrs.at_alignment);
7443
7444 /* Checksum the child DIEs. */
7445 c = die->die_child;
7446 if (c) do {
7447 dw_attr_node *name_attr;
7448
7449 c = c->die_sib;
7450 name_attr = get_AT (c, DW_AT_name);
7451 if (is_template_instantiation (c))
7452 {
7453 /* Ignore instantiations of member type and function templates. */
7454 }
7455 else if (name_attr != NULL
7456 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7457 {
7458 /* Use a shallow checksum for named nested types and member
7459 functions. */
7460 CHECKSUM_ULEB128 ('S');
7461 CHECKSUM_ULEB128 (c->die_tag);
7462 CHECKSUM_STRING (AT_string (name_attr));
7463 }
7464 else
7465 {
7466 /* Use a deep checksum for other children. */
7467 /* Mark this DIE so it gets processed when unmarking. */
7468 if (c->die_mark == 0)
7469 c->die_mark = -1;
7470 die_checksum_ordered (c, ctx, mark);
7471 }
7472 } while (c != die->die_child);
7473
7474 CHECKSUM_ULEB128 (0);
7475 }
7476
7477 /* Add a type name and tag to a hash. */
7478 static void
7479 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7480 {
7481 CHECKSUM_ULEB128 (tag);
7482 CHECKSUM_STRING (name);
7483 }
7484
7485 #undef CHECKSUM
7486 #undef CHECKSUM_STRING
7487 #undef CHECKSUM_ATTR
7488 #undef CHECKSUM_LEB128
7489 #undef CHECKSUM_ULEB128
7490
7491 /* Generate the type signature for DIE. This is computed by generating an
7492 MD5 checksum over the DIE's tag, its relevant attributes, and its
7493 children. Attributes that are references to other DIEs are processed
7494 by recursion, using the MARK field to prevent infinite recursion.
7495 If the DIE is nested inside a namespace or another type, we also
7496 need to include that context in the signature. The lower 64 bits
7497 of the resulting MD5 checksum comprise the signature. */
7498
7499 static void
7500 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7501 {
7502 int mark;
7503 const char *name;
7504 unsigned char checksum[16];
7505 struct md5_ctx ctx;
7506 dw_die_ref decl;
7507 dw_die_ref parent;
7508
7509 name = get_AT_string (die, DW_AT_name);
7510 decl = get_AT_ref (die, DW_AT_specification);
7511 parent = get_die_parent (die);
7512
7513 /* First, compute a signature for just the type name (and its surrounding
7514 context, if any. This is stored in the type unit DIE for link-time
7515 ODR (one-definition rule) checking. */
7516
7517 if (is_cxx () && name != NULL)
7518 {
7519 md5_init_ctx (&ctx);
7520
7521 /* Checksum the names of surrounding namespaces and structures. */
7522 if (parent != NULL)
7523 checksum_die_context (parent, &ctx);
7524
7525 /* Checksum the current DIE. */
7526 die_odr_checksum (die->die_tag, name, &ctx);
7527 md5_finish_ctx (&ctx, checksum);
7528
7529 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7530 }
7531
7532 /* Next, compute the complete type signature. */
7533
7534 md5_init_ctx (&ctx);
7535 mark = 1;
7536 die->die_mark = mark;
7537
7538 /* Checksum the names of surrounding namespaces and structures. */
7539 if (parent != NULL)
7540 checksum_die_context (parent, &ctx);
7541
7542 /* Checksum the DIE and its children. */
7543 die_checksum_ordered (die, &ctx, &mark);
7544 unmark_all_dies (die);
7545 md5_finish_ctx (&ctx, checksum);
7546
7547 /* Store the signature in the type node and link the type DIE and the
7548 type node together. */
7549 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7550 DWARF_TYPE_SIGNATURE_SIZE);
7551 die->comdat_type_p = true;
7552 die->die_id.die_type_node = type_node;
7553 type_node->type_die = die;
7554
7555 /* If the DIE is a specification, link its declaration to the type node
7556 as well. */
7557 if (decl != NULL)
7558 {
7559 decl->comdat_type_p = true;
7560 decl->die_id.die_type_node = type_node;
7561 }
7562 }
7563
7564 /* Do the location expressions look same? */
7565 static inline int
7566 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7567 {
7568 return loc1->dw_loc_opc == loc2->dw_loc_opc
7569 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7570 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7571 }
7572
7573 /* Do the values look the same? */
7574 static int
7575 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7576 {
7577 dw_loc_descr_ref loc1, loc2;
7578 rtx r1, r2;
7579
7580 if (v1->val_class != v2->val_class)
7581 return 0;
7582
7583 switch (v1->val_class)
7584 {
7585 case dw_val_class_const:
7586 case dw_val_class_const_implicit:
7587 return v1->v.val_int == v2->v.val_int;
7588 case dw_val_class_unsigned_const:
7589 case dw_val_class_unsigned_const_implicit:
7590 return v1->v.val_unsigned == v2->v.val_unsigned;
7591 case dw_val_class_const_double:
7592 return v1->v.val_double.high == v2->v.val_double.high
7593 && v1->v.val_double.low == v2->v.val_double.low;
7594 case dw_val_class_wide_int:
7595 return *v1->v.val_wide == *v2->v.val_wide;
7596 case dw_val_class_vec:
7597 if (v1->v.val_vec.length != v2->v.val_vec.length
7598 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7599 return 0;
7600 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7601 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7602 return 0;
7603 return 1;
7604 case dw_val_class_flag:
7605 return v1->v.val_flag == v2->v.val_flag;
7606 case dw_val_class_str:
7607 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7608
7609 case dw_val_class_addr:
7610 r1 = v1->v.val_addr;
7611 r2 = v2->v.val_addr;
7612 if (GET_CODE (r1) != GET_CODE (r2))
7613 return 0;
7614 return !rtx_equal_p (r1, r2);
7615
7616 case dw_val_class_offset:
7617 return v1->v.val_offset == v2->v.val_offset;
7618
7619 case dw_val_class_loc:
7620 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7621 loc1 && loc2;
7622 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7623 if (!same_loc_p (loc1, loc2, mark))
7624 return 0;
7625 return !loc1 && !loc2;
7626
7627 case dw_val_class_die_ref:
7628 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7629
7630 case dw_val_class_fde_ref:
7631 case dw_val_class_vms_delta:
7632 case dw_val_class_lbl_id:
7633 case dw_val_class_lineptr:
7634 case dw_val_class_macptr:
7635 case dw_val_class_loclistsptr:
7636 case dw_val_class_high_pc:
7637 return 1;
7638
7639 case dw_val_class_file:
7640 case dw_val_class_file_implicit:
7641 return v1->v.val_file == v2->v.val_file;
7642
7643 case dw_val_class_data8:
7644 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7645
7646 default:
7647 return 1;
7648 }
7649 }
7650
7651 /* Do the attributes look the same? */
7652
7653 static int
7654 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7655 {
7656 if (at1->dw_attr != at2->dw_attr)
7657 return 0;
7658
7659 /* We don't care that this was compiled with a different compiler
7660 snapshot; if the output is the same, that's what matters. */
7661 if (at1->dw_attr == DW_AT_producer)
7662 return 1;
7663
7664 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7665 }
7666
7667 /* Do the dies look the same? */
7668
7669 static int
7670 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7671 {
7672 dw_die_ref c1, c2;
7673 dw_attr_node *a1;
7674 unsigned ix;
7675
7676 /* To avoid infinite recursion. */
7677 if (die1->die_mark)
7678 return die1->die_mark == die2->die_mark;
7679 die1->die_mark = die2->die_mark = ++(*mark);
7680
7681 if (die1->die_tag != die2->die_tag)
7682 return 0;
7683
7684 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7685 return 0;
7686
7687 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7688 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7689 return 0;
7690
7691 c1 = die1->die_child;
7692 c2 = die2->die_child;
7693 if (! c1)
7694 {
7695 if (c2)
7696 return 0;
7697 }
7698 else
7699 for (;;)
7700 {
7701 if (!same_die_p (c1, c2, mark))
7702 return 0;
7703 c1 = c1->die_sib;
7704 c2 = c2->die_sib;
7705 if (c1 == die1->die_child)
7706 {
7707 if (c2 == die2->die_child)
7708 break;
7709 else
7710 return 0;
7711 }
7712 }
7713
7714 return 1;
7715 }
7716
7717 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7718 children, and set die_symbol. */
7719
7720 static void
7721 compute_comp_unit_symbol (dw_die_ref unit_die)
7722 {
7723 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7724 const char *base = die_name ? lbasename (die_name) : "anonymous";
7725 char *name = XALLOCAVEC (char, strlen (base) + 64);
7726 char *p;
7727 int i, mark;
7728 unsigned char checksum[16];
7729 struct md5_ctx ctx;
7730
7731 /* Compute the checksum of the DIE, then append part of it as hex digits to
7732 the name filename of the unit. */
7733
7734 md5_init_ctx (&ctx);
7735 mark = 0;
7736 die_checksum (unit_die, &ctx, &mark);
7737 unmark_all_dies (unit_die);
7738 md5_finish_ctx (&ctx, checksum);
7739
7740 /* When we this for comp_unit_die () we have a DW_AT_name that might
7741 not start with a letter but with anything valid for filenames and
7742 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7743 character is not a letter. */
7744 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7745 clean_symbol_name (name);
7746
7747 p = name + strlen (name);
7748 for (i = 0; i < 4; i++)
7749 {
7750 sprintf (p, "%.2x", checksum[i]);
7751 p += 2;
7752 }
7753
7754 unit_die->die_id.die_symbol = xstrdup (name);
7755 }
7756
7757 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7758
7759 static int
7760 is_type_die (dw_die_ref die)
7761 {
7762 switch (die->die_tag)
7763 {
7764 case DW_TAG_array_type:
7765 case DW_TAG_class_type:
7766 case DW_TAG_interface_type:
7767 case DW_TAG_enumeration_type:
7768 case DW_TAG_pointer_type:
7769 case DW_TAG_reference_type:
7770 case DW_TAG_rvalue_reference_type:
7771 case DW_TAG_string_type:
7772 case DW_TAG_structure_type:
7773 case DW_TAG_subroutine_type:
7774 case DW_TAG_union_type:
7775 case DW_TAG_ptr_to_member_type:
7776 case DW_TAG_set_type:
7777 case DW_TAG_subrange_type:
7778 case DW_TAG_base_type:
7779 case DW_TAG_const_type:
7780 case DW_TAG_file_type:
7781 case DW_TAG_packed_type:
7782 case DW_TAG_volatile_type:
7783 case DW_TAG_typedef:
7784 return 1;
7785 default:
7786 return 0;
7787 }
7788 }
7789
7790 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7791 Basically, we want to choose the bits that are likely to be shared between
7792 compilations (types) and leave out the bits that are specific to individual
7793 compilations (functions). */
7794
7795 static int
7796 is_comdat_die (dw_die_ref c)
7797 {
7798 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7799 we do for stabs. The advantage is a greater likelihood of sharing between
7800 objects that don't include headers in the same order (and therefore would
7801 put the base types in a different comdat). jason 8/28/00 */
7802
7803 if (c->die_tag == DW_TAG_base_type)
7804 return 0;
7805
7806 if (c->die_tag == DW_TAG_pointer_type
7807 || c->die_tag == DW_TAG_reference_type
7808 || c->die_tag == DW_TAG_rvalue_reference_type
7809 || c->die_tag == DW_TAG_const_type
7810 || c->die_tag == DW_TAG_volatile_type)
7811 {
7812 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7813
7814 return t ? is_comdat_die (t) : 0;
7815 }
7816
7817 return is_type_die (c);
7818 }
7819
7820 /* Returns true iff C is a compile-unit DIE. */
7821
7822 static inline bool
7823 is_cu_die (dw_die_ref c)
7824 {
7825 return c && (c->die_tag == DW_TAG_compile_unit
7826 || c->die_tag == DW_TAG_skeleton_unit);
7827 }
7828
7829 /* Returns true iff C is a unit DIE of some sort. */
7830
7831 static inline bool
7832 is_unit_die (dw_die_ref c)
7833 {
7834 return c && (c->die_tag == DW_TAG_compile_unit
7835 || c->die_tag == DW_TAG_partial_unit
7836 || c->die_tag == DW_TAG_type_unit
7837 || c->die_tag == DW_TAG_skeleton_unit);
7838 }
7839
7840 /* Returns true iff C is a namespace DIE. */
7841
7842 static inline bool
7843 is_namespace_die (dw_die_ref c)
7844 {
7845 return c && c->die_tag == DW_TAG_namespace;
7846 }
7847
7848 /* Returns true iff C is a class or structure DIE. */
7849
7850 static inline bool
7851 is_class_die (dw_die_ref c)
7852 {
7853 return c && (c->die_tag == DW_TAG_class_type
7854 || c->die_tag == DW_TAG_structure_type);
7855 }
7856
7857 /* Return non-zero if this DIE is a template parameter. */
7858
7859 static inline bool
7860 is_template_parameter (dw_die_ref die)
7861 {
7862 switch (die->die_tag)
7863 {
7864 case DW_TAG_template_type_param:
7865 case DW_TAG_template_value_param:
7866 case DW_TAG_GNU_template_template_param:
7867 case DW_TAG_GNU_template_parameter_pack:
7868 return true;
7869 default:
7870 return false;
7871 }
7872 }
7873
7874 /* Return non-zero if this DIE represents a template instantiation. */
7875
7876 static inline bool
7877 is_template_instantiation (dw_die_ref die)
7878 {
7879 dw_die_ref c;
7880
7881 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7882 return false;
7883 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7884 return false;
7885 }
7886
7887 static char *
7888 gen_internal_sym (const char *prefix)
7889 {
7890 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7891
7892 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7893 return xstrdup (buf);
7894 }
7895
7896 /* Return non-zero if this DIE is a declaration. */
7897
7898 static int
7899 is_declaration_die (dw_die_ref die)
7900 {
7901 dw_attr_node *a;
7902 unsigned ix;
7903
7904 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7905 if (a->dw_attr == DW_AT_declaration)
7906 return 1;
7907
7908 return 0;
7909 }
7910
7911 /* Return non-zero if this DIE is nested inside a subprogram. */
7912
7913 static int
7914 is_nested_in_subprogram (dw_die_ref die)
7915 {
7916 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7917
7918 if (decl == NULL)
7919 decl = die;
7920 return local_scope_p (decl);
7921 }
7922
7923 /* Return non-zero if this DIE contains a defining declaration of a
7924 subprogram. */
7925
7926 static int
7927 contains_subprogram_definition (dw_die_ref die)
7928 {
7929 dw_die_ref c;
7930
7931 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7932 return 1;
7933 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7934 return 0;
7935 }
7936
7937 /* Return non-zero if this is a type DIE that should be moved to a
7938 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7939 unit type. */
7940
7941 static int
7942 should_move_die_to_comdat (dw_die_ref die)
7943 {
7944 switch (die->die_tag)
7945 {
7946 case DW_TAG_class_type:
7947 case DW_TAG_structure_type:
7948 case DW_TAG_enumeration_type:
7949 case DW_TAG_union_type:
7950 /* Don't move declarations, inlined instances, types nested in a
7951 subprogram, or types that contain subprogram definitions. */
7952 if (is_declaration_die (die)
7953 || get_AT (die, DW_AT_abstract_origin)
7954 || is_nested_in_subprogram (die)
7955 || contains_subprogram_definition (die))
7956 return 0;
7957 return 1;
7958 case DW_TAG_array_type:
7959 case DW_TAG_interface_type:
7960 case DW_TAG_pointer_type:
7961 case DW_TAG_reference_type:
7962 case DW_TAG_rvalue_reference_type:
7963 case DW_TAG_string_type:
7964 case DW_TAG_subroutine_type:
7965 case DW_TAG_ptr_to_member_type:
7966 case DW_TAG_set_type:
7967 case DW_TAG_subrange_type:
7968 case DW_TAG_base_type:
7969 case DW_TAG_const_type:
7970 case DW_TAG_file_type:
7971 case DW_TAG_packed_type:
7972 case DW_TAG_volatile_type:
7973 case DW_TAG_typedef:
7974 default:
7975 return 0;
7976 }
7977 }
7978
7979 /* Make a clone of DIE. */
7980
7981 static dw_die_ref
7982 clone_die (dw_die_ref die)
7983 {
7984 dw_die_ref clone = new_die_raw (die->die_tag);
7985 dw_attr_node *a;
7986 unsigned ix;
7987
7988 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7989 add_dwarf_attr (clone, a);
7990
7991 return clone;
7992 }
7993
7994 /* Make a clone of the tree rooted at DIE. */
7995
7996 static dw_die_ref
7997 clone_tree (dw_die_ref die)
7998 {
7999 dw_die_ref c;
8000 dw_die_ref clone = clone_die (die);
8001
8002 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8003
8004 return clone;
8005 }
8006
8007 /* Make a clone of DIE as a declaration. */
8008
8009 static dw_die_ref
8010 clone_as_declaration (dw_die_ref die)
8011 {
8012 dw_die_ref clone;
8013 dw_die_ref decl;
8014 dw_attr_node *a;
8015 unsigned ix;
8016
8017 /* If the DIE is already a declaration, just clone it. */
8018 if (is_declaration_die (die))
8019 return clone_die (die);
8020
8021 /* If the DIE is a specification, just clone its declaration DIE. */
8022 decl = get_AT_ref (die, DW_AT_specification);
8023 if (decl != NULL)
8024 {
8025 clone = clone_die (decl);
8026 if (die->comdat_type_p)
8027 add_AT_die_ref (clone, DW_AT_signature, die);
8028 return clone;
8029 }
8030
8031 clone = new_die_raw (die->die_tag);
8032
8033 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8034 {
8035 /* We don't want to copy over all attributes.
8036 For example we don't want DW_AT_byte_size because otherwise we will no
8037 longer have a declaration and GDB will treat it as a definition. */
8038
8039 switch (a->dw_attr)
8040 {
8041 case DW_AT_abstract_origin:
8042 case DW_AT_artificial:
8043 case DW_AT_containing_type:
8044 case DW_AT_external:
8045 case DW_AT_name:
8046 case DW_AT_type:
8047 case DW_AT_virtuality:
8048 case DW_AT_linkage_name:
8049 case DW_AT_MIPS_linkage_name:
8050 add_dwarf_attr (clone, a);
8051 break;
8052 case DW_AT_byte_size:
8053 case DW_AT_alignment:
8054 default:
8055 break;
8056 }
8057 }
8058
8059 if (die->comdat_type_p)
8060 add_AT_die_ref (clone, DW_AT_signature, die);
8061
8062 add_AT_flag (clone, DW_AT_declaration, 1);
8063 return clone;
8064 }
8065
8066
8067 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8068
8069 struct decl_table_entry
8070 {
8071 dw_die_ref orig;
8072 dw_die_ref copy;
8073 };
8074
8075 /* Helpers to manipulate hash table of copied declarations. */
8076
8077 /* Hashtable helpers. */
8078
8079 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8080 {
8081 typedef die_struct *compare_type;
8082 static inline hashval_t hash (const decl_table_entry *);
8083 static inline bool equal (const decl_table_entry *, const die_struct *);
8084 };
8085
8086 inline hashval_t
8087 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8088 {
8089 return htab_hash_pointer (entry->orig);
8090 }
8091
8092 inline bool
8093 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8094 const die_struct *entry2)
8095 {
8096 return entry1->orig == entry2;
8097 }
8098
8099 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8100
8101 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8102 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8103 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8104 to check if the ancestor has already been copied into UNIT. */
8105
8106 static dw_die_ref
8107 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8108 decl_hash_type *decl_table)
8109 {
8110 dw_die_ref parent = die->die_parent;
8111 dw_die_ref new_parent = unit;
8112 dw_die_ref copy;
8113 decl_table_entry **slot = NULL;
8114 struct decl_table_entry *entry = NULL;
8115
8116 if (decl_table)
8117 {
8118 /* Check if the entry has already been copied to UNIT. */
8119 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8120 INSERT);
8121 if (*slot != HTAB_EMPTY_ENTRY)
8122 {
8123 entry = *slot;
8124 return entry->copy;
8125 }
8126
8127 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8128 entry = XCNEW (struct decl_table_entry);
8129 entry->orig = die;
8130 entry->copy = NULL;
8131 *slot = entry;
8132 }
8133
8134 if (parent != NULL)
8135 {
8136 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8137 if (spec != NULL)
8138 parent = spec;
8139 if (!is_unit_die (parent))
8140 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8141 }
8142
8143 copy = clone_as_declaration (die);
8144 add_child_die (new_parent, copy);
8145
8146 if (decl_table)
8147 {
8148 /* Record the pointer to the copy. */
8149 entry->copy = copy;
8150 }
8151
8152 return copy;
8153 }
8154 /* Copy the declaration context to the new type unit DIE. This includes
8155 any surrounding namespace or type declarations. If the DIE has an
8156 AT_specification attribute, it also includes attributes and children
8157 attached to the specification, and returns a pointer to the original
8158 parent of the declaration DIE. Returns NULL otherwise. */
8159
8160 static dw_die_ref
8161 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8162 {
8163 dw_die_ref decl;
8164 dw_die_ref new_decl;
8165 dw_die_ref orig_parent = NULL;
8166
8167 decl = get_AT_ref (die, DW_AT_specification);
8168 if (decl == NULL)
8169 decl = die;
8170 else
8171 {
8172 unsigned ix;
8173 dw_die_ref c;
8174 dw_attr_node *a;
8175
8176 /* The original DIE will be changed to a declaration, and must
8177 be moved to be a child of the original declaration DIE. */
8178 orig_parent = decl->die_parent;
8179
8180 /* Copy the type node pointer from the new DIE to the original
8181 declaration DIE so we can forward references later. */
8182 decl->comdat_type_p = true;
8183 decl->die_id.die_type_node = die->die_id.die_type_node;
8184
8185 remove_AT (die, DW_AT_specification);
8186
8187 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8188 {
8189 if (a->dw_attr != DW_AT_name
8190 && a->dw_attr != DW_AT_declaration
8191 && a->dw_attr != DW_AT_external)
8192 add_dwarf_attr (die, a);
8193 }
8194
8195 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8196 }
8197
8198 if (decl->die_parent != NULL
8199 && !is_unit_die (decl->die_parent))
8200 {
8201 new_decl = copy_ancestor_tree (unit, decl, NULL);
8202 if (new_decl != NULL)
8203 {
8204 remove_AT (new_decl, DW_AT_signature);
8205 add_AT_specification (die, new_decl);
8206 }
8207 }
8208
8209 return orig_parent;
8210 }
8211
8212 /* Generate the skeleton ancestor tree for the given NODE, then clone
8213 the DIE and add the clone into the tree. */
8214
8215 static void
8216 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8217 {
8218 if (node->new_die != NULL)
8219 return;
8220
8221 node->new_die = clone_as_declaration (node->old_die);
8222
8223 if (node->parent != NULL)
8224 {
8225 generate_skeleton_ancestor_tree (node->parent);
8226 add_child_die (node->parent->new_die, node->new_die);
8227 }
8228 }
8229
8230 /* Generate a skeleton tree of DIEs containing any declarations that are
8231 found in the original tree. We traverse the tree looking for declaration
8232 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8233
8234 static void
8235 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8236 {
8237 skeleton_chain_node node;
8238 dw_die_ref c;
8239 dw_die_ref first;
8240 dw_die_ref prev = NULL;
8241 dw_die_ref next = NULL;
8242
8243 node.parent = parent;
8244
8245 first = c = parent->old_die->die_child;
8246 if (c)
8247 next = c->die_sib;
8248 if (c) do {
8249 if (prev == NULL || prev->die_sib == c)
8250 prev = c;
8251 c = next;
8252 next = (c == first ? NULL : c->die_sib);
8253 node.old_die = c;
8254 node.new_die = NULL;
8255 if (is_declaration_die (c))
8256 {
8257 if (is_template_instantiation (c))
8258 {
8259 /* Instantiated templates do not need to be cloned into the
8260 type unit. Just move the DIE and its children back to
8261 the skeleton tree (in the main CU). */
8262 remove_child_with_prev (c, prev);
8263 add_child_die (parent->new_die, c);
8264 c = prev;
8265 }
8266 else if (c->comdat_type_p)
8267 {
8268 /* This is the skeleton of earlier break_out_comdat_types
8269 type. Clone the existing DIE, but keep the children
8270 under the original (which is in the main CU). */
8271 dw_die_ref clone = clone_die (c);
8272
8273 replace_child (c, clone, prev);
8274 generate_skeleton_ancestor_tree (parent);
8275 add_child_die (parent->new_die, c);
8276 c = clone;
8277 continue;
8278 }
8279 else
8280 {
8281 /* Clone the existing DIE, move the original to the skeleton
8282 tree (which is in the main CU), and put the clone, with
8283 all the original's children, where the original came from
8284 (which is about to be moved to the type unit). */
8285 dw_die_ref clone = clone_die (c);
8286 move_all_children (c, clone);
8287
8288 /* If the original has a DW_AT_object_pointer attribute,
8289 it would now point to a child DIE just moved to the
8290 cloned tree, so we need to remove that attribute from
8291 the original. */
8292 remove_AT (c, DW_AT_object_pointer);
8293
8294 replace_child (c, clone, prev);
8295 generate_skeleton_ancestor_tree (parent);
8296 add_child_die (parent->new_die, c);
8297 node.old_die = clone;
8298 node.new_die = c;
8299 c = clone;
8300 }
8301 }
8302 generate_skeleton_bottom_up (&node);
8303 } while (next != NULL);
8304 }
8305
8306 /* Wrapper function for generate_skeleton_bottom_up. */
8307
8308 static dw_die_ref
8309 generate_skeleton (dw_die_ref die)
8310 {
8311 skeleton_chain_node node;
8312
8313 node.old_die = die;
8314 node.new_die = NULL;
8315 node.parent = NULL;
8316
8317 /* If this type definition is nested inside another type,
8318 and is not an instantiation of a template, always leave
8319 at least a declaration in its place. */
8320 if (die->die_parent != NULL
8321 && is_type_die (die->die_parent)
8322 && !is_template_instantiation (die))
8323 node.new_die = clone_as_declaration (die);
8324
8325 generate_skeleton_bottom_up (&node);
8326 return node.new_die;
8327 }
8328
8329 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8330 declaration. The original DIE is moved to a new compile unit so that
8331 existing references to it follow it to the new location. If any of the
8332 original DIE's descendants is a declaration, we need to replace the
8333 original DIE with a skeleton tree and move the declarations back into the
8334 skeleton tree. */
8335
8336 static dw_die_ref
8337 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8338 dw_die_ref prev)
8339 {
8340 dw_die_ref skeleton, orig_parent;
8341
8342 /* Copy the declaration context to the type unit DIE. If the returned
8343 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8344 that DIE. */
8345 orig_parent = copy_declaration_context (unit, child);
8346
8347 skeleton = generate_skeleton (child);
8348 if (skeleton == NULL)
8349 remove_child_with_prev (child, prev);
8350 else
8351 {
8352 skeleton->comdat_type_p = true;
8353 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8354
8355 /* If the original DIE was a specification, we need to put
8356 the skeleton under the parent DIE of the declaration.
8357 This leaves the original declaration in the tree, but
8358 it will be pruned later since there are no longer any
8359 references to it. */
8360 if (orig_parent != NULL)
8361 {
8362 remove_child_with_prev (child, prev);
8363 add_child_die (orig_parent, skeleton);
8364 }
8365 else
8366 replace_child (child, skeleton, prev);
8367 }
8368
8369 return skeleton;
8370 }
8371
8372 static void
8373 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8374 comdat_type_node *type_node,
8375 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8376
8377 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8378 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8379 DWARF procedure references in the DW_AT_location attribute. */
8380
8381 static dw_die_ref
8382 copy_dwarf_procedure (dw_die_ref die,
8383 comdat_type_node *type_node,
8384 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8385 {
8386 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8387
8388 /* DWARF procedures are not supposed to have children... */
8389 gcc_assert (die->die_child == NULL);
8390
8391 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8392 gcc_assert (vec_safe_length (die->die_attr) == 1
8393 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8394
8395 /* Do not copy more than once DWARF procedures. */
8396 bool existed;
8397 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8398 if (existed)
8399 return die_copy;
8400
8401 die_copy = clone_die (die);
8402 add_child_die (type_node->root_die, die_copy);
8403 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8404 return die_copy;
8405 }
8406
8407 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8408 procedures in DIE's attributes. */
8409
8410 static void
8411 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8412 comdat_type_node *type_node,
8413 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8414 {
8415 dw_attr_node *a;
8416 unsigned i;
8417
8418 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8419 {
8420 dw_loc_descr_ref loc;
8421
8422 if (a->dw_attr_val.val_class != dw_val_class_loc)
8423 continue;
8424
8425 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8426 {
8427 switch (loc->dw_loc_opc)
8428 {
8429 case DW_OP_call2:
8430 case DW_OP_call4:
8431 case DW_OP_call_ref:
8432 gcc_assert (loc->dw_loc_oprnd1.val_class
8433 == dw_val_class_die_ref);
8434 loc->dw_loc_oprnd1.v.val_die_ref.die
8435 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8436 type_node,
8437 copied_dwarf_procs);
8438
8439 default:
8440 break;
8441 }
8442 }
8443 }
8444 }
8445
8446 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8447 rewrite references to point to the copies.
8448
8449 References are looked for in DIE's attributes and recursively in all its
8450 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8451 mapping from old DWARF procedures to their copy. It is used not to copy
8452 twice the same DWARF procedure under TYPE_NODE. */
8453
8454 static void
8455 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8456 comdat_type_node *type_node,
8457 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8458 {
8459 dw_die_ref c;
8460
8461 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8462 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8463 type_node,
8464 copied_dwarf_procs));
8465 }
8466
8467 /* Traverse the DIE and set up additional .debug_types or .debug_info
8468 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8469 section. */
8470
8471 static void
8472 break_out_comdat_types (dw_die_ref die)
8473 {
8474 dw_die_ref c;
8475 dw_die_ref first;
8476 dw_die_ref prev = NULL;
8477 dw_die_ref next = NULL;
8478 dw_die_ref unit = NULL;
8479
8480 first = c = die->die_child;
8481 if (c)
8482 next = c->die_sib;
8483 if (c) do {
8484 if (prev == NULL || prev->die_sib == c)
8485 prev = c;
8486 c = next;
8487 next = (c == first ? NULL : c->die_sib);
8488 if (should_move_die_to_comdat (c))
8489 {
8490 dw_die_ref replacement;
8491 comdat_type_node *type_node;
8492
8493 /* Break out nested types into their own type units. */
8494 break_out_comdat_types (c);
8495
8496 /* Create a new type unit DIE as the root for the new tree, and
8497 add it to the list of comdat types. */
8498 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8499 add_AT_unsigned (unit, DW_AT_language,
8500 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8501 type_node = ggc_cleared_alloc<comdat_type_node> ();
8502 type_node->root_die = unit;
8503 type_node->next = comdat_type_list;
8504 comdat_type_list = type_node;
8505
8506 /* Generate the type signature. */
8507 generate_type_signature (c, type_node);
8508
8509 /* Copy the declaration context, attributes, and children of the
8510 declaration into the new type unit DIE, then remove this DIE
8511 from the main CU (or replace it with a skeleton if necessary). */
8512 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8513 type_node->skeleton_die = replacement;
8514
8515 /* Add the DIE to the new compunit. */
8516 add_child_die (unit, c);
8517
8518 /* Types can reference DWARF procedures for type size or data location
8519 expressions. Calls in DWARF expressions cannot target procedures
8520 that are not in the same section. So we must copy DWARF procedures
8521 along with this type and then rewrite references to them. */
8522 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8523 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8524
8525 if (replacement != NULL)
8526 c = replacement;
8527 }
8528 else if (c->die_tag == DW_TAG_namespace
8529 || c->die_tag == DW_TAG_class_type
8530 || c->die_tag == DW_TAG_structure_type
8531 || c->die_tag == DW_TAG_union_type)
8532 {
8533 /* Look for nested types that can be broken out. */
8534 break_out_comdat_types (c);
8535 }
8536 } while (next != NULL);
8537 }
8538
8539 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8540 Enter all the cloned children into the hash table decl_table. */
8541
8542 static dw_die_ref
8543 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8544 {
8545 dw_die_ref c;
8546 dw_die_ref clone;
8547 struct decl_table_entry *entry;
8548 decl_table_entry **slot;
8549
8550 if (die->die_tag == DW_TAG_subprogram)
8551 clone = clone_as_declaration (die);
8552 else
8553 clone = clone_die (die);
8554
8555 slot = decl_table->find_slot_with_hash (die,
8556 htab_hash_pointer (die), INSERT);
8557
8558 /* Assert that DIE isn't in the hash table yet. If it would be there
8559 before, the ancestors would be necessarily there as well, therefore
8560 clone_tree_partial wouldn't be called. */
8561 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8562
8563 entry = XCNEW (struct decl_table_entry);
8564 entry->orig = die;
8565 entry->copy = clone;
8566 *slot = entry;
8567
8568 if (die->die_tag != DW_TAG_subprogram)
8569 FOR_EACH_CHILD (die, c,
8570 add_child_die (clone, clone_tree_partial (c, decl_table)));
8571
8572 return clone;
8573 }
8574
8575 /* Walk the DIE and its children, looking for references to incomplete
8576 or trivial types that are unmarked (i.e., that are not in the current
8577 type_unit). */
8578
8579 static void
8580 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8581 {
8582 dw_die_ref c;
8583 dw_attr_node *a;
8584 unsigned ix;
8585
8586 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8587 {
8588 if (AT_class (a) == dw_val_class_die_ref)
8589 {
8590 dw_die_ref targ = AT_ref (a);
8591 decl_table_entry **slot;
8592 struct decl_table_entry *entry;
8593
8594 if (targ->die_mark != 0 || targ->comdat_type_p)
8595 continue;
8596
8597 slot = decl_table->find_slot_with_hash (targ,
8598 htab_hash_pointer (targ),
8599 INSERT);
8600
8601 if (*slot != HTAB_EMPTY_ENTRY)
8602 {
8603 /* TARG has already been copied, so we just need to
8604 modify the reference to point to the copy. */
8605 entry = *slot;
8606 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8607 }
8608 else
8609 {
8610 dw_die_ref parent = unit;
8611 dw_die_ref copy = clone_die (targ);
8612
8613 /* Record in DECL_TABLE that TARG has been copied.
8614 Need to do this now, before the recursive call,
8615 because DECL_TABLE may be expanded and SLOT
8616 would no longer be a valid pointer. */
8617 entry = XCNEW (struct decl_table_entry);
8618 entry->orig = targ;
8619 entry->copy = copy;
8620 *slot = entry;
8621
8622 /* If TARG is not a declaration DIE, we need to copy its
8623 children. */
8624 if (!is_declaration_die (targ))
8625 {
8626 FOR_EACH_CHILD (
8627 targ, c,
8628 add_child_die (copy,
8629 clone_tree_partial (c, decl_table)));
8630 }
8631
8632 /* Make sure the cloned tree is marked as part of the
8633 type unit. */
8634 mark_dies (copy);
8635
8636 /* If TARG has surrounding context, copy its ancestor tree
8637 into the new type unit. */
8638 if (targ->die_parent != NULL
8639 && !is_unit_die (targ->die_parent))
8640 parent = copy_ancestor_tree (unit, targ->die_parent,
8641 decl_table);
8642
8643 add_child_die (parent, copy);
8644 a->dw_attr_val.v.val_die_ref.die = copy;
8645
8646 /* Make sure the newly-copied DIE is walked. If it was
8647 installed in a previously-added context, it won't
8648 get visited otherwise. */
8649 if (parent != unit)
8650 {
8651 /* Find the highest point of the newly-added tree,
8652 mark each node along the way, and walk from there. */
8653 parent->die_mark = 1;
8654 while (parent->die_parent
8655 && parent->die_parent->die_mark == 0)
8656 {
8657 parent = parent->die_parent;
8658 parent->die_mark = 1;
8659 }
8660 copy_decls_walk (unit, parent, decl_table);
8661 }
8662 }
8663 }
8664 }
8665
8666 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8667 }
8668
8669 /* Copy declarations for "unworthy" types into the new comdat section.
8670 Incomplete types, modified types, and certain other types aren't broken
8671 out into comdat sections of their own, so they don't have a signature,
8672 and we need to copy the declaration into the same section so that we
8673 don't have an external reference. */
8674
8675 static void
8676 copy_decls_for_unworthy_types (dw_die_ref unit)
8677 {
8678 mark_dies (unit);
8679 decl_hash_type decl_table (10);
8680 copy_decls_walk (unit, unit, &decl_table);
8681 unmark_dies (unit);
8682 }
8683
8684 /* Traverse the DIE and add a sibling attribute if it may have the
8685 effect of speeding up access to siblings. To save some space,
8686 avoid generating sibling attributes for DIE's without children. */
8687
8688 static void
8689 add_sibling_attributes (dw_die_ref die)
8690 {
8691 dw_die_ref c;
8692
8693 if (! die->die_child)
8694 return;
8695
8696 if (die->die_parent && die != die->die_parent->die_child)
8697 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8698
8699 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8700 }
8701
8702 /* Output all location lists for the DIE and its children. */
8703
8704 static void
8705 output_location_lists (dw_die_ref die)
8706 {
8707 dw_die_ref c;
8708 dw_attr_node *a;
8709 unsigned ix;
8710
8711 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8712 if (AT_class (a) == dw_val_class_loc_list)
8713 output_loc_list (AT_loc_list (a));
8714
8715 FOR_EACH_CHILD (die, c, output_location_lists (c));
8716 }
8717
8718 /* During assign_location_list_indexes and output_loclists_offset the
8719 current index, after it the number of assigned indexes (i.e. how
8720 large the .debug_loclists* offset table should be). */
8721 static unsigned int loc_list_idx;
8722
8723 /* Output all location list offsets for the DIE and its children. */
8724
8725 static void
8726 output_loclists_offsets (dw_die_ref die)
8727 {
8728 dw_die_ref c;
8729 dw_attr_node *a;
8730 unsigned ix;
8731
8732 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8733 if (AT_class (a) == dw_val_class_loc_list)
8734 {
8735 dw_loc_list_ref l = AT_loc_list (a);
8736 if (l->offset_emitted)
8737 continue;
8738 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8739 loc_section_label, NULL);
8740 gcc_assert (l->hash == loc_list_idx);
8741 loc_list_idx++;
8742 l->offset_emitted = true;
8743 }
8744
8745 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8746 }
8747
8748 /* Recursively set indexes of location lists. */
8749
8750 static void
8751 assign_location_list_indexes (dw_die_ref die)
8752 {
8753 dw_die_ref c;
8754 dw_attr_node *a;
8755 unsigned ix;
8756
8757 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8758 if (AT_class (a) == dw_val_class_loc_list)
8759 {
8760 dw_loc_list_ref list = AT_loc_list (a);
8761 if (!list->num_assigned)
8762 {
8763 list->num_assigned = true;
8764 list->hash = loc_list_idx++;
8765 }
8766 }
8767
8768 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8769 }
8770
8771 /* We want to limit the number of external references, because they are
8772 larger than local references: a relocation takes multiple words, and
8773 even a sig8 reference is always eight bytes, whereas a local reference
8774 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8775 So if we encounter multiple external references to the same type DIE, we
8776 make a local typedef stub for it and redirect all references there.
8777
8778 This is the element of the hash table for keeping track of these
8779 references. */
8780
8781 struct external_ref
8782 {
8783 dw_die_ref type;
8784 dw_die_ref stub;
8785 unsigned n_refs;
8786 };
8787
8788 /* Hashtable helpers. */
8789
8790 struct external_ref_hasher : free_ptr_hash <external_ref>
8791 {
8792 static inline hashval_t hash (const external_ref *);
8793 static inline bool equal (const external_ref *, const external_ref *);
8794 };
8795
8796 inline hashval_t
8797 external_ref_hasher::hash (const external_ref *r)
8798 {
8799 dw_die_ref die = r->type;
8800 hashval_t h = 0;
8801
8802 /* We can't use the address of the DIE for hashing, because
8803 that will make the order of the stub DIEs non-deterministic. */
8804 if (! die->comdat_type_p)
8805 /* We have a symbol; use it to compute a hash. */
8806 h = htab_hash_string (die->die_id.die_symbol);
8807 else
8808 {
8809 /* We have a type signature; use a subset of the bits as the hash.
8810 The 8-byte signature is at least as large as hashval_t. */
8811 comdat_type_node *type_node = die->die_id.die_type_node;
8812 memcpy (&h, type_node->signature, sizeof (h));
8813 }
8814 return h;
8815 }
8816
8817 inline bool
8818 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8819 {
8820 return r1->type == r2->type;
8821 }
8822
8823 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8824
8825 /* Return a pointer to the external_ref for references to DIE. */
8826
8827 static struct external_ref *
8828 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8829 {
8830 struct external_ref ref, *ref_p;
8831 external_ref **slot;
8832
8833 ref.type = die;
8834 slot = map->find_slot (&ref, INSERT);
8835 if (*slot != HTAB_EMPTY_ENTRY)
8836 return *slot;
8837
8838 ref_p = XCNEW (struct external_ref);
8839 ref_p->type = die;
8840 *slot = ref_p;
8841 return ref_p;
8842 }
8843
8844 /* Subroutine of optimize_external_refs, below.
8845
8846 If we see a type skeleton, record it as our stub. If we see external
8847 references, remember how many we've seen. */
8848
8849 static void
8850 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8851 {
8852 dw_die_ref c;
8853 dw_attr_node *a;
8854 unsigned ix;
8855 struct external_ref *ref_p;
8856
8857 if (is_type_die (die)
8858 && (c = get_AT_ref (die, DW_AT_signature)))
8859 {
8860 /* This is a local skeleton; use it for local references. */
8861 ref_p = lookup_external_ref (map, c);
8862 ref_p->stub = die;
8863 }
8864
8865 /* Scan the DIE references, and remember any that refer to DIEs from
8866 other CUs (i.e. those which are not marked). */
8867 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8868 if (AT_class (a) == dw_val_class_die_ref
8869 && (c = AT_ref (a))->die_mark == 0
8870 && is_type_die (c))
8871 {
8872 ref_p = lookup_external_ref (map, c);
8873 ref_p->n_refs++;
8874 }
8875
8876 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8877 }
8878
8879 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8880 points to an external_ref, DATA is the CU we're processing. If we don't
8881 already have a local stub, and we have multiple refs, build a stub. */
8882
8883 int
8884 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8885 {
8886 struct external_ref *ref_p = *slot;
8887
8888 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8889 {
8890 /* We have multiple references to this type, so build a small stub.
8891 Both of these forms are a bit dodgy from the perspective of the
8892 DWARF standard, since technically they should have names. */
8893 dw_die_ref cu = data;
8894 dw_die_ref type = ref_p->type;
8895 dw_die_ref stub = NULL;
8896
8897 if (type->comdat_type_p)
8898 {
8899 /* If we refer to this type via sig8, use AT_signature. */
8900 stub = new_die (type->die_tag, cu, NULL_TREE);
8901 add_AT_die_ref (stub, DW_AT_signature, type);
8902 }
8903 else
8904 {
8905 /* Otherwise, use a typedef with no name. */
8906 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8907 add_AT_die_ref (stub, DW_AT_type, type);
8908 }
8909
8910 stub->die_mark++;
8911 ref_p->stub = stub;
8912 }
8913 return 1;
8914 }
8915
8916 /* DIE is a unit; look through all the DIE references to see if there are
8917 any external references to types, and if so, create local stubs for
8918 them which will be applied in build_abbrev_table. This is useful because
8919 references to local DIEs are smaller. */
8920
8921 static external_ref_hash_type *
8922 optimize_external_refs (dw_die_ref die)
8923 {
8924 external_ref_hash_type *map = new external_ref_hash_type (10);
8925 optimize_external_refs_1 (die, map);
8926 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8927 return map;
8928 }
8929
8930 /* The following 3 variables are temporaries that are computed only during the
8931 build_abbrev_table call and used and released during the following
8932 optimize_abbrev_table call. */
8933
8934 /* First abbrev_id that can be optimized based on usage. */
8935 static unsigned int abbrev_opt_start;
8936
8937 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8938 abbrev_id smaller than this, because they must be already sized
8939 during build_abbrev_table). */
8940 static unsigned int abbrev_opt_base_type_end;
8941
8942 /* Vector of usage counts during build_abbrev_table. Indexed by
8943 abbrev_id - abbrev_opt_start. */
8944 static vec<unsigned int> abbrev_usage_count;
8945
8946 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8947 static vec<dw_die_ref> sorted_abbrev_dies;
8948
8949 /* The format of each DIE (and its attribute value pairs) is encoded in an
8950 abbreviation table. This routine builds the abbreviation table and assigns
8951 a unique abbreviation id for each abbreviation entry. The children of each
8952 die are visited recursively. */
8953
8954 static void
8955 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8956 {
8957 unsigned int abbrev_id = 0;
8958 dw_die_ref c;
8959 dw_attr_node *a;
8960 unsigned ix;
8961 dw_die_ref abbrev;
8962
8963 /* Scan the DIE references, and replace any that refer to
8964 DIEs from other CUs (i.e. those which are not marked) with
8965 the local stubs we built in optimize_external_refs. */
8966 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8967 if (AT_class (a) == dw_val_class_die_ref
8968 && (c = AT_ref (a))->die_mark == 0)
8969 {
8970 struct external_ref *ref_p;
8971 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
8972
8973 ref_p = lookup_external_ref (extern_map, c);
8974 if (ref_p->stub && ref_p->stub != die)
8975 change_AT_die_ref (a, ref_p->stub);
8976 else
8977 /* We aren't changing this reference, so mark it external. */
8978 set_AT_ref_external (a, 1);
8979 }
8980
8981 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
8982 {
8983 dw_attr_node *die_a, *abbrev_a;
8984 unsigned ix;
8985 bool ok = true;
8986
8987 if (abbrev_id == 0)
8988 continue;
8989 if (abbrev->die_tag != die->die_tag)
8990 continue;
8991 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
8992 continue;
8993
8994 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
8995 continue;
8996
8997 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
8998 {
8999 abbrev_a = &(*abbrev->die_attr)[ix];
9000 if ((abbrev_a->dw_attr != die_a->dw_attr)
9001 || (value_format (abbrev_a) != value_format (die_a)))
9002 {
9003 ok = false;
9004 break;
9005 }
9006 }
9007 if (ok)
9008 break;
9009 }
9010
9011 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9012 {
9013 vec_safe_push (abbrev_die_table, die);
9014 if (abbrev_opt_start)
9015 abbrev_usage_count.safe_push (0);
9016 }
9017 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9018 {
9019 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9020 sorted_abbrev_dies.safe_push (die);
9021 }
9022
9023 die->die_abbrev = abbrev_id;
9024 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9025 }
9026
9027 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9028 by die_abbrev's usage count, from the most commonly used
9029 abbreviation to the least. */
9030
9031 static int
9032 die_abbrev_cmp (const void *p1, const void *p2)
9033 {
9034 dw_die_ref die1 = *(const dw_die_ref *) p1;
9035 dw_die_ref die2 = *(const dw_die_ref *) p2;
9036
9037 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9038 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9039
9040 if (die1->die_abbrev >= abbrev_opt_base_type_end
9041 && die2->die_abbrev >= abbrev_opt_base_type_end)
9042 {
9043 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9044 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9045 return -1;
9046 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9047 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9048 return 1;
9049 }
9050
9051 /* Stabilize the sort. */
9052 if (die1->die_abbrev < die2->die_abbrev)
9053 return -1;
9054 if (die1->die_abbrev > die2->die_abbrev)
9055 return 1;
9056
9057 return 0;
9058 }
9059
9060 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9061 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9062 into dw_val_class_const_implicit or
9063 dw_val_class_unsigned_const_implicit. */
9064
9065 static void
9066 optimize_implicit_const (unsigned int first_id, unsigned int end,
9067 vec<bool> &implicit_consts)
9068 {
9069 /* It never makes sense if there is just one DIE using the abbreviation. */
9070 if (end < first_id + 2)
9071 return;
9072
9073 dw_attr_node *a;
9074 unsigned ix, i;
9075 dw_die_ref die = sorted_abbrev_dies[first_id];
9076 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9077 if (implicit_consts[ix])
9078 {
9079 enum dw_val_class new_class = dw_val_class_none;
9080 switch (AT_class (a))
9081 {
9082 case dw_val_class_unsigned_const:
9083 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9084 continue;
9085
9086 /* The .debug_abbrev section will grow by
9087 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9088 in all the DIEs using that abbreviation. */
9089 if (constant_size (AT_unsigned (a)) * (end - first_id)
9090 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9091 continue;
9092
9093 new_class = dw_val_class_unsigned_const_implicit;
9094 break;
9095
9096 case dw_val_class_const:
9097 new_class = dw_val_class_const_implicit;
9098 break;
9099
9100 case dw_val_class_file:
9101 new_class = dw_val_class_file_implicit;
9102 break;
9103
9104 default:
9105 continue;
9106 }
9107 for (i = first_id; i < end; i++)
9108 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9109 = new_class;
9110 }
9111 }
9112
9113 /* Attempt to optimize abbreviation table from abbrev_opt_start
9114 abbreviation above. */
9115
9116 static void
9117 optimize_abbrev_table (void)
9118 {
9119 if (abbrev_opt_start
9120 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9121 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9122 {
9123 auto_vec<bool, 32> implicit_consts;
9124 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9125
9126 unsigned int abbrev_id = abbrev_opt_start - 1;
9127 unsigned int first_id = ~0U;
9128 unsigned int last_abbrev_id = 0;
9129 unsigned int i;
9130 dw_die_ref die;
9131 if (abbrev_opt_base_type_end > abbrev_opt_start)
9132 abbrev_id = abbrev_opt_base_type_end - 1;
9133 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9134 most commonly used abbreviations come first. */
9135 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9136 {
9137 dw_attr_node *a;
9138 unsigned ix;
9139
9140 /* If calc_base_type_die_sizes has been called, the CU and
9141 base types after it can't be optimized, because we've already
9142 calculated their DIE offsets. We've sorted them first. */
9143 if (die->die_abbrev < abbrev_opt_base_type_end)
9144 continue;
9145 if (die->die_abbrev != last_abbrev_id)
9146 {
9147 last_abbrev_id = die->die_abbrev;
9148 if (dwarf_version >= 5 && first_id != ~0U)
9149 optimize_implicit_const (first_id, i, implicit_consts);
9150 abbrev_id++;
9151 (*abbrev_die_table)[abbrev_id] = die;
9152 if (dwarf_version >= 5)
9153 {
9154 first_id = i;
9155 implicit_consts.truncate (0);
9156
9157 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9158 switch (AT_class (a))
9159 {
9160 case dw_val_class_const:
9161 case dw_val_class_unsigned_const:
9162 case dw_val_class_file:
9163 implicit_consts.safe_push (true);
9164 break;
9165 default:
9166 implicit_consts.safe_push (false);
9167 break;
9168 }
9169 }
9170 }
9171 else if (dwarf_version >= 5)
9172 {
9173 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9174 if (!implicit_consts[ix])
9175 continue;
9176 else
9177 {
9178 dw_attr_node *other_a
9179 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9180 if (!dw_val_equal_p (&a->dw_attr_val,
9181 &other_a->dw_attr_val))
9182 implicit_consts[ix] = false;
9183 }
9184 }
9185 die->die_abbrev = abbrev_id;
9186 }
9187 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9188 if (dwarf_version >= 5 && first_id != ~0U)
9189 optimize_implicit_const (first_id, i, implicit_consts);
9190 }
9191
9192 abbrev_opt_start = 0;
9193 abbrev_opt_base_type_end = 0;
9194 abbrev_usage_count.release ();
9195 sorted_abbrev_dies.release ();
9196 }
9197 \f
9198 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9199
9200 static int
9201 constant_size (unsigned HOST_WIDE_INT value)
9202 {
9203 int log;
9204
9205 if (value == 0)
9206 log = 0;
9207 else
9208 log = floor_log2 (value);
9209
9210 log = log / 8;
9211 log = 1 << (floor_log2 (log) + 1);
9212
9213 return log;
9214 }
9215
9216 /* Return the size of a DIE as it is represented in the
9217 .debug_info section. */
9218
9219 static unsigned long
9220 size_of_die (dw_die_ref die)
9221 {
9222 unsigned long size = 0;
9223 dw_attr_node *a;
9224 unsigned ix;
9225 enum dwarf_form form;
9226
9227 size += size_of_uleb128 (die->die_abbrev);
9228 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9229 {
9230 switch (AT_class (a))
9231 {
9232 case dw_val_class_addr:
9233 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9234 {
9235 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9236 size += size_of_uleb128 (AT_index (a));
9237 }
9238 else
9239 size += DWARF2_ADDR_SIZE;
9240 break;
9241 case dw_val_class_offset:
9242 size += DWARF_OFFSET_SIZE;
9243 break;
9244 case dw_val_class_loc:
9245 {
9246 unsigned long lsize = size_of_locs (AT_loc (a));
9247
9248 /* Block length. */
9249 if (dwarf_version >= 4)
9250 size += size_of_uleb128 (lsize);
9251 else
9252 size += constant_size (lsize);
9253 size += lsize;
9254 }
9255 break;
9256 case dw_val_class_loc_list:
9257 case dw_val_class_view_list:
9258 if (dwarf_split_debug_info && dwarf_version >= 5)
9259 {
9260 gcc_assert (AT_loc_list (a)->num_assigned);
9261 size += size_of_uleb128 (AT_loc_list (a)->hash);
9262 }
9263 else
9264 size += DWARF_OFFSET_SIZE;
9265 break;
9266 case dw_val_class_range_list:
9267 if (value_format (a) == DW_FORM_rnglistx)
9268 {
9269 gcc_assert (rnglist_idx);
9270 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9271 size += size_of_uleb128 (r->idx);
9272 }
9273 else
9274 size += DWARF_OFFSET_SIZE;
9275 break;
9276 case dw_val_class_const:
9277 size += size_of_sleb128 (AT_int (a));
9278 break;
9279 case dw_val_class_unsigned_const:
9280 {
9281 int csize = constant_size (AT_unsigned (a));
9282 if (dwarf_version == 3
9283 && a->dw_attr == DW_AT_data_member_location
9284 && csize >= 4)
9285 size += size_of_uleb128 (AT_unsigned (a));
9286 else
9287 size += csize;
9288 }
9289 break;
9290 case dw_val_class_const_implicit:
9291 case dw_val_class_unsigned_const_implicit:
9292 case dw_val_class_file_implicit:
9293 /* These occupy no size in the DIE, just an extra sleb128 in
9294 .debug_abbrev. */
9295 break;
9296 case dw_val_class_const_double:
9297 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9298 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9299 size++; /* block */
9300 break;
9301 case dw_val_class_wide_int:
9302 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9303 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9304 if (get_full_len (*a->dw_attr_val.v.val_wide)
9305 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9306 size++; /* block */
9307 break;
9308 case dw_val_class_vec:
9309 size += constant_size (a->dw_attr_val.v.val_vec.length
9310 * a->dw_attr_val.v.val_vec.elt_size)
9311 + a->dw_attr_val.v.val_vec.length
9312 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9313 break;
9314 case dw_val_class_flag:
9315 if (dwarf_version >= 4)
9316 /* Currently all add_AT_flag calls pass in 1 as last argument,
9317 so DW_FORM_flag_present can be used. If that ever changes,
9318 we'll need to use DW_FORM_flag and have some optimization
9319 in build_abbrev_table that will change those to
9320 DW_FORM_flag_present if it is set to 1 in all DIEs using
9321 the same abbrev entry. */
9322 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9323 else
9324 size += 1;
9325 break;
9326 case dw_val_class_die_ref:
9327 if (AT_ref_external (a))
9328 {
9329 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9330 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9331 is sized by target address length, whereas in DWARF3
9332 it's always sized as an offset. */
9333 if (use_debug_types)
9334 size += DWARF_TYPE_SIGNATURE_SIZE;
9335 else if (dwarf_version == 2)
9336 size += DWARF2_ADDR_SIZE;
9337 else
9338 size += DWARF_OFFSET_SIZE;
9339 }
9340 else
9341 size += DWARF_OFFSET_SIZE;
9342 break;
9343 case dw_val_class_fde_ref:
9344 size += DWARF_OFFSET_SIZE;
9345 break;
9346 case dw_val_class_lbl_id:
9347 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9348 {
9349 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9350 size += size_of_uleb128 (AT_index (a));
9351 }
9352 else
9353 size += DWARF2_ADDR_SIZE;
9354 break;
9355 case dw_val_class_lineptr:
9356 case dw_val_class_macptr:
9357 case dw_val_class_loclistsptr:
9358 size += DWARF_OFFSET_SIZE;
9359 break;
9360 case dw_val_class_str:
9361 form = AT_string_form (a);
9362 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9363 size += DWARF_OFFSET_SIZE;
9364 else if (form == DW_FORM_GNU_str_index)
9365 size += size_of_uleb128 (AT_index (a));
9366 else
9367 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9368 break;
9369 case dw_val_class_file:
9370 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9371 break;
9372 case dw_val_class_data8:
9373 size += 8;
9374 break;
9375 case dw_val_class_vms_delta:
9376 size += DWARF_OFFSET_SIZE;
9377 break;
9378 case dw_val_class_high_pc:
9379 size += DWARF2_ADDR_SIZE;
9380 break;
9381 case dw_val_class_discr_value:
9382 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9383 break;
9384 case dw_val_class_discr_list:
9385 {
9386 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9387
9388 /* This is a block, so we have the block length and then its
9389 data. */
9390 size += constant_size (block_size) + block_size;
9391 }
9392 break;
9393 default:
9394 gcc_unreachable ();
9395 }
9396 }
9397
9398 return size;
9399 }
9400
9401 /* Size the debugging information associated with a given DIE. Visits the
9402 DIE's children recursively. Updates the global variable next_die_offset, on
9403 each time through. Uses the current value of next_die_offset to update the
9404 die_offset field in each DIE. */
9405
9406 static void
9407 calc_die_sizes (dw_die_ref die)
9408 {
9409 dw_die_ref c;
9410
9411 gcc_assert (die->die_offset == 0
9412 || (unsigned long int) die->die_offset == next_die_offset);
9413 die->die_offset = next_die_offset;
9414 next_die_offset += size_of_die (die);
9415
9416 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9417
9418 if (die->die_child != NULL)
9419 /* Count the null byte used to terminate sibling lists. */
9420 next_die_offset += 1;
9421 }
9422
9423 /* Size just the base type children at the start of the CU.
9424 This is needed because build_abbrev needs to size locs
9425 and sizing of type based stack ops needs to know die_offset
9426 values for the base types. */
9427
9428 static void
9429 calc_base_type_die_sizes (void)
9430 {
9431 unsigned long die_offset = (dwarf_split_debug_info
9432 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9433 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9434 unsigned int i;
9435 dw_die_ref base_type;
9436 #if ENABLE_ASSERT_CHECKING
9437 dw_die_ref prev = comp_unit_die ()->die_child;
9438 #endif
9439
9440 die_offset += size_of_die (comp_unit_die ());
9441 for (i = 0; base_types.iterate (i, &base_type); i++)
9442 {
9443 #if ENABLE_ASSERT_CHECKING
9444 gcc_assert (base_type->die_offset == 0
9445 && prev->die_sib == base_type
9446 && base_type->die_child == NULL
9447 && base_type->die_abbrev);
9448 prev = base_type;
9449 #endif
9450 if (abbrev_opt_start
9451 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9452 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9453 base_type->die_offset = die_offset;
9454 die_offset += size_of_die (base_type);
9455 }
9456 }
9457
9458 /* Set the marks for a die and its children. We do this so
9459 that we know whether or not a reference needs to use FORM_ref_addr; only
9460 DIEs in the same CU will be marked. We used to clear out the offset
9461 and use that as the flag, but ran into ordering problems. */
9462
9463 static void
9464 mark_dies (dw_die_ref die)
9465 {
9466 dw_die_ref c;
9467
9468 gcc_assert (!die->die_mark);
9469
9470 die->die_mark = 1;
9471 FOR_EACH_CHILD (die, c, mark_dies (c));
9472 }
9473
9474 /* Clear the marks for a die and its children. */
9475
9476 static void
9477 unmark_dies (dw_die_ref die)
9478 {
9479 dw_die_ref c;
9480
9481 if (! use_debug_types)
9482 gcc_assert (die->die_mark);
9483
9484 die->die_mark = 0;
9485 FOR_EACH_CHILD (die, c, unmark_dies (c));
9486 }
9487
9488 /* Clear the marks for a die, its children and referred dies. */
9489
9490 static void
9491 unmark_all_dies (dw_die_ref die)
9492 {
9493 dw_die_ref c;
9494 dw_attr_node *a;
9495 unsigned ix;
9496
9497 if (!die->die_mark)
9498 return;
9499 die->die_mark = 0;
9500
9501 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9502
9503 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9504 if (AT_class (a) == dw_val_class_die_ref)
9505 unmark_all_dies (AT_ref (a));
9506 }
9507
9508 /* Calculate if the entry should appear in the final output file. It may be
9509 from a pruned a type. */
9510
9511 static bool
9512 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9513 {
9514 /* By limiting gnu pubnames to definitions only, gold can generate a
9515 gdb index without entries for declarations, which don't include
9516 enough information to be useful. */
9517 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9518 return false;
9519
9520 if (table == pubname_table)
9521 {
9522 /* Enumerator names are part of the pubname table, but the
9523 parent DW_TAG_enumeration_type die may have been pruned.
9524 Don't output them if that is the case. */
9525 if (p->die->die_tag == DW_TAG_enumerator &&
9526 (p->die->die_parent == NULL
9527 || !p->die->die_parent->die_perennial_p))
9528 return false;
9529
9530 /* Everything else in the pubname table is included. */
9531 return true;
9532 }
9533
9534 /* The pubtypes table shouldn't include types that have been
9535 pruned. */
9536 return (p->die->die_offset != 0
9537 || !flag_eliminate_unused_debug_types);
9538 }
9539
9540 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9541 generated for the compilation unit. */
9542
9543 static unsigned long
9544 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9545 {
9546 unsigned long size;
9547 unsigned i;
9548 pubname_entry *p;
9549 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9550
9551 size = DWARF_PUBNAMES_HEADER_SIZE;
9552 FOR_EACH_VEC_ELT (*names, i, p)
9553 if (include_pubname_in_output (names, p))
9554 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9555
9556 size += DWARF_OFFSET_SIZE;
9557 return size;
9558 }
9559
9560 /* Return the size of the information in the .debug_aranges section. */
9561
9562 static unsigned long
9563 size_of_aranges (void)
9564 {
9565 unsigned long size;
9566
9567 size = DWARF_ARANGES_HEADER_SIZE;
9568
9569 /* Count the address/length pair for this compilation unit. */
9570 if (text_section_used)
9571 size += 2 * DWARF2_ADDR_SIZE;
9572 if (cold_text_section_used)
9573 size += 2 * DWARF2_ADDR_SIZE;
9574 if (have_multiple_function_sections)
9575 {
9576 unsigned fde_idx;
9577 dw_fde_ref fde;
9578
9579 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9580 {
9581 if (DECL_IGNORED_P (fde->decl))
9582 continue;
9583 if (!fde->in_std_section)
9584 size += 2 * DWARF2_ADDR_SIZE;
9585 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9586 size += 2 * DWARF2_ADDR_SIZE;
9587 }
9588 }
9589
9590 /* Count the two zero words used to terminated the address range table. */
9591 size += 2 * DWARF2_ADDR_SIZE;
9592 return size;
9593 }
9594 \f
9595 /* Select the encoding of an attribute value. */
9596
9597 static enum dwarf_form
9598 value_format (dw_attr_node *a)
9599 {
9600 switch (AT_class (a))
9601 {
9602 case dw_val_class_addr:
9603 /* Only very few attributes allow DW_FORM_addr. */
9604 switch (a->dw_attr)
9605 {
9606 case DW_AT_low_pc:
9607 case DW_AT_high_pc:
9608 case DW_AT_entry_pc:
9609 case DW_AT_trampoline:
9610 return (AT_index (a) == NOT_INDEXED
9611 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9612 default:
9613 break;
9614 }
9615 switch (DWARF2_ADDR_SIZE)
9616 {
9617 case 1:
9618 return DW_FORM_data1;
9619 case 2:
9620 return DW_FORM_data2;
9621 case 4:
9622 return DW_FORM_data4;
9623 case 8:
9624 return DW_FORM_data8;
9625 default:
9626 gcc_unreachable ();
9627 }
9628 case dw_val_class_loc_list:
9629 case dw_val_class_view_list:
9630 if (dwarf_split_debug_info
9631 && dwarf_version >= 5
9632 && AT_loc_list (a)->num_assigned)
9633 return DW_FORM_loclistx;
9634 /* FALLTHRU */
9635 case dw_val_class_range_list:
9636 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9637 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9638 care about sizes of .debug* sections in shared libraries and
9639 executables and don't take into account relocations that affect just
9640 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9641 table in the .debug_rnglists section. */
9642 if (dwarf_split_debug_info
9643 && dwarf_version >= 5
9644 && AT_class (a) == dw_val_class_range_list
9645 && rnglist_idx
9646 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9647 return DW_FORM_rnglistx;
9648 if (dwarf_version >= 4)
9649 return DW_FORM_sec_offset;
9650 /* FALLTHRU */
9651 case dw_val_class_vms_delta:
9652 case dw_val_class_offset:
9653 switch (DWARF_OFFSET_SIZE)
9654 {
9655 case 4:
9656 return DW_FORM_data4;
9657 case 8:
9658 return DW_FORM_data8;
9659 default:
9660 gcc_unreachable ();
9661 }
9662 case dw_val_class_loc:
9663 if (dwarf_version >= 4)
9664 return DW_FORM_exprloc;
9665 switch (constant_size (size_of_locs (AT_loc (a))))
9666 {
9667 case 1:
9668 return DW_FORM_block1;
9669 case 2:
9670 return DW_FORM_block2;
9671 case 4:
9672 return DW_FORM_block4;
9673 default:
9674 gcc_unreachable ();
9675 }
9676 case dw_val_class_const:
9677 return DW_FORM_sdata;
9678 case dw_val_class_unsigned_const:
9679 switch (constant_size (AT_unsigned (a)))
9680 {
9681 case 1:
9682 return DW_FORM_data1;
9683 case 2:
9684 return DW_FORM_data2;
9685 case 4:
9686 /* In DWARF3 DW_AT_data_member_location with
9687 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9688 constant, so we need to use DW_FORM_udata if we need
9689 a large constant. */
9690 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9691 return DW_FORM_udata;
9692 return DW_FORM_data4;
9693 case 8:
9694 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9695 return DW_FORM_udata;
9696 return DW_FORM_data8;
9697 default:
9698 gcc_unreachable ();
9699 }
9700 case dw_val_class_const_implicit:
9701 case dw_val_class_unsigned_const_implicit:
9702 case dw_val_class_file_implicit:
9703 return DW_FORM_implicit_const;
9704 case dw_val_class_const_double:
9705 switch (HOST_BITS_PER_WIDE_INT)
9706 {
9707 case 8:
9708 return DW_FORM_data2;
9709 case 16:
9710 return DW_FORM_data4;
9711 case 32:
9712 return DW_FORM_data8;
9713 case 64:
9714 if (dwarf_version >= 5)
9715 return DW_FORM_data16;
9716 /* FALLTHRU */
9717 default:
9718 return DW_FORM_block1;
9719 }
9720 case dw_val_class_wide_int:
9721 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9722 {
9723 case 8:
9724 return DW_FORM_data1;
9725 case 16:
9726 return DW_FORM_data2;
9727 case 32:
9728 return DW_FORM_data4;
9729 case 64:
9730 return DW_FORM_data8;
9731 case 128:
9732 if (dwarf_version >= 5)
9733 return DW_FORM_data16;
9734 /* FALLTHRU */
9735 default:
9736 return DW_FORM_block1;
9737 }
9738 case dw_val_class_vec:
9739 switch (constant_size (a->dw_attr_val.v.val_vec.length
9740 * a->dw_attr_val.v.val_vec.elt_size))
9741 {
9742 case 1:
9743 return DW_FORM_block1;
9744 case 2:
9745 return DW_FORM_block2;
9746 case 4:
9747 return DW_FORM_block4;
9748 default:
9749 gcc_unreachable ();
9750 }
9751 case dw_val_class_flag:
9752 if (dwarf_version >= 4)
9753 {
9754 /* Currently all add_AT_flag calls pass in 1 as last argument,
9755 so DW_FORM_flag_present can be used. If that ever changes,
9756 we'll need to use DW_FORM_flag and have some optimization
9757 in build_abbrev_table that will change those to
9758 DW_FORM_flag_present if it is set to 1 in all DIEs using
9759 the same abbrev entry. */
9760 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9761 return DW_FORM_flag_present;
9762 }
9763 return DW_FORM_flag;
9764 case dw_val_class_die_ref:
9765 if (AT_ref_external (a))
9766 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9767 else
9768 return DW_FORM_ref;
9769 case dw_val_class_fde_ref:
9770 return DW_FORM_data;
9771 case dw_val_class_lbl_id:
9772 return (AT_index (a) == NOT_INDEXED
9773 ? DW_FORM_addr : DW_FORM_GNU_addr_index);
9774 case dw_val_class_lineptr:
9775 case dw_val_class_macptr:
9776 case dw_val_class_loclistsptr:
9777 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9778 case dw_val_class_str:
9779 return AT_string_form (a);
9780 case dw_val_class_file:
9781 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9782 {
9783 case 1:
9784 return DW_FORM_data1;
9785 case 2:
9786 return DW_FORM_data2;
9787 case 4:
9788 return DW_FORM_data4;
9789 default:
9790 gcc_unreachable ();
9791 }
9792
9793 case dw_val_class_data8:
9794 return DW_FORM_data8;
9795
9796 case dw_val_class_high_pc:
9797 switch (DWARF2_ADDR_SIZE)
9798 {
9799 case 1:
9800 return DW_FORM_data1;
9801 case 2:
9802 return DW_FORM_data2;
9803 case 4:
9804 return DW_FORM_data4;
9805 case 8:
9806 return DW_FORM_data8;
9807 default:
9808 gcc_unreachable ();
9809 }
9810
9811 case dw_val_class_discr_value:
9812 return (a->dw_attr_val.v.val_discr_value.pos
9813 ? DW_FORM_udata
9814 : DW_FORM_sdata);
9815 case dw_val_class_discr_list:
9816 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9817 {
9818 case 1:
9819 return DW_FORM_block1;
9820 case 2:
9821 return DW_FORM_block2;
9822 case 4:
9823 return DW_FORM_block4;
9824 default:
9825 gcc_unreachable ();
9826 }
9827
9828 default:
9829 gcc_unreachable ();
9830 }
9831 }
9832
9833 /* Output the encoding of an attribute value. */
9834
9835 static void
9836 output_value_format (dw_attr_node *a)
9837 {
9838 enum dwarf_form form = value_format (a);
9839
9840 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9841 }
9842
9843 /* Given a die and id, produce the appropriate abbreviations. */
9844
9845 static void
9846 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9847 {
9848 unsigned ix;
9849 dw_attr_node *a_attr;
9850
9851 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9852 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9853 dwarf_tag_name (abbrev->die_tag));
9854
9855 if (abbrev->die_child != NULL)
9856 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9857 else
9858 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9859
9860 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9861 {
9862 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9863 dwarf_attr_name (a_attr->dw_attr));
9864 output_value_format (a_attr);
9865 if (value_format (a_attr) == DW_FORM_implicit_const)
9866 {
9867 if (AT_class (a_attr) == dw_val_class_file_implicit)
9868 {
9869 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9870 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9871 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9872 }
9873 else
9874 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9875 }
9876 }
9877
9878 dw2_asm_output_data (1, 0, NULL);
9879 dw2_asm_output_data (1, 0, NULL);
9880 }
9881
9882
9883 /* Output the .debug_abbrev section which defines the DIE abbreviation
9884 table. */
9885
9886 static void
9887 output_abbrev_section (void)
9888 {
9889 unsigned int abbrev_id;
9890 dw_die_ref abbrev;
9891
9892 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9893 if (abbrev_id != 0)
9894 output_die_abbrevs (abbrev_id, abbrev);
9895
9896 /* Terminate the table. */
9897 dw2_asm_output_data (1, 0, NULL);
9898 }
9899
9900 /* Return a new location list, given the begin and end range, and the
9901 expression. */
9902
9903 static inline dw_loc_list_ref
9904 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9905 const char *end, var_loc_view vend,
9906 const char *section)
9907 {
9908 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9909
9910 retlist->begin = begin;
9911 retlist->begin_entry = NULL;
9912 retlist->end = end;
9913 retlist->expr = expr;
9914 retlist->section = section;
9915 retlist->vbegin = vbegin;
9916 retlist->vend = vend;
9917
9918 return retlist;
9919 }
9920
9921 /* Return true iff there's any nonzero view number in the loc list. */
9922
9923 static bool
9924 loc_list_has_views (dw_loc_list_ref list)
9925 {
9926 if (!debug_variable_location_views)
9927 return false;
9928
9929 for (dw_loc_list_ref loc = list;
9930 loc != NULL; loc = loc->dw_loc_next)
9931 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
9932 return true;
9933
9934 return false;
9935 }
9936
9937 /* Generate a new internal symbol for this location list node, if it
9938 hasn't got one yet. */
9939
9940 static inline void
9941 gen_llsym (dw_loc_list_ref list)
9942 {
9943 gcc_assert (!list->ll_symbol);
9944 list->ll_symbol = gen_internal_sym ("LLST");
9945
9946 if (!loc_list_has_views (list))
9947 return;
9948
9949 if (dwarf2out_locviews_in_attribute ())
9950 {
9951 /* Use the same label_num for the view list. */
9952 label_num--;
9953 list->vl_symbol = gen_internal_sym ("LVUS");
9954 }
9955 else
9956 list->vl_symbol = list->ll_symbol;
9957 }
9958
9959 /* Generate a symbol for the list, but only if we really want to emit
9960 it as a list. */
9961
9962 static inline void
9963 maybe_gen_llsym (dw_loc_list_ref list)
9964 {
9965 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
9966 return;
9967
9968 gen_llsym (list);
9969 }
9970
9971 /* Determine whether or not to skip loc_list entry CURR. If we're not
9972 to skip it, and SIZEP is non-null, store the size of CURR->expr's
9973 representation in *SIZEP. */
9974
9975 static bool
9976 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = 0)
9977 {
9978 /* Don't output an entry that starts and ends at the same address. */
9979 if (strcmp (curr->begin, curr->end) == 0
9980 && curr->vbegin == curr->vend && !curr->force)
9981 return true;
9982
9983 unsigned long size = size_of_locs (curr->expr);
9984
9985 /* If the expression is too large, drop it on the floor. We could
9986 perhaps put it into DW_TAG_dwarf_procedure and refer to that
9987 in the expression, but >= 64KB expressions for a single value
9988 in a single range are unlikely very useful. */
9989 if (dwarf_version < 5 && size > 0xffff)
9990 return true;
9991
9992 if (sizep)
9993 *sizep = size;
9994
9995 return false;
9996 }
9997
9998 /* Output a view pair loclist entry for CURR, if it requires one. */
9999
10000 static void
10001 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10002 {
10003 if (!dwarf2out_locviews_in_loclist ())
10004 return;
10005
10006 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10007 return;
10008
10009 #ifdef DW_LLE_view_pair
10010 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10011
10012 if (dwarf2out_as_locview_support)
10013 {
10014 if (ZERO_VIEW_P (curr->vbegin))
10015 dw2_asm_output_data_uleb128 (0, "Location view begin");
10016 else
10017 {
10018 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10019 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10020 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10021 }
10022
10023 if (ZERO_VIEW_P (curr->vend))
10024 dw2_asm_output_data_uleb128 (0, "Location view end");
10025 else
10026 {
10027 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10028 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10029 dw2_asm_output_symname_uleb128 (label, "Location view end");
10030 }
10031 }
10032 else
10033 {
10034 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10035 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10036 }
10037 #endif /* DW_LLE_view_pair */
10038
10039 return;
10040 }
10041
10042 /* Output the location list given to us. */
10043
10044 static void
10045 output_loc_list (dw_loc_list_ref list_head)
10046 {
10047 int vcount = 0, lcount = 0;
10048
10049 if (list_head->emitted)
10050 return;
10051 list_head->emitted = true;
10052
10053 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10054 {
10055 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10056
10057 for (dw_loc_list_ref curr = list_head; curr != NULL;
10058 curr = curr->dw_loc_next)
10059 {
10060 if (skip_loc_list_entry (curr))
10061 continue;
10062
10063 vcount++;
10064
10065 /* ?? dwarf_split_debug_info? */
10066 if (dwarf2out_as_locview_support)
10067 {
10068 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10069
10070 if (!ZERO_VIEW_P (curr->vbegin))
10071 {
10072 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10073 dw2_asm_output_symname_uleb128 (label,
10074 "View list begin (%s)",
10075 list_head->vl_symbol);
10076 }
10077 else
10078 dw2_asm_output_data_uleb128 (0,
10079 "View list begin (%s)",
10080 list_head->vl_symbol);
10081
10082 if (!ZERO_VIEW_P (curr->vend))
10083 {
10084 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10085 dw2_asm_output_symname_uleb128 (label,
10086 "View list end (%s)",
10087 list_head->vl_symbol);
10088 }
10089 else
10090 dw2_asm_output_data_uleb128 (0,
10091 "View list end (%s)",
10092 list_head->vl_symbol);
10093 }
10094 else
10095 {
10096 dw2_asm_output_data_uleb128 (curr->vbegin,
10097 "View list begin (%s)",
10098 list_head->vl_symbol);
10099 dw2_asm_output_data_uleb128 (curr->vend,
10100 "View list end (%s)",
10101 list_head->vl_symbol);
10102 }
10103 }
10104 }
10105
10106 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10107
10108 const char *last_section = NULL;
10109 const char *base_label = NULL;
10110
10111 /* Walk the location list, and output each range + expression. */
10112 for (dw_loc_list_ref curr = list_head; curr != NULL;
10113 curr = curr->dw_loc_next)
10114 {
10115 unsigned long size;
10116
10117 /* Skip this entry? If we skip it here, we must skip it in the
10118 view list above as well. */
10119 if (skip_loc_list_entry (curr, &size))
10120 continue;
10121
10122 lcount++;
10123
10124 if (dwarf_version >= 5)
10125 {
10126 if (dwarf_split_debug_info)
10127 {
10128 dwarf2out_maybe_output_loclist_view_pair (curr);
10129 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10130 uleb128 index into .debug_addr and uleb128 length. */
10131 dw2_asm_output_data (1, DW_LLE_startx_length,
10132 "DW_LLE_startx_length (%s)",
10133 list_head->ll_symbol);
10134 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10135 "Location list range start index "
10136 "(%s)", curr->begin);
10137 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10138 For that case we probably need to emit DW_LLE_startx_endx,
10139 but we'd need 2 .debug_addr entries rather than just one. */
10140 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10141 "Location list length (%s)",
10142 list_head->ll_symbol);
10143 }
10144 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10145 {
10146 dwarf2out_maybe_output_loclist_view_pair (curr);
10147 /* If all code is in .text section, the base address is
10148 already provided by the CU attributes. Use
10149 DW_LLE_offset_pair where both addresses are uleb128 encoded
10150 offsets against that base. */
10151 dw2_asm_output_data (1, DW_LLE_offset_pair,
10152 "DW_LLE_offset_pair (%s)",
10153 list_head->ll_symbol);
10154 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10155 "Location list begin address (%s)",
10156 list_head->ll_symbol);
10157 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10158 "Location list end address (%s)",
10159 list_head->ll_symbol);
10160 }
10161 else if (HAVE_AS_LEB128)
10162 {
10163 /* Otherwise, find out how many consecutive entries could share
10164 the same base entry. If just one, emit DW_LLE_start_length,
10165 otherwise emit DW_LLE_base_address for the base address
10166 followed by a series of DW_LLE_offset_pair. */
10167 if (last_section == NULL || curr->section != last_section)
10168 {
10169 dw_loc_list_ref curr2;
10170 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10171 curr2 = curr2->dw_loc_next)
10172 {
10173 if (strcmp (curr2->begin, curr2->end) == 0
10174 && !curr2->force)
10175 continue;
10176 break;
10177 }
10178 if (curr2 == NULL || curr->section != curr2->section)
10179 last_section = NULL;
10180 else
10181 {
10182 last_section = curr->section;
10183 base_label = curr->begin;
10184 dw2_asm_output_data (1, DW_LLE_base_address,
10185 "DW_LLE_base_address (%s)",
10186 list_head->ll_symbol);
10187 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10188 "Base address (%s)",
10189 list_head->ll_symbol);
10190 }
10191 }
10192 /* Only one entry with the same base address. Use
10193 DW_LLE_start_length with absolute address and uleb128
10194 length. */
10195 if (last_section == NULL)
10196 {
10197 dwarf2out_maybe_output_loclist_view_pair (curr);
10198 dw2_asm_output_data (1, DW_LLE_start_length,
10199 "DW_LLE_start_length (%s)",
10200 list_head->ll_symbol);
10201 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10202 "Location list begin address (%s)",
10203 list_head->ll_symbol);
10204 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10205 "Location list length "
10206 "(%s)", list_head->ll_symbol);
10207 }
10208 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10209 DW_LLE_base_address. */
10210 else
10211 {
10212 dwarf2out_maybe_output_loclist_view_pair (curr);
10213 dw2_asm_output_data (1, DW_LLE_offset_pair,
10214 "DW_LLE_offset_pair (%s)",
10215 list_head->ll_symbol);
10216 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10217 "Location list begin address "
10218 "(%s)", list_head->ll_symbol);
10219 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10220 "Location list end address "
10221 "(%s)", list_head->ll_symbol);
10222 }
10223 }
10224 /* The assembler does not support .uleb128 directive. Emit
10225 DW_LLE_start_end with a pair of absolute addresses. */
10226 else
10227 {
10228 dwarf2out_maybe_output_loclist_view_pair (curr);
10229 dw2_asm_output_data (1, DW_LLE_start_end,
10230 "DW_LLE_start_end (%s)",
10231 list_head->ll_symbol);
10232 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10233 "Location list begin address (%s)",
10234 list_head->ll_symbol);
10235 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10236 "Location list end address (%s)",
10237 list_head->ll_symbol);
10238 }
10239 }
10240 else if (dwarf_split_debug_info)
10241 {
10242 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10243 and 4 byte length. */
10244 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10245 "Location list start/length entry (%s)",
10246 list_head->ll_symbol);
10247 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10248 "Location list range start index (%s)",
10249 curr->begin);
10250 /* The length field is 4 bytes. If we ever need to support
10251 an 8-byte length, we can add a new DW_LLE code or fall back
10252 to DW_LLE_GNU_start_end_entry. */
10253 dw2_asm_output_delta (4, curr->end, curr->begin,
10254 "Location list range length (%s)",
10255 list_head->ll_symbol);
10256 }
10257 else if (!have_multiple_function_sections)
10258 {
10259 /* Pair of relative addresses against start of text section. */
10260 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10261 "Location list begin address (%s)",
10262 list_head->ll_symbol);
10263 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10264 "Location list end address (%s)",
10265 list_head->ll_symbol);
10266 }
10267 else
10268 {
10269 /* Pair of absolute addresses. */
10270 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10271 "Location list begin address (%s)",
10272 list_head->ll_symbol);
10273 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10274 "Location list end address (%s)",
10275 list_head->ll_symbol);
10276 }
10277
10278 /* Output the block length for this list of location operations. */
10279 if (dwarf_version >= 5)
10280 dw2_asm_output_data_uleb128 (size, "Location expression size");
10281 else
10282 {
10283 gcc_assert (size <= 0xffff);
10284 dw2_asm_output_data (2, size, "Location expression size");
10285 }
10286
10287 output_loc_sequence (curr->expr, -1);
10288 }
10289
10290 /* And finally list termination. */
10291 if (dwarf_version >= 5)
10292 dw2_asm_output_data (1, DW_LLE_end_of_list,
10293 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10294 else if (dwarf_split_debug_info)
10295 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10296 "Location list terminator (%s)",
10297 list_head->ll_symbol);
10298 else
10299 {
10300 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10301 "Location list terminator begin (%s)",
10302 list_head->ll_symbol);
10303 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10304 "Location list terminator end (%s)",
10305 list_head->ll_symbol);
10306 }
10307
10308 gcc_assert (!list_head->vl_symbol
10309 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10310 }
10311
10312 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10313 section. Emit a relocated reference if val_entry is NULL, otherwise,
10314 emit an indirect reference. */
10315
10316 static void
10317 output_range_list_offset (dw_attr_node *a)
10318 {
10319 const char *name = dwarf_attr_name (a->dw_attr);
10320
10321 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10322 {
10323 if (dwarf_version >= 5)
10324 {
10325 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10326 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10327 debug_ranges_section, "%s", name);
10328 }
10329 else
10330 {
10331 char *p = strchr (ranges_section_label, '\0');
10332 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10333 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10334 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10335 debug_ranges_section, "%s", name);
10336 *p = '\0';
10337 }
10338 }
10339 else if (dwarf_version >= 5)
10340 {
10341 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10342 gcc_assert (rnglist_idx);
10343 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10344 }
10345 else
10346 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10347 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10348 "%s (offset from %s)", name, ranges_section_label);
10349 }
10350
10351 /* Output the offset into the debug_loc section. */
10352
10353 static void
10354 output_loc_list_offset (dw_attr_node *a)
10355 {
10356 char *sym = AT_loc_list (a)->ll_symbol;
10357
10358 gcc_assert (sym);
10359 if (!dwarf_split_debug_info)
10360 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10361 "%s", dwarf_attr_name (a->dw_attr));
10362 else if (dwarf_version >= 5)
10363 {
10364 gcc_assert (AT_loc_list (a)->num_assigned);
10365 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10366 dwarf_attr_name (a->dw_attr),
10367 sym);
10368 }
10369 else
10370 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10371 "%s", dwarf_attr_name (a->dw_attr));
10372 }
10373
10374 /* Output the offset into the debug_loc section. */
10375
10376 static void
10377 output_view_list_offset (dw_attr_node *a)
10378 {
10379 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10380
10381 gcc_assert (sym);
10382 if (dwarf_split_debug_info)
10383 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10384 "%s", dwarf_attr_name (a->dw_attr));
10385 else
10386 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10387 "%s", dwarf_attr_name (a->dw_attr));
10388 }
10389
10390 /* Output an attribute's index or value appropriately. */
10391
10392 static void
10393 output_attr_index_or_value (dw_attr_node *a)
10394 {
10395 const char *name = dwarf_attr_name (a->dw_attr);
10396
10397 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10398 {
10399 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10400 return;
10401 }
10402 switch (AT_class (a))
10403 {
10404 case dw_val_class_addr:
10405 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10406 break;
10407 case dw_val_class_high_pc:
10408 case dw_val_class_lbl_id:
10409 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10410 break;
10411 default:
10412 gcc_unreachable ();
10413 }
10414 }
10415
10416 /* Output a type signature. */
10417
10418 static inline void
10419 output_signature (const char *sig, const char *name)
10420 {
10421 int i;
10422
10423 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10424 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10425 }
10426
10427 /* Output a discriminant value. */
10428
10429 static inline void
10430 output_discr_value (dw_discr_value *discr_value, const char *name)
10431 {
10432 if (discr_value->pos)
10433 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10434 else
10435 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10436 }
10437
10438 /* Output the DIE and its attributes. Called recursively to generate
10439 the definitions of each child DIE. */
10440
10441 static void
10442 output_die (dw_die_ref die)
10443 {
10444 dw_attr_node *a;
10445 dw_die_ref c;
10446 unsigned long size;
10447 unsigned ix;
10448
10449 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10450 (unsigned long)die->die_offset,
10451 dwarf_tag_name (die->die_tag));
10452
10453 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10454 {
10455 const char *name = dwarf_attr_name (a->dw_attr);
10456
10457 switch (AT_class (a))
10458 {
10459 case dw_val_class_addr:
10460 output_attr_index_or_value (a);
10461 break;
10462
10463 case dw_val_class_offset:
10464 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10465 "%s", name);
10466 break;
10467
10468 case dw_val_class_range_list:
10469 output_range_list_offset (a);
10470 break;
10471
10472 case dw_val_class_loc:
10473 size = size_of_locs (AT_loc (a));
10474
10475 /* Output the block length for this list of location operations. */
10476 if (dwarf_version >= 4)
10477 dw2_asm_output_data_uleb128 (size, "%s", name);
10478 else
10479 dw2_asm_output_data (constant_size (size), size, "%s", name);
10480
10481 output_loc_sequence (AT_loc (a), -1);
10482 break;
10483
10484 case dw_val_class_const:
10485 /* ??? It would be slightly more efficient to use a scheme like is
10486 used for unsigned constants below, but gdb 4.x does not sign
10487 extend. Gdb 5.x does sign extend. */
10488 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10489 break;
10490
10491 case dw_val_class_unsigned_const:
10492 {
10493 int csize = constant_size (AT_unsigned (a));
10494 if (dwarf_version == 3
10495 && a->dw_attr == DW_AT_data_member_location
10496 && csize >= 4)
10497 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10498 else
10499 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10500 }
10501 break;
10502
10503 case dw_val_class_const_implicit:
10504 if (flag_debug_asm)
10505 fprintf (asm_out_file, "\t\t\t%s %s ("
10506 HOST_WIDE_INT_PRINT_DEC ")\n",
10507 ASM_COMMENT_START, name, AT_int (a));
10508 break;
10509
10510 case dw_val_class_unsigned_const_implicit:
10511 if (flag_debug_asm)
10512 fprintf (asm_out_file, "\t\t\t%s %s ("
10513 HOST_WIDE_INT_PRINT_HEX ")\n",
10514 ASM_COMMENT_START, name, AT_unsigned (a));
10515 break;
10516
10517 case dw_val_class_const_double:
10518 {
10519 unsigned HOST_WIDE_INT first, second;
10520
10521 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10522 dw2_asm_output_data (1,
10523 HOST_BITS_PER_DOUBLE_INT
10524 / HOST_BITS_PER_CHAR,
10525 NULL);
10526
10527 if (WORDS_BIG_ENDIAN)
10528 {
10529 first = a->dw_attr_val.v.val_double.high;
10530 second = a->dw_attr_val.v.val_double.low;
10531 }
10532 else
10533 {
10534 first = a->dw_attr_val.v.val_double.low;
10535 second = a->dw_attr_val.v.val_double.high;
10536 }
10537
10538 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10539 first, "%s", name);
10540 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10541 second, NULL);
10542 }
10543 break;
10544
10545 case dw_val_class_wide_int:
10546 {
10547 int i;
10548 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10549 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10550 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10551 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10552 * l, NULL);
10553
10554 if (WORDS_BIG_ENDIAN)
10555 for (i = len - 1; i >= 0; --i)
10556 {
10557 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10558 "%s", name);
10559 name = "";
10560 }
10561 else
10562 for (i = 0; i < len; ++i)
10563 {
10564 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10565 "%s", name);
10566 name = "";
10567 }
10568 }
10569 break;
10570
10571 case dw_val_class_vec:
10572 {
10573 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10574 unsigned int len = a->dw_attr_val.v.val_vec.length;
10575 unsigned int i;
10576 unsigned char *p;
10577
10578 dw2_asm_output_data (constant_size (len * elt_size),
10579 len * elt_size, "%s", name);
10580 if (elt_size > sizeof (HOST_WIDE_INT))
10581 {
10582 elt_size /= 2;
10583 len *= 2;
10584 }
10585 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10586 i < len;
10587 i++, p += elt_size)
10588 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10589 "fp or vector constant word %u", i);
10590 break;
10591 }
10592
10593 case dw_val_class_flag:
10594 if (dwarf_version >= 4)
10595 {
10596 /* Currently all add_AT_flag calls pass in 1 as last argument,
10597 so DW_FORM_flag_present can be used. If that ever changes,
10598 we'll need to use DW_FORM_flag and have some optimization
10599 in build_abbrev_table that will change those to
10600 DW_FORM_flag_present if it is set to 1 in all DIEs using
10601 the same abbrev entry. */
10602 gcc_assert (AT_flag (a) == 1);
10603 if (flag_debug_asm)
10604 fprintf (asm_out_file, "\t\t\t%s %s\n",
10605 ASM_COMMENT_START, name);
10606 break;
10607 }
10608 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10609 break;
10610
10611 case dw_val_class_loc_list:
10612 output_loc_list_offset (a);
10613 break;
10614
10615 case dw_val_class_view_list:
10616 output_view_list_offset (a);
10617 break;
10618
10619 case dw_val_class_die_ref:
10620 if (AT_ref_external (a))
10621 {
10622 if (AT_ref (a)->comdat_type_p)
10623 {
10624 comdat_type_node *type_node
10625 = AT_ref (a)->die_id.die_type_node;
10626
10627 gcc_assert (type_node);
10628 output_signature (type_node->signature, name);
10629 }
10630 else
10631 {
10632 const char *sym = AT_ref (a)->die_id.die_symbol;
10633 int size;
10634
10635 gcc_assert (sym);
10636 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10637 length, whereas in DWARF3 it's always sized as an
10638 offset. */
10639 if (dwarf_version == 2)
10640 size = DWARF2_ADDR_SIZE;
10641 else
10642 size = DWARF_OFFSET_SIZE;
10643 /* ??? We cannot unconditionally output die_offset if
10644 non-zero - others might create references to those
10645 DIEs via symbols.
10646 And we do not clear its DIE offset after outputting it
10647 (and the label refers to the actual DIEs, not the
10648 DWARF CU unit header which is when using label + offset
10649 would be the correct thing to do).
10650 ??? This is the reason for the with_offset flag. */
10651 if (AT_ref (a)->with_offset)
10652 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10653 debug_info_section, "%s", name);
10654 else
10655 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10656 name);
10657 }
10658 }
10659 else
10660 {
10661 gcc_assert (AT_ref (a)->die_offset);
10662 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10663 "%s", name);
10664 }
10665 break;
10666
10667 case dw_val_class_fde_ref:
10668 {
10669 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10670
10671 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10672 a->dw_attr_val.v.val_fde_index * 2);
10673 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10674 "%s", name);
10675 }
10676 break;
10677
10678 case dw_val_class_vms_delta:
10679 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10680 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10681 AT_vms_delta2 (a), AT_vms_delta1 (a),
10682 "%s", name);
10683 #else
10684 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10685 AT_vms_delta2 (a), AT_vms_delta1 (a),
10686 "%s", name);
10687 #endif
10688 break;
10689
10690 case dw_val_class_lbl_id:
10691 output_attr_index_or_value (a);
10692 break;
10693
10694 case dw_val_class_lineptr:
10695 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10696 debug_line_section, "%s", name);
10697 break;
10698
10699 case dw_val_class_macptr:
10700 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10701 debug_macinfo_section, "%s", name);
10702 break;
10703
10704 case dw_val_class_loclistsptr:
10705 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10706 debug_loc_section, "%s", name);
10707 break;
10708
10709 case dw_val_class_str:
10710 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10711 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10712 a->dw_attr_val.v.val_str->label,
10713 debug_str_section,
10714 "%s: \"%s\"", name, AT_string (a));
10715 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10716 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10717 a->dw_attr_val.v.val_str->label,
10718 debug_line_str_section,
10719 "%s: \"%s\"", name, AT_string (a));
10720 else if (a->dw_attr_val.v.val_str->form == DW_FORM_GNU_str_index)
10721 dw2_asm_output_data_uleb128 (AT_index (a),
10722 "%s: \"%s\"", name, AT_string (a));
10723 else
10724 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10725 break;
10726
10727 case dw_val_class_file:
10728 {
10729 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10730
10731 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10732 a->dw_attr_val.v.val_file->filename);
10733 break;
10734 }
10735
10736 case dw_val_class_file_implicit:
10737 if (flag_debug_asm)
10738 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10739 ASM_COMMENT_START, name,
10740 maybe_emit_file (a->dw_attr_val.v.val_file),
10741 a->dw_attr_val.v.val_file->filename);
10742 break;
10743
10744 case dw_val_class_data8:
10745 {
10746 int i;
10747
10748 for (i = 0; i < 8; i++)
10749 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10750 i == 0 ? "%s" : NULL, name);
10751 break;
10752 }
10753
10754 case dw_val_class_high_pc:
10755 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10756 get_AT_low_pc (die), "DW_AT_high_pc");
10757 break;
10758
10759 case dw_val_class_discr_value:
10760 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10761 break;
10762
10763 case dw_val_class_discr_list:
10764 {
10765 dw_discr_list_ref list = AT_discr_list (a);
10766 const int size = size_of_discr_list (list);
10767
10768 /* This is a block, so output its length first. */
10769 dw2_asm_output_data (constant_size (size), size,
10770 "%s: block size", name);
10771
10772 for (; list != NULL; list = list->dw_discr_next)
10773 {
10774 /* One byte for the discriminant value descriptor, and then as
10775 many LEB128 numbers as required. */
10776 if (list->dw_discr_range)
10777 dw2_asm_output_data (1, DW_DSC_range,
10778 "%s: DW_DSC_range", name);
10779 else
10780 dw2_asm_output_data (1, DW_DSC_label,
10781 "%s: DW_DSC_label", name);
10782
10783 output_discr_value (&list->dw_discr_lower_bound, name);
10784 if (list->dw_discr_range)
10785 output_discr_value (&list->dw_discr_upper_bound, name);
10786 }
10787 break;
10788 }
10789
10790 default:
10791 gcc_unreachable ();
10792 }
10793 }
10794
10795 FOR_EACH_CHILD (die, c, output_die (c));
10796
10797 /* Add null byte to terminate sibling list. */
10798 if (die->die_child != NULL)
10799 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10800 (unsigned long) die->die_offset);
10801 }
10802
10803 /* Output the dwarf version number. */
10804
10805 static void
10806 output_dwarf_version ()
10807 {
10808 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10809 views in loclist. That will change eventually. */
10810 if (dwarf_version == 6)
10811 {
10812 static bool once;
10813 if (!once)
10814 {
10815 warning (0,
10816 "-gdwarf-6 is output as version 5 with incompatibilities");
10817 once = true;
10818 }
10819 dw2_asm_output_data (2, 5, "DWARF version number");
10820 }
10821 else
10822 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10823 }
10824
10825 /* Output the compilation unit that appears at the beginning of the
10826 .debug_info section, and precedes the DIE descriptions. */
10827
10828 static void
10829 output_compilation_unit_header (enum dwarf_unit_type ut)
10830 {
10831 if (!XCOFF_DEBUGGING_INFO)
10832 {
10833 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10834 dw2_asm_output_data (4, 0xffffffff,
10835 "Initial length escape value indicating 64-bit DWARF extension");
10836 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10837 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10838 "Length of Compilation Unit Info");
10839 }
10840
10841 output_dwarf_version ();
10842 if (dwarf_version >= 5)
10843 {
10844 const char *name;
10845 switch (ut)
10846 {
10847 case DW_UT_compile: name = "DW_UT_compile"; break;
10848 case DW_UT_type: name = "DW_UT_type"; break;
10849 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10850 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10851 default: gcc_unreachable ();
10852 }
10853 dw2_asm_output_data (1, ut, "%s", name);
10854 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10855 }
10856 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10857 debug_abbrev_section,
10858 "Offset Into Abbrev. Section");
10859 if (dwarf_version < 5)
10860 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10861 }
10862
10863 /* Output the compilation unit DIE and its children. */
10864
10865 static void
10866 output_comp_unit (dw_die_ref die, int output_if_empty,
10867 const unsigned char *dwo_id)
10868 {
10869 const char *secname, *oldsym;
10870 char *tmp;
10871
10872 /* Unless we are outputting main CU, we may throw away empty ones. */
10873 if (!output_if_empty && die->die_child == NULL)
10874 return;
10875
10876 /* Even if there are no children of this DIE, we must output the information
10877 about the compilation unit. Otherwise, on an empty translation unit, we
10878 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10879 will then complain when examining the file. First mark all the DIEs in
10880 this CU so we know which get local refs. */
10881 mark_dies (die);
10882
10883 external_ref_hash_type *extern_map = optimize_external_refs (die);
10884
10885 /* For now, optimize only the main CU, in order to optimize the rest
10886 we'd need to see all of them earlier. Leave the rest for post-linking
10887 tools like DWZ. */
10888 if (die == comp_unit_die ())
10889 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10890
10891 build_abbrev_table (die, extern_map);
10892
10893 optimize_abbrev_table ();
10894
10895 delete extern_map;
10896
10897 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10898 next_die_offset = (dwo_id
10899 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10900 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10901 calc_die_sizes (die);
10902
10903 oldsym = die->die_id.die_symbol;
10904 if (oldsym && die->comdat_type_p)
10905 {
10906 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
10907
10908 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
10909 secname = tmp;
10910 die->die_id.die_symbol = NULL;
10911 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
10912 }
10913 else
10914 {
10915 switch_to_section (debug_info_section);
10916 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
10917 info_section_emitted = true;
10918 }
10919
10920 /* For LTO cross unit DIE refs we want a symbol on the start of the
10921 debuginfo section, not on the CU DIE. */
10922 if ((flag_generate_lto || flag_generate_offload) && oldsym)
10923 {
10924 /* ??? No way to get visibility assembled without a decl. */
10925 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
10926 get_identifier (oldsym), char_type_node);
10927 TREE_PUBLIC (decl) = true;
10928 TREE_STATIC (decl) = true;
10929 DECL_ARTIFICIAL (decl) = true;
10930 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
10931 DECL_VISIBILITY_SPECIFIED (decl) = true;
10932 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
10933 #ifdef ASM_WEAKEN_LABEL
10934 /* We prefer a .weak because that handles duplicates from duplicate
10935 archive members in a graceful way. */
10936 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
10937 #else
10938 targetm.asm_out.globalize_label (asm_out_file, oldsym);
10939 #endif
10940 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
10941 }
10942
10943 /* Output debugging information. */
10944 output_compilation_unit_header (dwo_id
10945 ? DW_UT_split_compile : DW_UT_compile);
10946 if (dwarf_version >= 5)
10947 {
10948 if (dwo_id != NULL)
10949 for (int i = 0; i < 8; i++)
10950 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
10951 }
10952 output_die (die);
10953
10954 /* Leave the marks on the main CU, so we can check them in
10955 output_pubnames. */
10956 if (oldsym)
10957 {
10958 unmark_dies (die);
10959 die->die_id.die_symbol = oldsym;
10960 }
10961 }
10962
10963 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
10964 and .debug_pubtypes. This is configured per-target, but can be
10965 overridden by the -gpubnames or -gno-pubnames options. */
10966
10967 static inline bool
10968 want_pubnames (void)
10969 {
10970 if (debug_info_level <= DINFO_LEVEL_TERSE)
10971 return false;
10972 if (debug_generate_pub_sections != -1)
10973 return debug_generate_pub_sections;
10974 return targetm.want_debug_pub_sections;
10975 }
10976
10977 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
10978
10979 static void
10980 add_AT_pubnames (dw_die_ref die)
10981 {
10982 if (want_pubnames ())
10983 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
10984 }
10985
10986 /* Add a string attribute value to a skeleton DIE. */
10987
10988 static inline void
10989 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
10990 const char *str)
10991 {
10992 dw_attr_node attr;
10993 struct indirect_string_node *node;
10994
10995 if (! skeleton_debug_str_hash)
10996 skeleton_debug_str_hash
10997 = hash_table<indirect_string_hasher>::create_ggc (10);
10998
10999 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11000 find_string_form (node);
11001 if (node->form == DW_FORM_GNU_str_index)
11002 node->form = DW_FORM_strp;
11003
11004 attr.dw_attr = attr_kind;
11005 attr.dw_attr_val.val_class = dw_val_class_str;
11006 attr.dw_attr_val.val_entry = NULL;
11007 attr.dw_attr_val.v.val_str = node;
11008 add_dwarf_attr (die, &attr);
11009 }
11010
11011 /* Helper function to generate top-level dies for skeleton debug_info and
11012 debug_types. */
11013
11014 static void
11015 add_top_level_skeleton_die_attrs (dw_die_ref die)
11016 {
11017 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11018 const char *comp_dir = comp_dir_string ();
11019
11020 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11021 if (comp_dir != NULL)
11022 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11023 add_AT_pubnames (die);
11024 add_AT_lineptr (die, DW_AT_GNU_addr_base, debug_addr_section_label);
11025 }
11026
11027 /* Output skeleton debug sections that point to the dwo file. */
11028
11029 static void
11030 output_skeleton_debug_sections (dw_die_ref comp_unit,
11031 const unsigned char *dwo_id)
11032 {
11033 /* These attributes will be found in the full debug_info section. */
11034 remove_AT (comp_unit, DW_AT_producer);
11035 remove_AT (comp_unit, DW_AT_language);
11036
11037 switch_to_section (debug_skeleton_info_section);
11038 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11039
11040 /* Produce the skeleton compilation-unit header. This one differs enough from
11041 a normal CU header that it's better not to call output_compilation_unit
11042 header. */
11043 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11044 dw2_asm_output_data (4, 0xffffffff,
11045 "Initial length escape value indicating 64-bit "
11046 "DWARF extension");
11047
11048 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11049 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11050 - DWARF_INITIAL_LENGTH_SIZE
11051 + size_of_die (comp_unit),
11052 "Length of Compilation Unit Info");
11053 output_dwarf_version ();
11054 if (dwarf_version >= 5)
11055 {
11056 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11057 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11058 }
11059 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11060 debug_skeleton_abbrev_section,
11061 "Offset Into Abbrev. Section");
11062 if (dwarf_version < 5)
11063 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11064 else
11065 for (int i = 0; i < 8; i++)
11066 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11067
11068 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11069 output_die (comp_unit);
11070
11071 /* Build the skeleton debug_abbrev section. */
11072 switch_to_section (debug_skeleton_abbrev_section);
11073 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11074
11075 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11076
11077 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11078 }
11079
11080 /* Output a comdat type unit DIE and its children. */
11081
11082 static void
11083 output_comdat_type_unit (comdat_type_node *node)
11084 {
11085 const char *secname;
11086 char *tmp;
11087 int i;
11088 #if defined (OBJECT_FORMAT_ELF)
11089 tree comdat_key;
11090 #endif
11091
11092 /* First mark all the DIEs in this CU so we know which get local refs. */
11093 mark_dies (node->root_die);
11094
11095 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11096
11097 build_abbrev_table (node->root_die, extern_map);
11098
11099 delete extern_map;
11100 extern_map = NULL;
11101
11102 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11103 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11104 calc_die_sizes (node->root_die);
11105
11106 #if defined (OBJECT_FORMAT_ELF)
11107 if (dwarf_version >= 5)
11108 {
11109 if (!dwarf_split_debug_info)
11110 secname = ".debug_info";
11111 else
11112 secname = ".debug_info.dwo";
11113 }
11114 else if (!dwarf_split_debug_info)
11115 secname = ".debug_types";
11116 else
11117 secname = ".debug_types.dwo";
11118
11119 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11120 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11121 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11122 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11123 comdat_key = get_identifier (tmp);
11124 targetm.asm_out.named_section (secname,
11125 SECTION_DEBUG | SECTION_LINKONCE,
11126 comdat_key);
11127 #else
11128 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11129 sprintf (tmp, (dwarf_version >= 5
11130 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11131 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11132 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11133 secname = tmp;
11134 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11135 #endif
11136
11137 /* Output debugging information. */
11138 output_compilation_unit_header (dwarf_split_debug_info
11139 ? DW_UT_split_type : DW_UT_type);
11140 output_signature (node->signature, "Type Signature");
11141 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11142 "Offset to Type DIE");
11143 output_die (node->root_die);
11144
11145 unmark_dies (node->root_die);
11146 }
11147
11148 /* Return the DWARF2/3 pubname associated with a decl. */
11149
11150 static const char *
11151 dwarf2_name (tree decl, int scope)
11152 {
11153 if (DECL_NAMELESS (decl))
11154 return NULL;
11155 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11156 }
11157
11158 /* Add a new entry to .debug_pubnames if appropriate. */
11159
11160 static void
11161 add_pubname_string (const char *str, dw_die_ref die)
11162 {
11163 pubname_entry e;
11164
11165 e.die = die;
11166 e.name = xstrdup (str);
11167 vec_safe_push (pubname_table, e);
11168 }
11169
11170 static void
11171 add_pubname (tree decl, dw_die_ref die)
11172 {
11173 if (!want_pubnames ())
11174 return;
11175
11176 /* Don't add items to the table when we expect that the consumer will have
11177 just read the enclosing die. For example, if the consumer is looking at a
11178 class_member, it will either be inside the class already, or will have just
11179 looked up the class to find the member. Either way, searching the class is
11180 faster than searching the index. */
11181 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11182 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11183 {
11184 const char *name = dwarf2_name (decl, 1);
11185
11186 if (name)
11187 add_pubname_string (name, die);
11188 }
11189 }
11190
11191 /* Add an enumerator to the pubnames section. */
11192
11193 static void
11194 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11195 {
11196 pubname_entry e;
11197
11198 gcc_assert (scope_name);
11199 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11200 e.die = die;
11201 vec_safe_push (pubname_table, e);
11202 }
11203
11204 /* Add a new entry to .debug_pubtypes if appropriate. */
11205
11206 static void
11207 add_pubtype (tree decl, dw_die_ref die)
11208 {
11209 pubname_entry e;
11210
11211 if (!want_pubnames ())
11212 return;
11213
11214 if ((TREE_PUBLIC (decl)
11215 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11216 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11217 {
11218 tree scope = NULL;
11219 const char *scope_name = "";
11220 const char *sep = is_cxx () ? "::" : ".";
11221 const char *name;
11222
11223 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11224 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11225 {
11226 scope_name = lang_hooks.dwarf_name (scope, 1);
11227 if (scope_name != NULL && scope_name[0] != '\0')
11228 scope_name = concat (scope_name, sep, NULL);
11229 else
11230 scope_name = "";
11231 }
11232
11233 if (TYPE_P (decl))
11234 name = type_tag (decl);
11235 else
11236 name = lang_hooks.dwarf_name (decl, 1);
11237
11238 /* If we don't have a name for the type, there's no point in adding
11239 it to the table. */
11240 if (name != NULL && name[0] != '\0')
11241 {
11242 e.die = die;
11243 e.name = concat (scope_name, name, NULL);
11244 vec_safe_push (pubtype_table, e);
11245 }
11246
11247 /* Although it might be more consistent to add the pubinfo for the
11248 enumerators as their dies are created, they should only be added if the
11249 enum type meets the criteria above. So rather than re-check the parent
11250 enum type whenever an enumerator die is created, just output them all
11251 here. This isn't protected by the name conditional because anonymous
11252 enums don't have names. */
11253 if (die->die_tag == DW_TAG_enumeration_type)
11254 {
11255 dw_die_ref c;
11256
11257 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11258 }
11259 }
11260 }
11261
11262 /* Output a single entry in the pubnames table. */
11263
11264 static void
11265 output_pubname (dw_offset die_offset, pubname_entry *entry)
11266 {
11267 dw_die_ref die = entry->die;
11268 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11269
11270 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11271
11272 if (debug_generate_pub_sections == 2)
11273 {
11274 /* This logic follows gdb's method for determining the value of the flag
11275 byte. */
11276 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11277 switch (die->die_tag)
11278 {
11279 case DW_TAG_typedef:
11280 case DW_TAG_base_type:
11281 case DW_TAG_subrange_type:
11282 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11283 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11284 break;
11285 case DW_TAG_enumerator:
11286 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11287 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11288 if (!is_cxx ())
11289 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11290 break;
11291 case DW_TAG_subprogram:
11292 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11293 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11294 if (!is_ada ())
11295 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11296 break;
11297 case DW_TAG_constant:
11298 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11299 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11300 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11301 break;
11302 case DW_TAG_variable:
11303 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11304 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11305 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11306 break;
11307 case DW_TAG_namespace:
11308 case DW_TAG_imported_declaration:
11309 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11310 break;
11311 case DW_TAG_class_type:
11312 case DW_TAG_interface_type:
11313 case DW_TAG_structure_type:
11314 case DW_TAG_union_type:
11315 case DW_TAG_enumeration_type:
11316 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11317 if (!is_cxx ())
11318 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11319 break;
11320 default:
11321 /* An unusual tag. Leave the flag-byte empty. */
11322 break;
11323 }
11324 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11325 "GDB-index flags");
11326 }
11327
11328 dw2_asm_output_nstring (entry->name, -1, "external name");
11329 }
11330
11331
11332 /* Output the public names table used to speed up access to externally
11333 visible names; or the public types table used to find type definitions. */
11334
11335 static void
11336 output_pubnames (vec<pubname_entry, va_gc> *names)
11337 {
11338 unsigned i;
11339 unsigned long pubnames_length = size_of_pubnames (names);
11340 pubname_entry *pub;
11341
11342 if (!XCOFF_DEBUGGING_INFO)
11343 {
11344 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11345 dw2_asm_output_data (4, 0xffffffff,
11346 "Initial length escape value indicating 64-bit DWARF extension");
11347 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11348 "Pub Info Length");
11349 }
11350
11351 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11352 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11353
11354 if (dwarf_split_debug_info)
11355 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11356 debug_skeleton_info_section,
11357 "Offset of Compilation Unit Info");
11358 else
11359 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11360 debug_info_section,
11361 "Offset of Compilation Unit Info");
11362 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11363 "Compilation Unit Length");
11364
11365 FOR_EACH_VEC_ELT (*names, i, pub)
11366 {
11367 if (include_pubname_in_output (names, pub))
11368 {
11369 dw_offset die_offset = pub->die->die_offset;
11370
11371 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11372 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11373 gcc_assert (pub->die->die_mark);
11374
11375 /* If we're putting types in their own .debug_types sections,
11376 the .debug_pubtypes table will still point to the compile
11377 unit (not the type unit), so we want to use the offset of
11378 the skeleton DIE (if there is one). */
11379 if (pub->die->comdat_type_p && names == pubtype_table)
11380 {
11381 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11382
11383 if (type_node != NULL)
11384 die_offset = (type_node->skeleton_die != NULL
11385 ? type_node->skeleton_die->die_offset
11386 : comp_unit_die ()->die_offset);
11387 }
11388
11389 output_pubname (die_offset, pub);
11390 }
11391 }
11392
11393 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11394 }
11395
11396 /* Output public names and types tables if necessary. */
11397
11398 static void
11399 output_pubtables (void)
11400 {
11401 if (!want_pubnames () || !info_section_emitted)
11402 return;
11403
11404 switch_to_section (debug_pubnames_section);
11405 output_pubnames (pubname_table);
11406 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11407 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11408 simply won't look for the section. */
11409 switch_to_section (debug_pubtypes_section);
11410 output_pubnames (pubtype_table);
11411 }
11412
11413
11414 /* Output the information that goes into the .debug_aranges table.
11415 Namely, define the beginning and ending address range of the
11416 text section generated for this compilation unit. */
11417
11418 static void
11419 output_aranges (void)
11420 {
11421 unsigned i;
11422 unsigned long aranges_length = size_of_aranges ();
11423
11424 if (!XCOFF_DEBUGGING_INFO)
11425 {
11426 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11427 dw2_asm_output_data (4, 0xffffffff,
11428 "Initial length escape value indicating 64-bit DWARF extension");
11429 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11430 "Length of Address Ranges Info");
11431 }
11432
11433 /* Version number for aranges is still 2, even up to DWARF5. */
11434 dw2_asm_output_data (2, 2, "DWARF aranges version");
11435 if (dwarf_split_debug_info)
11436 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11437 debug_skeleton_info_section,
11438 "Offset of Compilation Unit Info");
11439 else
11440 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11441 debug_info_section,
11442 "Offset of Compilation Unit Info");
11443 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11444 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11445
11446 /* We need to align to twice the pointer size here. */
11447 if (DWARF_ARANGES_PAD_SIZE)
11448 {
11449 /* Pad using a 2 byte words so that padding is correct for any
11450 pointer size. */
11451 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11452 2 * DWARF2_ADDR_SIZE);
11453 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11454 dw2_asm_output_data (2, 0, NULL);
11455 }
11456
11457 /* It is necessary not to output these entries if the sections were
11458 not used; if the sections were not used, the length will be 0 and
11459 the address may end up as 0 if the section is discarded by ld
11460 --gc-sections, leaving an invalid (0, 0) entry that can be
11461 confused with the terminator. */
11462 if (text_section_used)
11463 {
11464 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11465 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11466 text_section_label, "Length");
11467 }
11468 if (cold_text_section_used)
11469 {
11470 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11471 "Address");
11472 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11473 cold_text_section_label, "Length");
11474 }
11475
11476 if (have_multiple_function_sections)
11477 {
11478 unsigned fde_idx;
11479 dw_fde_ref fde;
11480
11481 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11482 {
11483 if (DECL_IGNORED_P (fde->decl))
11484 continue;
11485 if (!fde->in_std_section)
11486 {
11487 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11488 "Address");
11489 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11490 fde->dw_fde_begin, "Length");
11491 }
11492 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11493 {
11494 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11495 "Address");
11496 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11497 fde->dw_fde_second_begin, "Length");
11498 }
11499 }
11500 }
11501
11502 /* Output the terminator words. */
11503 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11504 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11505 }
11506
11507 /* Add a new entry to .debug_ranges. Return its index into
11508 ranges_table vector. */
11509
11510 static unsigned int
11511 add_ranges_num (int num, bool maybe_new_sec)
11512 {
11513 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11514 vec_safe_push (ranges_table, r);
11515 return vec_safe_length (ranges_table) - 1;
11516 }
11517
11518 /* Add a new entry to .debug_ranges corresponding to a block, or a
11519 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11520 this entry might be in a different section from previous range. */
11521
11522 static unsigned int
11523 add_ranges (const_tree block, bool maybe_new_sec)
11524 {
11525 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11526 }
11527
11528 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11529 chain, or middle entry of a chain that will be directly referred to. */
11530
11531 static void
11532 note_rnglist_head (unsigned int offset)
11533 {
11534 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11535 return;
11536 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11537 }
11538
11539 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11540 When using dwarf_split_debug_info, address attributes in dies destined
11541 for the final executable should be direct references--setting the
11542 parameter force_direct ensures this behavior. */
11543
11544 static void
11545 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11546 bool *added, bool force_direct)
11547 {
11548 unsigned int in_use = vec_safe_length (ranges_by_label);
11549 unsigned int offset;
11550 dw_ranges_by_label rbl = { begin, end };
11551 vec_safe_push (ranges_by_label, rbl);
11552 offset = add_ranges_num (-(int)in_use - 1, true);
11553 if (!*added)
11554 {
11555 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11556 *added = true;
11557 note_rnglist_head (offset);
11558 }
11559 }
11560
11561 /* Emit .debug_ranges section. */
11562
11563 static void
11564 output_ranges (void)
11565 {
11566 unsigned i;
11567 static const char *const start_fmt = "Offset %#x";
11568 const char *fmt = start_fmt;
11569 dw_ranges *r;
11570
11571 switch_to_section (debug_ranges_section);
11572 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11573 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11574 {
11575 int block_num = r->num;
11576
11577 if (block_num > 0)
11578 {
11579 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11580 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11581
11582 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11583 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11584
11585 /* If all code is in the text section, then the compilation
11586 unit base address defaults to DW_AT_low_pc, which is the
11587 base of the text section. */
11588 if (!have_multiple_function_sections)
11589 {
11590 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11591 text_section_label,
11592 fmt, i * 2 * DWARF2_ADDR_SIZE);
11593 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11594 text_section_label, NULL);
11595 }
11596
11597 /* Otherwise, the compilation unit base address is zero,
11598 which allows us to use absolute addresses, and not worry
11599 about whether the target supports cross-section
11600 arithmetic. */
11601 else
11602 {
11603 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11604 fmt, i * 2 * DWARF2_ADDR_SIZE);
11605 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11606 }
11607
11608 fmt = NULL;
11609 }
11610
11611 /* Negative block_num stands for an index into ranges_by_label. */
11612 else if (block_num < 0)
11613 {
11614 int lab_idx = - block_num - 1;
11615
11616 if (!have_multiple_function_sections)
11617 {
11618 gcc_unreachable ();
11619 #if 0
11620 /* If we ever use add_ranges_by_labels () for a single
11621 function section, all we have to do is to take out
11622 the #if 0 above. */
11623 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11624 (*ranges_by_label)[lab_idx].begin,
11625 text_section_label,
11626 fmt, i * 2 * DWARF2_ADDR_SIZE);
11627 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11628 (*ranges_by_label)[lab_idx].end,
11629 text_section_label, NULL);
11630 #endif
11631 }
11632 else
11633 {
11634 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11635 (*ranges_by_label)[lab_idx].begin,
11636 fmt, i * 2 * DWARF2_ADDR_SIZE);
11637 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11638 (*ranges_by_label)[lab_idx].end,
11639 NULL);
11640 }
11641 }
11642 else
11643 {
11644 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11645 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11646 fmt = start_fmt;
11647 }
11648 }
11649 }
11650
11651 /* Non-zero if .debug_line_str should be used for .debug_line section
11652 strings or strings that are likely shareable with those. */
11653 #define DWARF5_USE_DEBUG_LINE_STR \
11654 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11655 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11656 /* FIXME: there is no .debug_line_str.dwo section, \
11657 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11658 && !dwarf_split_debug_info)
11659
11660 /* Assign .debug_rnglists indexes. */
11661
11662 static void
11663 index_rnglists (void)
11664 {
11665 unsigned i;
11666 dw_ranges *r;
11667
11668 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11669 if (r->label)
11670 r->idx = rnglist_idx++;
11671 }
11672
11673 /* Emit .debug_rnglists section. */
11674
11675 static void
11676 output_rnglists (unsigned generation)
11677 {
11678 unsigned i;
11679 dw_ranges *r;
11680 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11681 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11682 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11683
11684 switch_to_section (debug_ranges_section);
11685 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11686 /* There are up to 4 unique ranges labels per generation.
11687 See also init_sections_and_labels. */
11688 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11689 2 + generation * 4);
11690 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11691 3 + generation * 4);
11692 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11693 dw2_asm_output_data (4, 0xffffffff,
11694 "Initial length escape value indicating "
11695 "64-bit DWARF extension");
11696 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11697 "Length of Range Lists");
11698 ASM_OUTPUT_LABEL (asm_out_file, l1);
11699 output_dwarf_version ();
11700 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11701 dw2_asm_output_data (1, 0, "Segment Size");
11702 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11703 about relocation sizes and primarily care about the size of .debug*
11704 sections in linked shared libraries and executables, then
11705 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11706 into it are usually larger than just DW_FORM_sec_offset offsets
11707 into the .debug_rnglists section. */
11708 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11709 "Offset Entry Count");
11710 if (dwarf_split_debug_info)
11711 {
11712 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11713 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11714 if (r->label)
11715 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11716 ranges_base_label, NULL);
11717 }
11718
11719 const char *lab = "";
11720 unsigned int len = vec_safe_length (ranges_table);
11721 const char *base = NULL;
11722 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11723 {
11724 int block_num = r->num;
11725
11726 if (r->label)
11727 {
11728 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11729 lab = r->label;
11730 }
11731 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11732 base = NULL;
11733 if (block_num > 0)
11734 {
11735 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11736 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11737
11738 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11739 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11740
11741 if (HAVE_AS_LEB128)
11742 {
11743 /* If all code is in the text section, then the compilation
11744 unit base address defaults to DW_AT_low_pc, which is the
11745 base of the text section. */
11746 if (!have_multiple_function_sections)
11747 {
11748 dw2_asm_output_data (1, DW_RLE_offset_pair,
11749 "DW_RLE_offset_pair (%s)", lab);
11750 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11751 "Range begin address (%s)", lab);
11752 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11753 "Range end address (%s)", lab);
11754 continue;
11755 }
11756 if (base == NULL)
11757 {
11758 dw_ranges *r2 = NULL;
11759 if (i < len - 1)
11760 r2 = &(*ranges_table)[i + 1];
11761 if (r2
11762 && r2->num != 0
11763 && r2->label == NULL
11764 && !r2->maybe_new_sec)
11765 {
11766 dw2_asm_output_data (1, DW_RLE_base_address,
11767 "DW_RLE_base_address (%s)", lab);
11768 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11769 "Base address (%s)", lab);
11770 strcpy (basebuf, blabel);
11771 base = basebuf;
11772 }
11773 }
11774 if (base)
11775 {
11776 dw2_asm_output_data (1, DW_RLE_offset_pair,
11777 "DW_RLE_offset_pair (%s)", lab);
11778 dw2_asm_output_delta_uleb128 (blabel, base,
11779 "Range begin address (%s)", lab);
11780 dw2_asm_output_delta_uleb128 (elabel, base,
11781 "Range end address (%s)", lab);
11782 continue;
11783 }
11784 dw2_asm_output_data (1, DW_RLE_start_length,
11785 "DW_RLE_start_length (%s)", lab);
11786 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11787 "Range begin address (%s)", lab);
11788 dw2_asm_output_delta_uleb128 (elabel, blabel,
11789 "Range length (%s)", lab);
11790 }
11791 else
11792 {
11793 dw2_asm_output_data (1, DW_RLE_start_end,
11794 "DW_RLE_start_end (%s)", lab);
11795 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11796 "Range begin address (%s)", lab);
11797 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11798 "Range end address (%s)", lab);
11799 }
11800 }
11801
11802 /* Negative block_num stands for an index into ranges_by_label. */
11803 else if (block_num < 0)
11804 {
11805 int lab_idx = - block_num - 1;
11806 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11807 const char *elabel = (*ranges_by_label)[lab_idx].end;
11808
11809 if (!have_multiple_function_sections)
11810 gcc_unreachable ();
11811 if (HAVE_AS_LEB128)
11812 {
11813 dw2_asm_output_data (1, DW_RLE_start_length,
11814 "DW_RLE_start_length (%s)", lab);
11815 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11816 "Range begin address (%s)", lab);
11817 dw2_asm_output_delta_uleb128 (elabel, blabel,
11818 "Range length (%s)", lab);
11819 }
11820 else
11821 {
11822 dw2_asm_output_data (1, DW_RLE_start_end,
11823 "DW_RLE_start_end (%s)", lab);
11824 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11825 "Range begin address (%s)", lab);
11826 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11827 "Range end address (%s)", lab);
11828 }
11829 }
11830 else
11831 dw2_asm_output_data (1, DW_RLE_end_of_list,
11832 "DW_RLE_end_of_list (%s)", lab);
11833 }
11834 ASM_OUTPUT_LABEL (asm_out_file, l2);
11835 }
11836
11837 /* Data structure containing information about input files. */
11838 struct file_info
11839 {
11840 const char *path; /* Complete file name. */
11841 const char *fname; /* File name part. */
11842 int length; /* Length of entire string. */
11843 struct dwarf_file_data * file_idx; /* Index in input file table. */
11844 int dir_idx; /* Index in directory table. */
11845 };
11846
11847 /* Data structure containing information about directories with source
11848 files. */
11849 struct dir_info
11850 {
11851 const char *path; /* Path including directory name. */
11852 int length; /* Path length. */
11853 int prefix; /* Index of directory entry which is a prefix. */
11854 int count; /* Number of files in this directory. */
11855 int dir_idx; /* Index of directory used as base. */
11856 };
11857
11858 /* Callback function for file_info comparison. We sort by looking at
11859 the directories in the path. */
11860
11861 static int
11862 file_info_cmp (const void *p1, const void *p2)
11863 {
11864 const struct file_info *const s1 = (const struct file_info *) p1;
11865 const struct file_info *const s2 = (const struct file_info *) p2;
11866 const unsigned char *cp1;
11867 const unsigned char *cp2;
11868
11869 /* Take care of file names without directories. We need to make sure that
11870 we return consistent values to qsort since some will get confused if
11871 we return the same value when identical operands are passed in opposite
11872 orders. So if neither has a directory, return 0 and otherwise return
11873 1 or -1 depending on which one has the directory. */
11874 if ((s1->path == s1->fname || s2->path == s2->fname))
11875 return (s2->path == s2->fname) - (s1->path == s1->fname);
11876
11877 cp1 = (const unsigned char *) s1->path;
11878 cp2 = (const unsigned char *) s2->path;
11879
11880 while (1)
11881 {
11882 ++cp1;
11883 ++cp2;
11884 /* Reached the end of the first path? If so, handle like above. */
11885 if ((cp1 == (const unsigned char *) s1->fname)
11886 || (cp2 == (const unsigned char *) s2->fname))
11887 return ((cp2 == (const unsigned char *) s2->fname)
11888 - (cp1 == (const unsigned char *) s1->fname));
11889
11890 /* Character of current path component the same? */
11891 else if (*cp1 != *cp2)
11892 return *cp1 - *cp2;
11893 }
11894 }
11895
11896 struct file_name_acquire_data
11897 {
11898 struct file_info *files;
11899 int used_files;
11900 int max_files;
11901 };
11902
11903 /* Traversal function for the hash table. */
11904
11905 int
11906 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
11907 {
11908 struct dwarf_file_data *d = *slot;
11909 struct file_info *fi;
11910 const char *f;
11911
11912 gcc_assert (fnad->max_files >= d->emitted_number);
11913
11914 if (! d->emitted_number)
11915 return 1;
11916
11917 gcc_assert (fnad->max_files != fnad->used_files);
11918
11919 fi = fnad->files + fnad->used_files++;
11920
11921 /* Skip all leading "./". */
11922 f = d->filename;
11923 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
11924 f += 2;
11925
11926 /* Create a new array entry. */
11927 fi->path = f;
11928 fi->length = strlen (f);
11929 fi->file_idx = d;
11930
11931 /* Search for the file name part. */
11932 f = strrchr (f, DIR_SEPARATOR);
11933 #if defined (DIR_SEPARATOR_2)
11934 {
11935 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
11936
11937 if (g != NULL)
11938 {
11939 if (f == NULL || f < g)
11940 f = g;
11941 }
11942 }
11943 #endif
11944
11945 fi->fname = f == NULL ? fi->path : f + 1;
11946 return 1;
11947 }
11948
11949 /* Helper function for output_file_names. Emit a FORM encoded
11950 string STR, with assembly comment start ENTRY_KIND and
11951 index IDX */
11952
11953 static void
11954 output_line_string (enum dwarf_form form, const char *str,
11955 const char *entry_kind, unsigned int idx)
11956 {
11957 switch (form)
11958 {
11959 case DW_FORM_string:
11960 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
11961 break;
11962 case DW_FORM_line_strp:
11963 if (!debug_line_str_hash)
11964 debug_line_str_hash
11965 = hash_table<indirect_string_hasher>::create_ggc (10);
11966
11967 struct indirect_string_node *node;
11968 node = find_AT_string_in_table (str, debug_line_str_hash);
11969 set_indirect_string (node);
11970 node->form = form;
11971 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
11972 debug_line_str_section, "%s: %#x: \"%s\"",
11973 entry_kind, 0, node->str);
11974 break;
11975 default:
11976 gcc_unreachable ();
11977 }
11978 }
11979
11980 /* Output the directory table and the file name table. We try to minimize
11981 the total amount of memory needed. A heuristic is used to avoid large
11982 slowdowns with many input files. */
11983
11984 static void
11985 output_file_names (void)
11986 {
11987 struct file_name_acquire_data fnad;
11988 int numfiles;
11989 struct file_info *files;
11990 struct dir_info *dirs;
11991 int *saved;
11992 int *savehere;
11993 int *backmap;
11994 int ndirs;
11995 int idx_offset;
11996 int i;
11997
11998 if (!last_emitted_file)
11999 {
12000 if (dwarf_version >= 5)
12001 {
12002 dw2_asm_output_data (1, 0, "Directory entry format count");
12003 dw2_asm_output_data_uleb128 (0, "Directories count");
12004 dw2_asm_output_data (1, 0, "File name entry format count");
12005 dw2_asm_output_data_uleb128 (0, "File names count");
12006 }
12007 else
12008 {
12009 dw2_asm_output_data (1, 0, "End directory table");
12010 dw2_asm_output_data (1, 0, "End file name table");
12011 }
12012 return;
12013 }
12014
12015 numfiles = last_emitted_file->emitted_number;
12016
12017 /* Allocate the various arrays we need. */
12018 files = XALLOCAVEC (struct file_info, numfiles);
12019 dirs = XALLOCAVEC (struct dir_info, numfiles);
12020
12021 fnad.files = files;
12022 fnad.used_files = 0;
12023 fnad.max_files = numfiles;
12024 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12025 gcc_assert (fnad.used_files == fnad.max_files);
12026
12027 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12028
12029 /* Find all the different directories used. */
12030 dirs[0].path = files[0].path;
12031 dirs[0].length = files[0].fname - files[0].path;
12032 dirs[0].prefix = -1;
12033 dirs[0].count = 1;
12034 dirs[0].dir_idx = 0;
12035 files[0].dir_idx = 0;
12036 ndirs = 1;
12037
12038 for (i = 1; i < numfiles; i++)
12039 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12040 && memcmp (dirs[ndirs - 1].path, files[i].path,
12041 dirs[ndirs - 1].length) == 0)
12042 {
12043 /* Same directory as last entry. */
12044 files[i].dir_idx = ndirs - 1;
12045 ++dirs[ndirs - 1].count;
12046 }
12047 else
12048 {
12049 int j;
12050
12051 /* This is a new directory. */
12052 dirs[ndirs].path = files[i].path;
12053 dirs[ndirs].length = files[i].fname - files[i].path;
12054 dirs[ndirs].count = 1;
12055 dirs[ndirs].dir_idx = ndirs;
12056 files[i].dir_idx = ndirs;
12057
12058 /* Search for a prefix. */
12059 dirs[ndirs].prefix = -1;
12060 for (j = 0; j < ndirs; j++)
12061 if (dirs[j].length < dirs[ndirs].length
12062 && dirs[j].length > 1
12063 && (dirs[ndirs].prefix == -1
12064 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12065 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12066 dirs[ndirs].prefix = j;
12067
12068 ++ndirs;
12069 }
12070
12071 /* Now to the actual work. We have to find a subset of the directories which
12072 allow expressing the file name using references to the directory table
12073 with the least amount of characters. We do not do an exhaustive search
12074 where we would have to check out every combination of every single
12075 possible prefix. Instead we use a heuristic which provides nearly optimal
12076 results in most cases and never is much off. */
12077 saved = XALLOCAVEC (int, ndirs);
12078 savehere = XALLOCAVEC (int, ndirs);
12079
12080 memset (saved, '\0', ndirs * sizeof (saved[0]));
12081 for (i = 0; i < ndirs; i++)
12082 {
12083 int j;
12084 int total;
12085
12086 /* We can always save some space for the current directory. But this
12087 does not mean it will be enough to justify adding the directory. */
12088 savehere[i] = dirs[i].length;
12089 total = (savehere[i] - saved[i]) * dirs[i].count;
12090
12091 for (j = i + 1; j < ndirs; j++)
12092 {
12093 savehere[j] = 0;
12094 if (saved[j] < dirs[i].length)
12095 {
12096 /* Determine whether the dirs[i] path is a prefix of the
12097 dirs[j] path. */
12098 int k;
12099
12100 k = dirs[j].prefix;
12101 while (k != -1 && k != (int) i)
12102 k = dirs[k].prefix;
12103
12104 if (k == (int) i)
12105 {
12106 /* Yes it is. We can possibly save some memory by
12107 writing the filenames in dirs[j] relative to
12108 dirs[i]. */
12109 savehere[j] = dirs[i].length;
12110 total += (savehere[j] - saved[j]) * dirs[j].count;
12111 }
12112 }
12113 }
12114
12115 /* Check whether we can save enough to justify adding the dirs[i]
12116 directory. */
12117 if (total > dirs[i].length + 1)
12118 {
12119 /* It's worthwhile adding. */
12120 for (j = i; j < ndirs; j++)
12121 if (savehere[j] > 0)
12122 {
12123 /* Remember how much we saved for this directory so far. */
12124 saved[j] = savehere[j];
12125
12126 /* Remember the prefix directory. */
12127 dirs[j].dir_idx = i;
12128 }
12129 }
12130 }
12131
12132 /* Emit the directory name table. */
12133 idx_offset = dirs[0].length > 0 ? 1 : 0;
12134 enum dwarf_form str_form = DW_FORM_string;
12135 enum dwarf_form idx_form = DW_FORM_udata;
12136 if (dwarf_version >= 5)
12137 {
12138 const char *comp_dir = comp_dir_string ();
12139 if (comp_dir == NULL)
12140 comp_dir = "";
12141 dw2_asm_output_data (1, 1, "Directory entry format count");
12142 if (DWARF5_USE_DEBUG_LINE_STR)
12143 str_form = DW_FORM_line_strp;
12144 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12145 dw2_asm_output_data_uleb128 (str_form, "%s",
12146 get_DW_FORM_name (str_form));
12147 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12148 if (str_form == DW_FORM_string)
12149 {
12150 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12151 for (i = 1 - idx_offset; i < ndirs; i++)
12152 dw2_asm_output_nstring (dirs[i].path,
12153 dirs[i].length
12154 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12155 "Directory Entry: %#x", i + idx_offset);
12156 }
12157 else
12158 {
12159 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12160 for (i = 1 - idx_offset; i < ndirs; i++)
12161 {
12162 const char *str
12163 = ggc_alloc_string (dirs[i].path,
12164 dirs[i].length
12165 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12166 output_line_string (str_form, str, "Directory Entry",
12167 (unsigned) i + idx_offset);
12168 }
12169 }
12170 }
12171 else
12172 {
12173 for (i = 1 - idx_offset; i < ndirs; i++)
12174 dw2_asm_output_nstring (dirs[i].path,
12175 dirs[i].length
12176 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12177 "Directory Entry: %#x", i + idx_offset);
12178
12179 dw2_asm_output_data (1, 0, "End directory table");
12180 }
12181
12182 /* We have to emit them in the order of emitted_number since that's
12183 used in the debug info generation. To do this efficiently we
12184 generate a back-mapping of the indices first. */
12185 backmap = XALLOCAVEC (int, numfiles);
12186 for (i = 0; i < numfiles; i++)
12187 backmap[files[i].file_idx->emitted_number - 1] = i;
12188
12189 if (dwarf_version >= 5)
12190 {
12191 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12192 if (filename0 == NULL)
12193 filename0 = "";
12194 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12195 DW_FORM_data2. Choose one based on the number of directories
12196 and how much space would they occupy in each encoding.
12197 If we have at most 256 directories, all indexes fit into
12198 a single byte, so DW_FORM_data1 is most compact (if there
12199 are at most 128 directories, DW_FORM_udata would be as
12200 compact as that, but not shorter and slower to decode). */
12201 if (ndirs + idx_offset <= 256)
12202 idx_form = DW_FORM_data1;
12203 /* If there are more than 65536 directories, we have to use
12204 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12205 Otherwise, compute what space would occupy if all the indexes
12206 used DW_FORM_udata - sum - and compare that to how large would
12207 be DW_FORM_data2 encoding, and pick the more efficient one. */
12208 else if (ndirs + idx_offset <= 65536)
12209 {
12210 unsigned HOST_WIDE_INT sum = 1;
12211 for (i = 0; i < numfiles; i++)
12212 {
12213 int file_idx = backmap[i];
12214 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12215 sum += size_of_uleb128 (dir_idx);
12216 }
12217 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12218 idx_form = DW_FORM_data2;
12219 }
12220 #ifdef VMS_DEBUGGING_INFO
12221 dw2_asm_output_data (1, 4, "File name entry format count");
12222 #else
12223 dw2_asm_output_data (1, 2, "File name entry format count");
12224 #endif
12225 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12226 dw2_asm_output_data_uleb128 (str_form, "%s",
12227 get_DW_FORM_name (str_form));
12228 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12229 "DW_LNCT_directory_index");
12230 dw2_asm_output_data_uleb128 (idx_form, "%s",
12231 get_DW_FORM_name (idx_form));
12232 #ifdef VMS_DEBUGGING_INFO
12233 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12234 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12235 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12236 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12237 #endif
12238 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12239
12240 output_line_string (str_form, filename0, "File Entry", 0);
12241
12242 /* Include directory index. */
12243 if (idx_form != DW_FORM_udata)
12244 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12245 0, NULL);
12246 else
12247 dw2_asm_output_data_uleb128 (0, NULL);
12248
12249 #ifdef VMS_DEBUGGING_INFO
12250 dw2_asm_output_data_uleb128 (0, NULL);
12251 dw2_asm_output_data_uleb128 (0, NULL);
12252 #endif
12253 }
12254
12255 /* Now write all the file names. */
12256 for (i = 0; i < numfiles; i++)
12257 {
12258 int file_idx = backmap[i];
12259 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12260
12261 #ifdef VMS_DEBUGGING_INFO
12262 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12263
12264 /* Setting these fields can lead to debugger miscomparisons,
12265 but VMS Debug requires them to be set correctly. */
12266
12267 int ver;
12268 long long cdt;
12269 long siz;
12270 int maxfilelen = (strlen (files[file_idx].path)
12271 + dirs[dir_idx].length
12272 + MAX_VMS_VERSION_LEN + 1);
12273 char *filebuf = XALLOCAVEC (char, maxfilelen);
12274
12275 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12276 snprintf (filebuf, maxfilelen, "%s;%d",
12277 files[file_idx].path + dirs[dir_idx].length, ver);
12278
12279 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12280
12281 /* Include directory index. */
12282 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12283 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12284 dir_idx + idx_offset, NULL);
12285 else
12286 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12287
12288 /* Modification time. */
12289 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12290 &cdt, 0, 0, 0) == 0)
12291 ? cdt : 0, NULL);
12292
12293 /* File length in bytes. */
12294 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12295 0, &siz, 0, 0) == 0)
12296 ? siz : 0, NULL);
12297 #else
12298 output_line_string (str_form,
12299 files[file_idx].path + dirs[dir_idx].length,
12300 "File Entry", (unsigned) i + 1);
12301
12302 /* Include directory index. */
12303 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12304 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12305 dir_idx + idx_offset, NULL);
12306 else
12307 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12308
12309 if (dwarf_version >= 5)
12310 continue;
12311
12312 /* Modification time. */
12313 dw2_asm_output_data_uleb128 (0, NULL);
12314
12315 /* File length in bytes. */
12316 dw2_asm_output_data_uleb128 (0, NULL);
12317 #endif /* VMS_DEBUGGING_INFO */
12318 }
12319
12320 if (dwarf_version < 5)
12321 dw2_asm_output_data (1, 0, "End file name table");
12322 }
12323
12324
12325 /* Output one line number table into the .debug_line section. */
12326
12327 static void
12328 output_one_line_info_table (dw_line_info_table *table)
12329 {
12330 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12331 unsigned int current_line = 1;
12332 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12333 dw_line_info_entry *ent, *prev_addr;
12334 size_t i;
12335 unsigned int view;
12336
12337 view = 0;
12338
12339 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12340 {
12341 switch (ent->opcode)
12342 {
12343 case LI_set_address:
12344 /* ??? Unfortunately, we have little choice here currently, and
12345 must always use the most general form. GCC does not know the
12346 address delta itself, so we can't use DW_LNS_advance_pc. Many
12347 ports do have length attributes which will give an upper bound
12348 on the address range. We could perhaps use length attributes
12349 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12350 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12351
12352 view = 0;
12353
12354 /* This can handle any delta. This takes
12355 4+DWARF2_ADDR_SIZE bytes. */
12356 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12357 debug_variable_location_views
12358 ? ", reset view to 0" : "");
12359 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12360 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12361 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12362
12363 prev_addr = ent;
12364 break;
12365
12366 case LI_adv_address:
12367 {
12368 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12369 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12370 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12371
12372 view++;
12373
12374 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12375 dw2_asm_output_delta (2, line_label, prev_label,
12376 "from %s to %s", prev_label, line_label);
12377
12378 prev_addr = ent;
12379 break;
12380 }
12381
12382 case LI_set_line:
12383 if (ent->val == current_line)
12384 {
12385 /* We still need to start a new row, so output a copy insn. */
12386 dw2_asm_output_data (1, DW_LNS_copy,
12387 "copy line %u", current_line);
12388 }
12389 else
12390 {
12391 int line_offset = ent->val - current_line;
12392 int line_delta = line_offset - DWARF_LINE_BASE;
12393
12394 current_line = ent->val;
12395 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12396 {
12397 /* This can handle deltas from -10 to 234, using the current
12398 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12399 This takes 1 byte. */
12400 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12401 "line %u", current_line);
12402 }
12403 else
12404 {
12405 /* This can handle any delta. This takes at least 4 bytes,
12406 depending on the value being encoded. */
12407 dw2_asm_output_data (1, DW_LNS_advance_line,
12408 "advance to line %u", current_line);
12409 dw2_asm_output_data_sleb128 (line_offset, NULL);
12410 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12411 }
12412 }
12413 break;
12414
12415 case LI_set_file:
12416 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12417 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12418 break;
12419
12420 case LI_set_column:
12421 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12422 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12423 break;
12424
12425 case LI_negate_stmt:
12426 current_is_stmt = !current_is_stmt;
12427 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12428 "is_stmt %d", current_is_stmt);
12429 break;
12430
12431 case LI_set_prologue_end:
12432 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12433 "set prologue end");
12434 break;
12435
12436 case LI_set_epilogue_begin:
12437 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12438 "set epilogue begin");
12439 break;
12440
12441 case LI_set_discriminator:
12442 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12443 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12444 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12445 dw2_asm_output_data_uleb128 (ent->val, NULL);
12446 break;
12447 }
12448 }
12449
12450 /* Emit debug info for the address of the end of the table. */
12451 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12452 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12453 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12454 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12455
12456 dw2_asm_output_data (1, 0, "end sequence");
12457 dw2_asm_output_data_uleb128 (1, NULL);
12458 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12459 }
12460
12461 /* Output the source line number correspondence information. This
12462 information goes into the .debug_line section. */
12463
12464 static void
12465 output_line_info (bool prologue_only)
12466 {
12467 static unsigned int generation;
12468 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12469 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12470 bool saw_one = false;
12471 int opc;
12472
12473 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12474 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12475 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12476 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12477
12478 if (!XCOFF_DEBUGGING_INFO)
12479 {
12480 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12481 dw2_asm_output_data (4, 0xffffffff,
12482 "Initial length escape value indicating 64-bit DWARF extension");
12483 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12484 "Length of Source Line Info");
12485 }
12486
12487 ASM_OUTPUT_LABEL (asm_out_file, l1);
12488
12489 output_dwarf_version ();
12490 if (dwarf_version >= 5)
12491 {
12492 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12493 dw2_asm_output_data (1, 0, "Segment Size");
12494 }
12495 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12496 ASM_OUTPUT_LABEL (asm_out_file, p1);
12497
12498 /* Define the architecture-dependent minimum instruction length (in bytes).
12499 In this implementation of DWARF, this field is used for information
12500 purposes only. Since GCC generates assembly language, we have no
12501 a priori knowledge of how many instruction bytes are generated for each
12502 source line, and therefore can use only the DW_LNE_set_address and
12503 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12504 this as '1', which is "correct enough" for all architectures,
12505 and don't let the target override. */
12506 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12507
12508 if (dwarf_version >= 4)
12509 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12510 "Maximum Operations Per Instruction");
12511 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12512 "Default is_stmt_start flag");
12513 dw2_asm_output_data (1, DWARF_LINE_BASE,
12514 "Line Base Value (Special Opcodes)");
12515 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12516 "Line Range Value (Special Opcodes)");
12517 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12518 "Special Opcode Base");
12519
12520 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12521 {
12522 int n_op_args;
12523 switch (opc)
12524 {
12525 case DW_LNS_advance_pc:
12526 case DW_LNS_advance_line:
12527 case DW_LNS_set_file:
12528 case DW_LNS_set_column:
12529 case DW_LNS_fixed_advance_pc:
12530 case DW_LNS_set_isa:
12531 n_op_args = 1;
12532 break;
12533 default:
12534 n_op_args = 0;
12535 break;
12536 }
12537
12538 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12539 opc, n_op_args);
12540 }
12541
12542 /* Write out the information about the files we use. */
12543 output_file_names ();
12544 ASM_OUTPUT_LABEL (asm_out_file, p2);
12545 if (prologue_only)
12546 {
12547 /* Output the marker for the end of the line number info. */
12548 ASM_OUTPUT_LABEL (asm_out_file, l2);
12549 return;
12550 }
12551
12552 if (separate_line_info)
12553 {
12554 dw_line_info_table *table;
12555 size_t i;
12556
12557 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12558 if (table->in_use)
12559 {
12560 output_one_line_info_table (table);
12561 saw_one = true;
12562 }
12563 }
12564 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12565 {
12566 output_one_line_info_table (cold_text_section_line_info);
12567 saw_one = true;
12568 }
12569
12570 /* ??? Some Darwin linkers crash on a .debug_line section with no
12571 sequences. Further, merely a DW_LNE_end_sequence entry is not
12572 sufficient -- the address column must also be initialized.
12573 Make sure to output at least one set_address/end_sequence pair,
12574 choosing .text since that section is always present. */
12575 if (text_section_line_info->in_use || !saw_one)
12576 output_one_line_info_table (text_section_line_info);
12577
12578 /* Output the marker for the end of the line number info. */
12579 ASM_OUTPUT_LABEL (asm_out_file, l2);
12580 }
12581 \f
12582 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12583
12584 static inline bool
12585 need_endianity_attribute_p (bool reverse)
12586 {
12587 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12588 }
12589
12590 /* Given a pointer to a tree node for some base type, return a pointer to
12591 a DIE that describes the given type. REVERSE is true if the type is
12592 to be interpreted in the reverse storage order wrt the target order.
12593
12594 This routine must only be called for GCC type nodes that correspond to
12595 Dwarf base (fundamental) types. */
12596
12597 static dw_die_ref
12598 base_type_die (tree type, bool reverse)
12599 {
12600 dw_die_ref base_type_result;
12601 enum dwarf_type encoding;
12602 bool fpt_used = false;
12603 struct fixed_point_type_info fpt_info;
12604 tree type_bias = NULL_TREE;
12605
12606 /* If this is a subtype that should not be emitted as a subrange type,
12607 use the base type. See subrange_type_for_debug_p. */
12608 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12609 type = TREE_TYPE (type);
12610
12611 switch (TREE_CODE (type))
12612 {
12613 case INTEGER_TYPE:
12614 if ((dwarf_version >= 4 || !dwarf_strict)
12615 && TYPE_NAME (type)
12616 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12617 && DECL_IS_BUILTIN (TYPE_NAME (type))
12618 && DECL_NAME (TYPE_NAME (type)))
12619 {
12620 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12621 if (strcmp (name, "char16_t") == 0
12622 || strcmp (name, "char32_t") == 0)
12623 {
12624 encoding = DW_ATE_UTF;
12625 break;
12626 }
12627 }
12628 if ((dwarf_version >= 3 || !dwarf_strict)
12629 && lang_hooks.types.get_fixed_point_type_info)
12630 {
12631 memset (&fpt_info, 0, sizeof (fpt_info));
12632 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12633 {
12634 fpt_used = true;
12635 encoding = ((TYPE_UNSIGNED (type))
12636 ? DW_ATE_unsigned_fixed
12637 : DW_ATE_signed_fixed);
12638 break;
12639 }
12640 }
12641 if (TYPE_STRING_FLAG (type))
12642 {
12643 if (TYPE_UNSIGNED (type))
12644 encoding = DW_ATE_unsigned_char;
12645 else
12646 encoding = DW_ATE_signed_char;
12647 }
12648 else if (TYPE_UNSIGNED (type))
12649 encoding = DW_ATE_unsigned;
12650 else
12651 encoding = DW_ATE_signed;
12652
12653 if (!dwarf_strict
12654 && lang_hooks.types.get_type_bias)
12655 type_bias = lang_hooks.types.get_type_bias (type);
12656 break;
12657
12658 case REAL_TYPE:
12659 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12660 {
12661 if (dwarf_version >= 3 || !dwarf_strict)
12662 encoding = DW_ATE_decimal_float;
12663 else
12664 encoding = DW_ATE_lo_user;
12665 }
12666 else
12667 encoding = DW_ATE_float;
12668 break;
12669
12670 case FIXED_POINT_TYPE:
12671 if (!(dwarf_version >= 3 || !dwarf_strict))
12672 encoding = DW_ATE_lo_user;
12673 else if (TYPE_UNSIGNED (type))
12674 encoding = DW_ATE_unsigned_fixed;
12675 else
12676 encoding = DW_ATE_signed_fixed;
12677 break;
12678
12679 /* Dwarf2 doesn't know anything about complex ints, so use
12680 a user defined type for it. */
12681 case COMPLEX_TYPE:
12682 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12683 encoding = DW_ATE_complex_float;
12684 else
12685 encoding = DW_ATE_lo_user;
12686 break;
12687
12688 case BOOLEAN_TYPE:
12689 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12690 encoding = DW_ATE_boolean;
12691 break;
12692
12693 default:
12694 /* No other TREE_CODEs are Dwarf fundamental types. */
12695 gcc_unreachable ();
12696 }
12697
12698 base_type_result = new_die_raw (DW_TAG_base_type);
12699
12700 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12701 int_size_in_bytes (type));
12702 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12703
12704 if (need_endianity_attribute_p (reverse))
12705 add_AT_unsigned (base_type_result, DW_AT_endianity,
12706 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12707
12708 add_alignment_attribute (base_type_result, type);
12709
12710 if (fpt_used)
12711 {
12712 switch (fpt_info.scale_factor_kind)
12713 {
12714 case fixed_point_scale_factor_binary:
12715 add_AT_int (base_type_result, DW_AT_binary_scale,
12716 fpt_info.scale_factor.binary);
12717 break;
12718
12719 case fixed_point_scale_factor_decimal:
12720 add_AT_int (base_type_result, DW_AT_decimal_scale,
12721 fpt_info.scale_factor.decimal);
12722 break;
12723
12724 case fixed_point_scale_factor_arbitrary:
12725 /* Arbitrary scale factors cannot be described in standard DWARF,
12726 yet. */
12727 if (!dwarf_strict)
12728 {
12729 /* Describe the scale factor as a rational constant. */
12730 const dw_die_ref scale_factor
12731 = new_die (DW_TAG_constant, comp_unit_die (), type);
12732
12733 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12734 fpt_info.scale_factor.arbitrary.numerator);
12735 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12736 fpt_info.scale_factor.arbitrary.denominator);
12737
12738 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12739 }
12740 break;
12741
12742 default:
12743 gcc_unreachable ();
12744 }
12745 }
12746
12747 if (type_bias)
12748 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12749 dw_scalar_form_constant
12750 | dw_scalar_form_exprloc
12751 | dw_scalar_form_reference,
12752 NULL);
12753
12754 return base_type_result;
12755 }
12756
12757 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12758 named 'auto' in its type: return true for it, false otherwise. */
12759
12760 static inline bool
12761 is_cxx_auto (tree type)
12762 {
12763 if (is_cxx ())
12764 {
12765 tree name = TYPE_IDENTIFIER (type);
12766 if (name == get_identifier ("auto")
12767 || name == get_identifier ("decltype(auto)"))
12768 return true;
12769 }
12770 return false;
12771 }
12772
12773 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12774 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12775
12776 static inline int
12777 is_base_type (tree type)
12778 {
12779 switch (TREE_CODE (type))
12780 {
12781 case INTEGER_TYPE:
12782 case REAL_TYPE:
12783 case FIXED_POINT_TYPE:
12784 case COMPLEX_TYPE:
12785 case BOOLEAN_TYPE:
12786 case POINTER_BOUNDS_TYPE:
12787 return 1;
12788
12789 case VOID_TYPE:
12790 case ARRAY_TYPE:
12791 case RECORD_TYPE:
12792 case UNION_TYPE:
12793 case QUAL_UNION_TYPE:
12794 case ENUMERAL_TYPE:
12795 case FUNCTION_TYPE:
12796 case METHOD_TYPE:
12797 case POINTER_TYPE:
12798 case REFERENCE_TYPE:
12799 case NULLPTR_TYPE:
12800 case OFFSET_TYPE:
12801 case LANG_TYPE:
12802 case VECTOR_TYPE:
12803 return 0;
12804
12805 default:
12806 if (is_cxx_auto (type))
12807 return 0;
12808 gcc_unreachable ();
12809 }
12810
12811 return 0;
12812 }
12813
12814 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12815 node, return the size in bits for the type if it is a constant, or else
12816 return the alignment for the type if the type's size is not constant, or
12817 else return BITS_PER_WORD if the type actually turns out to be an
12818 ERROR_MARK node. */
12819
12820 static inline unsigned HOST_WIDE_INT
12821 simple_type_size_in_bits (const_tree type)
12822 {
12823 if (TREE_CODE (type) == ERROR_MARK)
12824 return BITS_PER_WORD;
12825 else if (TYPE_SIZE (type) == NULL_TREE)
12826 return 0;
12827 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12828 return tree_to_uhwi (TYPE_SIZE (type));
12829 else
12830 return TYPE_ALIGN (type);
12831 }
12832
12833 /* Similarly, but return an offset_int instead of UHWI. */
12834
12835 static inline offset_int
12836 offset_int_type_size_in_bits (const_tree type)
12837 {
12838 if (TREE_CODE (type) == ERROR_MARK)
12839 return BITS_PER_WORD;
12840 else if (TYPE_SIZE (type) == NULL_TREE)
12841 return 0;
12842 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12843 return wi::to_offset (TYPE_SIZE (type));
12844 else
12845 return TYPE_ALIGN (type);
12846 }
12847
12848 /* Given a pointer to a tree node for a subrange type, return a pointer
12849 to a DIE that describes the given type. */
12850
12851 static dw_die_ref
12852 subrange_type_die (tree type, tree low, tree high, tree bias,
12853 dw_die_ref context_die)
12854 {
12855 dw_die_ref subrange_die;
12856 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12857
12858 if (context_die == NULL)
12859 context_die = comp_unit_die ();
12860
12861 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12862
12863 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12864 {
12865 /* The size of the subrange type and its base type do not match,
12866 so we need to generate a size attribute for the subrange type. */
12867 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12868 }
12869
12870 add_alignment_attribute (subrange_die, type);
12871
12872 if (low)
12873 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12874 if (high)
12875 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12876 if (bias && !dwarf_strict)
12877 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12878 dw_scalar_form_constant
12879 | dw_scalar_form_exprloc
12880 | dw_scalar_form_reference,
12881 NULL);
12882
12883 return subrange_die;
12884 }
12885
12886 /* Returns the (const and/or volatile) cv_qualifiers associated with
12887 the decl node. This will normally be augmented with the
12888 cv_qualifiers of the underlying type in add_type_attribute. */
12889
12890 static int
12891 decl_quals (const_tree decl)
12892 {
12893 return ((TREE_READONLY (decl)
12894 /* The C++ front-end correctly marks reference-typed
12895 variables as readonly, but from a language (and debug
12896 info) standpoint they are not const-qualified. */
12897 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12898 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12899 | (TREE_THIS_VOLATILE (decl)
12900 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12901 }
12902
12903 /* Determine the TYPE whose qualifiers match the largest strict subset
12904 of the given TYPE_QUALS, and return its qualifiers. Ignore all
12905 qualifiers outside QUAL_MASK. */
12906
12907 static int
12908 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
12909 {
12910 tree t;
12911 int best_rank = 0, best_qual = 0, max_rank;
12912
12913 type_quals &= qual_mask;
12914 max_rank = popcount_hwi (type_quals) - 1;
12915
12916 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
12917 t = TYPE_NEXT_VARIANT (t))
12918 {
12919 int q = TYPE_QUALS (t) & qual_mask;
12920
12921 if ((q & type_quals) == q && q != type_quals
12922 && check_base_type (t, type))
12923 {
12924 int rank = popcount_hwi (q);
12925
12926 if (rank > best_rank)
12927 {
12928 best_rank = rank;
12929 best_qual = q;
12930 }
12931 }
12932 }
12933
12934 return best_qual;
12935 }
12936
12937 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
12938 static const dwarf_qual_info_t dwarf_qual_info[] =
12939 {
12940 { TYPE_QUAL_CONST, DW_TAG_const_type },
12941 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
12942 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
12943 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
12944 };
12945 static const unsigned int dwarf_qual_info_size
12946 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
12947
12948 /* If DIE is a qualified DIE of some base DIE with the same parent,
12949 return the base DIE, otherwise return NULL. Set MASK to the
12950 qualifiers added compared to the returned DIE. */
12951
12952 static dw_die_ref
12953 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
12954 {
12955 unsigned int i;
12956 for (i = 0; i < dwarf_qual_info_size; i++)
12957 if (die->die_tag == dwarf_qual_info[i].t)
12958 break;
12959 if (i == dwarf_qual_info_size)
12960 return NULL;
12961 if (vec_safe_length (die->die_attr) != 1)
12962 return NULL;
12963 dw_die_ref type = get_AT_ref (die, DW_AT_type);
12964 if (type == NULL || type->die_parent != die->die_parent)
12965 return NULL;
12966 *mask |= dwarf_qual_info[i].q;
12967 if (depth)
12968 {
12969 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
12970 if (ret)
12971 return ret;
12972 }
12973 return type;
12974 }
12975
12976 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
12977 entry that chains the modifiers specified by CV_QUALS in front of the
12978 given type. REVERSE is true if the type is to be interpreted in the
12979 reverse storage order wrt the target order. */
12980
12981 static dw_die_ref
12982 modified_type_die (tree type, int cv_quals, bool reverse,
12983 dw_die_ref context_die)
12984 {
12985 enum tree_code code = TREE_CODE (type);
12986 dw_die_ref mod_type_die;
12987 dw_die_ref sub_die = NULL;
12988 tree item_type = NULL;
12989 tree qualified_type;
12990 tree name, low, high;
12991 dw_die_ref mod_scope;
12992 /* Only these cv-qualifiers are currently handled. */
12993 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
12994 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
12995 ENCODE_QUAL_ADDR_SPACE(~0U));
12996 const bool reverse_base_type
12997 = need_endianity_attribute_p (reverse) && is_base_type (type);
12998
12999 if (code == ERROR_MARK)
13000 return NULL;
13001
13002 if (lang_hooks.types.get_debug_type)
13003 {
13004 tree debug_type = lang_hooks.types.get_debug_type (type);
13005
13006 if (debug_type != NULL_TREE && debug_type != type)
13007 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13008 }
13009
13010 cv_quals &= cv_qual_mask;
13011
13012 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13013 tag modifier (and not an attribute) old consumers won't be able
13014 to handle it. */
13015 if (dwarf_version < 3)
13016 cv_quals &= ~TYPE_QUAL_RESTRICT;
13017
13018 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13019 if (dwarf_version < 5)
13020 cv_quals &= ~TYPE_QUAL_ATOMIC;
13021
13022 /* See if we already have the appropriately qualified variant of
13023 this type. */
13024 qualified_type = get_qualified_type (type, cv_quals);
13025
13026 if (qualified_type == sizetype)
13027 {
13028 /* Try not to expose the internal sizetype type's name. */
13029 if (TYPE_NAME (qualified_type)
13030 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13031 {
13032 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13033
13034 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13035 && (TYPE_PRECISION (t)
13036 == TYPE_PRECISION (qualified_type))
13037 && (TYPE_UNSIGNED (t)
13038 == TYPE_UNSIGNED (qualified_type)));
13039 qualified_type = t;
13040 }
13041 else if (qualified_type == sizetype
13042 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13043 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13044 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13045 qualified_type = size_type_node;
13046 }
13047
13048 /* If we do, then we can just use its DIE, if it exists. */
13049 if (qualified_type)
13050 {
13051 mod_type_die = lookup_type_die (qualified_type);
13052
13053 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13054 dealt with specially: the DIE with the attribute, if it exists, is
13055 placed immediately after the regular DIE for the same base type. */
13056 if (mod_type_die
13057 && (!reverse_base_type
13058 || ((mod_type_die = mod_type_die->die_sib) != NULL
13059 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13060 return mod_type_die;
13061 }
13062
13063 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13064
13065 /* Handle C typedef types. */
13066 if (name
13067 && TREE_CODE (name) == TYPE_DECL
13068 && DECL_ORIGINAL_TYPE (name)
13069 && !DECL_ARTIFICIAL (name))
13070 {
13071 tree dtype = TREE_TYPE (name);
13072
13073 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13074 if (qualified_type == dtype && !reverse_base_type)
13075 {
13076 tree origin = decl_ultimate_origin (name);
13077
13078 /* Typedef variants that have an abstract origin don't get their own
13079 type DIE (see gen_typedef_die), so fall back on the ultimate
13080 abstract origin instead. */
13081 if (origin != NULL && origin != name)
13082 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13083 context_die);
13084
13085 /* For a named type, use the typedef. */
13086 gen_type_die (qualified_type, context_die);
13087 return lookup_type_die (qualified_type);
13088 }
13089 else
13090 {
13091 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13092 dquals &= cv_qual_mask;
13093 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13094 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13095 /* cv-unqualified version of named type. Just use
13096 the unnamed type to which it refers. */
13097 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13098 reverse, context_die);
13099 /* Else cv-qualified version of named type; fall through. */
13100 }
13101 }
13102
13103 mod_scope = scope_die_for (type, context_die);
13104
13105 if (cv_quals)
13106 {
13107 int sub_quals = 0, first_quals = 0;
13108 unsigned i;
13109 dw_die_ref first = NULL, last = NULL;
13110
13111 /* Determine a lesser qualified type that most closely matches
13112 this one. Then generate DW_TAG_* entries for the remaining
13113 qualifiers. */
13114 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13115 cv_qual_mask);
13116 if (sub_quals && use_debug_types)
13117 {
13118 bool needed = false;
13119 /* If emitting type units, make sure the order of qualifiers
13120 is canonical. Thus, start from unqualified type if
13121 an earlier qualifier is missing in sub_quals, but some later
13122 one is present there. */
13123 for (i = 0; i < dwarf_qual_info_size; i++)
13124 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13125 needed = true;
13126 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13127 {
13128 sub_quals = 0;
13129 break;
13130 }
13131 }
13132 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13133 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13134 {
13135 /* As not all intermediate qualified DIEs have corresponding
13136 tree types, ensure that qualified DIEs in the same scope
13137 as their DW_AT_type are emitted after their DW_AT_type,
13138 only with other qualified DIEs for the same type possibly
13139 in between them. Determine the range of such qualified
13140 DIEs now (first being the base type, last being corresponding
13141 last qualified DIE for it). */
13142 unsigned int count = 0;
13143 first = qualified_die_p (mod_type_die, &first_quals,
13144 dwarf_qual_info_size);
13145 if (first == NULL)
13146 first = mod_type_die;
13147 gcc_assert ((first_quals & ~sub_quals) == 0);
13148 for (count = 0, last = first;
13149 count < (1U << dwarf_qual_info_size);
13150 count++, last = last->die_sib)
13151 {
13152 int quals = 0;
13153 if (last == mod_scope->die_child)
13154 break;
13155 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13156 != first)
13157 break;
13158 }
13159 }
13160
13161 for (i = 0; i < dwarf_qual_info_size; i++)
13162 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13163 {
13164 dw_die_ref d;
13165 if (first && first != last)
13166 {
13167 for (d = first->die_sib; ; d = d->die_sib)
13168 {
13169 int quals = 0;
13170 qualified_die_p (d, &quals, dwarf_qual_info_size);
13171 if (quals == (first_quals | dwarf_qual_info[i].q))
13172 break;
13173 if (d == last)
13174 {
13175 d = NULL;
13176 break;
13177 }
13178 }
13179 if (d)
13180 {
13181 mod_type_die = d;
13182 continue;
13183 }
13184 }
13185 if (first)
13186 {
13187 d = new_die_raw (dwarf_qual_info[i].t);
13188 add_child_die_after (mod_scope, d, last);
13189 last = d;
13190 }
13191 else
13192 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13193 if (mod_type_die)
13194 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13195 mod_type_die = d;
13196 first_quals |= dwarf_qual_info[i].q;
13197 }
13198 }
13199 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13200 {
13201 dwarf_tag tag = DW_TAG_pointer_type;
13202 if (code == REFERENCE_TYPE)
13203 {
13204 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13205 tag = DW_TAG_rvalue_reference_type;
13206 else
13207 tag = DW_TAG_reference_type;
13208 }
13209 mod_type_die = new_die (tag, mod_scope, type);
13210
13211 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13212 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13213 add_alignment_attribute (mod_type_die, type);
13214 item_type = TREE_TYPE (type);
13215
13216 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13217 if (!ADDR_SPACE_GENERIC_P (as))
13218 {
13219 int action = targetm.addr_space.debug (as);
13220 if (action >= 0)
13221 {
13222 /* Positive values indicate an address_class. */
13223 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13224 }
13225 else
13226 {
13227 /* Negative values indicate an (inverted) segment base reg. */
13228 dw_loc_descr_ref d
13229 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13230 add_AT_loc (mod_type_die, DW_AT_segment, d);
13231 }
13232 }
13233 }
13234 else if (code == INTEGER_TYPE
13235 && TREE_TYPE (type) != NULL_TREE
13236 && subrange_type_for_debug_p (type, &low, &high))
13237 {
13238 tree bias = NULL_TREE;
13239 if (lang_hooks.types.get_type_bias)
13240 bias = lang_hooks.types.get_type_bias (type);
13241 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13242 item_type = TREE_TYPE (type);
13243 }
13244 else if (is_base_type (type))
13245 {
13246 mod_type_die = base_type_die (type, reverse);
13247
13248 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13249 if (reverse_base_type)
13250 {
13251 dw_die_ref after_die
13252 = modified_type_die (type, cv_quals, false, context_die);
13253 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13254 }
13255 else
13256 add_child_die (comp_unit_die (), mod_type_die);
13257
13258 add_pubtype (type, mod_type_die);
13259 }
13260 else
13261 {
13262 gen_type_die (type, context_die);
13263
13264 /* We have to get the type_main_variant here (and pass that to the
13265 `lookup_type_die' routine) because the ..._TYPE node we have
13266 might simply be a *copy* of some original type node (where the
13267 copy was created to help us keep track of typedef names) and
13268 that copy might have a different TYPE_UID from the original
13269 ..._TYPE node. */
13270 if (TREE_CODE (type) == FUNCTION_TYPE
13271 || TREE_CODE (type) == METHOD_TYPE)
13272 {
13273 /* For function/method types, can't just use type_main_variant here,
13274 because that can have different ref-qualifiers for C++,
13275 but try to canonicalize. */
13276 tree main = TYPE_MAIN_VARIANT (type);
13277 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13278 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13279 && check_base_type (t, main)
13280 && check_lang_type (t, type))
13281 return lookup_type_die (t);
13282 return lookup_type_die (type);
13283 }
13284 else if (TREE_CODE (type) != VECTOR_TYPE
13285 && TREE_CODE (type) != ARRAY_TYPE)
13286 return lookup_type_die (type_main_variant (type));
13287 else
13288 /* Vectors have the debugging information in the type,
13289 not the main variant. */
13290 return lookup_type_die (type);
13291 }
13292
13293 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13294 don't output a DW_TAG_typedef, since there isn't one in the
13295 user's program; just attach a DW_AT_name to the type.
13296 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13297 if the base type already has the same name. */
13298 if (name
13299 && ((TREE_CODE (name) != TYPE_DECL
13300 && (qualified_type == TYPE_MAIN_VARIANT (type)
13301 || (cv_quals == TYPE_UNQUALIFIED)))
13302 || (TREE_CODE (name) == TYPE_DECL
13303 && TREE_TYPE (name) == qualified_type
13304 && DECL_NAME (name))))
13305 {
13306 if (TREE_CODE (name) == TYPE_DECL)
13307 /* Could just call add_name_and_src_coords_attributes here,
13308 but since this is a builtin type it doesn't have any
13309 useful source coordinates anyway. */
13310 name = DECL_NAME (name);
13311 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13312 }
13313 /* This probably indicates a bug. */
13314 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13315 {
13316 name = TYPE_IDENTIFIER (type);
13317 add_name_attribute (mod_type_die,
13318 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13319 }
13320
13321 if (qualified_type && !reverse_base_type)
13322 equate_type_number_to_die (qualified_type, mod_type_die);
13323
13324 if (item_type)
13325 /* We must do this after the equate_type_number_to_die call, in case
13326 this is a recursive type. This ensures that the modified_type_die
13327 recursion will terminate even if the type is recursive. Recursive
13328 types are possible in Ada. */
13329 sub_die = modified_type_die (item_type,
13330 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13331 reverse,
13332 context_die);
13333
13334 if (sub_die != NULL)
13335 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13336
13337 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13338 if (TYPE_ARTIFICIAL (type))
13339 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13340
13341 return mod_type_die;
13342 }
13343
13344 /* Generate DIEs for the generic parameters of T.
13345 T must be either a generic type or a generic function.
13346 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13347
13348 static void
13349 gen_generic_params_dies (tree t)
13350 {
13351 tree parms, args;
13352 int parms_num, i;
13353 dw_die_ref die = NULL;
13354 int non_default;
13355
13356 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13357 return;
13358
13359 if (TYPE_P (t))
13360 die = lookup_type_die (t);
13361 else if (DECL_P (t))
13362 die = lookup_decl_die (t);
13363
13364 gcc_assert (die);
13365
13366 parms = lang_hooks.get_innermost_generic_parms (t);
13367 if (!parms)
13368 /* T has no generic parameter. It means T is neither a generic type
13369 or function. End of story. */
13370 return;
13371
13372 parms_num = TREE_VEC_LENGTH (parms);
13373 args = lang_hooks.get_innermost_generic_args (t);
13374 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13375 non_default = int_cst_value (TREE_CHAIN (args));
13376 else
13377 non_default = TREE_VEC_LENGTH (args);
13378 for (i = 0; i < parms_num; i++)
13379 {
13380 tree parm, arg, arg_pack_elems;
13381 dw_die_ref parm_die;
13382
13383 parm = TREE_VEC_ELT (parms, i);
13384 arg = TREE_VEC_ELT (args, i);
13385 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13386 gcc_assert (parm && TREE_VALUE (parm) && arg);
13387
13388 if (parm && TREE_VALUE (parm) && arg)
13389 {
13390 /* If PARM represents a template parameter pack,
13391 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13392 by DW_TAG_template_*_parameter DIEs for the argument
13393 pack elements of ARG. Note that ARG would then be
13394 an argument pack. */
13395 if (arg_pack_elems)
13396 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13397 arg_pack_elems,
13398 die);
13399 else
13400 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13401 true /* emit name */, die);
13402 if (i >= non_default)
13403 add_AT_flag (parm_die, DW_AT_default_value, 1);
13404 }
13405 }
13406 }
13407
13408 /* Create and return a DIE for PARM which should be
13409 the representation of a generic type parameter.
13410 For instance, in the C++ front end, PARM would be a template parameter.
13411 ARG is the argument to PARM.
13412 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13413 name of the PARM.
13414 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13415 as a child node. */
13416
13417 static dw_die_ref
13418 generic_parameter_die (tree parm, tree arg,
13419 bool emit_name_p,
13420 dw_die_ref parent_die)
13421 {
13422 dw_die_ref tmpl_die = NULL;
13423 const char *name = NULL;
13424
13425 if (!parm || !DECL_NAME (parm) || !arg)
13426 return NULL;
13427
13428 /* We support non-type generic parameters and arguments,
13429 type generic parameters and arguments, as well as
13430 generic generic parameters (a.k.a. template template parameters in C++)
13431 and arguments. */
13432 if (TREE_CODE (parm) == PARM_DECL)
13433 /* PARM is a nontype generic parameter */
13434 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13435 else if (TREE_CODE (parm) == TYPE_DECL)
13436 /* PARM is a type generic parameter. */
13437 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13438 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13439 /* PARM is a generic generic parameter.
13440 Its DIE is a GNU extension. It shall have a
13441 DW_AT_name attribute to represent the name of the template template
13442 parameter, and a DW_AT_GNU_template_name attribute to represent the
13443 name of the template template argument. */
13444 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13445 parent_die, parm);
13446 else
13447 gcc_unreachable ();
13448
13449 if (tmpl_die)
13450 {
13451 tree tmpl_type;
13452
13453 /* If PARM is a generic parameter pack, it means we are
13454 emitting debug info for a template argument pack element.
13455 In other terms, ARG is a template argument pack element.
13456 In that case, we don't emit any DW_AT_name attribute for
13457 the die. */
13458 if (emit_name_p)
13459 {
13460 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13461 gcc_assert (name);
13462 add_AT_string (tmpl_die, DW_AT_name, name);
13463 }
13464
13465 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13466 {
13467 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13468 TMPL_DIE should have a child DW_AT_type attribute that is set
13469 to the type of the argument to PARM, which is ARG.
13470 If PARM is a type generic parameter, TMPL_DIE should have a
13471 child DW_AT_type that is set to ARG. */
13472 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13473 add_type_attribute (tmpl_die, tmpl_type,
13474 (TREE_THIS_VOLATILE (tmpl_type)
13475 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13476 false, parent_die);
13477 }
13478 else
13479 {
13480 /* So TMPL_DIE is a DIE representing a
13481 a generic generic template parameter, a.k.a template template
13482 parameter in C++ and arg is a template. */
13483
13484 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13485 to the name of the argument. */
13486 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13487 if (name)
13488 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13489 }
13490
13491 if (TREE_CODE (parm) == PARM_DECL)
13492 /* So PARM is a non-type generic parameter.
13493 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13494 attribute of TMPL_DIE which value represents the value
13495 of ARG.
13496 We must be careful here:
13497 The value of ARG might reference some function decls.
13498 We might currently be emitting debug info for a generic
13499 type and types are emitted before function decls, we don't
13500 know if the function decls referenced by ARG will actually be
13501 emitted after cgraph computations.
13502 So must defer the generation of the DW_AT_const_value to
13503 after cgraph is ready. */
13504 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13505 }
13506
13507 return tmpl_die;
13508 }
13509
13510 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13511 PARM_PACK must be a template parameter pack. The returned DIE
13512 will be child DIE of PARENT_DIE. */
13513
13514 static dw_die_ref
13515 template_parameter_pack_die (tree parm_pack,
13516 tree parm_pack_args,
13517 dw_die_ref parent_die)
13518 {
13519 dw_die_ref die;
13520 int j;
13521
13522 gcc_assert (parent_die && parm_pack);
13523
13524 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13525 add_name_and_src_coords_attributes (die, parm_pack);
13526 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13527 generic_parameter_die (parm_pack,
13528 TREE_VEC_ELT (parm_pack_args, j),
13529 false /* Don't emit DW_AT_name */,
13530 die);
13531 return die;
13532 }
13533
13534 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13535 an enumerated type. */
13536
13537 static inline int
13538 type_is_enum (const_tree type)
13539 {
13540 return TREE_CODE (type) == ENUMERAL_TYPE;
13541 }
13542
13543 /* Return the DBX register number described by a given RTL node. */
13544
13545 static unsigned int
13546 dbx_reg_number (const_rtx rtl)
13547 {
13548 unsigned regno = REGNO (rtl);
13549
13550 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13551
13552 #ifdef LEAF_REG_REMAP
13553 if (crtl->uses_only_leaf_regs)
13554 {
13555 int leaf_reg = LEAF_REG_REMAP (regno);
13556 if (leaf_reg != -1)
13557 regno = (unsigned) leaf_reg;
13558 }
13559 #endif
13560
13561 regno = DBX_REGISTER_NUMBER (regno);
13562 gcc_assert (regno != INVALID_REGNUM);
13563 return regno;
13564 }
13565
13566 /* Optionally add a DW_OP_piece term to a location description expression.
13567 DW_OP_piece is only added if the location description expression already
13568 doesn't end with DW_OP_piece. */
13569
13570 static void
13571 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13572 {
13573 dw_loc_descr_ref loc;
13574
13575 if (*list_head != NULL)
13576 {
13577 /* Find the end of the chain. */
13578 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13579 ;
13580
13581 if (loc->dw_loc_opc != DW_OP_piece)
13582 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13583 }
13584 }
13585
13586 /* Return a location descriptor that designates a machine register or
13587 zero if there is none. */
13588
13589 static dw_loc_descr_ref
13590 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13591 {
13592 rtx regs;
13593
13594 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13595 return 0;
13596
13597 /* We only use "frame base" when we're sure we're talking about the
13598 post-prologue local stack frame. We do this by *not* running
13599 register elimination until this point, and recognizing the special
13600 argument pointer and soft frame pointer rtx's.
13601 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13602 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13603 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13604 {
13605 dw_loc_descr_ref result = NULL;
13606
13607 if (dwarf_version >= 4 || !dwarf_strict)
13608 {
13609 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13610 initialized);
13611 if (result)
13612 add_loc_descr (&result,
13613 new_loc_descr (DW_OP_stack_value, 0, 0));
13614 }
13615 return result;
13616 }
13617
13618 regs = targetm.dwarf_register_span (rtl);
13619
13620 if (REG_NREGS (rtl) > 1 || regs)
13621 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13622 else
13623 {
13624 unsigned int dbx_regnum = dbx_reg_number (rtl);
13625 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13626 return 0;
13627 return one_reg_loc_descriptor (dbx_regnum, initialized);
13628 }
13629 }
13630
13631 /* Return a location descriptor that designates a machine register for
13632 a given hard register number. */
13633
13634 static dw_loc_descr_ref
13635 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13636 {
13637 dw_loc_descr_ref reg_loc_descr;
13638
13639 if (regno <= 31)
13640 reg_loc_descr
13641 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13642 else
13643 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13644
13645 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13646 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13647
13648 return reg_loc_descr;
13649 }
13650
13651 /* Given an RTL of a register, return a location descriptor that
13652 designates a value that spans more than one register. */
13653
13654 static dw_loc_descr_ref
13655 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13656 enum var_init_status initialized)
13657 {
13658 int size, i;
13659 dw_loc_descr_ref loc_result = NULL;
13660
13661 /* Simple, contiguous registers. */
13662 if (regs == NULL_RTX)
13663 {
13664 unsigned reg = REGNO (rtl);
13665 int nregs;
13666
13667 #ifdef LEAF_REG_REMAP
13668 if (crtl->uses_only_leaf_regs)
13669 {
13670 int leaf_reg = LEAF_REG_REMAP (reg);
13671 if (leaf_reg != -1)
13672 reg = (unsigned) leaf_reg;
13673 }
13674 #endif
13675
13676 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13677 nregs = REG_NREGS (rtl);
13678
13679 /* At present we only track constant-sized pieces. */
13680 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13681 return NULL;
13682 size /= nregs;
13683
13684 loc_result = NULL;
13685 while (nregs--)
13686 {
13687 dw_loc_descr_ref t;
13688
13689 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13690 VAR_INIT_STATUS_INITIALIZED);
13691 add_loc_descr (&loc_result, t);
13692 add_loc_descr_op_piece (&loc_result, size);
13693 ++reg;
13694 }
13695 return loc_result;
13696 }
13697
13698 /* Now onto stupid register sets in non contiguous locations. */
13699
13700 gcc_assert (GET_CODE (regs) == PARALLEL);
13701
13702 /* At present we only track constant-sized pieces. */
13703 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13704 return NULL;
13705 loc_result = NULL;
13706
13707 for (i = 0; i < XVECLEN (regs, 0); ++i)
13708 {
13709 dw_loc_descr_ref t;
13710
13711 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13712 VAR_INIT_STATUS_INITIALIZED);
13713 add_loc_descr (&loc_result, t);
13714 add_loc_descr_op_piece (&loc_result, size);
13715 }
13716
13717 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13718 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13719 return loc_result;
13720 }
13721
13722 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13723
13724 /* Return a location descriptor that designates a constant i,
13725 as a compound operation from constant (i >> shift), constant shift
13726 and DW_OP_shl. */
13727
13728 static dw_loc_descr_ref
13729 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13730 {
13731 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13732 add_loc_descr (&ret, int_loc_descriptor (shift));
13733 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13734 return ret;
13735 }
13736
13737 /* Return a location descriptor that designates constant POLY_I. */
13738
13739 static dw_loc_descr_ref
13740 int_loc_descriptor (poly_int64 poly_i)
13741 {
13742 enum dwarf_location_atom op;
13743
13744 HOST_WIDE_INT i;
13745 if (!poly_i.is_constant (&i))
13746 {
13747 /* Create location descriptions for the non-constant part and
13748 add any constant offset at the end. */
13749 dw_loc_descr_ref ret = NULL;
13750 HOST_WIDE_INT constant = poly_i.coeffs[0];
13751 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13752 {
13753 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13754 if (coeff != 0)
13755 {
13756 dw_loc_descr_ref start = ret;
13757 unsigned int factor;
13758 int bias;
13759 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13760 (j, &factor, &bias);
13761
13762 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13763 add COEFF * (REGNO / FACTOR) now and subtract
13764 COEFF * BIAS from the final constant part. */
13765 constant -= coeff * bias;
13766 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13767 if (coeff % factor == 0)
13768 coeff /= factor;
13769 else
13770 {
13771 int amount = exact_log2 (factor);
13772 gcc_assert (amount >= 0);
13773 add_loc_descr (&ret, int_loc_descriptor (amount));
13774 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13775 }
13776 if (coeff != 1)
13777 {
13778 add_loc_descr (&ret, int_loc_descriptor (coeff));
13779 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13780 }
13781 if (start)
13782 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13783 }
13784 }
13785 loc_descr_plus_const (&ret, constant);
13786 return ret;
13787 }
13788
13789 /* Pick the smallest representation of a constant, rather than just
13790 defaulting to the LEB encoding. */
13791 if (i >= 0)
13792 {
13793 int clz = clz_hwi (i);
13794 int ctz = ctz_hwi (i);
13795 if (i <= 31)
13796 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13797 else if (i <= 0xff)
13798 op = DW_OP_const1u;
13799 else if (i <= 0xffff)
13800 op = DW_OP_const2u;
13801 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13802 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13803 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13804 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13805 while DW_OP_const4u is 5 bytes. */
13806 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13807 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13808 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13809 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13810 while DW_OP_const4u is 5 bytes. */
13811 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13812
13813 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13814 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13815 <= 4)
13816 {
13817 /* As i >= 2**31, the double cast above will yield a negative number.
13818 Since wrapping is defined in DWARF expressions we can output big
13819 positive integers as small negative ones, regardless of the size
13820 of host wide ints.
13821
13822 Here, since the evaluator will handle 32-bit values and since i >=
13823 2**31, we know it's going to be interpreted as a negative literal:
13824 store it this way if we can do better than 5 bytes this way. */
13825 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13826 }
13827 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13828 op = DW_OP_const4u;
13829
13830 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13831 least 6 bytes: see if we can do better before falling back to it. */
13832 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13833 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13834 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13835 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13836 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13837 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13838 >= HOST_BITS_PER_WIDE_INT)
13839 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13840 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13841 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13842 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13843 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13844 && size_of_uleb128 (i) > 6)
13845 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13846 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13847 else
13848 op = DW_OP_constu;
13849 }
13850 else
13851 {
13852 if (i >= -0x80)
13853 op = DW_OP_const1s;
13854 else if (i >= -0x8000)
13855 op = DW_OP_const2s;
13856 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13857 {
13858 if (size_of_int_loc_descriptor (i) < 5)
13859 {
13860 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13861 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13862 return ret;
13863 }
13864 op = DW_OP_const4s;
13865 }
13866 else
13867 {
13868 if (size_of_int_loc_descriptor (i)
13869 < (unsigned long) 1 + size_of_sleb128 (i))
13870 {
13871 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13872 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13873 return ret;
13874 }
13875 op = DW_OP_consts;
13876 }
13877 }
13878
13879 return new_loc_descr (op, i, 0);
13880 }
13881
13882 /* Likewise, for unsigned constants. */
13883
13884 static dw_loc_descr_ref
13885 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13886 {
13887 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13888 const unsigned HOST_WIDE_INT max_uint
13889 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13890
13891 /* If possible, use the clever signed constants handling. */
13892 if (i <= max_int)
13893 return int_loc_descriptor ((HOST_WIDE_INT) i);
13894
13895 /* Here, we are left with positive numbers that cannot be represented as
13896 HOST_WIDE_INT, i.e.:
13897 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13898
13899 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13900 whereas may be better to output a negative integer: thanks to integer
13901 wrapping, we know that:
13902 x = x - 2 ** DWARF2_ADDR_SIZE
13903 = x - 2 * (max (HOST_WIDE_INT) + 1)
13904 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13905 small negative integers. Let's try that in cases it will clearly improve
13906 the encoding: there is no gain turning DW_OP_const4u into
13907 DW_OP_const4s. */
13908 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13909 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13910 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13911 {
13912 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13913
13914 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
13915 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
13916 const HOST_WIDE_INT second_shift
13917 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
13918
13919 /* So we finally have:
13920 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
13921 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
13922 return int_loc_descriptor (second_shift);
13923 }
13924
13925 /* Last chance: fallback to a simple constant operation. */
13926 return new_loc_descr
13927 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13928 ? DW_OP_const4u
13929 : DW_OP_const8u,
13930 i, 0);
13931 }
13932
13933 /* Generate and return a location description that computes the unsigned
13934 comparison of the two stack top entries (a OP b where b is the top-most
13935 entry and a is the second one). The KIND of comparison can be LT_EXPR,
13936 LE_EXPR, GT_EXPR or GE_EXPR. */
13937
13938 static dw_loc_descr_ref
13939 uint_comparison_loc_list (enum tree_code kind)
13940 {
13941 enum dwarf_location_atom op, flip_op;
13942 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
13943
13944 switch (kind)
13945 {
13946 case LT_EXPR:
13947 op = DW_OP_lt;
13948 break;
13949 case LE_EXPR:
13950 op = DW_OP_le;
13951 break;
13952 case GT_EXPR:
13953 op = DW_OP_gt;
13954 break;
13955 case GE_EXPR:
13956 op = DW_OP_ge;
13957 break;
13958 default:
13959 gcc_unreachable ();
13960 }
13961
13962 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
13963 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
13964
13965 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
13966 possible to perform unsigned comparisons: we just have to distinguish
13967 three cases:
13968
13969 1. when a and b have the same sign (as signed integers); then we should
13970 return: a OP(signed) b;
13971
13972 2. when a is a negative signed integer while b is a positive one, then a
13973 is a greater unsigned integer than b; likewise when a and b's roles
13974 are flipped.
13975
13976 So first, compare the sign of the two operands. */
13977 ret = new_loc_descr (DW_OP_over, 0, 0);
13978 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
13979 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
13980 /* If they have different signs (i.e. they have different sign bits), then
13981 the stack top value has now the sign bit set and thus it's smaller than
13982 zero. */
13983 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
13984 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
13985 add_loc_descr (&ret, bra_node);
13986
13987 /* We are in case 1. At this point, we know both operands have the same
13988 sign, to it's safe to use the built-in signed comparison. */
13989 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
13990 add_loc_descr (&ret, jmp_node);
13991
13992 /* We are in case 2. Here, we know both operands do not have the same sign,
13993 so we have to flip the signed comparison. */
13994 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
13995 tmp = new_loc_descr (flip_op, 0, 0);
13996 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
13997 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
13998 add_loc_descr (&ret, tmp);
13999
14000 /* This dummy operation is necessary to make the two branches join. */
14001 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14002 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14003 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14004 add_loc_descr (&ret, tmp);
14005
14006 return ret;
14007 }
14008
14009 /* Likewise, but takes the location description lists (might be destructive on
14010 them). Return NULL if either is NULL or if concatenation fails. */
14011
14012 static dw_loc_list_ref
14013 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14014 enum tree_code kind)
14015 {
14016 if (left == NULL || right == NULL)
14017 return NULL;
14018
14019 add_loc_list (&left, right);
14020 if (left == NULL)
14021 return NULL;
14022
14023 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14024 return left;
14025 }
14026
14027 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14028 without actually allocating it. */
14029
14030 static unsigned long
14031 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14032 {
14033 return size_of_int_loc_descriptor (i >> shift)
14034 + size_of_int_loc_descriptor (shift)
14035 + 1;
14036 }
14037
14038 /* Return size_of_locs (int_loc_descriptor (i)) without
14039 actually allocating it. */
14040
14041 static unsigned long
14042 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14043 {
14044 unsigned long s;
14045
14046 if (i >= 0)
14047 {
14048 int clz, ctz;
14049 if (i <= 31)
14050 return 1;
14051 else if (i <= 0xff)
14052 return 2;
14053 else if (i <= 0xffff)
14054 return 3;
14055 clz = clz_hwi (i);
14056 ctz = ctz_hwi (i);
14057 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14058 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14059 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14060 - clz - 5);
14061 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14062 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14063 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14064 - clz - 8);
14065 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14066 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14067 <= 4)
14068 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14069 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14070 return 5;
14071 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14072 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14073 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14074 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14075 - clz - 8);
14076 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14077 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14078 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14079 - clz - 16);
14080 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14081 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14082 && s > 6)
14083 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14084 - clz - 32);
14085 else
14086 return 1 + s;
14087 }
14088 else
14089 {
14090 if (i >= -0x80)
14091 return 2;
14092 else if (i >= -0x8000)
14093 return 3;
14094 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14095 {
14096 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14097 {
14098 s = size_of_int_loc_descriptor (-i) + 1;
14099 if (s < 5)
14100 return s;
14101 }
14102 return 5;
14103 }
14104 else
14105 {
14106 unsigned long r = 1 + size_of_sleb128 (i);
14107 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14108 {
14109 s = size_of_int_loc_descriptor (-i) + 1;
14110 if (s < r)
14111 return s;
14112 }
14113 return r;
14114 }
14115 }
14116 }
14117
14118 /* Return loc description representing "address" of integer value.
14119 This can appear only as toplevel expression. */
14120
14121 static dw_loc_descr_ref
14122 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14123 {
14124 int litsize;
14125 dw_loc_descr_ref loc_result = NULL;
14126
14127 if (!(dwarf_version >= 4 || !dwarf_strict))
14128 return NULL;
14129
14130 litsize = size_of_int_loc_descriptor (i);
14131 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14132 is more compact. For DW_OP_stack_value we need:
14133 litsize + 1 (DW_OP_stack_value)
14134 and for DW_OP_implicit_value:
14135 1 (DW_OP_implicit_value) + 1 (length) + size. */
14136 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14137 {
14138 loc_result = int_loc_descriptor (i);
14139 add_loc_descr (&loc_result,
14140 new_loc_descr (DW_OP_stack_value, 0, 0));
14141 return loc_result;
14142 }
14143
14144 loc_result = new_loc_descr (DW_OP_implicit_value,
14145 size, 0);
14146 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14147 loc_result->dw_loc_oprnd2.v.val_int = i;
14148 return loc_result;
14149 }
14150
14151 /* Return a location descriptor that designates a base+offset location. */
14152
14153 static dw_loc_descr_ref
14154 based_loc_descr (rtx reg, poly_int64 offset,
14155 enum var_init_status initialized)
14156 {
14157 unsigned int regno;
14158 dw_loc_descr_ref result;
14159 dw_fde_ref fde = cfun->fde;
14160
14161 /* We only use "frame base" when we're sure we're talking about the
14162 post-prologue local stack frame. We do this by *not* running
14163 register elimination until this point, and recognizing the special
14164 argument pointer and soft frame pointer rtx's. */
14165 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14166 {
14167 rtx elim = (ira_use_lra_p
14168 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14169 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14170
14171 if (elim != reg)
14172 {
14173 elim = strip_offset_and_add (elim, &offset);
14174 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14175 && (elim == hard_frame_pointer_rtx
14176 || elim == stack_pointer_rtx))
14177 || elim == (frame_pointer_needed
14178 ? hard_frame_pointer_rtx
14179 : stack_pointer_rtx));
14180
14181 /* If drap register is used to align stack, use frame
14182 pointer + offset to access stack variables. If stack
14183 is aligned without drap, use stack pointer + offset to
14184 access stack variables. */
14185 if (crtl->stack_realign_tried
14186 && reg == frame_pointer_rtx)
14187 {
14188 int base_reg
14189 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14190 ? HARD_FRAME_POINTER_REGNUM
14191 : REGNO (elim));
14192 return new_reg_loc_descr (base_reg, offset);
14193 }
14194
14195 gcc_assert (frame_pointer_fb_offset_valid);
14196 offset += frame_pointer_fb_offset;
14197 HOST_WIDE_INT const_offset;
14198 if (offset.is_constant (&const_offset))
14199 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14200 else
14201 {
14202 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14203 loc_descr_plus_const (&ret, offset);
14204 return ret;
14205 }
14206 }
14207 }
14208
14209 regno = REGNO (reg);
14210 #ifdef LEAF_REG_REMAP
14211 if (crtl->uses_only_leaf_regs)
14212 {
14213 int leaf_reg = LEAF_REG_REMAP (regno);
14214 if (leaf_reg != -1)
14215 regno = (unsigned) leaf_reg;
14216 }
14217 #endif
14218 regno = DWARF_FRAME_REGNUM (regno);
14219
14220 HOST_WIDE_INT const_offset;
14221 if (!optimize && fde
14222 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14223 && offset.is_constant (&const_offset))
14224 {
14225 /* Use cfa+offset to represent the location of arguments passed
14226 on the stack when drap is used to align stack.
14227 Only do this when not optimizing, for optimized code var-tracking
14228 is supposed to track where the arguments live and the register
14229 used as vdrap or drap in some spot might be used for something
14230 else in other part of the routine. */
14231 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14232 }
14233
14234 result = new_reg_loc_descr (regno, offset);
14235
14236 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14237 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14238
14239 return result;
14240 }
14241
14242 /* Return true if this RTL expression describes a base+offset calculation. */
14243
14244 static inline int
14245 is_based_loc (const_rtx rtl)
14246 {
14247 return (GET_CODE (rtl) == PLUS
14248 && ((REG_P (XEXP (rtl, 0))
14249 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14250 && CONST_INT_P (XEXP (rtl, 1)))));
14251 }
14252
14253 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14254 failed. */
14255
14256 static dw_loc_descr_ref
14257 tls_mem_loc_descriptor (rtx mem)
14258 {
14259 tree base;
14260 dw_loc_descr_ref loc_result;
14261
14262 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14263 return NULL;
14264
14265 base = get_base_address (MEM_EXPR (mem));
14266 if (base == NULL
14267 || !VAR_P (base)
14268 || !DECL_THREAD_LOCAL_P (base))
14269 return NULL;
14270
14271 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14272 if (loc_result == NULL)
14273 return NULL;
14274
14275 if (maybe_ne (MEM_OFFSET (mem), 0))
14276 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14277
14278 return loc_result;
14279 }
14280
14281 /* Output debug info about reason why we failed to expand expression as dwarf
14282 expression. */
14283
14284 static void
14285 expansion_failed (tree expr, rtx rtl, char const *reason)
14286 {
14287 if (dump_file && (dump_flags & TDF_DETAILS))
14288 {
14289 fprintf (dump_file, "Failed to expand as dwarf: ");
14290 if (expr)
14291 print_generic_expr (dump_file, expr, dump_flags);
14292 if (rtl)
14293 {
14294 fprintf (dump_file, "\n");
14295 print_rtl (dump_file, rtl);
14296 }
14297 fprintf (dump_file, "\nReason: %s\n", reason);
14298 }
14299 }
14300
14301 /* Helper function for const_ok_for_output. */
14302
14303 static bool
14304 const_ok_for_output_1 (rtx rtl)
14305 {
14306 if (targetm.const_not_ok_for_debug_p (rtl))
14307 {
14308 if (GET_CODE (rtl) != UNSPEC)
14309 {
14310 expansion_failed (NULL_TREE, rtl,
14311 "Expression rejected for debug by the backend.\n");
14312 return false;
14313 }
14314
14315 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14316 the target hook doesn't explicitly allow it in debug info, assume
14317 we can't express it in the debug info. */
14318 /* Don't complain about TLS UNSPECs, those are just too hard to
14319 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14320 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14321 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14322 if (flag_checking
14323 && (XVECLEN (rtl, 0) == 0
14324 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14325 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14326 inform (current_function_decl
14327 ? DECL_SOURCE_LOCATION (current_function_decl)
14328 : UNKNOWN_LOCATION,
14329 #if NUM_UNSPEC_VALUES > 0
14330 "non-delegitimized UNSPEC %s (%d) found in variable location",
14331 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14332 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14333 XINT (rtl, 1));
14334 #else
14335 "non-delegitimized UNSPEC %d found in variable location",
14336 XINT (rtl, 1));
14337 #endif
14338 expansion_failed (NULL_TREE, rtl,
14339 "UNSPEC hasn't been delegitimized.\n");
14340 return false;
14341 }
14342
14343 if (CONST_POLY_INT_P (rtl))
14344 return false;
14345
14346 if (targetm.const_not_ok_for_debug_p (rtl))
14347 {
14348 expansion_failed (NULL_TREE, rtl,
14349 "Expression rejected for debug by the backend.\n");
14350 return false;
14351 }
14352
14353 /* FIXME: Refer to PR60655. It is possible for simplification
14354 of rtl expressions in var tracking to produce such expressions.
14355 We should really identify / validate expressions
14356 enclosed in CONST that can be handled by assemblers on various
14357 targets and only handle legitimate cases here. */
14358 switch (GET_CODE (rtl))
14359 {
14360 case SYMBOL_REF:
14361 break;
14362 case NOT:
14363 case NEG:
14364 return false;
14365 default:
14366 return true;
14367 }
14368
14369 if (CONSTANT_POOL_ADDRESS_P (rtl))
14370 {
14371 bool marked;
14372 get_pool_constant_mark (rtl, &marked);
14373 /* If all references to this pool constant were optimized away,
14374 it was not output and thus we can't represent it. */
14375 if (!marked)
14376 {
14377 expansion_failed (NULL_TREE, rtl,
14378 "Constant was removed from constant pool.\n");
14379 return false;
14380 }
14381 }
14382
14383 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14384 return false;
14385
14386 /* Avoid references to external symbols in debug info, on several targets
14387 the linker might even refuse to link when linking a shared library,
14388 and in many other cases the relocations for .debug_info/.debug_loc are
14389 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14390 to be defined within the same shared library or executable are fine. */
14391 if (SYMBOL_REF_EXTERNAL_P (rtl))
14392 {
14393 tree decl = SYMBOL_REF_DECL (rtl);
14394
14395 if (decl == NULL || !targetm.binds_local_p (decl))
14396 {
14397 expansion_failed (NULL_TREE, rtl,
14398 "Symbol not defined in current TU.\n");
14399 return false;
14400 }
14401 }
14402
14403 return true;
14404 }
14405
14406 /* Return true if constant RTL can be emitted in DW_OP_addr or
14407 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14408 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14409
14410 static bool
14411 const_ok_for_output (rtx rtl)
14412 {
14413 if (GET_CODE (rtl) == SYMBOL_REF)
14414 return const_ok_for_output_1 (rtl);
14415
14416 if (GET_CODE (rtl) == CONST)
14417 {
14418 subrtx_var_iterator::array_type array;
14419 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14420 if (!const_ok_for_output_1 (*iter))
14421 return false;
14422 return true;
14423 }
14424
14425 return true;
14426 }
14427
14428 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14429 if possible, NULL otherwise. */
14430
14431 static dw_die_ref
14432 base_type_for_mode (machine_mode mode, bool unsignedp)
14433 {
14434 dw_die_ref type_die;
14435 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14436
14437 if (type == NULL)
14438 return NULL;
14439 switch (TREE_CODE (type))
14440 {
14441 case INTEGER_TYPE:
14442 case REAL_TYPE:
14443 break;
14444 default:
14445 return NULL;
14446 }
14447 type_die = lookup_type_die (type);
14448 if (!type_die)
14449 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14450 comp_unit_die ());
14451 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14452 return NULL;
14453 return type_die;
14454 }
14455
14456 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14457 type matching MODE, or, if MODE is narrower than or as wide as
14458 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14459 possible. */
14460
14461 static dw_loc_descr_ref
14462 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14463 {
14464 machine_mode outer_mode = mode;
14465 dw_die_ref type_die;
14466 dw_loc_descr_ref cvt;
14467
14468 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14469 {
14470 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14471 return op;
14472 }
14473 type_die = base_type_for_mode (outer_mode, 1);
14474 if (type_die == NULL)
14475 return NULL;
14476 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14477 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14478 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14479 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14480 add_loc_descr (&op, cvt);
14481 return op;
14482 }
14483
14484 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14485
14486 static dw_loc_descr_ref
14487 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14488 dw_loc_descr_ref op1)
14489 {
14490 dw_loc_descr_ref ret = op0;
14491 add_loc_descr (&ret, op1);
14492 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14493 if (STORE_FLAG_VALUE != 1)
14494 {
14495 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14496 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14497 }
14498 return ret;
14499 }
14500
14501 /* Subroutine of scompare_loc_descriptor for the case in which we're
14502 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14503 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14504
14505 static dw_loc_descr_ref
14506 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14507 scalar_int_mode op_mode,
14508 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14509 {
14510 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14511 dw_loc_descr_ref cvt;
14512
14513 if (type_die == NULL)
14514 return NULL;
14515 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14516 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14517 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14518 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14519 add_loc_descr (&op0, cvt);
14520 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14521 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14522 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14523 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14524 add_loc_descr (&op1, cvt);
14525 return compare_loc_descriptor (op, op0, op1);
14526 }
14527
14528 /* Subroutine of scompare_loc_descriptor for the case in which we're
14529 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14530 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14531
14532 static dw_loc_descr_ref
14533 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14534 scalar_int_mode op_mode,
14535 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14536 {
14537 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14538 /* For eq/ne, if the operands are known to be zero-extended,
14539 there is no need to do the fancy shifting up. */
14540 if (op == DW_OP_eq || op == DW_OP_ne)
14541 {
14542 dw_loc_descr_ref last0, last1;
14543 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14544 ;
14545 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14546 ;
14547 /* deref_size zero extends, and for constants we can check
14548 whether they are zero extended or not. */
14549 if (((last0->dw_loc_opc == DW_OP_deref_size
14550 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14551 || (CONST_INT_P (XEXP (rtl, 0))
14552 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14553 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14554 && ((last1->dw_loc_opc == DW_OP_deref_size
14555 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14556 || (CONST_INT_P (XEXP (rtl, 1))
14557 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14558 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14559 return compare_loc_descriptor (op, op0, op1);
14560
14561 /* EQ/NE comparison against constant in narrower type than
14562 DWARF2_ADDR_SIZE can be performed either as
14563 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14564 DW_OP_{eq,ne}
14565 or
14566 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14567 DW_OP_{eq,ne}. Pick whatever is shorter. */
14568 if (CONST_INT_P (XEXP (rtl, 1))
14569 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14570 && (size_of_int_loc_descriptor (shift) + 1
14571 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14572 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14573 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14574 & GET_MODE_MASK (op_mode))))
14575 {
14576 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14577 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14578 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14579 & GET_MODE_MASK (op_mode));
14580 return compare_loc_descriptor (op, op0, op1);
14581 }
14582 }
14583 add_loc_descr (&op0, int_loc_descriptor (shift));
14584 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14585 if (CONST_INT_P (XEXP (rtl, 1)))
14586 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14587 else
14588 {
14589 add_loc_descr (&op1, int_loc_descriptor (shift));
14590 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14591 }
14592 return compare_loc_descriptor (op, op0, op1);
14593 }
14594
14595 /* Return location descriptor for unsigned comparison OP RTL. */
14596
14597 static dw_loc_descr_ref
14598 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14599 machine_mode mem_mode)
14600 {
14601 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14602 dw_loc_descr_ref op0, op1;
14603
14604 if (op_mode == VOIDmode)
14605 op_mode = GET_MODE (XEXP (rtl, 1));
14606 if (op_mode == VOIDmode)
14607 return NULL;
14608
14609 scalar_int_mode int_op_mode;
14610 if (dwarf_strict
14611 && dwarf_version < 5
14612 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14613 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14614 return NULL;
14615
14616 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14617 VAR_INIT_STATUS_INITIALIZED);
14618 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14619 VAR_INIT_STATUS_INITIALIZED);
14620
14621 if (op0 == NULL || op1 == NULL)
14622 return NULL;
14623
14624 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14625 {
14626 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14627 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14628
14629 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14630 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14631 }
14632 return compare_loc_descriptor (op, op0, op1);
14633 }
14634
14635 /* Return location descriptor for unsigned comparison OP RTL. */
14636
14637 static dw_loc_descr_ref
14638 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14639 machine_mode mem_mode)
14640 {
14641 dw_loc_descr_ref op0, op1;
14642
14643 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14644 if (test_op_mode == VOIDmode)
14645 test_op_mode = GET_MODE (XEXP (rtl, 1));
14646
14647 scalar_int_mode op_mode;
14648 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14649 return NULL;
14650
14651 if (dwarf_strict
14652 && dwarf_version < 5
14653 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14654 return NULL;
14655
14656 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14657 VAR_INIT_STATUS_INITIALIZED);
14658 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14659 VAR_INIT_STATUS_INITIALIZED);
14660
14661 if (op0 == NULL || op1 == NULL)
14662 return NULL;
14663
14664 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14665 {
14666 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14667 dw_loc_descr_ref last0, last1;
14668 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14669 ;
14670 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14671 ;
14672 if (CONST_INT_P (XEXP (rtl, 0)))
14673 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14674 /* deref_size zero extends, so no need to mask it again. */
14675 else if (last0->dw_loc_opc != DW_OP_deref_size
14676 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14677 {
14678 add_loc_descr (&op0, int_loc_descriptor (mask));
14679 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14680 }
14681 if (CONST_INT_P (XEXP (rtl, 1)))
14682 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14683 /* deref_size zero extends, so no need to mask it again. */
14684 else if (last1->dw_loc_opc != DW_OP_deref_size
14685 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14686 {
14687 add_loc_descr (&op1, int_loc_descriptor (mask));
14688 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14689 }
14690 }
14691 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14692 {
14693 HOST_WIDE_INT bias = 1;
14694 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14695 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14696 if (CONST_INT_P (XEXP (rtl, 1)))
14697 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14698 + INTVAL (XEXP (rtl, 1)));
14699 else
14700 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14701 bias, 0));
14702 }
14703 return compare_loc_descriptor (op, op0, op1);
14704 }
14705
14706 /* Return location descriptor for {U,S}{MIN,MAX}. */
14707
14708 static dw_loc_descr_ref
14709 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14710 machine_mode mem_mode)
14711 {
14712 enum dwarf_location_atom op;
14713 dw_loc_descr_ref op0, op1, ret;
14714 dw_loc_descr_ref bra_node, drop_node;
14715
14716 scalar_int_mode int_mode;
14717 if (dwarf_strict
14718 && dwarf_version < 5
14719 && (!is_a <scalar_int_mode> (mode, &int_mode)
14720 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14721 return NULL;
14722
14723 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14724 VAR_INIT_STATUS_INITIALIZED);
14725 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14726 VAR_INIT_STATUS_INITIALIZED);
14727
14728 if (op0 == NULL || op1 == NULL)
14729 return NULL;
14730
14731 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14732 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14733 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14734 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14735 {
14736 /* Checked by the caller. */
14737 int_mode = as_a <scalar_int_mode> (mode);
14738 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14739 {
14740 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14741 add_loc_descr (&op0, int_loc_descriptor (mask));
14742 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14743 add_loc_descr (&op1, int_loc_descriptor (mask));
14744 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14745 }
14746 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14747 {
14748 HOST_WIDE_INT bias = 1;
14749 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14750 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14751 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14752 }
14753 }
14754 else if (is_a <scalar_int_mode> (mode, &int_mode)
14755 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14756 {
14757 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14758 add_loc_descr (&op0, int_loc_descriptor (shift));
14759 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14760 add_loc_descr (&op1, int_loc_descriptor (shift));
14761 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14762 }
14763 else if (is_a <scalar_int_mode> (mode, &int_mode)
14764 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14765 {
14766 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14767 dw_loc_descr_ref cvt;
14768 if (type_die == NULL)
14769 return NULL;
14770 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14771 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14772 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14773 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14774 add_loc_descr (&op0, cvt);
14775 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14776 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14777 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14778 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14779 add_loc_descr (&op1, cvt);
14780 }
14781
14782 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14783 op = DW_OP_lt;
14784 else
14785 op = DW_OP_gt;
14786 ret = op0;
14787 add_loc_descr (&ret, op1);
14788 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14789 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14790 add_loc_descr (&ret, bra_node);
14791 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14792 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14793 add_loc_descr (&ret, drop_node);
14794 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14795 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14796 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14797 && is_a <scalar_int_mode> (mode, &int_mode)
14798 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14799 ret = convert_descriptor_to_mode (int_mode, ret);
14800 return ret;
14801 }
14802
14803 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14804 but after converting arguments to type_die, afterwards
14805 convert back to unsigned. */
14806
14807 static dw_loc_descr_ref
14808 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14809 scalar_int_mode mode, machine_mode mem_mode)
14810 {
14811 dw_loc_descr_ref cvt, op0, op1;
14812
14813 if (type_die == NULL)
14814 return NULL;
14815 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14816 VAR_INIT_STATUS_INITIALIZED);
14817 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14818 VAR_INIT_STATUS_INITIALIZED);
14819 if (op0 == NULL || op1 == NULL)
14820 return NULL;
14821 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14822 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14823 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14824 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14825 add_loc_descr (&op0, cvt);
14826 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14827 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14828 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14829 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14830 add_loc_descr (&op1, cvt);
14831 add_loc_descr (&op0, op1);
14832 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14833 return convert_descriptor_to_mode (mode, op0);
14834 }
14835
14836 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14837 const0 is DW_OP_lit0 or corresponding typed constant,
14838 const1 is DW_OP_lit1 or corresponding typed constant
14839 and constMSB is constant with just the MSB bit set
14840 for the mode):
14841 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14842 L1: const0 DW_OP_swap
14843 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14844 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14845 L3: DW_OP_drop
14846 L4: DW_OP_nop
14847
14848 CTZ is similar:
14849 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14850 L1: const0 DW_OP_swap
14851 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14852 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14853 L3: DW_OP_drop
14854 L4: DW_OP_nop
14855
14856 FFS is similar:
14857 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14858 L1: const1 DW_OP_swap
14859 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14860 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14861 L3: DW_OP_drop
14862 L4: DW_OP_nop */
14863
14864 static dw_loc_descr_ref
14865 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14866 machine_mode mem_mode)
14867 {
14868 dw_loc_descr_ref op0, ret, tmp;
14869 HOST_WIDE_INT valv;
14870 dw_loc_descr_ref l1jump, l1label;
14871 dw_loc_descr_ref l2jump, l2label;
14872 dw_loc_descr_ref l3jump, l3label;
14873 dw_loc_descr_ref l4jump, l4label;
14874 rtx msb;
14875
14876 if (GET_MODE (XEXP (rtl, 0)) != mode)
14877 return NULL;
14878
14879 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14880 VAR_INIT_STATUS_INITIALIZED);
14881 if (op0 == NULL)
14882 return NULL;
14883 ret = op0;
14884 if (GET_CODE (rtl) == CLZ)
14885 {
14886 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14887 valv = GET_MODE_BITSIZE (mode);
14888 }
14889 else if (GET_CODE (rtl) == FFS)
14890 valv = 0;
14891 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14892 valv = GET_MODE_BITSIZE (mode);
14893 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14894 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14895 add_loc_descr (&ret, l1jump);
14896 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14897 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14898 VAR_INIT_STATUS_INITIALIZED);
14899 if (tmp == NULL)
14900 return NULL;
14901 add_loc_descr (&ret, tmp);
14902 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14903 add_loc_descr (&ret, l4jump);
14904 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14905 ? const1_rtx : const0_rtx,
14906 mode, mem_mode,
14907 VAR_INIT_STATUS_INITIALIZED);
14908 if (l1label == NULL)
14909 return NULL;
14910 add_loc_descr (&ret, l1label);
14911 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14912 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14913 add_loc_descr (&ret, l2label);
14914 if (GET_CODE (rtl) != CLZ)
14915 msb = const1_rtx;
14916 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
14917 msb = GEN_INT (HOST_WIDE_INT_1U
14918 << (GET_MODE_BITSIZE (mode) - 1));
14919 else
14920 msb = immed_wide_int_const
14921 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
14922 GET_MODE_PRECISION (mode)), mode);
14923 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
14924 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
14925 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
14926 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
14927 else
14928 tmp = mem_loc_descriptor (msb, mode, mem_mode,
14929 VAR_INIT_STATUS_INITIALIZED);
14930 if (tmp == NULL)
14931 return NULL;
14932 add_loc_descr (&ret, tmp);
14933 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
14934 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
14935 add_loc_descr (&ret, l3jump);
14936 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
14937 VAR_INIT_STATUS_INITIALIZED);
14938 if (tmp == NULL)
14939 return NULL;
14940 add_loc_descr (&ret, tmp);
14941 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
14942 ? DW_OP_shl : DW_OP_shr, 0, 0));
14943 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14944 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
14945 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14946 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
14947 add_loc_descr (&ret, l2jump);
14948 l3label = new_loc_descr (DW_OP_drop, 0, 0);
14949 add_loc_descr (&ret, l3label);
14950 l4label = new_loc_descr (DW_OP_nop, 0, 0);
14951 add_loc_descr (&ret, l4label);
14952 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14953 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
14954 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14955 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
14956 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14957 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
14958 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
14959 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
14960 return ret;
14961 }
14962
14963 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
14964 const1 is DW_OP_lit1 or corresponding typed constant):
14965 const0 DW_OP_swap
14966 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14967 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14968 L2: DW_OP_drop
14969
14970 PARITY is similar:
14971 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
14972 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
14973 L2: DW_OP_drop */
14974
14975 static dw_loc_descr_ref
14976 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
14977 machine_mode mem_mode)
14978 {
14979 dw_loc_descr_ref op0, ret, tmp;
14980 dw_loc_descr_ref l1jump, l1label;
14981 dw_loc_descr_ref l2jump, l2label;
14982
14983 if (GET_MODE (XEXP (rtl, 0)) != mode)
14984 return NULL;
14985
14986 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14987 VAR_INIT_STATUS_INITIALIZED);
14988 if (op0 == NULL)
14989 return NULL;
14990 ret = op0;
14991 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
14992 VAR_INIT_STATUS_INITIALIZED);
14993 if (tmp == NULL)
14994 return NULL;
14995 add_loc_descr (&ret, tmp);
14996 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14997 l1label = new_loc_descr (DW_OP_dup, 0, 0);
14998 add_loc_descr (&ret, l1label);
14999 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15000 add_loc_descr (&ret, l2jump);
15001 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15002 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15003 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15004 VAR_INIT_STATUS_INITIALIZED);
15005 if (tmp == NULL)
15006 return NULL;
15007 add_loc_descr (&ret, tmp);
15008 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15009 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15010 ? DW_OP_plus : DW_OP_xor, 0, 0));
15011 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15012 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15013 VAR_INIT_STATUS_INITIALIZED);
15014 add_loc_descr (&ret, tmp);
15015 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15016 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15017 add_loc_descr (&ret, l1jump);
15018 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15019 add_loc_descr (&ret, l2label);
15020 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15021 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15022 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15023 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15024 return ret;
15025 }
15026
15027 /* BSWAP (constS is initial shift count, either 56 or 24):
15028 constS const0
15029 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15030 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15031 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15032 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15033 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15034
15035 static dw_loc_descr_ref
15036 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15037 machine_mode mem_mode)
15038 {
15039 dw_loc_descr_ref op0, ret, tmp;
15040 dw_loc_descr_ref l1jump, l1label;
15041 dw_loc_descr_ref l2jump, l2label;
15042
15043 if (BITS_PER_UNIT != 8
15044 || (GET_MODE_BITSIZE (mode) != 32
15045 && GET_MODE_BITSIZE (mode) != 64))
15046 return NULL;
15047
15048 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15049 VAR_INIT_STATUS_INITIALIZED);
15050 if (op0 == NULL)
15051 return NULL;
15052
15053 ret = op0;
15054 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15055 mode, mem_mode,
15056 VAR_INIT_STATUS_INITIALIZED);
15057 if (tmp == NULL)
15058 return NULL;
15059 add_loc_descr (&ret, tmp);
15060 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15061 VAR_INIT_STATUS_INITIALIZED);
15062 if (tmp == NULL)
15063 return NULL;
15064 add_loc_descr (&ret, tmp);
15065 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15066 add_loc_descr (&ret, l1label);
15067 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15068 mode, mem_mode,
15069 VAR_INIT_STATUS_INITIALIZED);
15070 add_loc_descr (&ret, tmp);
15071 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15072 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15073 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15074 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15075 VAR_INIT_STATUS_INITIALIZED);
15076 if (tmp == NULL)
15077 return NULL;
15078 add_loc_descr (&ret, tmp);
15079 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15080 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15081 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15082 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15083 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15084 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15085 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15086 VAR_INIT_STATUS_INITIALIZED);
15087 add_loc_descr (&ret, tmp);
15088 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15089 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15090 add_loc_descr (&ret, l2jump);
15091 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15092 VAR_INIT_STATUS_INITIALIZED);
15093 add_loc_descr (&ret, tmp);
15094 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15095 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15096 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15097 add_loc_descr (&ret, l1jump);
15098 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15099 add_loc_descr (&ret, l2label);
15100 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15101 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15102 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15103 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15104 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15105 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15106 return ret;
15107 }
15108
15109 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15110 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15111 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15112 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15113
15114 ROTATERT is similar:
15115 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15116 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15117 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15118
15119 static dw_loc_descr_ref
15120 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15121 machine_mode mem_mode)
15122 {
15123 rtx rtlop1 = XEXP (rtl, 1);
15124 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15125 int i;
15126
15127 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15128 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15129 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15130 VAR_INIT_STATUS_INITIALIZED);
15131 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15132 VAR_INIT_STATUS_INITIALIZED);
15133 if (op0 == NULL || op1 == NULL)
15134 return NULL;
15135 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15136 for (i = 0; i < 2; i++)
15137 {
15138 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15139 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15140 mode, mem_mode,
15141 VAR_INIT_STATUS_INITIALIZED);
15142 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15143 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15144 ? DW_OP_const4u
15145 : HOST_BITS_PER_WIDE_INT == 64
15146 ? DW_OP_const8u : DW_OP_constu,
15147 GET_MODE_MASK (mode), 0);
15148 else
15149 mask[i] = NULL;
15150 if (mask[i] == NULL)
15151 return NULL;
15152 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15153 }
15154 ret = op0;
15155 add_loc_descr (&ret, op1);
15156 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15157 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15158 if (GET_CODE (rtl) == ROTATERT)
15159 {
15160 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15161 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15162 GET_MODE_BITSIZE (mode), 0));
15163 }
15164 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15165 if (mask[0] != NULL)
15166 add_loc_descr (&ret, mask[0]);
15167 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15168 if (mask[1] != NULL)
15169 {
15170 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15171 add_loc_descr (&ret, mask[1]);
15172 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15173 }
15174 if (GET_CODE (rtl) == ROTATE)
15175 {
15176 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15177 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15178 GET_MODE_BITSIZE (mode), 0));
15179 }
15180 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15181 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15182 return ret;
15183 }
15184
15185 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15186 for DEBUG_PARAMETER_REF RTL. */
15187
15188 static dw_loc_descr_ref
15189 parameter_ref_descriptor (rtx rtl)
15190 {
15191 dw_loc_descr_ref ret;
15192 dw_die_ref ref;
15193
15194 if (dwarf_strict)
15195 return NULL;
15196 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15197 /* With LTO during LTRANS we get the late DIE that refers to the early
15198 DIE, thus we add another indirection here. This seems to confuse
15199 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15200 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15201 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15202 if (ref)
15203 {
15204 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15205 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15206 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15207 }
15208 else
15209 {
15210 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15211 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15212 }
15213 return ret;
15214 }
15215
15216 /* The following routine converts the RTL for a variable or parameter
15217 (resident in memory) into an equivalent Dwarf representation of a
15218 mechanism for getting the address of that same variable onto the top of a
15219 hypothetical "address evaluation" stack.
15220
15221 When creating memory location descriptors, we are effectively transforming
15222 the RTL for a memory-resident object into its Dwarf postfix expression
15223 equivalent. This routine recursively descends an RTL tree, turning
15224 it into Dwarf postfix code as it goes.
15225
15226 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15227
15228 MEM_MODE is the mode of the memory reference, needed to handle some
15229 autoincrement addressing modes.
15230
15231 Return 0 if we can't represent the location. */
15232
15233 dw_loc_descr_ref
15234 mem_loc_descriptor (rtx rtl, machine_mode mode,
15235 machine_mode mem_mode,
15236 enum var_init_status initialized)
15237 {
15238 dw_loc_descr_ref mem_loc_result = NULL;
15239 enum dwarf_location_atom op;
15240 dw_loc_descr_ref op0, op1;
15241 rtx inner = NULL_RTX;
15242 poly_int64 offset;
15243
15244 if (mode == VOIDmode)
15245 mode = GET_MODE (rtl);
15246
15247 /* Note that for a dynamically sized array, the location we will generate a
15248 description of here will be the lowest numbered location which is
15249 actually within the array. That's *not* necessarily the same as the
15250 zeroth element of the array. */
15251
15252 rtl = targetm.delegitimize_address (rtl);
15253
15254 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15255 return NULL;
15256
15257 scalar_int_mode int_mode, inner_mode, op1_mode;
15258 switch (GET_CODE (rtl))
15259 {
15260 case POST_INC:
15261 case POST_DEC:
15262 case POST_MODIFY:
15263 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15264
15265 case SUBREG:
15266 /* The case of a subreg may arise when we have a local (register)
15267 variable or a formal (register) parameter which doesn't quite fill
15268 up an entire register. For now, just assume that it is
15269 legitimate to make the Dwarf info refer to the whole register which
15270 contains the given subreg. */
15271 if (!subreg_lowpart_p (rtl))
15272 break;
15273 inner = SUBREG_REG (rtl);
15274 /* FALLTHRU */
15275 case TRUNCATE:
15276 if (inner == NULL_RTX)
15277 inner = XEXP (rtl, 0);
15278 if (is_a <scalar_int_mode> (mode, &int_mode)
15279 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15280 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15281 #ifdef POINTERS_EXTEND_UNSIGNED
15282 || (int_mode == Pmode && mem_mode != VOIDmode)
15283 #endif
15284 )
15285 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15286 {
15287 mem_loc_result = mem_loc_descriptor (inner,
15288 inner_mode,
15289 mem_mode, initialized);
15290 break;
15291 }
15292 if (dwarf_strict && dwarf_version < 5)
15293 break;
15294 if (is_a <scalar_int_mode> (mode, &int_mode)
15295 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15296 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15297 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15298 {
15299 dw_die_ref type_die;
15300 dw_loc_descr_ref cvt;
15301
15302 mem_loc_result = mem_loc_descriptor (inner,
15303 GET_MODE (inner),
15304 mem_mode, initialized);
15305 if (mem_loc_result == NULL)
15306 break;
15307 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15308 if (type_die == NULL)
15309 {
15310 mem_loc_result = NULL;
15311 break;
15312 }
15313 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15314 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15315 else
15316 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15317 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15318 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15319 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15320 add_loc_descr (&mem_loc_result, cvt);
15321 if (is_a <scalar_int_mode> (mode, &int_mode)
15322 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15323 {
15324 /* Convert it to untyped afterwards. */
15325 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15326 add_loc_descr (&mem_loc_result, cvt);
15327 }
15328 }
15329 break;
15330
15331 case REG:
15332 if (!is_a <scalar_int_mode> (mode, &int_mode)
15333 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15334 && rtl != arg_pointer_rtx
15335 && rtl != frame_pointer_rtx
15336 #ifdef POINTERS_EXTEND_UNSIGNED
15337 && (int_mode != Pmode || mem_mode == VOIDmode)
15338 #endif
15339 ))
15340 {
15341 dw_die_ref type_die;
15342 unsigned int dbx_regnum;
15343
15344 if (dwarf_strict && dwarf_version < 5)
15345 break;
15346 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15347 break;
15348 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15349 if (type_die == NULL)
15350 break;
15351
15352 dbx_regnum = dbx_reg_number (rtl);
15353 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15354 break;
15355 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15356 dbx_regnum, 0);
15357 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15358 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15359 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15360 break;
15361 }
15362 /* Whenever a register number forms a part of the description of the
15363 method for calculating the (dynamic) address of a memory resident
15364 object, DWARF rules require the register number be referred to as
15365 a "base register". This distinction is not based in any way upon
15366 what category of register the hardware believes the given register
15367 belongs to. This is strictly DWARF terminology we're dealing with
15368 here. Note that in cases where the location of a memory-resident
15369 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15370 OP_CONST (0)) the actual DWARF location descriptor that we generate
15371 may just be OP_BASEREG (basereg). This may look deceptively like
15372 the object in question was allocated to a register (rather than in
15373 memory) so DWARF consumers need to be aware of the subtle
15374 distinction between OP_REG and OP_BASEREG. */
15375 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15376 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15377 else if (stack_realign_drap
15378 && crtl->drap_reg
15379 && crtl->args.internal_arg_pointer == rtl
15380 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15381 {
15382 /* If RTL is internal_arg_pointer, which has been optimized
15383 out, use DRAP instead. */
15384 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15385 VAR_INIT_STATUS_INITIALIZED);
15386 }
15387 break;
15388
15389 case SIGN_EXTEND:
15390 case ZERO_EXTEND:
15391 if (!is_a <scalar_int_mode> (mode, &int_mode)
15392 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15393 break;
15394 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15395 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15396 if (op0 == 0)
15397 break;
15398 else if (GET_CODE (rtl) == ZERO_EXTEND
15399 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15400 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15401 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15402 to expand zero extend as two shifts instead of
15403 masking. */
15404 && GET_MODE_SIZE (inner_mode) <= 4)
15405 {
15406 mem_loc_result = op0;
15407 add_loc_descr (&mem_loc_result,
15408 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15409 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15410 }
15411 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15412 {
15413 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15414 shift *= BITS_PER_UNIT;
15415 if (GET_CODE (rtl) == SIGN_EXTEND)
15416 op = DW_OP_shra;
15417 else
15418 op = DW_OP_shr;
15419 mem_loc_result = op0;
15420 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15421 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15422 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15423 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15424 }
15425 else if (!dwarf_strict || dwarf_version >= 5)
15426 {
15427 dw_die_ref type_die1, type_die2;
15428 dw_loc_descr_ref cvt;
15429
15430 type_die1 = base_type_for_mode (inner_mode,
15431 GET_CODE (rtl) == ZERO_EXTEND);
15432 if (type_die1 == NULL)
15433 break;
15434 type_die2 = base_type_for_mode (int_mode, 1);
15435 if (type_die2 == NULL)
15436 break;
15437 mem_loc_result = op0;
15438 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15439 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15440 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15441 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15442 add_loc_descr (&mem_loc_result, cvt);
15443 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15444 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15445 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15446 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15447 add_loc_descr (&mem_loc_result, cvt);
15448 }
15449 break;
15450
15451 case MEM:
15452 {
15453 rtx new_rtl = avoid_constant_pool_reference (rtl);
15454 if (new_rtl != rtl)
15455 {
15456 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15457 initialized);
15458 if (mem_loc_result != NULL)
15459 return mem_loc_result;
15460 }
15461 }
15462 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15463 get_address_mode (rtl), mode,
15464 VAR_INIT_STATUS_INITIALIZED);
15465 if (mem_loc_result == NULL)
15466 mem_loc_result = tls_mem_loc_descriptor (rtl);
15467 if (mem_loc_result != NULL)
15468 {
15469 if (!is_a <scalar_int_mode> (mode, &int_mode)
15470 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15471 {
15472 dw_die_ref type_die;
15473 dw_loc_descr_ref deref;
15474 HOST_WIDE_INT size;
15475
15476 if (dwarf_strict && dwarf_version < 5)
15477 return NULL;
15478 if (!GET_MODE_SIZE (mode).is_constant (&size))
15479 return NULL;
15480 type_die
15481 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15482 if (type_die == NULL)
15483 return NULL;
15484 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15485 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15486 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15487 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15488 add_loc_descr (&mem_loc_result, deref);
15489 }
15490 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15491 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15492 else
15493 add_loc_descr (&mem_loc_result,
15494 new_loc_descr (DW_OP_deref_size,
15495 GET_MODE_SIZE (int_mode), 0));
15496 }
15497 break;
15498
15499 case LO_SUM:
15500 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15501
15502 case LABEL_REF:
15503 /* Some ports can transform a symbol ref into a label ref, because
15504 the symbol ref is too far away and has to be dumped into a constant
15505 pool. */
15506 case CONST:
15507 case SYMBOL_REF:
15508 if (!is_a <scalar_int_mode> (mode, &int_mode)
15509 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15510 #ifdef POINTERS_EXTEND_UNSIGNED
15511 && (int_mode != Pmode || mem_mode == VOIDmode)
15512 #endif
15513 ))
15514 break;
15515 if (GET_CODE (rtl) == SYMBOL_REF
15516 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15517 {
15518 dw_loc_descr_ref temp;
15519
15520 /* If this is not defined, we have no way to emit the data. */
15521 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15522 break;
15523
15524 temp = new_addr_loc_descr (rtl, dtprel_true);
15525
15526 /* We check for DWARF 5 here because gdb did not implement
15527 DW_OP_form_tls_address until after 7.12. */
15528 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15529 ? DW_OP_form_tls_address
15530 : DW_OP_GNU_push_tls_address),
15531 0, 0);
15532 add_loc_descr (&mem_loc_result, temp);
15533
15534 break;
15535 }
15536
15537 if (!const_ok_for_output (rtl))
15538 {
15539 if (GET_CODE (rtl) == CONST)
15540 switch (GET_CODE (XEXP (rtl, 0)))
15541 {
15542 case NOT:
15543 op = DW_OP_not;
15544 goto try_const_unop;
15545 case NEG:
15546 op = DW_OP_neg;
15547 goto try_const_unop;
15548 try_const_unop:
15549 rtx arg;
15550 arg = XEXP (XEXP (rtl, 0), 0);
15551 if (!CONSTANT_P (arg))
15552 arg = gen_rtx_CONST (int_mode, arg);
15553 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15554 initialized);
15555 if (op0)
15556 {
15557 mem_loc_result = op0;
15558 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15559 }
15560 break;
15561 default:
15562 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15563 mem_mode, initialized);
15564 break;
15565 }
15566 break;
15567 }
15568
15569 symref:
15570 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15571 vec_safe_push (used_rtx_array, rtl);
15572 break;
15573
15574 case CONCAT:
15575 case CONCATN:
15576 case VAR_LOCATION:
15577 case DEBUG_IMPLICIT_PTR:
15578 expansion_failed (NULL_TREE, rtl,
15579 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15580 return 0;
15581
15582 case ENTRY_VALUE:
15583 if (dwarf_strict && dwarf_version < 5)
15584 return NULL;
15585 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15586 {
15587 if (!is_a <scalar_int_mode> (mode, &int_mode)
15588 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15589 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15590 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15591 else
15592 {
15593 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15594 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15595 return NULL;
15596 op0 = one_reg_loc_descriptor (dbx_regnum,
15597 VAR_INIT_STATUS_INITIALIZED);
15598 }
15599 }
15600 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15601 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15602 {
15603 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15604 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15605 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15606 return NULL;
15607 }
15608 else
15609 gcc_unreachable ();
15610 if (op0 == NULL)
15611 return NULL;
15612 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15613 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15614 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15615 break;
15616
15617 case DEBUG_PARAMETER_REF:
15618 mem_loc_result = parameter_ref_descriptor (rtl);
15619 break;
15620
15621 case PRE_MODIFY:
15622 /* Extract the PLUS expression nested inside and fall into
15623 PLUS code below. */
15624 rtl = XEXP (rtl, 1);
15625 goto plus;
15626
15627 case PRE_INC:
15628 case PRE_DEC:
15629 /* Turn these into a PLUS expression and fall into the PLUS code
15630 below. */
15631 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15632 gen_int_mode (GET_CODE (rtl) == PRE_INC
15633 ? GET_MODE_UNIT_SIZE (mem_mode)
15634 : -GET_MODE_UNIT_SIZE (mem_mode),
15635 mode));
15636
15637 /* fall through */
15638
15639 case PLUS:
15640 plus:
15641 if (is_based_loc (rtl)
15642 && is_a <scalar_int_mode> (mode, &int_mode)
15643 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15644 || XEXP (rtl, 0) == arg_pointer_rtx
15645 || XEXP (rtl, 0) == frame_pointer_rtx))
15646 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15647 INTVAL (XEXP (rtl, 1)),
15648 VAR_INIT_STATUS_INITIALIZED);
15649 else
15650 {
15651 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15652 VAR_INIT_STATUS_INITIALIZED);
15653 if (mem_loc_result == 0)
15654 break;
15655
15656 if (CONST_INT_P (XEXP (rtl, 1))
15657 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15658 <= DWARF2_ADDR_SIZE))
15659 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15660 else
15661 {
15662 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15663 VAR_INIT_STATUS_INITIALIZED);
15664 if (op1 == 0)
15665 return NULL;
15666 add_loc_descr (&mem_loc_result, op1);
15667 add_loc_descr (&mem_loc_result,
15668 new_loc_descr (DW_OP_plus, 0, 0));
15669 }
15670 }
15671 break;
15672
15673 /* If a pseudo-reg is optimized away, it is possible for it to
15674 be replaced with a MEM containing a multiply or shift. */
15675 case MINUS:
15676 op = DW_OP_minus;
15677 goto do_binop;
15678
15679 case MULT:
15680 op = DW_OP_mul;
15681 goto do_binop;
15682
15683 case DIV:
15684 if ((!dwarf_strict || dwarf_version >= 5)
15685 && is_a <scalar_int_mode> (mode, &int_mode)
15686 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15687 {
15688 mem_loc_result = typed_binop (DW_OP_div, rtl,
15689 base_type_for_mode (mode, 0),
15690 int_mode, mem_mode);
15691 break;
15692 }
15693 op = DW_OP_div;
15694 goto do_binop;
15695
15696 case UMOD:
15697 op = DW_OP_mod;
15698 goto do_binop;
15699
15700 case ASHIFT:
15701 op = DW_OP_shl;
15702 goto do_shift;
15703
15704 case ASHIFTRT:
15705 op = DW_OP_shra;
15706 goto do_shift;
15707
15708 case LSHIFTRT:
15709 op = DW_OP_shr;
15710 goto do_shift;
15711
15712 do_shift:
15713 if (!is_a <scalar_int_mode> (mode, &int_mode))
15714 break;
15715 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15716 VAR_INIT_STATUS_INITIALIZED);
15717 {
15718 rtx rtlop1 = XEXP (rtl, 1);
15719 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15720 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15721 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15722 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15723 VAR_INIT_STATUS_INITIALIZED);
15724 }
15725
15726 if (op0 == 0 || op1 == 0)
15727 break;
15728
15729 mem_loc_result = op0;
15730 add_loc_descr (&mem_loc_result, op1);
15731 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15732 break;
15733
15734 case AND:
15735 op = DW_OP_and;
15736 goto do_binop;
15737
15738 case IOR:
15739 op = DW_OP_or;
15740 goto do_binop;
15741
15742 case XOR:
15743 op = DW_OP_xor;
15744 goto do_binop;
15745
15746 do_binop:
15747 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15748 VAR_INIT_STATUS_INITIALIZED);
15749 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15750 VAR_INIT_STATUS_INITIALIZED);
15751
15752 if (op0 == 0 || op1 == 0)
15753 break;
15754
15755 mem_loc_result = op0;
15756 add_loc_descr (&mem_loc_result, op1);
15757 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15758 break;
15759
15760 case MOD:
15761 if ((!dwarf_strict || dwarf_version >= 5)
15762 && is_a <scalar_int_mode> (mode, &int_mode)
15763 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15764 {
15765 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15766 base_type_for_mode (mode, 0),
15767 int_mode, mem_mode);
15768 break;
15769 }
15770
15771 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15772 VAR_INIT_STATUS_INITIALIZED);
15773 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15774 VAR_INIT_STATUS_INITIALIZED);
15775
15776 if (op0 == 0 || op1 == 0)
15777 break;
15778
15779 mem_loc_result = op0;
15780 add_loc_descr (&mem_loc_result, op1);
15781 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15782 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15783 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15784 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15785 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15786 break;
15787
15788 case UDIV:
15789 if ((!dwarf_strict || dwarf_version >= 5)
15790 && is_a <scalar_int_mode> (mode, &int_mode))
15791 {
15792 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15793 {
15794 op = DW_OP_div;
15795 goto do_binop;
15796 }
15797 mem_loc_result = typed_binop (DW_OP_div, rtl,
15798 base_type_for_mode (int_mode, 1),
15799 int_mode, mem_mode);
15800 }
15801 break;
15802
15803 case NOT:
15804 op = DW_OP_not;
15805 goto do_unop;
15806
15807 case ABS:
15808 op = DW_OP_abs;
15809 goto do_unop;
15810
15811 case NEG:
15812 op = DW_OP_neg;
15813 goto do_unop;
15814
15815 do_unop:
15816 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15817 VAR_INIT_STATUS_INITIALIZED);
15818
15819 if (op0 == 0)
15820 break;
15821
15822 mem_loc_result = op0;
15823 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15824 break;
15825
15826 case CONST_INT:
15827 if (!is_a <scalar_int_mode> (mode, &int_mode)
15828 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15829 #ifdef POINTERS_EXTEND_UNSIGNED
15830 || (int_mode == Pmode
15831 && mem_mode != VOIDmode
15832 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15833 #endif
15834 )
15835 {
15836 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15837 break;
15838 }
15839 if ((!dwarf_strict || dwarf_version >= 5)
15840 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15841 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15842 {
15843 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15844 scalar_int_mode amode;
15845 if (type_die == NULL)
15846 return NULL;
15847 if (INTVAL (rtl) >= 0
15848 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15849 .exists (&amode))
15850 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15851 /* const DW_OP_convert <XXX> vs.
15852 DW_OP_const_type <XXX, 1, const>. */
15853 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15854 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15855 {
15856 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15857 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15858 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15859 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15860 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15861 add_loc_descr (&mem_loc_result, op0);
15862 return mem_loc_result;
15863 }
15864 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15865 INTVAL (rtl));
15866 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15867 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15868 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15869 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15870 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15871 else
15872 {
15873 mem_loc_result->dw_loc_oprnd2.val_class
15874 = dw_val_class_const_double;
15875 mem_loc_result->dw_loc_oprnd2.v.val_double
15876 = double_int::from_shwi (INTVAL (rtl));
15877 }
15878 }
15879 break;
15880
15881 case CONST_DOUBLE:
15882 if (!dwarf_strict || dwarf_version >= 5)
15883 {
15884 dw_die_ref type_die;
15885
15886 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15887 CONST_DOUBLE rtx could represent either a large integer
15888 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15889 the value is always a floating point constant.
15890
15891 When it is an integer, a CONST_DOUBLE is used whenever
15892 the constant requires 2 HWIs to be adequately represented.
15893 We output CONST_DOUBLEs as blocks. */
15894 if (mode == VOIDmode
15895 || (GET_MODE (rtl) == VOIDmode
15896 && maybe_ne (GET_MODE_BITSIZE (mode),
15897 HOST_BITS_PER_DOUBLE_INT)))
15898 break;
15899 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15900 if (type_die == NULL)
15901 return NULL;
15902 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15903 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15904 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15905 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15906 #if TARGET_SUPPORTS_WIDE_INT == 0
15907 if (!SCALAR_FLOAT_MODE_P (mode))
15908 {
15909 mem_loc_result->dw_loc_oprnd2.val_class
15910 = dw_val_class_const_double;
15911 mem_loc_result->dw_loc_oprnd2.v.val_double
15912 = rtx_to_double_int (rtl);
15913 }
15914 else
15915 #endif
15916 {
15917 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
15918 unsigned int length = GET_MODE_SIZE (float_mode);
15919 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
15920
15921 insert_float (rtl, array);
15922 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
15923 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
15924 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
15925 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
15926 }
15927 }
15928 break;
15929
15930 case CONST_WIDE_INT:
15931 if (!dwarf_strict || dwarf_version >= 5)
15932 {
15933 dw_die_ref type_die;
15934
15935 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15936 if (type_die == NULL)
15937 return NULL;
15938 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15939 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15940 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15941 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15942 mem_loc_result->dw_loc_oprnd2.val_class
15943 = dw_val_class_wide_int;
15944 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
15945 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
15946 }
15947 break;
15948
15949 case CONST_POLY_INT:
15950 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
15951 break;
15952
15953 case EQ:
15954 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
15955 break;
15956
15957 case GE:
15958 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15959 break;
15960
15961 case GT:
15962 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15963 break;
15964
15965 case LE:
15966 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15967 break;
15968
15969 case LT:
15970 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15971 break;
15972
15973 case NE:
15974 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
15975 break;
15976
15977 case GEU:
15978 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
15979 break;
15980
15981 case GTU:
15982 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
15983 break;
15984
15985 case LEU:
15986 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
15987 break;
15988
15989 case LTU:
15990 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
15991 break;
15992
15993 case UMIN:
15994 case UMAX:
15995 if (!SCALAR_INT_MODE_P (mode))
15996 break;
15997 /* FALLTHRU */
15998 case SMIN:
15999 case SMAX:
16000 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16001 break;
16002
16003 case ZERO_EXTRACT:
16004 case SIGN_EXTRACT:
16005 if (CONST_INT_P (XEXP (rtl, 1))
16006 && CONST_INT_P (XEXP (rtl, 2))
16007 && is_a <scalar_int_mode> (mode, &int_mode)
16008 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16009 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16010 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16011 && ((unsigned) INTVAL (XEXP (rtl, 1))
16012 + (unsigned) INTVAL (XEXP (rtl, 2))
16013 <= GET_MODE_BITSIZE (int_mode)))
16014 {
16015 int shift, size;
16016 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16017 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16018 if (op0 == 0)
16019 break;
16020 if (GET_CODE (rtl) == SIGN_EXTRACT)
16021 op = DW_OP_shra;
16022 else
16023 op = DW_OP_shr;
16024 mem_loc_result = op0;
16025 size = INTVAL (XEXP (rtl, 1));
16026 shift = INTVAL (XEXP (rtl, 2));
16027 if (BITS_BIG_ENDIAN)
16028 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16029 if (shift + size != (int) DWARF2_ADDR_SIZE)
16030 {
16031 add_loc_descr (&mem_loc_result,
16032 int_loc_descriptor (DWARF2_ADDR_SIZE
16033 - shift - size));
16034 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16035 }
16036 if (size != (int) DWARF2_ADDR_SIZE)
16037 {
16038 add_loc_descr (&mem_loc_result,
16039 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16040 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16041 }
16042 }
16043 break;
16044
16045 case IF_THEN_ELSE:
16046 {
16047 dw_loc_descr_ref op2, bra_node, drop_node;
16048 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16049 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16050 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16051 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16052 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16053 VAR_INIT_STATUS_INITIALIZED);
16054 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16055 VAR_INIT_STATUS_INITIALIZED);
16056 if (op0 == NULL || op1 == NULL || op2 == NULL)
16057 break;
16058
16059 mem_loc_result = op1;
16060 add_loc_descr (&mem_loc_result, op2);
16061 add_loc_descr (&mem_loc_result, op0);
16062 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16063 add_loc_descr (&mem_loc_result, bra_node);
16064 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16065 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16066 add_loc_descr (&mem_loc_result, drop_node);
16067 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16068 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16069 }
16070 break;
16071
16072 case FLOAT_EXTEND:
16073 case FLOAT_TRUNCATE:
16074 case FLOAT:
16075 case UNSIGNED_FLOAT:
16076 case FIX:
16077 case UNSIGNED_FIX:
16078 if (!dwarf_strict || dwarf_version >= 5)
16079 {
16080 dw_die_ref type_die;
16081 dw_loc_descr_ref cvt;
16082
16083 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16084 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16085 if (op0 == NULL)
16086 break;
16087 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16088 && (GET_CODE (rtl) == FLOAT
16089 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16090 {
16091 type_die = base_type_for_mode (int_mode,
16092 GET_CODE (rtl) == UNSIGNED_FLOAT);
16093 if (type_die == NULL)
16094 break;
16095 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16096 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16097 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16098 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16099 add_loc_descr (&op0, cvt);
16100 }
16101 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16102 if (type_die == NULL)
16103 break;
16104 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16105 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16106 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16107 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16108 add_loc_descr (&op0, cvt);
16109 if (is_a <scalar_int_mode> (mode, &int_mode)
16110 && (GET_CODE (rtl) == FIX
16111 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16112 {
16113 op0 = convert_descriptor_to_mode (int_mode, op0);
16114 if (op0 == NULL)
16115 break;
16116 }
16117 mem_loc_result = op0;
16118 }
16119 break;
16120
16121 case CLZ:
16122 case CTZ:
16123 case FFS:
16124 if (is_a <scalar_int_mode> (mode, &int_mode))
16125 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16126 break;
16127
16128 case POPCOUNT:
16129 case PARITY:
16130 if (is_a <scalar_int_mode> (mode, &int_mode))
16131 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16132 break;
16133
16134 case BSWAP:
16135 if (is_a <scalar_int_mode> (mode, &int_mode))
16136 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16137 break;
16138
16139 case ROTATE:
16140 case ROTATERT:
16141 if (is_a <scalar_int_mode> (mode, &int_mode))
16142 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16143 break;
16144
16145 case COMPARE:
16146 /* In theory, we could implement the above. */
16147 /* DWARF cannot represent the unsigned compare operations
16148 natively. */
16149 case SS_MULT:
16150 case US_MULT:
16151 case SS_DIV:
16152 case US_DIV:
16153 case SS_PLUS:
16154 case US_PLUS:
16155 case SS_MINUS:
16156 case US_MINUS:
16157 case SS_NEG:
16158 case US_NEG:
16159 case SS_ABS:
16160 case SS_ASHIFT:
16161 case US_ASHIFT:
16162 case SS_TRUNCATE:
16163 case US_TRUNCATE:
16164 case UNORDERED:
16165 case ORDERED:
16166 case UNEQ:
16167 case UNGE:
16168 case UNGT:
16169 case UNLE:
16170 case UNLT:
16171 case LTGT:
16172 case FRACT_CONVERT:
16173 case UNSIGNED_FRACT_CONVERT:
16174 case SAT_FRACT:
16175 case UNSIGNED_SAT_FRACT:
16176 case SQRT:
16177 case ASM_OPERANDS:
16178 case VEC_MERGE:
16179 case VEC_SELECT:
16180 case VEC_CONCAT:
16181 case VEC_DUPLICATE:
16182 case VEC_SERIES:
16183 case UNSPEC:
16184 case HIGH:
16185 case FMA:
16186 case STRICT_LOW_PART:
16187 case CONST_VECTOR:
16188 case CONST_FIXED:
16189 case CLRSB:
16190 case CLOBBER:
16191 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16192 can't express it in the debug info. This can happen e.g. with some
16193 TLS UNSPECs. */
16194 break;
16195
16196 case CONST_STRING:
16197 resolve_one_addr (&rtl);
16198 goto symref;
16199
16200 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16201 the expression. An UNSPEC rtx represents a raw DWARF operation,
16202 new_loc_descr is called for it to build the operation directly.
16203 Otherwise mem_loc_descriptor is called recursively. */
16204 case PARALLEL:
16205 {
16206 int index = 0;
16207 dw_loc_descr_ref exp_result = NULL;
16208
16209 for (; index < XVECLEN (rtl, 0); index++)
16210 {
16211 rtx elem = XVECEXP (rtl, 0, index);
16212 if (GET_CODE (elem) == UNSPEC)
16213 {
16214 /* Each DWARF operation UNSPEC contain two operands, if
16215 one operand is not used for the operation, const0_rtx is
16216 passed. */
16217 gcc_assert (XVECLEN (elem, 0) == 2);
16218
16219 HOST_WIDE_INT dw_op = XINT (elem, 1);
16220 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16221 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16222 exp_result
16223 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16224 oprnd2);
16225 }
16226 else
16227 exp_result
16228 = mem_loc_descriptor (elem, mode, mem_mode,
16229 VAR_INIT_STATUS_INITIALIZED);
16230
16231 if (!mem_loc_result)
16232 mem_loc_result = exp_result;
16233 else
16234 add_loc_descr (&mem_loc_result, exp_result);
16235 }
16236
16237 break;
16238 }
16239
16240 default:
16241 if (flag_checking)
16242 {
16243 print_rtl (stderr, rtl);
16244 gcc_unreachable ();
16245 }
16246 break;
16247 }
16248
16249 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16250 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16251
16252 return mem_loc_result;
16253 }
16254
16255 /* Return a descriptor that describes the concatenation of two locations.
16256 This is typically a complex variable. */
16257
16258 static dw_loc_descr_ref
16259 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16260 {
16261 /* At present we only track constant-sized pieces. */
16262 unsigned int size0, size1;
16263 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16264 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16265 return 0;
16266
16267 dw_loc_descr_ref cc_loc_result = NULL;
16268 dw_loc_descr_ref x0_ref
16269 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16270 dw_loc_descr_ref x1_ref
16271 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16272
16273 if (x0_ref == 0 || x1_ref == 0)
16274 return 0;
16275
16276 cc_loc_result = x0_ref;
16277 add_loc_descr_op_piece (&cc_loc_result, size0);
16278
16279 add_loc_descr (&cc_loc_result, x1_ref);
16280 add_loc_descr_op_piece (&cc_loc_result, size1);
16281
16282 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16283 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16284
16285 return cc_loc_result;
16286 }
16287
16288 /* Return a descriptor that describes the concatenation of N
16289 locations. */
16290
16291 static dw_loc_descr_ref
16292 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16293 {
16294 unsigned int i;
16295 dw_loc_descr_ref cc_loc_result = NULL;
16296 unsigned int n = XVECLEN (concatn, 0);
16297 unsigned int size;
16298
16299 for (i = 0; i < n; ++i)
16300 {
16301 dw_loc_descr_ref ref;
16302 rtx x = XVECEXP (concatn, 0, i);
16303
16304 /* At present we only track constant-sized pieces. */
16305 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16306 return NULL;
16307
16308 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16309 if (ref == NULL)
16310 return NULL;
16311
16312 add_loc_descr (&cc_loc_result, ref);
16313 add_loc_descr_op_piece (&cc_loc_result, size);
16314 }
16315
16316 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16317 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16318
16319 return cc_loc_result;
16320 }
16321
16322 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16323 for DEBUG_IMPLICIT_PTR RTL. */
16324
16325 static dw_loc_descr_ref
16326 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16327 {
16328 dw_loc_descr_ref ret;
16329 dw_die_ref ref;
16330
16331 if (dwarf_strict && dwarf_version < 5)
16332 return NULL;
16333 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16334 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16335 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16336 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16337 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16338 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16339 if (ref)
16340 {
16341 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16342 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16343 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16344 }
16345 else
16346 {
16347 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16348 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16349 }
16350 return ret;
16351 }
16352
16353 /* Output a proper Dwarf location descriptor for a variable or parameter
16354 which is either allocated in a register or in a memory location. For a
16355 register, we just generate an OP_REG and the register number. For a
16356 memory location we provide a Dwarf postfix expression describing how to
16357 generate the (dynamic) address of the object onto the address stack.
16358
16359 MODE is mode of the decl if this loc_descriptor is going to be used in
16360 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16361 allowed, VOIDmode otherwise.
16362
16363 If we don't know how to describe it, return 0. */
16364
16365 static dw_loc_descr_ref
16366 loc_descriptor (rtx rtl, machine_mode mode,
16367 enum var_init_status initialized)
16368 {
16369 dw_loc_descr_ref loc_result = NULL;
16370 scalar_int_mode int_mode;
16371
16372 switch (GET_CODE (rtl))
16373 {
16374 case SUBREG:
16375 /* The case of a subreg may arise when we have a local (register)
16376 variable or a formal (register) parameter which doesn't quite fill
16377 up an entire register. For now, just assume that it is
16378 legitimate to make the Dwarf info refer to the whole register which
16379 contains the given subreg. */
16380 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16381 loc_result = loc_descriptor (SUBREG_REG (rtl),
16382 GET_MODE (SUBREG_REG (rtl)), initialized);
16383 else
16384 goto do_default;
16385 break;
16386
16387 case REG:
16388 loc_result = reg_loc_descriptor (rtl, initialized);
16389 break;
16390
16391 case MEM:
16392 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16393 GET_MODE (rtl), initialized);
16394 if (loc_result == NULL)
16395 loc_result = tls_mem_loc_descriptor (rtl);
16396 if (loc_result == NULL)
16397 {
16398 rtx new_rtl = avoid_constant_pool_reference (rtl);
16399 if (new_rtl != rtl)
16400 loc_result = loc_descriptor (new_rtl, mode, initialized);
16401 }
16402 break;
16403
16404 case CONCAT:
16405 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16406 initialized);
16407 break;
16408
16409 case CONCATN:
16410 loc_result = concatn_loc_descriptor (rtl, initialized);
16411 break;
16412
16413 case VAR_LOCATION:
16414 /* Single part. */
16415 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16416 {
16417 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16418 if (GET_CODE (loc) == EXPR_LIST)
16419 loc = XEXP (loc, 0);
16420 loc_result = loc_descriptor (loc, mode, initialized);
16421 break;
16422 }
16423
16424 rtl = XEXP (rtl, 1);
16425 /* FALLTHRU */
16426
16427 case PARALLEL:
16428 {
16429 rtvec par_elems = XVEC (rtl, 0);
16430 int num_elem = GET_NUM_ELEM (par_elems);
16431 machine_mode mode;
16432 int i, size;
16433
16434 /* Create the first one, so we have something to add to. */
16435 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16436 VOIDmode, initialized);
16437 if (loc_result == NULL)
16438 return NULL;
16439 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16440 /* At present we only track constant-sized pieces. */
16441 if (!GET_MODE_SIZE (mode).is_constant (&size))
16442 return NULL;
16443 add_loc_descr_op_piece (&loc_result, size);
16444 for (i = 1; i < num_elem; i++)
16445 {
16446 dw_loc_descr_ref temp;
16447
16448 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16449 VOIDmode, initialized);
16450 if (temp == NULL)
16451 return NULL;
16452 add_loc_descr (&loc_result, temp);
16453 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16454 /* At present we only track constant-sized pieces. */
16455 if (!GET_MODE_SIZE (mode).is_constant (&size))
16456 return NULL;
16457 add_loc_descr_op_piece (&loc_result, size);
16458 }
16459 }
16460 break;
16461
16462 case CONST_INT:
16463 if (mode != VOIDmode && mode != BLKmode)
16464 {
16465 int_mode = as_a <scalar_int_mode> (mode);
16466 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16467 INTVAL (rtl));
16468 }
16469 break;
16470
16471 case CONST_DOUBLE:
16472 if (mode == VOIDmode)
16473 mode = GET_MODE (rtl);
16474
16475 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16476 {
16477 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16478
16479 /* Note that a CONST_DOUBLE rtx could represent either an integer
16480 or a floating-point constant. A CONST_DOUBLE is used whenever
16481 the constant requires more than one word in order to be
16482 adequately represented. We output CONST_DOUBLEs as blocks. */
16483 scalar_mode smode = as_a <scalar_mode> (mode);
16484 loc_result = new_loc_descr (DW_OP_implicit_value,
16485 GET_MODE_SIZE (smode), 0);
16486 #if TARGET_SUPPORTS_WIDE_INT == 0
16487 if (!SCALAR_FLOAT_MODE_P (smode))
16488 {
16489 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16490 loc_result->dw_loc_oprnd2.v.val_double
16491 = rtx_to_double_int (rtl);
16492 }
16493 else
16494 #endif
16495 {
16496 unsigned int length = GET_MODE_SIZE (smode);
16497 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16498
16499 insert_float (rtl, array);
16500 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16501 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16502 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16503 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16504 }
16505 }
16506 break;
16507
16508 case CONST_WIDE_INT:
16509 if (mode == VOIDmode)
16510 mode = GET_MODE (rtl);
16511
16512 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16513 {
16514 int_mode = as_a <scalar_int_mode> (mode);
16515 loc_result = new_loc_descr (DW_OP_implicit_value,
16516 GET_MODE_SIZE (int_mode), 0);
16517 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16518 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16519 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16520 }
16521 break;
16522
16523 case CONST_VECTOR:
16524 if (mode == VOIDmode)
16525 mode = GET_MODE (rtl);
16526
16527 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16528 {
16529 unsigned int length;
16530 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16531 return NULL;
16532
16533 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16534 unsigned char *array
16535 = ggc_vec_alloc<unsigned char> (length * elt_size);
16536 unsigned int i;
16537 unsigned char *p;
16538 machine_mode imode = GET_MODE_INNER (mode);
16539
16540 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16541 switch (GET_MODE_CLASS (mode))
16542 {
16543 case MODE_VECTOR_INT:
16544 for (i = 0, p = array; i < length; i++, p += elt_size)
16545 {
16546 rtx elt = CONST_VECTOR_ELT (rtl, i);
16547 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16548 }
16549 break;
16550
16551 case MODE_VECTOR_FLOAT:
16552 for (i = 0, p = array; i < length; i++, p += elt_size)
16553 {
16554 rtx elt = CONST_VECTOR_ELT (rtl, i);
16555 insert_float (elt, p);
16556 }
16557 break;
16558
16559 default:
16560 gcc_unreachable ();
16561 }
16562
16563 loc_result = new_loc_descr (DW_OP_implicit_value,
16564 length * elt_size, 0);
16565 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16566 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16567 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16568 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16569 }
16570 break;
16571
16572 case CONST:
16573 if (mode == VOIDmode
16574 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16575 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16576 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16577 {
16578 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16579 break;
16580 }
16581 /* FALLTHROUGH */
16582 case SYMBOL_REF:
16583 if (!const_ok_for_output (rtl))
16584 break;
16585 /* FALLTHROUGH */
16586 case LABEL_REF:
16587 if (is_a <scalar_int_mode> (mode, &int_mode)
16588 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16589 && (dwarf_version >= 4 || !dwarf_strict))
16590 {
16591 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16592 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16593 vec_safe_push (used_rtx_array, rtl);
16594 }
16595 break;
16596
16597 case DEBUG_IMPLICIT_PTR:
16598 loc_result = implicit_ptr_descriptor (rtl, 0);
16599 break;
16600
16601 case PLUS:
16602 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16603 && CONST_INT_P (XEXP (rtl, 1)))
16604 {
16605 loc_result
16606 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16607 break;
16608 }
16609 /* FALLTHRU */
16610 do_default:
16611 default:
16612 if ((is_a <scalar_int_mode> (mode, &int_mode)
16613 && GET_MODE (rtl) == int_mode
16614 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16615 && dwarf_version >= 4)
16616 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16617 {
16618 /* Value expression. */
16619 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16620 if (loc_result)
16621 add_loc_descr (&loc_result,
16622 new_loc_descr (DW_OP_stack_value, 0, 0));
16623 }
16624 break;
16625 }
16626
16627 return loc_result;
16628 }
16629
16630 /* We need to figure out what section we should use as the base for the
16631 address ranges where a given location is valid.
16632 1. If this particular DECL has a section associated with it, use that.
16633 2. If this function has a section associated with it, use that.
16634 3. Otherwise, use the text section.
16635 XXX: If you split a variable across multiple sections, we won't notice. */
16636
16637 static const char *
16638 secname_for_decl (const_tree decl)
16639 {
16640 const char *secname;
16641
16642 if (VAR_OR_FUNCTION_DECL_P (decl)
16643 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16644 && DECL_SECTION_NAME (decl))
16645 secname = DECL_SECTION_NAME (decl);
16646 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16647 secname = DECL_SECTION_NAME (current_function_decl);
16648 else if (cfun && in_cold_section_p)
16649 secname = crtl->subsections.cold_section_label;
16650 else
16651 secname = text_section_label;
16652
16653 return secname;
16654 }
16655
16656 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16657
16658 static bool
16659 decl_by_reference_p (tree decl)
16660 {
16661 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16662 || VAR_P (decl))
16663 && DECL_BY_REFERENCE (decl));
16664 }
16665
16666 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16667 for VARLOC. */
16668
16669 static dw_loc_descr_ref
16670 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16671 enum var_init_status initialized)
16672 {
16673 int have_address = 0;
16674 dw_loc_descr_ref descr;
16675 machine_mode mode;
16676
16677 if (want_address != 2)
16678 {
16679 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16680 /* Single part. */
16681 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16682 {
16683 varloc = PAT_VAR_LOCATION_LOC (varloc);
16684 if (GET_CODE (varloc) == EXPR_LIST)
16685 varloc = XEXP (varloc, 0);
16686 mode = GET_MODE (varloc);
16687 if (MEM_P (varloc))
16688 {
16689 rtx addr = XEXP (varloc, 0);
16690 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16691 mode, initialized);
16692 if (descr)
16693 have_address = 1;
16694 else
16695 {
16696 rtx x = avoid_constant_pool_reference (varloc);
16697 if (x != varloc)
16698 descr = mem_loc_descriptor (x, mode, VOIDmode,
16699 initialized);
16700 }
16701 }
16702 else
16703 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16704 }
16705 else
16706 return 0;
16707 }
16708 else
16709 {
16710 if (GET_CODE (varloc) == VAR_LOCATION)
16711 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16712 else
16713 mode = DECL_MODE (loc);
16714 descr = loc_descriptor (varloc, mode, initialized);
16715 have_address = 1;
16716 }
16717
16718 if (!descr)
16719 return 0;
16720
16721 if (want_address == 2 && !have_address
16722 && (dwarf_version >= 4 || !dwarf_strict))
16723 {
16724 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16725 {
16726 expansion_failed (loc, NULL_RTX,
16727 "DWARF address size mismatch");
16728 return 0;
16729 }
16730 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16731 have_address = 1;
16732 }
16733 /* Show if we can't fill the request for an address. */
16734 if (want_address && !have_address)
16735 {
16736 expansion_failed (loc, NULL_RTX,
16737 "Want address and only have value");
16738 return 0;
16739 }
16740
16741 /* If we've got an address and don't want one, dereference. */
16742 if (!want_address && have_address)
16743 {
16744 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16745 enum dwarf_location_atom op;
16746
16747 if (size > DWARF2_ADDR_SIZE || size == -1)
16748 {
16749 expansion_failed (loc, NULL_RTX,
16750 "DWARF address size mismatch");
16751 return 0;
16752 }
16753 else if (size == DWARF2_ADDR_SIZE)
16754 op = DW_OP_deref;
16755 else
16756 op = DW_OP_deref_size;
16757
16758 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16759 }
16760
16761 return descr;
16762 }
16763
16764 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16765 if it is not possible. */
16766
16767 static dw_loc_descr_ref
16768 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16769 {
16770 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16771 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16772 else if (dwarf_version >= 3 || !dwarf_strict)
16773 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16774 else
16775 return NULL;
16776 }
16777
16778 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16779 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16780
16781 static dw_loc_descr_ref
16782 dw_sra_loc_expr (tree decl, rtx loc)
16783 {
16784 rtx p;
16785 unsigned HOST_WIDE_INT padsize = 0;
16786 dw_loc_descr_ref descr, *descr_tail;
16787 unsigned HOST_WIDE_INT decl_size;
16788 rtx varloc;
16789 enum var_init_status initialized;
16790
16791 if (DECL_SIZE (decl) == NULL
16792 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16793 return NULL;
16794
16795 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16796 descr = NULL;
16797 descr_tail = &descr;
16798
16799 for (p = loc; p; p = XEXP (p, 1))
16800 {
16801 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16802 rtx loc_note = *decl_piece_varloc_ptr (p);
16803 dw_loc_descr_ref cur_descr;
16804 dw_loc_descr_ref *tail, last = NULL;
16805 unsigned HOST_WIDE_INT opsize = 0;
16806
16807 if (loc_note == NULL_RTX
16808 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16809 {
16810 padsize += bitsize;
16811 continue;
16812 }
16813 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16814 varloc = NOTE_VAR_LOCATION (loc_note);
16815 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16816 if (cur_descr == NULL)
16817 {
16818 padsize += bitsize;
16819 continue;
16820 }
16821
16822 /* Check that cur_descr either doesn't use
16823 DW_OP_*piece operations, or their sum is equal
16824 to bitsize. Otherwise we can't embed it. */
16825 for (tail = &cur_descr; *tail != NULL;
16826 tail = &(*tail)->dw_loc_next)
16827 if ((*tail)->dw_loc_opc == DW_OP_piece)
16828 {
16829 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16830 * BITS_PER_UNIT;
16831 last = *tail;
16832 }
16833 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16834 {
16835 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16836 last = *tail;
16837 }
16838
16839 if (last != NULL && opsize != bitsize)
16840 {
16841 padsize += bitsize;
16842 /* Discard the current piece of the descriptor and release any
16843 addr_table entries it uses. */
16844 remove_loc_list_addr_table_entries (cur_descr);
16845 continue;
16846 }
16847
16848 /* If there is a hole, add DW_OP_*piece after empty DWARF
16849 expression, which means that those bits are optimized out. */
16850 if (padsize)
16851 {
16852 if (padsize > decl_size)
16853 {
16854 remove_loc_list_addr_table_entries (cur_descr);
16855 goto discard_descr;
16856 }
16857 decl_size -= padsize;
16858 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16859 if (*descr_tail == NULL)
16860 {
16861 remove_loc_list_addr_table_entries (cur_descr);
16862 goto discard_descr;
16863 }
16864 descr_tail = &(*descr_tail)->dw_loc_next;
16865 padsize = 0;
16866 }
16867 *descr_tail = cur_descr;
16868 descr_tail = tail;
16869 if (bitsize > decl_size)
16870 goto discard_descr;
16871 decl_size -= bitsize;
16872 if (last == NULL)
16873 {
16874 HOST_WIDE_INT offset = 0;
16875 if (GET_CODE (varloc) == VAR_LOCATION
16876 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16877 {
16878 varloc = PAT_VAR_LOCATION_LOC (varloc);
16879 if (GET_CODE (varloc) == EXPR_LIST)
16880 varloc = XEXP (varloc, 0);
16881 }
16882 do
16883 {
16884 if (GET_CODE (varloc) == CONST
16885 || GET_CODE (varloc) == SIGN_EXTEND
16886 || GET_CODE (varloc) == ZERO_EXTEND)
16887 varloc = XEXP (varloc, 0);
16888 else if (GET_CODE (varloc) == SUBREG)
16889 varloc = SUBREG_REG (varloc);
16890 else
16891 break;
16892 }
16893 while (1);
16894 /* DW_OP_bit_size offset should be zero for register
16895 or implicit location descriptions and empty location
16896 descriptions, but for memory addresses needs big endian
16897 adjustment. */
16898 if (MEM_P (varloc))
16899 {
16900 unsigned HOST_WIDE_INT memsize;
16901 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16902 goto discard_descr;
16903 memsize *= BITS_PER_UNIT;
16904 if (memsize != bitsize)
16905 {
16906 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16907 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16908 goto discard_descr;
16909 if (memsize < bitsize)
16910 goto discard_descr;
16911 if (BITS_BIG_ENDIAN)
16912 offset = memsize - bitsize;
16913 }
16914 }
16915
16916 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
16917 if (*descr_tail == NULL)
16918 goto discard_descr;
16919 descr_tail = &(*descr_tail)->dw_loc_next;
16920 }
16921 }
16922
16923 /* If there were any non-empty expressions, add padding till the end of
16924 the decl. */
16925 if (descr != NULL && decl_size != 0)
16926 {
16927 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
16928 if (*descr_tail == NULL)
16929 goto discard_descr;
16930 }
16931 return descr;
16932
16933 discard_descr:
16934 /* Discard the descriptor and release any addr_table entries it uses. */
16935 remove_loc_list_addr_table_entries (descr);
16936 return NULL;
16937 }
16938
16939 /* Return the dwarf representation of the location list LOC_LIST of
16940 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
16941 function. */
16942
16943 static dw_loc_list_ref
16944 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
16945 {
16946 const char *endname, *secname;
16947 var_loc_view endview;
16948 rtx varloc;
16949 enum var_init_status initialized;
16950 struct var_loc_node *node;
16951 dw_loc_descr_ref descr;
16952 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
16953 dw_loc_list_ref list = NULL;
16954 dw_loc_list_ref *listp = &list;
16955
16956 /* Now that we know what section we are using for a base,
16957 actually construct the list of locations.
16958 The first location information is what is passed to the
16959 function that creates the location list, and the remaining
16960 locations just get added on to that list.
16961 Note that we only know the start address for a location
16962 (IE location changes), so to build the range, we use
16963 the range [current location start, next location start].
16964 This means we have to special case the last node, and generate
16965 a range of [last location start, end of function label]. */
16966
16967 if (cfun && crtl->has_bb_partition)
16968 {
16969 bool save_in_cold_section_p = in_cold_section_p;
16970 in_cold_section_p = first_function_block_is_cold;
16971 if (loc_list->last_before_switch == NULL)
16972 in_cold_section_p = !in_cold_section_p;
16973 secname = secname_for_decl (decl);
16974 in_cold_section_p = save_in_cold_section_p;
16975 }
16976 else
16977 secname = secname_for_decl (decl);
16978
16979 for (node = loc_list->first; node; node = node->next)
16980 {
16981 bool range_across_switch = false;
16982 if (GET_CODE (node->loc) == EXPR_LIST
16983 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
16984 {
16985 if (GET_CODE (node->loc) == EXPR_LIST)
16986 {
16987 descr = NULL;
16988 /* This requires DW_OP_{,bit_}piece, which is not usable
16989 inside DWARF expressions. */
16990 if (want_address == 2)
16991 descr = dw_sra_loc_expr (decl, node->loc);
16992 }
16993 else
16994 {
16995 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
16996 varloc = NOTE_VAR_LOCATION (node->loc);
16997 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
16998 }
16999 if (descr)
17000 {
17001 /* If section switch happens in between node->label
17002 and node->next->label (or end of function) and
17003 we can't emit it as a single entry list,
17004 emit two ranges, first one ending at the end
17005 of first partition and second one starting at the
17006 beginning of second partition. */
17007 if (node == loc_list->last_before_switch
17008 && (node != loc_list->first || loc_list->first->next)
17009 && current_function_decl)
17010 {
17011 endname = cfun->fde->dw_fde_end;
17012 endview = 0;
17013 range_across_switch = true;
17014 }
17015 /* The variable has a location between NODE->LABEL and
17016 NODE->NEXT->LABEL. */
17017 else if (node->next)
17018 endname = node->next->label, endview = node->next->view;
17019 /* If the variable has a location at the last label
17020 it keeps its location until the end of function. */
17021 else if (!current_function_decl)
17022 endname = text_end_label, endview = 0;
17023 else
17024 {
17025 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17026 current_function_funcdef_no);
17027 endname = ggc_strdup (label_id);
17028 endview = 0;
17029 }
17030
17031 *listp = new_loc_list (descr, node->label, node->view,
17032 endname, endview, secname);
17033 if (TREE_CODE (decl) == PARM_DECL
17034 && node == loc_list->first
17035 && NOTE_P (node->loc)
17036 && strcmp (node->label, endname) == 0)
17037 (*listp)->force = true;
17038 listp = &(*listp)->dw_loc_next;
17039 }
17040 }
17041
17042 if (cfun
17043 && crtl->has_bb_partition
17044 && node == loc_list->last_before_switch)
17045 {
17046 bool save_in_cold_section_p = in_cold_section_p;
17047 in_cold_section_p = !first_function_block_is_cold;
17048 secname = secname_for_decl (decl);
17049 in_cold_section_p = save_in_cold_section_p;
17050 }
17051
17052 if (range_across_switch)
17053 {
17054 if (GET_CODE (node->loc) == EXPR_LIST)
17055 descr = dw_sra_loc_expr (decl, node->loc);
17056 else
17057 {
17058 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17059 varloc = NOTE_VAR_LOCATION (node->loc);
17060 descr = dw_loc_list_1 (decl, varloc, want_address,
17061 initialized);
17062 }
17063 gcc_assert (descr);
17064 /* The variable has a location between NODE->LABEL and
17065 NODE->NEXT->LABEL. */
17066 if (node->next)
17067 endname = node->next->label, endview = node->next->view;
17068 else
17069 endname = cfun->fde->dw_fde_second_end, endview = 0;
17070 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17071 endname, endview, secname);
17072 listp = &(*listp)->dw_loc_next;
17073 }
17074 }
17075
17076 /* Try to avoid the overhead of a location list emitting a location
17077 expression instead, but only if we didn't have more than one
17078 location entry in the first place. If some entries were not
17079 representable, we don't want to pretend a single entry that was
17080 applies to the entire scope in which the variable is
17081 available. */
17082 maybe_gen_llsym (list);
17083
17084 return list;
17085 }
17086
17087 /* Return if the loc_list has only single element and thus can be represented
17088 as location description. */
17089
17090 static bool
17091 single_element_loc_list_p (dw_loc_list_ref list)
17092 {
17093 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17094 return !list->ll_symbol;
17095 }
17096
17097 /* Duplicate a single element of location list. */
17098
17099 static inline dw_loc_descr_ref
17100 copy_loc_descr (dw_loc_descr_ref ref)
17101 {
17102 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17103 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17104 return copy;
17105 }
17106
17107 /* To each location in list LIST append loc descr REF. */
17108
17109 static void
17110 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17111 {
17112 dw_loc_descr_ref copy;
17113 add_loc_descr (&list->expr, ref);
17114 list = list->dw_loc_next;
17115 while (list)
17116 {
17117 copy = copy_loc_descr (ref);
17118 add_loc_descr (&list->expr, copy);
17119 while (copy->dw_loc_next)
17120 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17121 list = list->dw_loc_next;
17122 }
17123 }
17124
17125 /* To each location in list LIST prepend loc descr REF. */
17126
17127 static void
17128 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17129 {
17130 dw_loc_descr_ref copy;
17131 dw_loc_descr_ref ref_end = list->expr;
17132 add_loc_descr (&ref, list->expr);
17133 list->expr = ref;
17134 list = list->dw_loc_next;
17135 while (list)
17136 {
17137 dw_loc_descr_ref end = list->expr;
17138 list->expr = copy = copy_loc_descr (ref);
17139 while (copy->dw_loc_next != ref_end)
17140 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17141 copy->dw_loc_next = end;
17142 list = list->dw_loc_next;
17143 }
17144 }
17145
17146 /* Given two lists RET and LIST
17147 produce location list that is result of adding expression in LIST
17148 to expression in RET on each position in program.
17149 Might be destructive on both RET and LIST.
17150
17151 TODO: We handle only simple cases of RET or LIST having at most one
17152 element. General case would involve sorting the lists in program order
17153 and merging them that will need some additional work.
17154 Adding that will improve quality of debug info especially for SRA-ed
17155 structures. */
17156
17157 static void
17158 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17159 {
17160 if (!list)
17161 return;
17162 if (!*ret)
17163 {
17164 *ret = list;
17165 return;
17166 }
17167 if (!list->dw_loc_next)
17168 {
17169 add_loc_descr_to_each (*ret, list->expr);
17170 return;
17171 }
17172 if (!(*ret)->dw_loc_next)
17173 {
17174 prepend_loc_descr_to_each (list, (*ret)->expr);
17175 *ret = list;
17176 return;
17177 }
17178 expansion_failed (NULL_TREE, NULL_RTX,
17179 "Don't know how to merge two non-trivial"
17180 " location lists.\n");
17181 *ret = NULL;
17182 return;
17183 }
17184
17185 /* LOC is constant expression. Try a luck, look it up in constant
17186 pool and return its loc_descr of its address. */
17187
17188 static dw_loc_descr_ref
17189 cst_pool_loc_descr (tree loc)
17190 {
17191 /* Get an RTL for this, if something has been emitted. */
17192 rtx rtl = lookup_constant_def (loc);
17193
17194 if (!rtl || !MEM_P (rtl))
17195 {
17196 gcc_assert (!rtl);
17197 return 0;
17198 }
17199 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17200
17201 /* TODO: We might get more coverage if we was actually delaying expansion
17202 of all expressions till end of compilation when constant pools are fully
17203 populated. */
17204 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17205 {
17206 expansion_failed (loc, NULL_RTX,
17207 "CST value in contant pool but not marked.");
17208 return 0;
17209 }
17210 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17211 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17212 }
17213
17214 /* Return dw_loc_list representing address of addr_expr LOC
17215 by looking for inner INDIRECT_REF expression and turning
17216 it into simple arithmetics.
17217
17218 See loc_list_from_tree for the meaning of CONTEXT. */
17219
17220 static dw_loc_list_ref
17221 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17222 loc_descr_context *context)
17223 {
17224 tree obj, offset;
17225 poly_int64 bitsize, bitpos, bytepos;
17226 machine_mode mode;
17227 int unsignedp, reversep, volatilep = 0;
17228 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17229
17230 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17231 &bitsize, &bitpos, &offset, &mode,
17232 &unsignedp, &reversep, &volatilep);
17233 STRIP_NOPS (obj);
17234 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17235 {
17236 expansion_failed (loc, NULL_RTX, "bitfield access");
17237 return 0;
17238 }
17239 if (!INDIRECT_REF_P (obj))
17240 {
17241 expansion_failed (obj,
17242 NULL_RTX, "no indirect ref in inner refrence");
17243 return 0;
17244 }
17245 if (!offset && known_eq (bitpos, 0))
17246 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17247 context);
17248 else if (toplev
17249 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17250 && (dwarf_version >= 4 || !dwarf_strict))
17251 {
17252 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17253 if (!list_ret)
17254 return 0;
17255 if (offset)
17256 {
17257 /* Variable offset. */
17258 list_ret1 = loc_list_from_tree (offset, 0, context);
17259 if (list_ret1 == 0)
17260 return 0;
17261 add_loc_list (&list_ret, list_ret1);
17262 if (!list_ret)
17263 return 0;
17264 add_loc_descr_to_each (list_ret,
17265 new_loc_descr (DW_OP_plus, 0, 0));
17266 }
17267 HOST_WIDE_INT value;
17268 if (bytepos.is_constant (&value) && value > 0)
17269 add_loc_descr_to_each (list_ret,
17270 new_loc_descr (DW_OP_plus_uconst, value, 0));
17271 else if (maybe_ne (bytepos, 0))
17272 loc_list_plus_const (list_ret, bytepos);
17273 add_loc_descr_to_each (list_ret,
17274 new_loc_descr (DW_OP_stack_value, 0, 0));
17275 }
17276 return list_ret;
17277 }
17278
17279 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17280 all operations from LOC are nops, move to the last one. Insert in NOPS all
17281 operations that are skipped. */
17282
17283 static void
17284 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17285 hash_set<dw_loc_descr_ref> &nops)
17286 {
17287 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17288 {
17289 nops.add (loc);
17290 loc = loc->dw_loc_next;
17291 }
17292 }
17293
17294 /* Helper for loc_descr_without_nops: free the location description operation
17295 P. */
17296
17297 bool
17298 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17299 {
17300 ggc_free (loc);
17301 return true;
17302 }
17303
17304 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17305 finishes LOC. */
17306
17307 static void
17308 loc_descr_without_nops (dw_loc_descr_ref &loc)
17309 {
17310 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17311 return;
17312
17313 /* Set of all DW_OP_nop operations we remove. */
17314 hash_set<dw_loc_descr_ref> nops;
17315
17316 /* First, strip all prefix NOP operations in order to keep the head of the
17317 operations list. */
17318 loc_descr_to_next_no_nop (loc, nops);
17319
17320 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17321 {
17322 /* For control flow operations: strip "prefix" nops in destination
17323 labels. */
17324 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17325 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17326 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17327 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17328
17329 /* Do the same for the operations that follow, then move to the next
17330 iteration. */
17331 if (cur->dw_loc_next != NULL)
17332 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17333 cur = cur->dw_loc_next;
17334 }
17335
17336 nops.traverse<void *, free_loc_descr> (NULL);
17337 }
17338
17339
17340 struct dwarf_procedure_info;
17341
17342 /* Helper structure for location descriptions generation. */
17343 struct loc_descr_context
17344 {
17345 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17346 NULL_TREE if DW_OP_push_object_address in invalid for this location
17347 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17348 tree context_type;
17349 /* The ..._DECL node that should be translated as a
17350 DW_OP_push_object_address operation. */
17351 tree base_decl;
17352 /* Information about the DWARF procedure we are currently generating. NULL if
17353 we are not generating a DWARF procedure. */
17354 struct dwarf_procedure_info *dpi;
17355 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17356 by consumer. Used for DW_TAG_generic_subrange attributes. */
17357 bool placeholder_arg;
17358 /* True if PLACEHOLDER_EXPR has been seen. */
17359 bool placeholder_seen;
17360 };
17361
17362 /* DWARF procedures generation
17363
17364 DWARF expressions (aka. location descriptions) are used to encode variable
17365 things such as sizes or offsets. Such computations can have redundant parts
17366 that can be factorized in order to reduce the size of the output debug
17367 information. This is the whole point of DWARF procedures.
17368
17369 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17370 already factorized into functions ("size functions") in order to handle very
17371 big and complex types. Such functions are quite simple: they have integral
17372 arguments, they return an integral result and their body contains only a
17373 return statement with arithmetic expressions. This is the only kind of
17374 function we are interested in translating into DWARF procedures, here.
17375
17376 DWARF expressions and DWARF procedure are executed using a stack, so we have
17377 to define some calling convention for them to interact. Let's say that:
17378
17379 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17380 all arguments in reverse order (right-to-left) so that when the DWARF
17381 procedure execution starts, the first argument is the top of the stack.
17382
17383 - Then, when returning, the DWARF procedure must have consumed all arguments
17384 on the stack, must have pushed the result and touched nothing else.
17385
17386 - Each integral argument and the result are integral types can be hold in a
17387 single stack slot.
17388
17389 - We call "frame offset" the number of stack slots that are "under DWARF
17390 procedure control": it includes the arguments slots, the temporaries and
17391 the result slot. Thus, it is equal to the number of arguments when the
17392 procedure execution starts and must be equal to one (the result) when it
17393 returns. */
17394
17395 /* Helper structure used when generating operations for a DWARF procedure. */
17396 struct dwarf_procedure_info
17397 {
17398 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17399 currently translated. */
17400 tree fndecl;
17401 /* The number of arguments FNDECL takes. */
17402 unsigned args_count;
17403 };
17404
17405 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17406 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17407 equate it to this DIE. */
17408
17409 static dw_die_ref
17410 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17411 dw_die_ref parent_die)
17412 {
17413 dw_die_ref dwarf_proc_die;
17414
17415 if ((dwarf_version < 3 && dwarf_strict)
17416 || location == NULL)
17417 return NULL;
17418
17419 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17420 if (fndecl)
17421 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17422 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17423 return dwarf_proc_die;
17424 }
17425
17426 /* Return whether TYPE is a supported type as a DWARF procedure argument
17427 type or return type (we handle only scalar types and pointer types that
17428 aren't wider than the DWARF expression evaluation stack. */
17429
17430 static bool
17431 is_handled_procedure_type (tree type)
17432 {
17433 return ((INTEGRAL_TYPE_P (type)
17434 || TREE_CODE (type) == OFFSET_TYPE
17435 || TREE_CODE (type) == POINTER_TYPE)
17436 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17437 }
17438
17439 /* Helper for resolve_args_picking: do the same but stop when coming across
17440 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17441 offset *before* evaluating the corresponding operation. */
17442
17443 static bool
17444 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17445 struct dwarf_procedure_info *dpi,
17446 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17447 {
17448 /* The "frame_offset" identifier is already used to name a macro... */
17449 unsigned frame_offset_ = initial_frame_offset;
17450 dw_loc_descr_ref l;
17451
17452 for (l = loc; l != NULL;)
17453 {
17454 bool existed;
17455 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17456
17457 /* If we already met this node, there is nothing to compute anymore. */
17458 if (existed)
17459 {
17460 /* Make sure that the stack size is consistent wherever the execution
17461 flow comes from. */
17462 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17463 break;
17464 }
17465 l_frame_offset = frame_offset_;
17466
17467 /* If needed, relocate the picking offset with respect to the frame
17468 offset. */
17469 if (l->frame_offset_rel)
17470 {
17471 unsigned HOST_WIDE_INT off;
17472 switch (l->dw_loc_opc)
17473 {
17474 case DW_OP_pick:
17475 off = l->dw_loc_oprnd1.v.val_unsigned;
17476 break;
17477 case DW_OP_dup:
17478 off = 0;
17479 break;
17480 case DW_OP_over:
17481 off = 1;
17482 break;
17483 default:
17484 gcc_unreachable ();
17485 }
17486 /* frame_offset_ is the size of the current stack frame, including
17487 incoming arguments. Besides, the arguments are pushed
17488 right-to-left. Thus, in order to access the Nth argument from
17489 this operation node, the picking has to skip temporaries *plus*
17490 one stack slot per argument (0 for the first one, 1 for the second
17491 one, etc.).
17492
17493 The targetted argument number (N) is already set as the operand,
17494 and the number of temporaries can be computed with:
17495 frame_offsets_ - dpi->args_count */
17496 off += frame_offset_ - dpi->args_count;
17497
17498 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17499 if (off > 255)
17500 return false;
17501
17502 if (off == 0)
17503 {
17504 l->dw_loc_opc = DW_OP_dup;
17505 l->dw_loc_oprnd1.v.val_unsigned = 0;
17506 }
17507 else if (off == 1)
17508 {
17509 l->dw_loc_opc = DW_OP_over;
17510 l->dw_loc_oprnd1.v.val_unsigned = 0;
17511 }
17512 else
17513 {
17514 l->dw_loc_opc = DW_OP_pick;
17515 l->dw_loc_oprnd1.v.val_unsigned = off;
17516 }
17517 }
17518
17519 /* Update frame_offset according to the effect the current operation has
17520 on the stack. */
17521 switch (l->dw_loc_opc)
17522 {
17523 case DW_OP_deref:
17524 case DW_OP_swap:
17525 case DW_OP_rot:
17526 case DW_OP_abs:
17527 case DW_OP_neg:
17528 case DW_OP_not:
17529 case DW_OP_plus_uconst:
17530 case DW_OP_skip:
17531 case DW_OP_reg0:
17532 case DW_OP_reg1:
17533 case DW_OP_reg2:
17534 case DW_OP_reg3:
17535 case DW_OP_reg4:
17536 case DW_OP_reg5:
17537 case DW_OP_reg6:
17538 case DW_OP_reg7:
17539 case DW_OP_reg8:
17540 case DW_OP_reg9:
17541 case DW_OP_reg10:
17542 case DW_OP_reg11:
17543 case DW_OP_reg12:
17544 case DW_OP_reg13:
17545 case DW_OP_reg14:
17546 case DW_OP_reg15:
17547 case DW_OP_reg16:
17548 case DW_OP_reg17:
17549 case DW_OP_reg18:
17550 case DW_OP_reg19:
17551 case DW_OP_reg20:
17552 case DW_OP_reg21:
17553 case DW_OP_reg22:
17554 case DW_OP_reg23:
17555 case DW_OP_reg24:
17556 case DW_OP_reg25:
17557 case DW_OP_reg26:
17558 case DW_OP_reg27:
17559 case DW_OP_reg28:
17560 case DW_OP_reg29:
17561 case DW_OP_reg30:
17562 case DW_OP_reg31:
17563 case DW_OP_bregx:
17564 case DW_OP_piece:
17565 case DW_OP_deref_size:
17566 case DW_OP_nop:
17567 case DW_OP_bit_piece:
17568 case DW_OP_implicit_value:
17569 case DW_OP_stack_value:
17570 break;
17571
17572 case DW_OP_addr:
17573 case DW_OP_const1u:
17574 case DW_OP_const1s:
17575 case DW_OP_const2u:
17576 case DW_OP_const2s:
17577 case DW_OP_const4u:
17578 case DW_OP_const4s:
17579 case DW_OP_const8u:
17580 case DW_OP_const8s:
17581 case DW_OP_constu:
17582 case DW_OP_consts:
17583 case DW_OP_dup:
17584 case DW_OP_over:
17585 case DW_OP_pick:
17586 case DW_OP_lit0:
17587 case DW_OP_lit1:
17588 case DW_OP_lit2:
17589 case DW_OP_lit3:
17590 case DW_OP_lit4:
17591 case DW_OP_lit5:
17592 case DW_OP_lit6:
17593 case DW_OP_lit7:
17594 case DW_OP_lit8:
17595 case DW_OP_lit9:
17596 case DW_OP_lit10:
17597 case DW_OP_lit11:
17598 case DW_OP_lit12:
17599 case DW_OP_lit13:
17600 case DW_OP_lit14:
17601 case DW_OP_lit15:
17602 case DW_OP_lit16:
17603 case DW_OP_lit17:
17604 case DW_OP_lit18:
17605 case DW_OP_lit19:
17606 case DW_OP_lit20:
17607 case DW_OP_lit21:
17608 case DW_OP_lit22:
17609 case DW_OP_lit23:
17610 case DW_OP_lit24:
17611 case DW_OP_lit25:
17612 case DW_OP_lit26:
17613 case DW_OP_lit27:
17614 case DW_OP_lit28:
17615 case DW_OP_lit29:
17616 case DW_OP_lit30:
17617 case DW_OP_lit31:
17618 case DW_OP_breg0:
17619 case DW_OP_breg1:
17620 case DW_OP_breg2:
17621 case DW_OP_breg3:
17622 case DW_OP_breg4:
17623 case DW_OP_breg5:
17624 case DW_OP_breg6:
17625 case DW_OP_breg7:
17626 case DW_OP_breg8:
17627 case DW_OP_breg9:
17628 case DW_OP_breg10:
17629 case DW_OP_breg11:
17630 case DW_OP_breg12:
17631 case DW_OP_breg13:
17632 case DW_OP_breg14:
17633 case DW_OP_breg15:
17634 case DW_OP_breg16:
17635 case DW_OP_breg17:
17636 case DW_OP_breg18:
17637 case DW_OP_breg19:
17638 case DW_OP_breg20:
17639 case DW_OP_breg21:
17640 case DW_OP_breg22:
17641 case DW_OP_breg23:
17642 case DW_OP_breg24:
17643 case DW_OP_breg25:
17644 case DW_OP_breg26:
17645 case DW_OP_breg27:
17646 case DW_OP_breg28:
17647 case DW_OP_breg29:
17648 case DW_OP_breg30:
17649 case DW_OP_breg31:
17650 case DW_OP_fbreg:
17651 case DW_OP_push_object_address:
17652 case DW_OP_call_frame_cfa:
17653 case DW_OP_GNU_variable_value:
17654 ++frame_offset_;
17655 break;
17656
17657 case DW_OP_drop:
17658 case DW_OP_xderef:
17659 case DW_OP_and:
17660 case DW_OP_div:
17661 case DW_OP_minus:
17662 case DW_OP_mod:
17663 case DW_OP_mul:
17664 case DW_OP_or:
17665 case DW_OP_plus:
17666 case DW_OP_shl:
17667 case DW_OP_shr:
17668 case DW_OP_shra:
17669 case DW_OP_xor:
17670 case DW_OP_bra:
17671 case DW_OP_eq:
17672 case DW_OP_ge:
17673 case DW_OP_gt:
17674 case DW_OP_le:
17675 case DW_OP_lt:
17676 case DW_OP_ne:
17677 case DW_OP_regx:
17678 case DW_OP_xderef_size:
17679 --frame_offset_;
17680 break;
17681
17682 case DW_OP_call2:
17683 case DW_OP_call4:
17684 case DW_OP_call_ref:
17685 {
17686 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17687 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17688
17689 if (stack_usage == NULL)
17690 return false;
17691 frame_offset_ += *stack_usage;
17692 break;
17693 }
17694
17695 case DW_OP_implicit_pointer:
17696 case DW_OP_entry_value:
17697 case DW_OP_const_type:
17698 case DW_OP_regval_type:
17699 case DW_OP_deref_type:
17700 case DW_OP_convert:
17701 case DW_OP_reinterpret:
17702 case DW_OP_form_tls_address:
17703 case DW_OP_GNU_push_tls_address:
17704 case DW_OP_GNU_uninit:
17705 case DW_OP_GNU_encoded_addr:
17706 case DW_OP_GNU_implicit_pointer:
17707 case DW_OP_GNU_entry_value:
17708 case DW_OP_GNU_const_type:
17709 case DW_OP_GNU_regval_type:
17710 case DW_OP_GNU_deref_type:
17711 case DW_OP_GNU_convert:
17712 case DW_OP_GNU_reinterpret:
17713 case DW_OP_GNU_parameter_ref:
17714 /* loc_list_from_tree will probably not output these operations for
17715 size functions, so assume they will not appear here. */
17716 /* Fall through... */
17717
17718 default:
17719 gcc_unreachable ();
17720 }
17721
17722 /* Now, follow the control flow (except subroutine calls). */
17723 switch (l->dw_loc_opc)
17724 {
17725 case DW_OP_bra:
17726 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17727 frame_offsets))
17728 return false;
17729 /* Fall through. */
17730
17731 case DW_OP_skip:
17732 l = l->dw_loc_oprnd1.v.val_loc;
17733 break;
17734
17735 case DW_OP_stack_value:
17736 return true;
17737
17738 default:
17739 l = l->dw_loc_next;
17740 break;
17741 }
17742 }
17743
17744 return true;
17745 }
17746
17747 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17748 operations) in order to resolve the operand of DW_OP_pick operations that
17749 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17750 offset *before* LOC is executed. Return if all relocations were
17751 successful. */
17752
17753 static bool
17754 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17755 struct dwarf_procedure_info *dpi)
17756 {
17757 /* Associate to all visited operations the frame offset *before* evaluating
17758 this operation. */
17759 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17760
17761 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17762 frame_offsets);
17763 }
17764
17765 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17766 Return NULL if it is not possible. */
17767
17768 static dw_die_ref
17769 function_to_dwarf_procedure (tree fndecl)
17770 {
17771 struct loc_descr_context ctx;
17772 struct dwarf_procedure_info dpi;
17773 dw_die_ref dwarf_proc_die;
17774 tree tree_body = DECL_SAVED_TREE (fndecl);
17775 dw_loc_descr_ref loc_body, epilogue;
17776
17777 tree cursor;
17778 unsigned i;
17779
17780 /* Do not generate multiple DWARF procedures for the same function
17781 declaration. */
17782 dwarf_proc_die = lookup_decl_die (fndecl);
17783 if (dwarf_proc_die != NULL)
17784 return dwarf_proc_die;
17785
17786 /* DWARF procedures are available starting with the DWARFv3 standard. */
17787 if (dwarf_version < 3 && dwarf_strict)
17788 return NULL;
17789
17790 /* We handle only functions for which we still have a body, that return a
17791 supported type and that takes arguments with supported types. Note that
17792 there is no point translating functions that return nothing. */
17793 if (tree_body == NULL_TREE
17794 || DECL_RESULT (fndecl) == NULL_TREE
17795 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17796 return NULL;
17797
17798 for (cursor = DECL_ARGUMENTS (fndecl);
17799 cursor != NULL_TREE;
17800 cursor = TREE_CHAIN (cursor))
17801 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17802 return NULL;
17803
17804 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17805 if (TREE_CODE (tree_body) != RETURN_EXPR)
17806 return NULL;
17807 tree_body = TREE_OPERAND (tree_body, 0);
17808 if (TREE_CODE (tree_body) != MODIFY_EXPR
17809 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17810 return NULL;
17811 tree_body = TREE_OPERAND (tree_body, 1);
17812
17813 /* Try to translate the body expression itself. Note that this will probably
17814 cause an infinite recursion if its call graph has a cycle. This is very
17815 unlikely for size functions, however, so don't bother with such things at
17816 the moment. */
17817 ctx.context_type = NULL_TREE;
17818 ctx.base_decl = NULL_TREE;
17819 ctx.dpi = &dpi;
17820 ctx.placeholder_arg = false;
17821 ctx.placeholder_seen = false;
17822 dpi.fndecl = fndecl;
17823 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17824 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17825 if (!loc_body)
17826 return NULL;
17827
17828 /* After evaluating all operands in "loc_body", we should still have on the
17829 stack all arguments plus the desired function result (top of the stack).
17830 Generate code in order to keep only the result in our stack frame. */
17831 epilogue = NULL;
17832 for (i = 0; i < dpi.args_count; ++i)
17833 {
17834 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17835 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17836 op_couple->dw_loc_next->dw_loc_next = epilogue;
17837 epilogue = op_couple;
17838 }
17839 add_loc_descr (&loc_body, epilogue);
17840 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17841 return NULL;
17842
17843 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17844 because they are considered useful. Now there is an epilogue, they are
17845 not anymore, so give it another try. */
17846 loc_descr_without_nops (loc_body);
17847
17848 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17849 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17850 though, given that size functions do not come from source, so they should
17851 not have a dedicated DW_TAG_subprogram DIE. */
17852 dwarf_proc_die
17853 = new_dwarf_proc_die (loc_body, fndecl,
17854 get_context_die (DECL_CONTEXT (fndecl)));
17855
17856 /* The called DWARF procedure consumes one stack slot per argument and
17857 returns one stack slot. */
17858 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17859
17860 return dwarf_proc_die;
17861 }
17862
17863
17864 /* Generate Dwarf location list representing LOC.
17865 If WANT_ADDRESS is false, expression computing LOC will be computed
17866 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17867 if WANT_ADDRESS is 2, expression computing address useable in location
17868 will be returned (i.e. DW_OP_reg can be used
17869 to refer to register values).
17870
17871 CONTEXT provides information to customize the location descriptions
17872 generation. Its context_type field specifies what type is implicitly
17873 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17874 will not be generated.
17875
17876 Its DPI field determines whether we are generating a DWARF expression for a
17877 DWARF procedure, so PARM_DECL references are processed specifically.
17878
17879 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17880 and dpi fields were null. */
17881
17882 static dw_loc_list_ref
17883 loc_list_from_tree_1 (tree loc, int want_address,
17884 struct loc_descr_context *context)
17885 {
17886 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17887 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17888 int have_address = 0;
17889 enum dwarf_location_atom op;
17890
17891 /* ??? Most of the time we do not take proper care for sign/zero
17892 extending the values properly. Hopefully this won't be a real
17893 problem... */
17894
17895 if (context != NULL
17896 && context->base_decl == loc
17897 && want_address == 0)
17898 {
17899 if (dwarf_version >= 3 || !dwarf_strict)
17900 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17901 NULL, 0, NULL, 0, NULL);
17902 else
17903 return NULL;
17904 }
17905
17906 switch (TREE_CODE (loc))
17907 {
17908 case ERROR_MARK:
17909 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
17910 return 0;
17911
17912 case PLACEHOLDER_EXPR:
17913 /* This case involves extracting fields from an object to determine the
17914 position of other fields. It is supposed to appear only as the first
17915 operand of COMPONENT_REF nodes and to reference precisely the type
17916 that the context allows. */
17917 if (context != NULL
17918 && TREE_TYPE (loc) == context->context_type
17919 && want_address >= 1)
17920 {
17921 if (dwarf_version >= 3 || !dwarf_strict)
17922 {
17923 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
17924 have_address = 1;
17925 break;
17926 }
17927 else
17928 return NULL;
17929 }
17930 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
17931 the single argument passed by consumer. */
17932 else if (context != NULL
17933 && context->placeholder_arg
17934 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
17935 && want_address == 0)
17936 {
17937 ret = new_loc_descr (DW_OP_pick, 0, 0);
17938 ret->frame_offset_rel = 1;
17939 context->placeholder_seen = true;
17940 break;
17941 }
17942 else
17943 expansion_failed (loc, NULL_RTX,
17944 "PLACEHOLDER_EXPR for an unexpected type");
17945 break;
17946
17947 case CALL_EXPR:
17948 {
17949 const int nargs = call_expr_nargs (loc);
17950 tree callee = get_callee_fndecl (loc);
17951 int i;
17952 dw_die_ref dwarf_proc;
17953
17954 if (callee == NULL_TREE)
17955 goto call_expansion_failed;
17956
17957 /* We handle only functions that return an integer. */
17958 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
17959 goto call_expansion_failed;
17960
17961 dwarf_proc = function_to_dwarf_procedure (callee);
17962 if (dwarf_proc == NULL)
17963 goto call_expansion_failed;
17964
17965 /* Evaluate arguments right-to-left so that the first argument will
17966 be the top-most one on the stack. */
17967 for (i = nargs - 1; i >= 0; --i)
17968 {
17969 dw_loc_descr_ref loc_descr
17970 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
17971 context);
17972
17973 if (loc_descr == NULL)
17974 goto call_expansion_failed;
17975
17976 add_loc_descr (&ret, loc_descr);
17977 }
17978
17979 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
17980 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
17981 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
17982 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
17983 add_loc_descr (&ret, ret1);
17984 break;
17985
17986 call_expansion_failed:
17987 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
17988 /* There are no opcodes for these operations. */
17989 return 0;
17990 }
17991
17992 case PREINCREMENT_EXPR:
17993 case PREDECREMENT_EXPR:
17994 case POSTINCREMENT_EXPR:
17995 case POSTDECREMENT_EXPR:
17996 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
17997 /* There are no opcodes for these operations. */
17998 return 0;
17999
18000 case ADDR_EXPR:
18001 /* If we already want an address, see if there is INDIRECT_REF inside
18002 e.g. for &this->field. */
18003 if (want_address)
18004 {
18005 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18006 (loc, want_address == 2, context);
18007 if (list_ret)
18008 have_address = 1;
18009 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18010 && (ret = cst_pool_loc_descr (loc)))
18011 have_address = 1;
18012 }
18013 /* Otherwise, process the argument and look for the address. */
18014 if (!list_ret && !ret)
18015 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18016 else
18017 {
18018 if (want_address)
18019 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18020 return NULL;
18021 }
18022 break;
18023
18024 case VAR_DECL:
18025 if (DECL_THREAD_LOCAL_P (loc))
18026 {
18027 rtx rtl;
18028 enum dwarf_location_atom tls_op;
18029 enum dtprel_bool dtprel = dtprel_false;
18030
18031 if (targetm.have_tls)
18032 {
18033 /* If this is not defined, we have no way to emit the
18034 data. */
18035 if (!targetm.asm_out.output_dwarf_dtprel)
18036 return 0;
18037
18038 /* The way DW_OP_GNU_push_tls_address is specified, we
18039 can only look up addresses of objects in the current
18040 module. We used DW_OP_addr as first op, but that's
18041 wrong, because DW_OP_addr is relocated by the debug
18042 info consumer, while DW_OP_GNU_push_tls_address
18043 operand shouldn't be. */
18044 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18045 return 0;
18046 dtprel = dtprel_true;
18047 /* We check for DWARF 5 here because gdb did not implement
18048 DW_OP_form_tls_address until after 7.12. */
18049 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18050 : DW_OP_GNU_push_tls_address);
18051 }
18052 else
18053 {
18054 if (!targetm.emutls.debug_form_tls_address
18055 || !(dwarf_version >= 3 || !dwarf_strict))
18056 return 0;
18057 /* We stuffed the control variable into the DECL_VALUE_EXPR
18058 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18059 no longer appear in gimple code. We used the control
18060 variable in specific so that we could pick it up here. */
18061 loc = DECL_VALUE_EXPR (loc);
18062 tls_op = DW_OP_form_tls_address;
18063 }
18064
18065 rtl = rtl_for_decl_location (loc);
18066 if (rtl == NULL_RTX)
18067 return 0;
18068
18069 if (!MEM_P (rtl))
18070 return 0;
18071 rtl = XEXP (rtl, 0);
18072 if (! CONSTANT_P (rtl))
18073 return 0;
18074
18075 ret = new_addr_loc_descr (rtl, dtprel);
18076 ret1 = new_loc_descr (tls_op, 0, 0);
18077 add_loc_descr (&ret, ret1);
18078
18079 have_address = 1;
18080 break;
18081 }
18082 /* FALLTHRU */
18083
18084 case PARM_DECL:
18085 if (context != NULL && context->dpi != NULL
18086 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18087 {
18088 /* We are generating code for a DWARF procedure and we want to access
18089 one of its arguments: find the appropriate argument offset and let
18090 the resolve_args_picking pass compute the offset that complies
18091 with the stack frame size. */
18092 unsigned i = 0;
18093 tree cursor;
18094
18095 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18096 cursor != NULL_TREE && cursor != loc;
18097 cursor = TREE_CHAIN (cursor), ++i)
18098 ;
18099 /* If we are translating a DWARF procedure, all referenced parameters
18100 must belong to the current function. */
18101 gcc_assert (cursor != NULL_TREE);
18102
18103 ret = new_loc_descr (DW_OP_pick, i, 0);
18104 ret->frame_offset_rel = 1;
18105 break;
18106 }
18107 /* FALLTHRU */
18108
18109 case RESULT_DECL:
18110 if (DECL_HAS_VALUE_EXPR_P (loc))
18111 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18112 want_address, context);
18113 /* FALLTHRU */
18114
18115 case FUNCTION_DECL:
18116 {
18117 rtx rtl;
18118 var_loc_list *loc_list = lookup_decl_loc (loc);
18119
18120 if (loc_list && loc_list->first)
18121 {
18122 list_ret = dw_loc_list (loc_list, loc, want_address);
18123 have_address = want_address != 0;
18124 break;
18125 }
18126 rtl = rtl_for_decl_location (loc);
18127 if (rtl == NULL_RTX)
18128 {
18129 if (TREE_CODE (loc) != FUNCTION_DECL
18130 && early_dwarf
18131 && current_function_decl
18132 && want_address != 1
18133 && ! DECL_IGNORED_P (loc)
18134 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18135 || POINTER_TYPE_P (TREE_TYPE (loc)))
18136 && DECL_CONTEXT (loc) == current_function_decl
18137 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18138 <= DWARF2_ADDR_SIZE))
18139 {
18140 dw_die_ref ref = lookup_decl_die (loc);
18141 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18142 if (ref)
18143 {
18144 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18145 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18146 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18147 }
18148 else
18149 {
18150 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18151 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18152 }
18153 break;
18154 }
18155 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18156 return 0;
18157 }
18158 else if (CONST_INT_P (rtl))
18159 {
18160 HOST_WIDE_INT val = INTVAL (rtl);
18161 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18162 val &= GET_MODE_MASK (DECL_MODE (loc));
18163 ret = int_loc_descriptor (val);
18164 }
18165 else if (GET_CODE (rtl) == CONST_STRING)
18166 {
18167 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18168 return 0;
18169 }
18170 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18171 ret = new_addr_loc_descr (rtl, dtprel_false);
18172 else
18173 {
18174 machine_mode mode, mem_mode;
18175
18176 /* Certain constructs can only be represented at top-level. */
18177 if (want_address == 2)
18178 {
18179 ret = loc_descriptor (rtl, VOIDmode,
18180 VAR_INIT_STATUS_INITIALIZED);
18181 have_address = 1;
18182 }
18183 else
18184 {
18185 mode = GET_MODE (rtl);
18186 mem_mode = VOIDmode;
18187 if (MEM_P (rtl))
18188 {
18189 mem_mode = mode;
18190 mode = get_address_mode (rtl);
18191 rtl = XEXP (rtl, 0);
18192 have_address = 1;
18193 }
18194 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18195 VAR_INIT_STATUS_INITIALIZED);
18196 }
18197 if (!ret)
18198 expansion_failed (loc, rtl,
18199 "failed to produce loc descriptor for rtl");
18200 }
18201 }
18202 break;
18203
18204 case MEM_REF:
18205 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18206 {
18207 have_address = 1;
18208 goto do_plus;
18209 }
18210 /* Fallthru. */
18211 case INDIRECT_REF:
18212 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18213 have_address = 1;
18214 break;
18215
18216 case TARGET_MEM_REF:
18217 case SSA_NAME:
18218 case DEBUG_EXPR_DECL:
18219 return NULL;
18220
18221 case COMPOUND_EXPR:
18222 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18223 context);
18224
18225 CASE_CONVERT:
18226 case VIEW_CONVERT_EXPR:
18227 case SAVE_EXPR:
18228 case MODIFY_EXPR:
18229 case NON_LVALUE_EXPR:
18230 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18231 context);
18232
18233 case COMPONENT_REF:
18234 case BIT_FIELD_REF:
18235 case ARRAY_REF:
18236 case ARRAY_RANGE_REF:
18237 case REALPART_EXPR:
18238 case IMAGPART_EXPR:
18239 {
18240 tree obj, offset;
18241 poly_int64 bitsize, bitpos, bytepos;
18242 machine_mode mode;
18243 int unsignedp, reversep, volatilep = 0;
18244
18245 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18246 &unsignedp, &reversep, &volatilep);
18247
18248 gcc_assert (obj != loc);
18249
18250 list_ret = loc_list_from_tree_1 (obj,
18251 want_address == 2
18252 && known_eq (bitpos, 0)
18253 && !offset ? 2 : 1,
18254 context);
18255 /* TODO: We can extract value of the small expression via shifting even
18256 for nonzero bitpos. */
18257 if (list_ret == 0)
18258 return 0;
18259 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18260 || !multiple_p (bitsize, BITS_PER_UNIT))
18261 {
18262 expansion_failed (loc, NULL_RTX,
18263 "bitfield access");
18264 return 0;
18265 }
18266
18267 if (offset != NULL_TREE)
18268 {
18269 /* Variable offset. */
18270 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18271 if (list_ret1 == 0)
18272 return 0;
18273 add_loc_list (&list_ret, list_ret1);
18274 if (!list_ret)
18275 return 0;
18276 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18277 }
18278
18279 HOST_WIDE_INT value;
18280 if (bytepos.is_constant (&value) && value > 0)
18281 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18282 value, 0));
18283 else if (maybe_ne (bytepos, 0))
18284 loc_list_plus_const (list_ret, bytepos);
18285
18286 have_address = 1;
18287 break;
18288 }
18289
18290 case INTEGER_CST:
18291 if ((want_address || !tree_fits_shwi_p (loc))
18292 && (ret = cst_pool_loc_descr (loc)))
18293 have_address = 1;
18294 else if (want_address == 2
18295 && tree_fits_shwi_p (loc)
18296 && (ret = address_of_int_loc_descriptor
18297 (int_size_in_bytes (TREE_TYPE (loc)),
18298 tree_to_shwi (loc))))
18299 have_address = 1;
18300 else if (tree_fits_shwi_p (loc))
18301 ret = int_loc_descriptor (tree_to_shwi (loc));
18302 else if (tree_fits_uhwi_p (loc))
18303 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18304 else
18305 {
18306 expansion_failed (loc, NULL_RTX,
18307 "Integer operand is not host integer");
18308 return 0;
18309 }
18310 break;
18311
18312 case CONSTRUCTOR:
18313 case REAL_CST:
18314 case STRING_CST:
18315 case COMPLEX_CST:
18316 if ((ret = cst_pool_loc_descr (loc)))
18317 have_address = 1;
18318 else if (TREE_CODE (loc) == CONSTRUCTOR)
18319 {
18320 tree type = TREE_TYPE (loc);
18321 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18322 unsigned HOST_WIDE_INT offset = 0;
18323 unsigned HOST_WIDE_INT cnt;
18324 constructor_elt *ce;
18325
18326 if (TREE_CODE (type) == RECORD_TYPE)
18327 {
18328 /* This is very limited, but it's enough to output
18329 pointers to member functions, as long as the
18330 referenced function is defined in the current
18331 translation unit. */
18332 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18333 {
18334 tree val = ce->value;
18335
18336 tree field = ce->index;
18337
18338 if (val)
18339 STRIP_NOPS (val);
18340
18341 if (!field || DECL_BIT_FIELD (field))
18342 {
18343 expansion_failed (loc, NULL_RTX,
18344 "bitfield in record type constructor");
18345 size = offset = (unsigned HOST_WIDE_INT)-1;
18346 ret = NULL;
18347 break;
18348 }
18349
18350 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18351 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18352 gcc_assert (pos + fieldsize <= size);
18353 if (pos < offset)
18354 {
18355 expansion_failed (loc, NULL_RTX,
18356 "out-of-order fields in record constructor");
18357 size = offset = (unsigned HOST_WIDE_INT)-1;
18358 ret = NULL;
18359 break;
18360 }
18361 if (pos > offset)
18362 {
18363 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18364 add_loc_descr (&ret, ret1);
18365 offset = pos;
18366 }
18367 if (val && fieldsize != 0)
18368 {
18369 ret1 = loc_descriptor_from_tree (val, want_address, context);
18370 if (!ret1)
18371 {
18372 expansion_failed (loc, NULL_RTX,
18373 "unsupported expression in field");
18374 size = offset = (unsigned HOST_WIDE_INT)-1;
18375 ret = NULL;
18376 break;
18377 }
18378 add_loc_descr (&ret, ret1);
18379 }
18380 if (fieldsize)
18381 {
18382 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18383 add_loc_descr (&ret, ret1);
18384 offset = pos + fieldsize;
18385 }
18386 }
18387
18388 if (offset != size)
18389 {
18390 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18391 add_loc_descr (&ret, ret1);
18392 offset = size;
18393 }
18394
18395 have_address = !!want_address;
18396 }
18397 else
18398 expansion_failed (loc, NULL_RTX,
18399 "constructor of non-record type");
18400 }
18401 else
18402 /* We can construct small constants here using int_loc_descriptor. */
18403 expansion_failed (loc, NULL_RTX,
18404 "constructor or constant not in constant pool");
18405 break;
18406
18407 case TRUTH_AND_EXPR:
18408 case TRUTH_ANDIF_EXPR:
18409 case BIT_AND_EXPR:
18410 op = DW_OP_and;
18411 goto do_binop;
18412
18413 case TRUTH_XOR_EXPR:
18414 case BIT_XOR_EXPR:
18415 op = DW_OP_xor;
18416 goto do_binop;
18417
18418 case TRUTH_OR_EXPR:
18419 case TRUTH_ORIF_EXPR:
18420 case BIT_IOR_EXPR:
18421 op = DW_OP_or;
18422 goto do_binop;
18423
18424 case FLOOR_DIV_EXPR:
18425 case CEIL_DIV_EXPR:
18426 case ROUND_DIV_EXPR:
18427 case TRUNC_DIV_EXPR:
18428 case EXACT_DIV_EXPR:
18429 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18430 return 0;
18431 op = DW_OP_div;
18432 goto do_binop;
18433
18434 case MINUS_EXPR:
18435 op = DW_OP_minus;
18436 goto do_binop;
18437
18438 case FLOOR_MOD_EXPR:
18439 case CEIL_MOD_EXPR:
18440 case ROUND_MOD_EXPR:
18441 case TRUNC_MOD_EXPR:
18442 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18443 {
18444 op = DW_OP_mod;
18445 goto do_binop;
18446 }
18447 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18448 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18449 if (list_ret == 0 || list_ret1 == 0)
18450 return 0;
18451
18452 add_loc_list (&list_ret, list_ret1);
18453 if (list_ret == 0)
18454 return 0;
18455 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18456 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18457 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18458 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18459 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18460 break;
18461
18462 case MULT_EXPR:
18463 op = DW_OP_mul;
18464 goto do_binop;
18465
18466 case LSHIFT_EXPR:
18467 op = DW_OP_shl;
18468 goto do_binop;
18469
18470 case RSHIFT_EXPR:
18471 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18472 goto do_binop;
18473
18474 case POINTER_PLUS_EXPR:
18475 case PLUS_EXPR:
18476 do_plus:
18477 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18478 {
18479 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18480 smarter to encode their opposite. The DW_OP_plus_uconst operation
18481 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18482 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18483 bytes, Y being the size of the operation that pushes the opposite
18484 of the addend. So let's choose the smallest representation. */
18485 const tree tree_addend = TREE_OPERAND (loc, 1);
18486 offset_int wi_addend;
18487 HOST_WIDE_INT shwi_addend;
18488 dw_loc_descr_ref loc_naddend;
18489
18490 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18491 if (list_ret == 0)
18492 return 0;
18493
18494 /* Try to get the literal to push. It is the opposite of the addend,
18495 so as we rely on wrapping during DWARF evaluation, first decode
18496 the literal as a "DWARF-sized" signed number. */
18497 wi_addend = wi::to_offset (tree_addend);
18498 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18499 shwi_addend = wi_addend.to_shwi ();
18500 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18501 ? int_loc_descriptor (-shwi_addend)
18502 : NULL;
18503
18504 if (loc_naddend != NULL
18505 && ((unsigned) size_of_uleb128 (shwi_addend)
18506 > size_of_loc_descr (loc_naddend)))
18507 {
18508 add_loc_descr_to_each (list_ret, loc_naddend);
18509 add_loc_descr_to_each (list_ret,
18510 new_loc_descr (DW_OP_minus, 0, 0));
18511 }
18512 else
18513 {
18514 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18515 {
18516 loc_naddend = loc_cur;
18517 loc_cur = loc_cur->dw_loc_next;
18518 ggc_free (loc_naddend);
18519 }
18520 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18521 }
18522 break;
18523 }
18524
18525 op = DW_OP_plus;
18526 goto do_binop;
18527
18528 case LE_EXPR:
18529 op = DW_OP_le;
18530 goto do_comp_binop;
18531
18532 case GE_EXPR:
18533 op = DW_OP_ge;
18534 goto do_comp_binop;
18535
18536 case LT_EXPR:
18537 op = DW_OP_lt;
18538 goto do_comp_binop;
18539
18540 case GT_EXPR:
18541 op = DW_OP_gt;
18542 goto do_comp_binop;
18543
18544 do_comp_binop:
18545 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18546 {
18547 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18548 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18549 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18550 TREE_CODE (loc));
18551 break;
18552 }
18553 else
18554 goto do_binop;
18555
18556 case EQ_EXPR:
18557 op = DW_OP_eq;
18558 goto do_binop;
18559
18560 case NE_EXPR:
18561 op = DW_OP_ne;
18562 goto do_binop;
18563
18564 do_binop:
18565 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18566 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18567 if (list_ret == 0 || list_ret1 == 0)
18568 return 0;
18569
18570 add_loc_list (&list_ret, list_ret1);
18571 if (list_ret == 0)
18572 return 0;
18573 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18574 break;
18575
18576 case TRUTH_NOT_EXPR:
18577 case BIT_NOT_EXPR:
18578 op = DW_OP_not;
18579 goto do_unop;
18580
18581 case ABS_EXPR:
18582 op = DW_OP_abs;
18583 goto do_unop;
18584
18585 case NEGATE_EXPR:
18586 op = DW_OP_neg;
18587 goto do_unop;
18588
18589 do_unop:
18590 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18591 if (list_ret == 0)
18592 return 0;
18593
18594 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18595 break;
18596
18597 case MIN_EXPR:
18598 case MAX_EXPR:
18599 {
18600 const enum tree_code code =
18601 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18602
18603 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18604 build2 (code, integer_type_node,
18605 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18606 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18607 }
18608
18609 /* fall through */
18610
18611 case COND_EXPR:
18612 {
18613 dw_loc_descr_ref lhs
18614 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18615 dw_loc_list_ref rhs
18616 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18617 dw_loc_descr_ref bra_node, jump_node, tmp;
18618
18619 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18620 if (list_ret == 0 || lhs == 0 || rhs == 0)
18621 return 0;
18622
18623 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18624 add_loc_descr_to_each (list_ret, bra_node);
18625
18626 add_loc_list (&list_ret, rhs);
18627 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18628 add_loc_descr_to_each (list_ret, jump_node);
18629
18630 add_loc_descr_to_each (list_ret, lhs);
18631 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18632 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18633
18634 /* ??? Need a node to point the skip at. Use a nop. */
18635 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18636 add_loc_descr_to_each (list_ret, tmp);
18637 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18638 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18639 }
18640 break;
18641
18642 case FIX_TRUNC_EXPR:
18643 return 0;
18644
18645 default:
18646 /* Leave front-end specific codes as simply unknown. This comes
18647 up, for instance, with the C STMT_EXPR. */
18648 if ((unsigned int) TREE_CODE (loc)
18649 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18650 {
18651 expansion_failed (loc, NULL_RTX,
18652 "language specific tree node");
18653 return 0;
18654 }
18655
18656 /* Otherwise this is a generic code; we should just lists all of
18657 these explicitly. We forgot one. */
18658 if (flag_checking)
18659 gcc_unreachable ();
18660
18661 /* In a release build, we want to degrade gracefully: better to
18662 generate incomplete debugging information than to crash. */
18663 return NULL;
18664 }
18665
18666 if (!ret && !list_ret)
18667 return 0;
18668
18669 if (want_address == 2 && !have_address
18670 && (dwarf_version >= 4 || !dwarf_strict))
18671 {
18672 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18673 {
18674 expansion_failed (loc, NULL_RTX,
18675 "DWARF address size mismatch");
18676 return 0;
18677 }
18678 if (ret)
18679 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18680 else
18681 add_loc_descr_to_each (list_ret,
18682 new_loc_descr (DW_OP_stack_value, 0, 0));
18683 have_address = 1;
18684 }
18685 /* Show if we can't fill the request for an address. */
18686 if (want_address && !have_address)
18687 {
18688 expansion_failed (loc, NULL_RTX,
18689 "Want address and only have value");
18690 return 0;
18691 }
18692
18693 gcc_assert (!ret || !list_ret);
18694
18695 /* If we've got an address and don't want one, dereference. */
18696 if (!want_address && have_address)
18697 {
18698 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18699
18700 if (size > DWARF2_ADDR_SIZE || size == -1)
18701 {
18702 expansion_failed (loc, NULL_RTX,
18703 "DWARF address size mismatch");
18704 return 0;
18705 }
18706 else if (size == DWARF2_ADDR_SIZE)
18707 op = DW_OP_deref;
18708 else
18709 op = DW_OP_deref_size;
18710
18711 if (ret)
18712 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18713 else
18714 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18715 }
18716 if (ret)
18717 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18718
18719 return list_ret;
18720 }
18721
18722 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18723 expressions. */
18724
18725 static dw_loc_list_ref
18726 loc_list_from_tree (tree loc, int want_address,
18727 struct loc_descr_context *context)
18728 {
18729 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18730
18731 for (dw_loc_list_ref loc_cur = result;
18732 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18733 loc_descr_without_nops (loc_cur->expr);
18734 return result;
18735 }
18736
18737 /* Same as above but return only single location expression. */
18738 static dw_loc_descr_ref
18739 loc_descriptor_from_tree (tree loc, int want_address,
18740 struct loc_descr_context *context)
18741 {
18742 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18743 if (!ret)
18744 return NULL;
18745 if (ret->dw_loc_next)
18746 {
18747 expansion_failed (loc, NULL_RTX,
18748 "Location list where only loc descriptor needed");
18749 return NULL;
18750 }
18751 return ret->expr;
18752 }
18753
18754 /* Given a value, round it up to the lowest multiple of `boundary'
18755 which is not less than the value itself. */
18756
18757 static inline HOST_WIDE_INT
18758 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18759 {
18760 return (((value + boundary - 1) / boundary) * boundary);
18761 }
18762
18763 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18764 pointer to the declared type for the relevant field variable, or return
18765 `integer_type_node' if the given node turns out to be an
18766 ERROR_MARK node. */
18767
18768 static inline tree
18769 field_type (const_tree decl)
18770 {
18771 tree type;
18772
18773 if (TREE_CODE (decl) == ERROR_MARK)
18774 return integer_type_node;
18775
18776 type = DECL_BIT_FIELD_TYPE (decl);
18777 if (type == NULL_TREE)
18778 type = TREE_TYPE (decl);
18779
18780 return type;
18781 }
18782
18783 /* Given a pointer to a tree node, return the alignment in bits for
18784 it, or else return BITS_PER_WORD if the node actually turns out to
18785 be an ERROR_MARK node. */
18786
18787 static inline unsigned
18788 simple_type_align_in_bits (const_tree type)
18789 {
18790 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18791 }
18792
18793 static inline unsigned
18794 simple_decl_align_in_bits (const_tree decl)
18795 {
18796 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18797 }
18798
18799 /* Return the result of rounding T up to ALIGN. */
18800
18801 static inline offset_int
18802 round_up_to_align (const offset_int &t, unsigned int align)
18803 {
18804 return wi::udiv_trunc (t + align - 1, align) * align;
18805 }
18806
18807 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18808 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18809 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18810 if we fail to return the size in one of these two forms. */
18811
18812 static dw_loc_descr_ref
18813 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18814 {
18815 tree tree_size;
18816 struct loc_descr_context ctx;
18817
18818 /* Return a constant integer in priority, if possible. */
18819 *cst_size = int_size_in_bytes (type);
18820 if (*cst_size != -1)
18821 return NULL;
18822
18823 ctx.context_type = const_cast<tree> (type);
18824 ctx.base_decl = NULL_TREE;
18825 ctx.dpi = NULL;
18826 ctx.placeholder_arg = false;
18827 ctx.placeholder_seen = false;
18828
18829 type = TYPE_MAIN_VARIANT (type);
18830 tree_size = TYPE_SIZE_UNIT (type);
18831 return ((tree_size != NULL_TREE)
18832 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18833 : NULL);
18834 }
18835
18836 /* Helper structure for RECORD_TYPE processing. */
18837 struct vlr_context
18838 {
18839 /* Root RECORD_TYPE. It is needed to generate data member location
18840 descriptions in variable-length records (VLR), but also to cope with
18841 variants, which are composed of nested structures multiplexed with
18842 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18843 function processing a FIELD_DECL, it is required to be non null. */
18844 tree struct_type;
18845 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18846 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18847 this variant part as part of the root record (in storage units). For
18848 regular records, it must be NULL_TREE. */
18849 tree variant_part_offset;
18850 };
18851
18852 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18853 addressed byte of the "containing object" for the given FIELD_DECL. If
18854 possible, return a native constant through CST_OFFSET (in which case NULL is
18855 returned); otherwise return a DWARF expression that computes the offset.
18856
18857 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18858 that offset is, either because the argument turns out to be a pointer to an
18859 ERROR_MARK node, or because the offset expression is too complex for us.
18860
18861 CTX is required: see the comment for VLR_CONTEXT. */
18862
18863 static dw_loc_descr_ref
18864 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18865 HOST_WIDE_INT *cst_offset)
18866 {
18867 tree tree_result;
18868 dw_loc_list_ref loc_result;
18869
18870 *cst_offset = 0;
18871
18872 if (TREE_CODE (decl) == ERROR_MARK)
18873 return NULL;
18874 else
18875 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18876
18877 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18878 case. */
18879 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18880 return NULL;
18881
18882 #ifdef PCC_BITFIELD_TYPE_MATTERS
18883 /* We used to handle only constant offsets in all cases. Now, we handle
18884 properly dynamic byte offsets only when PCC bitfield type doesn't
18885 matter. */
18886 if (PCC_BITFIELD_TYPE_MATTERS
18887 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18888 {
18889 offset_int object_offset_in_bits;
18890 offset_int object_offset_in_bytes;
18891 offset_int bitpos_int;
18892 tree type;
18893 tree field_size_tree;
18894 offset_int deepest_bitpos;
18895 offset_int field_size_in_bits;
18896 unsigned int type_align_in_bits;
18897 unsigned int decl_align_in_bits;
18898 offset_int type_size_in_bits;
18899
18900 bitpos_int = wi::to_offset (bit_position (decl));
18901 type = field_type (decl);
18902 type_size_in_bits = offset_int_type_size_in_bits (type);
18903 type_align_in_bits = simple_type_align_in_bits (type);
18904
18905 field_size_tree = DECL_SIZE (decl);
18906
18907 /* The size could be unspecified if there was an error, or for
18908 a flexible array member. */
18909 if (!field_size_tree)
18910 field_size_tree = bitsize_zero_node;
18911
18912 /* If the size of the field is not constant, use the type size. */
18913 if (TREE_CODE (field_size_tree) == INTEGER_CST)
18914 field_size_in_bits = wi::to_offset (field_size_tree);
18915 else
18916 field_size_in_bits = type_size_in_bits;
18917
18918 decl_align_in_bits = simple_decl_align_in_bits (decl);
18919
18920 /* The GCC front-end doesn't make any attempt to keep track of the
18921 starting bit offset (relative to the start of the containing
18922 structure type) of the hypothetical "containing object" for a
18923 bit-field. Thus, when computing the byte offset value for the
18924 start of the "containing object" of a bit-field, we must deduce
18925 this information on our own. This can be rather tricky to do in
18926 some cases. For example, handling the following structure type
18927 definition when compiling for an i386/i486 target (which only
18928 aligns long long's to 32-bit boundaries) can be very tricky:
18929
18930 struct S { int field1; long long field2:31; };
18931
18932 Fortunately, there is a simple rule-of-thumb which can be used
18933 in such cases. When compiling for an i386/i486, GCC will
18934 allocate 8 bytes for the structure shown above. It decides to
18935 do this based upon one simple rule for bit-field allocation.
18936 GCC allocates each "containing object" for each bit-field at
18937 the first (i.e. lowest addressed) legitimate alignment boundary
18938 (based upon the required minimum alignment for the declared
18939 type of the field) which it can possibly use, subject to the
18940 condition that there is still enough available space remaining
18941 in the containing object (when allocated at the selected point)
18942 to fully accommodate all of the bits of the bit-field itself.
18943
18944 This simple rule makes it obvious why GCC allocates 8 bytes for
18945 each object of the structure type shown above. When looking
18946 for a place to allocate the "containing object" for `field2',
18947 the compiler simply tries to allocate a 64-bit "containing
18948 object" at each successive 32-bit boundary (starting at zero)
18949 until it finds a place to allocate that 64- bit field such that
18950 at least 31 contiguous (and previously unallocated) bits remain
18951 within that selected 64 bit field. (As it turns out, for the
18952 example above, the compiler finds it is OK to allocate the
18953 "containing object" 64-bit field at bit-offset zero within the
18954 structure type.)
18955
18956 Here we attempt to work backwards from the limited set of facts
18957 we're given, and we try to deduce from those facts, where GCC
18958 must have believed that the containing object started (within
18959 the structure type). The value we deduce is then used (by the
18960 callers of this routine) to generate DW_AT_location and
18961 DW_AT_bit_offset attributes for fields (both bit-fields and, in
18962 the case of DW_AT_location, regular fields as well). */
18963
18964 /* Figure out the bit-distance from the start of the structure to
18965 the "deepest" bit of the bit-field. */
18966 deepest_bitpos = bitpos_int + field_size_in_bits;
18967
18968 /* This is the tricky part. Use some fancy footwork to deduce
18969 where the lowest addressed bit of the containing object must
18970 be. */
18971 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18972
18973 /* Round up to type_align by default. This works best for
18974 bitfields. */
18975 object_offset_in_bits
18976 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
18977
18978 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
18979 {
18980 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
18981
18982 /* Round up to decl_align instead. */
18983 object_offset_in_bits
18984 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
18985 }
18986
18987 object_offset_in_bytes
18988 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
18989 if (ctx->variant_part_offset == NULL_TREE)
18990 {
18991 *cst_offset = object_offset_in_bytes.to_shwi ();
18992 return NULL;
18993 }
18994 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
18995 }
18996 else
18997 #endif /* PCC_BITFIELD_TYPE_MATTERS */
18998 tree_result = byte_position (decl);
18999
19000 if (ctx->variant_part_offset != NULL_TREE)
19001 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19002 ctx->variant_part_offset, tree_result);
19003
19004 /* If the byte offset is a constant, it's simplier to handle a native
19005 constant rather than a DWARF expression. */
19006 if (TREE_CODE (tree_result) == INTEGER_CST)
19007 {
19008 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19009 return NULL;
19010 }
19011 struct loc_descr_context loc_ctx = {
19012 ctx->struct_type, /* context_type */
19013 NULL_TREE, /* base_decl */
19014 NULL, /* dpi */
19015 false, /* placeholder_arg */
19016 false /* placeholder_seen */
19017 };
19018 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19019
19020 /* We want a DWARF expression: abort if we only have a location list with
19021 multiple elements. */
19022 if (!loc_result || !single_element_loc_list_p (loc_result))
19023 return NULL;
19024 else
19025 return loc_result->expr;
19026 }
19027 \f
19028 /* The following routines define various Dwarf attributes and any data
19029 associated with them. */
19030
19031 /* Add a location description attribute value to a DIE.
19032
19033 This emits location attributes suitable for whole variables and
19034 whole parameters. Note that the location attributes for struct fields are
19035 generated by the routine `data_member_location_attribute' below. */
19036
19037 static inline void
19038 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19039 dw_loc_list_ref descr)
19040 {
19041 bool check_no_locviews = true;
19042 if (descr == 0)
19043 return;
19044 if (single_element_loc_list_p (descr))
19045 add_AT_loc (die, attr_kind, descr->expr);
19046 else
19047 {
19048 add_AT_loc_list (die, attr_kind, descr);
19049 gcc_assert (descr->ll_symbol);
19050 if (attr_kind == DW_AT_location && descr->vl_symbol
19051 && dwarf2out_locviews_in_attribute ())
19052 {
19053 add_AT_view_list (die, DW_AT_GNU_locviews);
19054 check_no_locviews = false;
19055 }
19056 }
19057
19058 if (check_no_locviews)
19059 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19060 }
19061
19062 /* Add DW_AT_accessibility attribute to DIE if needed. */
19063
19064 static void
19065 add_accessibility_attribute (dw_die_ref die, tree decl)
19066 {
19067 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19068 children, otherwise the default is DW_ACCESS_public. In DWARF2
19069 the default has always been DW_ACCESS_public. */
19070 if (TREE_PROTECTED (decl))
19071 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19072 else if (TREE_PRIVATE (decl))
19073 {
19074 if (dwarf_version == 2
19075 || die->die_parent == NULL
19076 || die->die_parent->die_tag != DW_TAG_class_type)
19077 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19078 }
19079 else if (dwarf_version > 2
19080 && die->die_parent
19081 && die->die_parent->die_tag == DW_TAG_class_type)
19082 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19083 }
19084
19085 /* Attach the specialized form of location attribute used for data members of
19086 struct and union types. In the special case of a FIELD_DECL node which
19087 represents a bit-field, the "offset" part of this special location
19088 descriptor must indicate the distance in bytes from the lowest-addressed
19089 byte of the containing struct or union type to the lowest-addressed byte of
19090 the "containing object" for the bit-field. (See the `field_byte_offset'
19091 function above).
19092
19093 For any given bit-field, the "containing object" is a hypothetical object
19094 (of some integral or enum type) within which the given bit-field lives. The
19095 type of this hypothetical "containing object" is always the same as the
19096 declared type of the individual bit-field itself (for GCC anyway... the
19097 DWARF spec doesn't actually mandate this). Note that it is the size (in
19098 bytes) of the hypothetical "containing object" which will be given in the
19099 DW_AT_byte_size attribute for this bit-field. (See the
19100 `byte_size_attribute' function below.) It is also used when calculating the
19101 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19102 function below.)
19103
19104 CTX is required: see the comment for VLR_CONTEXT. */
19105
19106 static void
19107 add_data_member_location_attribute (dw_die_ref die,
19108 tree decl,
19109 struct vlr_context *ctx)
19110 {
19111 HOST_WIDE_INT offset;
19112 dw_loc_descr_ref loc_descr = 0;
19113
19114 if (TREE_CODE (decl) == TREE_BINFO)
19115 {
19116 /* We're working on the TAG_inheritance for a base class. */
19117 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19118 {
19119 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19120 aren't at a fixed offset from all (sub)objects of the same
19121 type. We need to extract the appropriate offset from our
19122 vtable. The following dwarf expression means
19123
19124 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19125
19126 This is specific to the V3 ABI, of course. */
19127
19128 dw_loc_descr_ref tmp;
19129
19130 /* Make a copy of the object address. */
19131 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19132 add_loc_descr (&loc_descr, tmp);
19133
19134 /* Extract the vtable address. */
19135 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19136 add_loc_descr (&loc_descr, tmp);
19137
19138 /* Calculate the address of the offset. */
19139 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19140 gcc_assert (offset < 0);
19141
19142 tmp = int_loc_descriptor (-offset);
19143 add_loc_descr (&loc_descr, tmp);
19144 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19145 add_loc_descr (&loc_descr, tmp);
19146
19147 /* Extract the offset. */
19148 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19149 add_loc_descr (&loc_descr, tmp);
19150
19151 /* Add it to the object address. */
19152 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19153 add_loc_descr (&loc_descr, tmp);
19154 }
19155 else
19156 offset = tree_to_shwi (BINFO_OFFSET (decl));
19157 }
19158 else
19159 {
19160 loc_descr = field_byte_offset (decl, ctx, &offset);
19161
19162 /* If loc_descr is available then we know the field offset is dynamic.
19163 However, GDB does not handle dynamic field offsets very well at the
19164 moment. */
19165 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19166 {
19167 loc_descr = NULL;
19168 offset = 0;
19169 }
19170
19171 /* Data member location evalutation starts with the base address on the
19172 stack. Compute the field offset and add it to this base address. */
19173 else if (loc_descr != NULL)
19174 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19175 }
19176
19177 if (! loc_descr)
19178 {
19179 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19180 e.g. GDB only added support to it in November 2016. For DWARF5
19181 we need newer debug info consumers anyway. We might change this
19182 to dwarf_version >= 4 once most consumers catched up. */
19183 if (dwarf_version >= 5
19184 && TREE_CODE (decl) == FIELD_DECL
19185 && DECL_BIT_FIELD_TYPE (decl))
19186 {
19187 tree off = bit_position (decl);
19188 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19189 {
19190 remove_AT (die, DW_AT_byte_size);
19191 remove_AT (die, DW_AT_bit_offset);
19192 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19193 return;
19194 }
19195 }
19196 if (dwarf_version > 2)
19197 {
19198 /* Don't need to output a location expression, just the constant. */
19199 if (offset < 0)
19200 add_AT_int (die, DW_AT_data_member_location, offset);
19201 else
19202 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19203 return;
19204 }
19205 else
19206 {
19207 enum dwarf_location_atom op;
19208
19209 /* The DWARF2 standard says that we should assume that the structure
19210 address is already on the stack, so we can specify a structure
19211 field address by using DW_OP_plus_uconst. */
19212 op = DW_OP_plus_uconst;
19213 loc_descr = new_loc_descr (op, offset, 0);
19214 }
19215 }
19216
19217 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19218 }
19219
19220 /* Writes integer values to dw_vec_const array. */
19221
19222 static void
19223 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19224 {
19225 while (size != 0)
19226 {
19227 *dest++ = val & 0xff;
19228 val >>= 8;
19229 --size;
19230 }
19231 }
19232
19233 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19234
19235 static HOST_WIDE_INT
19236 extract_int (const unsigned char *src, unsigned int size)
19237 {
19238 HOST_WIDE_INT val = 0;
19239
19240 src += size;
19241 while (size != 0)
19242 {
19243 val <<= 8;
19244 val |= *--src & 0xff;
19245 --size;
19246 }
19247 return val;
19248 }
19249
19250 /* Writes wide_int values to dw_vec_const array. */
19251
19252 static void
19253 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19254 {
19255 int i;
19256
19257 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19258 {
19259 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19260 return;
19261 }
19262
19263 /* We'd have to extend this code to support odd sizes. */
19264 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19265
19266 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19267
19268 if (WORDS_BIG_ENDIAN)
19269 for (i = n - 1; i >= 0; i--)
19270 {
19271 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19272 dest += sizeof (HOST_WIDE_INT);
19273 }
19274 else
19275 for (i = 0; i < n; i++)
19276 {
19277 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19278 dest += sizeof (HOST_WIDE_INT);
19279 }
19280 }
19281
19282 /* Writes floating point values to dw_vec_const array. */
19283
19284 static void
19285 insert_float (const_rtx rtl, unsigned char *array)
19286 {
19287 long val[4];
19288 int i;
19289 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19290
19291 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19292
19293 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19294 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19295 {
19296 insert_int (val[i], 4, array);
19297 array += 4;
19298 }
19299 }
19300
19301 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19302 does not have a "location" either in memory or in a register. These
19303 things can arise in GNU C when a constant is passed as an actual parameter
19304 to an inlined function. They can also arise in C++ where declared
19305 constants do not necessarily get memory "homes". */
19306
19307 static bool
19308 add_const_value_attribute (dw_die_ref die, rtx rtl)
19309 {
19310 switch (GET_CODE (rtl))
19311 {
19312 case CONST_INT:
19313 {
19314 HOST_WIDE_INT val = INTVAL (rtl);
19315
19316 if (val < 0)
19317 add_AT_int (die, DW_AT_const_value, val);
19318 else
19319 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19320 }
19321 return true;
19322
19323 case CONST_WIDE_INT:
19324 {
19325 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19326 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19327 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19328 wide_int w = wi::zext (w1, prec);
19329 add_AT_wide (die, DW_AT_const_value, w);
19330 }
19331 return true;
19332
19333 case CONST_DOUBLE:
19334 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19335 floating-point constant. A CONST_DOUBLE is used whenever the
19336 constant requires more than one word in order to be adequately
19337 represented. */
19338 if (TARGET_SUPPORTS_WIDE_INT == 0
19339 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19340 add_AT_double (die, DW_AT_const_value,
19341 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19342 else
19343 {
19344 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19345 unsigned int length = GET_MODE_SIZE (mode);
19346 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19347
19348 insert_float (rtl, array);
19349 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19350 }
19351 return true;
19352
19353 case CONST_VECTOR:
19354 {
19355 unsigned int length;
19356 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19357 return false;
19358
19359 machine_mode mode = GET_MODE (rtl);
19360 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19361 unsigned char *array
19362 = ggc_vec_alloc<unsigned char> (length * elt_size);
19363 unsigned int i;
19364 unsigned char *p;
19365 machine_mode imode = GET_MODE_INNER (mode);
19366
19367 switch (GET_MODE_CLASS (mode))
19368 {
19369 case MODE_VECTOR_INT:
19370 for (i = 0, p = array; i < length; i++, p += elt_size)
19371 {
19372 rtx elt = CONST_VECTOR_ELT (rtl, i);
19373 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19374 }
19375 break;
19376
19377 case MODE_VECTOR_FLOAT:
19378 for (i = 0, p = array; i < length; i++, p += elt_size)
19379 {
19380 rtx elt = CONST_VECTOR_ELT (rtl, i);
19381 insert_float (elt, p);
19382 }
19383 break;
19384
19385 default:
19386 gcc_unreachable ();
19387 }
19388
19389 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19390 }
19391 return true;
19392
19393 case CONST_STRING:
19394 if (dwarf_version >= 4 || !dwarf_strict)
19395 {
19396 dw_loc_descr_ref loc_result;
19397 resolve_one_addr (&rtl);
19398 rtl_addr:
19399 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19400 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19401 add_AT_loc (die, DW_AT_location, loc_result);
19402 vec_safe_push (used_rtx_array, rtl);
19403 return true;
19404 }
19405 return false;
19406
19407 case CONST:
19408 if (CONSTANT_P (XEXP (rtl, 0)))
19409 return add_const_value_attribute (die, XEXP (rtl, 0));
19410 /* FALLTHROUGH */
19411 case SYMBOL_REF:
19412 if (!const_ok_for_output (rtl))
19413 return false;
19414 /* FALLTHROUGH */
19415 case LABEL_REF:
19416 if (dwarf_version >= 4 || !dwarf_strict)
19417 goto rtl_addr;
19418 return false;
19419
19420 case PLUS:
19421 /* In cases where an inlined instance of an inline function is passed
19422 the address of an `auto' variable (which is local to the caller) we
19423 can get a situation where the DECL_RTL of the artificial local
19424 variable (for the inlining) which acts as a stand-in for the
19425 corresponding formal parameter (of the inline function) will look
19426 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19427 exactly a compile-time constant expression, but it isn't the address
19428 of the (artificial) local variable either. Rather, it represents the
19429 *value* which the artificial local variable always has during its
19430 lifetime. We currently have no way to represent such quasi-constant
19431 values in Dwarf, so for now we just punt and generate nothing. */
19432 return false;
19433
19434 case HIGH:
19435 case CONST_FIXED:
19436 return false;
19437
19438 case MEM:
19439 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19440 && MEM_READONLY_P (rtl)
19441 && GET_MODE (rtl) == BLKmode)
19442 {
19443 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19444 return true;
19445 }
19446 return false;
19447
19448 default:
19449 /* No other kinds of rtx should be possible here. */
19450 gcc_unreachable ();
19451 }
19452 return false;
19453 }
19454
19455 /* Determine whether the evaluation of EXPR references any variables
19456 or functions which aren't otherwise used (and therefore may not be
19457 output). */
19458 static tree
19459 reference_to_unused (tree * tp, int * walk_subtrees,
19460 void * data ATTRIBUTE_UNUSED)
19461 {
19462 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19463 *walk_subtrees = 0;
19464
19465 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19466 && ! TREE_ASM_WRITTEN (*tp))
19467 return *tp;
19468 /* ??? The C++ FE emits debug information for using decls, so
19469 putting gcc_unreachable here falls over. See PR31899. For now
19470 be conservative. */
19471 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19472 return *tp;
19473 else if (VAR_P (*tp))
19474 {
19475 varpool_node *node = varpool_node::get (*tp);
19476 if (!node || !node->definition)
19477 return *tp;
19478 }
19479 else if (TREE_CODE (*tp) == FUNCTION_DECL
19480 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19481 {
19482 /* The call graph machinery must have finished analyzing,
19483 optimizing and gimplifying the CU by now.
19484 So if *TP has no call graph node associated
19485 to it, it means *TP will not be emitted. */
19486 if (!cgraph_node::get (*tp))
19487 return *tp;
19488 }
19489 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19490 return *tp;
19491
19492 return NULL_TREE;
19493 }
19494
19495 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19496 for use in a later add_const_value_attribute call. */
19497
19498 static rtx
19499 rtl_for_decl_init (tree init, tree type)
19500 {
19501 rtx rtl = NULL_RTX;
19502
19503 STRIP_NOPS (init);
19504
19505 /* If a variable is initialized with a string constant without embedded
19506 zeros, build CONST_STRING. */
19507 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19508 {
19509 tree enttype = TREE_TYPE (type);
19510 tree domain = TYPE_DOMAIN (type);
19511 scalar_int_mode mode;
19512
19513 if (is_int_mode (TYPE_MODE (enttype), &mode)
19514 && GET_MODE_SIZE (mode) == 1
19515 && domain
19516 && integer_zerop (TYPE_MIN_VALUE (domain))
19517 && compare_tree_int (TYPE_MAX_VALUE (domain),
19518 TREE_STRING_LENGTH (init) - 1) == 0
19519 && ((size_t) TREE_STRING_LENGTH (init)
19520 == strlen (TREE_STRING_POINTER (init)) + 1))
19521 {
19522 rtl = gen_rtx_CONST_STRING (VOIDmode,
19523 ggc_strdup (TREE_STRING_POINTER (init)));
19524 rtl = gen_rtx_MEM (BLKmode, rtl);
19525 MEM_READONLY_P (rtl) = 1;
19526 }
19527 }
19528 /* Other aggregates, and complex values, could be represented using
19529 CONCAT: FIXME! */
19530 else if (AGGREGATE_TYPE_P (type)
19531 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19532 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19533 || TREE_CODE (type) == COMPLEX_TYPE)
19534 ;
19535 /* Vectors only work if their mode is supported by the target.
19536 FIXME: generic vectors ought to work too. */
19537 else if (TREE_CODE (type) == VECTOR_TYPE
19538 && !VECTOR_MODE_P (TYPE_MODE (type)))
19539 ;
19540 /* If the initializer is something that we know will expand into an
19541 immediate RTL constant, expand it now. We must be careful not to
19542 reference variables which won't be output. */
19543 else if (initializer_constant_valid_p (init, type)
19544 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19545 {
19546 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19547 possible. */
19548 if (TREE_CODE (type) == VECTOR_TYPE)
19549 switch (TREE_CODE (init))
19550 {
19551 case VECTOR_CST:
19552 break;
19553 case CONSTRUCTOR:
19554 if (TREE_CONSTANT (init))
19555 {
19556 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19557 bool constant_p = true;
19558 tree value;
19559 unsigned HOST_WIDE_INT ix;
19560
19561 /* Even when ctor is constant, it might contain non-*_CST
19562 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19563 belong into VECTOR_CST nodes. */
19564 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19565 if (!CONSTANT_CLASS_P (value))
19566 {
19567 constant_p = false;
19568 break;
19569 }
19570
19571 if (constant_p)
19572 {
19573 init = build_vector_from_ctor (type, elts);
19574 break;
19575 }
19576 }
19577 /* FALLTHRU */
19578
19579 default:
19580 return NULL;
19581 }
19582
19583 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19584
19585 /* If expand_expr returns a MEM, it wasn't immediate. */
19586 gcc_assert (!rtl || !MEM_P (rtl));
19587 }
19588
19589 return rtl;
19590 }
19591
19592 /* Generate RTL for the variable DECL to represent its location. */
19593
19594 static rtx
19595 rtl_for_decl_location (tree decl)
19596 {
19597 rtx rtl;
19598
19599 /* Here we have to decide where we are going to say the parameter "lives"
19600 (as far as the debugger is concerned). We only have a couple of
19601 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19602
19603 DECL_RTL normally indicates where the parameter lives during most of the
19604 activation of the function. If optimization is enabled however, this
19605 could be either NULL or else a pseudo-reg. Both of those cases indicate
19606 that the parameter doesn't really live anywhere (as far as the code
19607 generation parts of GCC are concerned) during most of the function's
19608 activation. That will happen (for example) if the parameter is never
19609 referenced within the function.
19610
19611 We could just generate a location descriptor here for all non-NULL
19612 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19613 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19614 where DECL_RTL is NULL or is a pseudo-reg.
19615
19616 Note however that we can only get away with using DECL_INCOMING_RTL as
19617 a backup substitute for DECL_RTL in certain limited cases. In cases
19618 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19619 we can be sure that the parameter was passed using the same type as it is
19620 declared to have within the function, and that its DECL_INCOMING_RTL
19621 points us to a place where a value of that type is passed.
19622
19623 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19624 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19625 because in these cases DECL_INCOMING_RTL points us to a value of some
19626 type which is *different* from the type of the parameter itself. Thus,
19627 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19628 such cases, the debugger would end up (for example) trying to fetch a
19629 `float' from a place which actually contains the first part of a
19630 `double'. That would lead to really incorrect and confusing
19631 output at debug-time.
19632
19633 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19634 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19635 are a couple of exceptions however. On little-endian machines we can
19636 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19637 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19638 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19639 when (on a little-endian machine) a non-prototyped function has a
19640 parameter declared to be of type `short' or `char'. In such cases,
19641 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19642 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19643 passed `int' value. If the debugger then uses that address to fetch
19644 a `short' or a `char' (on a little-endian machine) the result will be
19645 the correct data, so we allow for such exceptional cases below.
19646
19647 Note that our goal here is to describe the place where the given formal
19648 parameter lives during most of the function's activation (i.e. between the
19649 end of the prologue and the start of the epilogue). We'll do that as best
19650 as we can. Note however that if the given formal parameter is modified
19651 sometime during the execution of the function, then a stack backtrace (at
19652 debug-time) will show the function as having been called with the *new*
19653 value rather than the value which was originally passed in. This happens
19654 rarely enough that it is not a major problem, but it *is* a problem, and
19655 I'd like to fix it.
19656
19657 A future version of dwarf2out.c may generate two additional attributes for
19658 any given DW_TAG_formal_parameter DIE which will describe the "passed
19659 type" and the "passed location" for the given formal parameter in addition
19660 to the attributes we now generate to indicate the "declared type" and the
19661 "active location" for each parameter. This additional set of attributes
19662 could be used by debuggers for stack backtraces. Separately, note that
19663 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19664 This happens (for example) for inlined-instances of inline function formal
19665 parameters which are never referenced. This really shouldn't be
19666 happening. All PARM_DECL nodes should get valid non-NULL
19667 DECL_INCOMING_RTL values. FIXME. */
19668
19669 /* Use DECL_RTL as the "location" unless we find something better. */
19670 rtl = DECL_RTL_IF_SET (decl);
19671
19672 /* When generating abstract instances, ignore everything except
19673 constants, symbols living in memory, and symbols living in
19674 fixed registers. */
19675 if (! reload_completed)
19676 {
19677 if (rtl
19678 && (CONSTANT_P (rtl)
19679 || (MEM_P (rtl)
19680 && CONSTANT_P (XEXP (rtl, 0)))
19681 || (REG_P (rtl)
19682 && VAR_P (decl)
19683 && TREE_STATIC (decl))))
19684 {
19685 rtl = targetm.delegitimize_address (rtl);
19686 return rtl;
19687 }
19688 rtl = NULL_RTX;
19689 }
19690 else if (TREE_CODE (decl) == PARM_DECL)
19691 {
19692 if (rtl == NULL_RTX
19693 || is_pseudo_reg (rtl)
19694 || (MEM_P (rtl)
19695 && is_pseudo_reg (XEXP (rtl, 0))
19696 && DECL_INCOMING_RTL (decl)
19697 && MEM_P (DECL_INCOMING_RTL (decl))
19698 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19699 {
19700 tree declared_type = TREE_TYPE (decl);
19701 tree passed_type = DECL_ARG_TYPE (decl);
19702 machine_mode dmode = TYPE_MODE (declared_type);
19703 machine_mode pmode = TYPE_MODE (passed_type);
19704
19705 /* This decl represents a formal parameter which was optimized out.
19706 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19707 all cases where (rtl == NULL_RTX) just below. */
19708 if (dmode == pmode)
19709 rtl = DECL_INCOMING_RTL (decl);
19710 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19711 && SCALAR_INT_MODE_P (dmode)
19712 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19713 && DECL_INCOMING_RTL (decl))
19714 {
19715 rtx inc = DECL_INCOMING_RTL (decl);
19716 if (REG_P (inc))
19717 rtl = inc;
19718 else if (MEM_P (inc))
19719 {
19720 if (BYTES_BIG_ENDIAN)
19721 rtl = adjust_address_nv (inc, dmode,
19722 GET_MODE_SIZE (pmode)
19723 - GET_MODE_SIZE (dmode));
19724 else
19725 rtl = inc;
19726 }
19727 }
19728 }
19729
19730 /* If the parm was passed in registers, but lives on the stack, then
19731 make a big endian correction if the mode of the type of the
19732 parameter is not the same as the mode of the rtl. */
19733 /* ??? This is the same series of checks that are made in dbxout.c before
19734 we reach the big endian correction code there. It isn't clear if all
19735 of these checks are necessary here, but keeping them all is the safe
19736 thing to do. */
19737 else if (MEM_P (rtl)
19738 && XEXP (rtl, 0) != const0_rtx
19739 && ! CONSTANT_P (XEXP (rtl, 0))
19740 /* Not passed in memory. */
19741 && !MEM_P (DECL_INCOMING_RTL (decl))
19742 /* Not passed by invisible reference. */
19743 && (!REG_P (XEXP (rtl, 0))
19744 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19745 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19746 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19747 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19748 #endif
19749 )
19750 /* Big endian correction check. */
19751 && BYTES_BIG_ENDIAN
19752 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19753 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19754 UNITS_PER_WORD))
19755 {
19756 machine_mode addr_mode = get_address_mode (rtl);
19757 poly_int64 offset = (UNITS_PER_WORD
19758 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19759
19760 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19761 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19762 }
19763 }
19764 else if (VAR_P (decl)
19765 && rtl
19766 && MEM_P (rtl)
19767 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19768 {
19769 machine_mode addr_mode = get_address_mode (rtl);
19770 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19771 GET_MODE (rtl));
19772
19773 /* If a variable is declared "register" yet is smaller than
19774 a register, then if we store the variable to memory, it
19775 looks like we're storing a register-sized value, when in
19776 fact we are not. We need to adjust the offset of the
19777 storage location to reflect the actual value's bytes,
19778 else gdb will not be able to display it. */
19779 if (maybe_ne (offset, 0))
19780 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19781 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19782 }
19783
19784 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19785 and will have been substituted directly into all expressions that use it.
19786 C does not have such a concept, but C++ and other languages do. */
19787 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19788 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19789
19790 if (rtl)
19791 rtl = targetm.delegitimize_address (rtl);
19792
19793 /* If we don't look past the constant pool, we risk emitting a
19794 reference to a constant pool entry that isn't referenced from
19795 code, and thus is not emitted. */
19796 if (rtl)
19797 rtl = avoid_constant_pool_reference (rtl);
19798
19799 /* Try harder to get a rtl. If this symbol ends up not being emitted
19800 in the current CU, resolve_addr will remove the expression referencing
19801 it. */
19802 if (rtl == NULL_RTX
19803 && VAR_P (decl)
19804 && !DECL_EXTERNAL (decl)
19805 && TREE_STATIC (decl)
19806 && DECL_NAME (decl)
19807 && !DECL_HARD_REGISTER (decl)
19808 && DECL_MODE (decl) != VOIDmode)
19809 {
19810 rtl = make_decl_rtl_for_debug (decl);
19811 if (!MEM_P (rtl)
19812 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19813 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19814 rtl = NULL_RTX;
19815 }
19816
19817 return rtl;
19818 }
19819
19820 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19821 returned. If so, the decl for the COMMON block is returned, and the
19822 value is the offset into the common block for the symbol. */
19823
19824 static tree
19825 fortran_common (tree decl, HOST_WIDE_INT *value)
19826 {
19827 tree val_expr, cvar;
19828 machine_mode mode;
19829 poly_int64 bitsize, bitpos;
19830 tree offset;
19831 HOST_WIDE_INT cbitpos;
19832 int unsignedp, reversep, volatilep = 0;
19833
19834 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19835 it does not have a value (the offset into the common area), or if it
19836 is thread local (as opposed to global) then it isn't common, and shouldn't
19837 be handled as such. */
19838 if (!VAR_P (decl)
19839 || !TREE_STATIC (decl)
19840 || !DECL_HAS_VALUE_EXPR_P (decl)
19841 || !is_fortran ())
19842 return NULL_TREE;
19843
19844 val_expr = DECL_VALUE_EXPR (decl);
19845 if (TREE_CODE (val_expr) != COMPONENT_REF)
19846 return NULL_TREE;
19847
19848 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19849 &unsignedp, &reversep, &volatilep);
19850
19851 if (cvar == NULL_TREE
19852 || !VAR_P (cvar)
19853 || DECL_ARTIFICIAL (cvar)
19854 || !TREE_PUBLIC (cvar)
19855 /* We don't expect to have to cope with variable offsets,
19856 since at present all static data must have a constant size. */
19857 || !bitpos.is_constant (&cbitpos))
19858 return NULL_TREE;
19859
19860 *value = 0;
19861 if (offset != NULL)
19862 {
19863 if (!tree_fits_shwi_p (offset))
19864 return NULL_TREE;
19865 *value = tree_to_shwi (offset);
19866 }
19867 if (cbitpos != 0)
19868 *value += cbitpos / BITS_PER_UNIT;
19869
19870 return cvar;
19871 }
19872
19873 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19874 data attribute for a variable or a parameter. We generate the
19875 DW_AT_const_value attribute only in those cases where the given variable
19876 or parameter does not have a true "location" either in memory or in a
19877 register. This can happen (for example) when a constant is passed as an
19878 actual argument in a call to an inline function. (It's possible that
19879 these things can crop up in other ways also.) Note that one type of
19880 constant value which can be passed into an inlined function is a constant
19881 pointer. This can happen for example if an actual argument in an inlined
19882 function call evaluates to a compile-time constant address.
19883
19884 CACHE_P is true if it is worth caching the location list for DECL,
19885 so that future calls can reuse it rather than regenerate it from scratch.
19886 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19887 since we will need to refer to them each time the function is inlined. */
19888
19889 static bool
19890 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19891 {
19892 rtx rtl;
19893 dw_loc_list_ref list;
19894 var_loc_list *loc_list;
19895 cached_dw_loc_list *cache;
19896
19897 if (early_dwarf)
19898 return false;
19899
19900 if (TREE_CODE (decl) == ERROR_MARK)
19901 return false;
19902
19903 if (get_AT (die, DW_AT_location)
19904 || get_AT (die, DW_AT_const_value))
19905 return true;
19906
19907 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19908 || TREE_CODE (decl) == RESULT_DECL);
19909
19910 /* Try to get some constant RTL for this decl, and use that as the value of
19911 the location. */
19912
19913 rtl = rtl_for_decl_location (decl);
19914 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19915 && add_const_value_attribute (die, rtl))
19916 return true;
19917
19918 /* See if we have single element location list that is equivalent to
19919 a constant value. That way we are better to use add_const_value_attribute
19920 rather than expanding constant value equivalent. */
19921 loc_list = lookup_decl_loc (decl);
19922 if (loc_list
19923 && loc_list->first
19924 && loc_list->first->next == NULL
19925 && NOTE_P (loc_list->first->loc)
19926 && NOTE_VAR_LOCATION (loc_list->first->loc)
19927 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
19928 {
19929 struct var_loc_node *node;
19930
19931 node = loc_list->first;
19932 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
19933 if (GET_CODE (rtl) == EXPR_LIST)
19934 rtl = XEXP (rtl, 0);
19935 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
19936 && add_const_value_attribute (die, rtl))
19937 return true;
19938 }
19939 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
19940 list several times. See if we've already cached the contents. */
19941 list = NULL;
19942 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
19943 cache_p = false;
19944 if (cache_p)
19945 {
19946 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
19947 if (cache)
19948 list = cache->loc_list;
19949 }
19950 if (list == NULL)
19951 {
19952 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
19953 NULL);
19954 /* It is usually worth caching this result if the decl is from
19955 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
19956 if (cache_p && list && list->dw_loc_next)
19957 {
19958 cached_dw_loc_list **slot
19959 = cached_dw_loc_list_table->find_slot_with_hash (decl,
19960 DECL_UID (decl),
19961 INSERT);
19962 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
19963 cache->decl_id = DECL_UID (decl);
19964 cache->loc_list = list;
19965 *slot = cache;
19966 }
19967 }
19968 if (list)
19969 {
19970 add_AT_location_description (die, DW_AT_location, list);
19971 return true;
19972 }
19973 /* None of that worked, so it must not really have a location;
19974 try adding a constant value attribute from the DECL_INITIAL. */
19975 return tree_add_const_value_attribute_for_decl (die, decl);
19976 }
19977
19978 /* Helper function for tree_add_const_value_attribute. Natively encode
19979 initializer INIT into an array. Return true if successful. */
19980
19981 static bool
19982 native_encode_initializer (tree init, unsigned char *array, int size)
19983 {
19984 tree type;
19985
19986 if (init == NULL_TREE)
19987 return false;
19988
19989 STRIP_NOPS (init);
19990 switch (TREE_CODE (init))
19991 {
19992 case STRING_CST:
19993 type = TREE_TYPE (init);
19994 if (TREE_CODE (type) == ARRAY_TYPE)
19995 {
19996 tree enttype = TREE_TYPE (type);
19997 scalar_int_mode mode;
19998
19999 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20000 || GET_MODE_SIZE (mode) != 1)
20001 return false;
20002 if (int_size_in_bytes (type) != size)
20003 return false;
20004 if (size > TREE_STRING_LENGTH (init))
20005 {
20006 memcpy (array, TREE_STRING_POINTER (init),
20007 TREE_STRING_LENGTH (init));
20008 memset (array + TREE_STRING_LENGTH (init),
20009 '\0', size - TREE_STRING_LENGTH (init));
20010 }
20011 else
20012 memcpy (array, TREE_STRING_POINTER (init), size);
20013 return true;
20014 }
20015 return false;
20016 case CONSTRUCTOR:
20017 type = TREE_TYPE (init);
20018 if (int_size_in_bytes (type) != size)
20019 return false;
20020 if (TREE_CODE (type) == ARRAY_TYPE)
20021 {
20022 HOST_WIDE_INT min_index;
20023 unsigned HOST_WIDE_INT cnt;
20024 int curpos = 0, fieldsize;
20025 constructor_elt *ce;
20026
20027 if (TYPE_DOMAIN (type) == NULL_TREE
20028 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20029 return false;
20030
20031 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20032 if (fieldsize <= 0)
20033 return false;
20034
20035 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20036 memset (array, '\0', size);
20037 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20038 {
20039 tree val = ce->value;
20040 tree index = ce->index;
20041 int pos = curpos;
20042 if (index && TREE_CODE (index) == RANGE_EXPR)
20043 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20044 * fieldsize;
20045 else if (index)
20046 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20047
20048 if (val)
20049 {
20050 STRIP_NOPS (val);
20051 if (!native_encode_initializer (val, array + pos, fieldsize))
20052 return false;
20053 }
20054 curpos = pos + fieldsize;
20055 if (index && TREE_CODE (index) == RANGE_EXPR)
20056 {
20057 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20058 - tree_to_shwi (TREE_OPERAND (index, 0));
20059 while (count-- > 0)
20060 {
20061 if (val)
20062 memcpy (array + curpos, array + pos, fieldsize);
20063 curpos += fieldsize;
20064 }
20065 }
20066 gcc_assert (curpos <= size);
20067 }
20068 return true;
20069 }
20070 else if (TREE_CODE (type) == RECORD_TYPE
20071 || TREE_CODE (type) == UNION_TYPE)
20072 {
20073 tree field = NULL_TREE;
20074 unsigned HOST_WIDE_INT cnt;
20075 constructor_elt *ce;
20076
20077 if (int_size_in_bytes (type) != size)
20078 return false;
20079
20080 if (TREE_CODE (type) == RECORD_TYPE)
20081 field = TYPE_FIELDS (type);
20082
20083 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20084 {
20085 tree val = ce->value;
20086 int pos, fieldsize;
20087
20088 if (ce->index != 0)
20089 field = ce->index;
20090
20091 if (val)
20092 STRIP_NOPS (val);
20093
20094 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20095 return false;
20096
20097 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20098 && TYPE_DOMAIN (TREE_TYPE (field))
20099 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20100 return false;
20101 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20102 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20103 return false;
20104 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20105 pos = int_byte_position (field);
20106 gcc_assert (pos + fieldsize <= size);
20107 if (val && fieldsize != 0
20108 && !native_encode_initializer (val, array + pos, fieldsize))
20109 return false;
20110 }
20111 return true;
20112 }
20113 return false;
20114 case VIEW_CONVERT_EXPR:
20115 case NON_LVALUE_EXPR:
20116 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20117 default:
20118 return native_encode_expr (init, array, size) == size;
20119 }
20120 }
20121
20122 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20123 attribute is the const value T. */
20124
20125 static bool
20126 tree_add_const_value_attribute (dw_die_ref die, tree t)
20127 {
20128 tree init;
20129 tree type = TREE_TYPE (t);
20130 rtx rtl;
20131
20132 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20133 return false;
20134
20135 init = t;
20136 gcc_assert (!DECL_P (init));
20137
20138 if (TREE_CODE (init) == INTEGER_CST)
20139 {
20140 if (tree_fits_uhwi_p (init))
20141 {
20142 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20143 return true;
20144 }
20145 if (tree_fits_shwi_p (init))
20146 {
20147 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20148 return true;
20149 }
20150 }
20151 if (! early_dwarf)
20152 {
20153 rtl = rtl_for_decl_init (init, type);
20154 if (rtl)
20155 return add_const_value_attribute (die, rtl);
20156 }
20157 /* If the host and target are sane, try harder. */
20158 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20159 && initializer_constant_valid_p (init, type))
20160 {
20161 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20162 if (size > 0 && (int) size == size)
20163 {
20164 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20165
20166 if (native_encode_initializer (init, array, size))
20167 {
20168 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20169 return true;
20170 }
20171 ggc_free (array);
20172 }
20173 }
20174 return false;
20175 }
20176
20177 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20178 attribute is the const value of T, where T is an integral constant
20179 variable with static storage duration
20180 (so it can't be a PARM_DECL or a RESULT_DECL). */
20181
20182 static bool
20183 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20184 {
20185
20186 if (!decl
20187 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20188 || (VAR_P (decl) && !TREE_STATIC (decl)))
20189 return false;
20190
20191 if (TREE_READONLY (decl)
20192 && ! TREE_THIS_VOLATILE (decl)
20193 && DECL_INITIAL (decl))
20194 /* OK */;
20195 else
20196 return false;
20197
20198 /* Don't add DW_AT_const_value if abstract origin already has one. */
20199 if (get_AT (var_die, DW_AT_const_value))
20200 return false;
20201
20202 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20203 }
20204
20205 /* Convert the CFI instructions for the current function into a
20206 location list. This is used for DW_AT_frame_base when we targeting
20207 a dwarf2 consumer that does not support the dwarf3
20208 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20209 expressions. */
20210
20211 static dw_loc_list_ref
20212 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20213 {
20214 int ix;
20215 dw_fde_ref fde;
20216 dw_loc_list_ref list, *list_tail;
20217 dw_cfi_ref cfi;
20218 dw_cfa_location last_cfa, next_cfa;
20219 const char *start_label, *last_label, *section;
20220 dw_cfa_location remember;
20221
20222 fde = cfun->fde;
20223 gcc_assert (fde != NULL);
20224
20225 section = secname_for_decl (current_function_decl);
20226 list_tail = &list;
20227 list = NULL;
20228
20229 memset (&next_cfa, 0, sizeof (next_cfa));
20230 next_cfa.reg = INVALID_REGNUM;
20231 remember = next_cfa;
20232
20233 start_label = fde->dw_fde_begin;
20234
20235 /* ??? Bald assumption that the CIE opcode list does not contain
20236 advance opcodes. */
20237 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20238 lookup_cfa_1 (cfi, &next_cfa, &remember);
20239
20240 last_cfa = next_cfa;
20241 last_label = start_label;
20242
20243 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20244 {
20245 /* If the first partition contained no CFI adjustments, the
20246 CIE opcodes apply to the whole first partition. */
20247 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20248 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20249 list_tail =&(*list_tail)->dw_loc_next;
20250 start_label = last_label = fde->dw_fde_second_begin;
20251 }
20252
20253 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20254 {
20255 switch (cfi->dw_cfi_opc)
20256 {
20257 case DW_CFA_set_loc:
20258 case DW_CFA_advance_loc1:
20259 case DW_CFA_advance_loc2:
20260 case DW_CFA_advance_loc4:
20261 if (!cfa_equal_p (&last_cfa, &next_cfa))
20262 {
20263 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20264 start_label, 0, last_label, 0, section);
20265
20266 list_tail = &(*list_tail)->dw_loc_next;
20267 last_cfa = next_cfa;
20268 start_label = last_label;
20269 }
20270 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20271 break;
20272
20273 case DW_CFA_advance_loc:
20274 /* The encoding is complex enough that we should never emit this. */
20275 gcc_unreachable ();
20276
20277 default:
20278 lookup_cfa_1 (cfi, &next_cfa, &remember);
20279 break;
20280 }
20281 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20282 {
20283 if (!cfa_equal_p (&last_cfa, &next_cfa))
20284 {
20285 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20286 start_label, 0, last_label, 0, section);
20287
20288 list_tail = &(*list_tail)->dw_loc_next;
20289 last_cfa = next_cfa;
20290 start_label = last_label;
20291 }
20292 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20293 start_label, 0, fde->dw_fde_end, 0, section);
20294 list_tail = &(*list_tail)->dw_loc_next;
20295 start_label = last_label = fde->dw_fde_second_begin;
20296 }
20297 }
20298
20299 if (!cfa_equal_p (&last_cfa, &next_cfa))
20300 {
20301 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20302 start_label, 0, last_label, 0, section);
20303 list_tail = &(*list_tail)->dw_loc_next;
20304 start_label = last_label;
20305 }
20306
20307 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20308 start_label, 0,
20309 fde->dw_fde_second_begin
20310 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20311 section);
20312
20313 maybe_gen_llsym (list);
20314
20315 return list;
20316 }
20317
20318 /* Compute a displacement from the "steady-state frame pointer" to the
20319 frame base (often the same as the CFA), and store it in
20320 frame_pointer_fb_offset. OFFSET is added to the displacement
20321 before the latter is negated. */
20322
20323 static void
20324 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20325 {
20326 rtx reg, elim;
20327
20328 #ifdef FRAME_POINTER_CFA_OFFSET
20329 reg = frame_pointer_rtx;
20330 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20331 #else
20332 reg = arg_pointer_rtx;
20333 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20334 #endif
20335
20336 elim = (ira_use_lra_p
20337 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20338 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20339 elim = strip_offset_and_add (elim, &offset);
20340
20341 frame_pointer_fb_offset = -offset;
20342
20343 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20344 in which to eliminate. This is because it's stack pointer isn't
20345 directly accessible as a register within the ISA. To work around
20346 this, assume that while we cannot provide a proper value for
20347 frame_pointer_fb_offset, we won't need one either. */
20348 frame_pointer_fb_offset_valid
20349 = ((SUPPORTS_STACK_ALIGNMENT
20350 && (elim == hard_frame_pointer_rtx
20351 || elim == stack_pointer_rtx))
20352 || elim == (frame_pointer_needed
20353 ? hard_frame_pointer_rtx
20354 : stack_pointer_rtx));
20355 }
20356
20357 /* Generate a DW_AT_name attribute given some string value to be included as
20358 the value of the attribute. */
20359
20360 static void
20361 add_name_attribute (dw_die_ref die, const char *name_string)
20362 {
20363 if (name_string != NULL && *name_string != 0)
20364 {
20365 if (demangle_name_func)
20366 name_string = (*demangle_name_func) (name_string);
20367
20368 add_AT_string (die, DW_AT_name, name_string);
20369 }
20370 }
20371
20372 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20373 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20374 of TYPE accordingly.
20375
20376 ??? This is a temporary measure until after we're able to generate
20377 regular DWARF for the complex Ada type system. */
20378
20379 static void
20380 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20381 dw_die_ref context_die)
20382 {
20383 tree dtype;
20384 dw_die_ref dtype_die;
20385
20386 if (!lang_hooks.types.descriptive_type)
20387 return;
20388
20389 dtype = lang_hooks.types.descriptive_type (type);
20390 if (!dtype)
20391 return;
20392
20393 dtype_die = lookup_type_die (dtype);
20394 if (!dtype_die)
20395 {
20396 gen_type_die (dtype, context_die);
20397 dtype_die = lookup_type_die (dtype);
20398 gcc_assert (dtype_die);
20399 }
20400
20401 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20402 }
20403
20404 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20405
20406 static const char *
20407 comp_dir_string (void)
20408 {
20409 const char *wd;
20410 char *wd1;
20411 static const char *cached_wd = NULL;
20412
20413 if (cached_wd != NULL)
20414 return cached_wd;
20415
20416 wd = get_src_pwd ();
20417 if (wd == NULL)
20418 return NULL;
20419
20420 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20421 {
20422 int wdlen;
20423
20424 wdlen = strlen (wd);
20425 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20426 strcpy (wd1, wd);
20427 wd1 [wdlen] = DIR_SEPARATOR;
20428 wd1 [wdlen + 1] = 0;
20429 wd = wd1;
20430 }
20431
20432 cached_wd = remap_debug_filename (wd);
20433 return cached_wd;
20434 }
20435
20436 /* Generate a DW_AT_comp_dir attribute for DIE. */
20437
20438 static void
20439 add_comp_dir_attribute (dw_die_ref die)
20440 {
20441 const char * wd = comp_dir_string ();
20442 if (wd != NULL)
20443 add_AT_string (die, DW_AT_comp_dir, wd);
20444 }
20445
20446 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20447 pointer computation, ...), output a representation for that bound according
20448 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20449 loc_list_from_tree for the meaning of CONTEXT. */
20450
20451 static void
20452 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20453 int forms, struct loc_descr_context *context)
20454 {
20455 dw_die_ref context_die, decl_die;
20456 dw_loc_list_ref list;
20457 bool strip_conversions = true;
20458 bool placeholder_seen = false;
20459
20460 while (strip_conversions)
20461 switch (TREE_CODE (value))
20462 {
20463 case ERROR_MARK:
20464 case SAVE_EXPR:
20465 return;
20466
20467 CASE_CONVERT:
20468 case VIEW_CONVERT_EXPR:
20469 value = TREE_OPERAND (value, 0);
20470 break;
20471
20472 default:
20473 strip_conversions = false;
20474 break;
20475 }
20476
20477 /* If possible and permitted, output the attribute as a constant. */
20478 if ((forms & dw_scalar_form_constant) != 0
20479 && TREE_CODE (value) == INTEGER_CST)
20480 {
20481 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20482
20483 /* If HOST_WIDE_INT is big enough then represent the bound as
20484 a constant value. We need to choose a form based on
20485 whether the type is signed or unsigned. We cannot just
20486 call add_AT_unsigned if the value itself is positive
20487 (add_AT_unsigned might add the unsigned value encoded as
20488 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20489 bounds type and then sign extend any unsigned values found
20490 for signed types. This is needed only for
20491 DW_AT_{lower,upper}_bound, since for most other attributes,
20492 consumers will treat DW_FORM_data[1248] as unsigned values,
20493 regardless of the underlying type. */
20494 if (prec <= HOST_BITS_PER_WIDE_INT
20495 || tree_fits_uhwi_p (value))
20496 {
20497 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20498 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20499 else
20500 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20501 }
20502 else
20503 /* Otherwise represent the bound as an unsigned value with
20504 the precision of its type. The precision and signedness
20505 of the type will be necessary to re-interpret it
20506 unambiguously. */
20507 add_AT_wide (die, attr, wi::to_wide (value));
20508 return;
20509 }
20510
20511 /* Otherwise, if it's possible and permitted too, output a reference to
20512 another DIE. */
20513 if ((forms & dw_scalar_form_reference) != 0)
20514 {
20515 tree decl = NULL_TREE;
20516
20517 /* Some type attributes reference an outer type. For instance, the upper
20518 bound of an array may reference an embedding record (this happens in
20519 Ada). */
20520 if (TREE_CODE (value) == COMPONENT_REF
20521 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20522 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20523 decl = TREE_OPERAND (value, 1);
20524
20525 else if (VAR_P (value)
20526 || TREE_CODE (value) == PARM_DECL
20527 || TREE_CODE (value) == RESULT_DECL)
20528 decl = value;
20529
20530 if (decl != NULL_TREE)
20531 {
20532 dw_die_ref decl_die = lookup_decl_die (decl);
20533
20534 /* ??? Can this happen, or should the variable have been bound
20535 first? Probably it can, since I imagine that we try to create
20536 the types of parameters in the order in which they exist in
20537 the list, and won't have created a forward reference to a
20538 later parameter. */
20539 if (decl_die != NULL)
20540 {
20541 add_AT_die_ref (die, attr, decl_die);
20542 return;
20543 }
20544 }
20545 }
20546
20547 /* Last chance: try to create a stack operation procedure to evaluate the
20548 value. Do nothing if even that is not possible or permitted. */
20549 if ((forms & dw_scalar_form_exprloc) == 0)
20550 return;
20551
20552 list = loc_list_from_tree (value, 2, context);
20553 if (context && context->placeholder_arg)
20554 {
20555 placeholder_seen = context->placeholder_seen;
20556 context->placeholder_seen = false;
20557 }
20558 if (list == NULL || single_element_loc_list_p (list))
20559 {
20560 /* If this attribute is not a reference nor constant, it is
20561 a DWARF expression rather than location description. For that
20562 loc_list_from_tree (value, 0, &context) is needed. */
20563 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20564 if (list2 && single_element_loc_list_p (list2))
20565 {
20566 if (placeholder_seen)
20567 {
20568 struct dwarf_procedure_info dpi;
20569 dpi.fndecl = NULL_TREE;
20570 dpi.args_count = 1;
20571 if (!resolve_args_picking (list2->expr, 1, &dpi))
20572 return;
20573 }
20574 add_AT_loc (die, attr, list2->expr);
20575 return;
20576 }
20577 }
20578
20579 /* If that failed to give a single element location list, fall back to
20580 outputting this as a reference... still if permitted. */
20581 if (list == NULL
20582 || (forms & dw_scalar_form_reference) == 0
20583 || placeholder_seen)
20584 return;
20585
20586 if (current_function_decl == 0)
20587 context_die = comp_unit_die ();
20588 else
20589 context_die = lookup_decl_die (current_function_decl);
20590
20591 decl_die = new_die (DW_TAG_variable, context_die, value);
20592 add_AT_flag (decl_die, DW_AT_artificial, 1);
20593 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20594 context_die);
20595 add_AT_location_description (decl_die, DW_AT_location, list);
20596 add_AT_die_ref (die, attr, decl_die);
20597 }
20598
20599 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20600 default. */
20601
20602 static int
20603 lower_bound_default (void)
20604 {
20605 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20606 {
20607 case DW_LANG_C:
20608 case DW_LANG_C89:
20609 case DW_LANG_C99:
20610 case DW_LANG_C11:
20611 case DW_LANG_C_plus_plus:
20612 case DW_LANG_C_plus_plus_11:
20613 case DW_LANG_C_plus_plus_14:
20614 case DW_LANG_ObjC:
20615 case DW_LANG_ObjC_plus_plus:
20616 return 0;
20617 case DW_LANG_Fortran77:
20618 case DW_LANG_Fortran90:
20619 case DW_LANG_Fortran95:
20620 case DW_LANG_Fortran03:
20621 case DW_LANG_Fortran08:
20622 return 1;
20623 case DW_LANG_UPC:
20624 case DW_LANG_D:
20625 case DW_LANG_Python:
20626 return dwarf_version >= 4 ? 0 : -1;
20627 case DW_LANG_Ada95:
20628 case DW_LANG_Ada83:
20629 case DW_LANG_Cobol74:
20630 case DW_LANG_Cobol85:
20631 case DW_LANG_Modula2:
20632 case DW_LANG_PLI:
20633 return dwarf_version >= 4 ? 1 : -1;
20634 default:
20635 return -1;
20636 }
20637 }
20638
20639 /* Given a tree node describing an array bound (either lower or upper) output
20640 a representation for that bound. */
20641
20642 static void
20643 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20644 tree bound, struct loc_descr_context *context)
20645 {
20646 int dflt;
20647
20648 while (1)
20649 switch (TREE_CODE (bound))
20650 {
20651 /* Strip all conversions. */
20652 CASE_CONVERT:
20653 case VIEW_CONVERT_EXPR:
20654 bound = TREE_OPERAND (bound, 0);
20655 break;
20656
20657 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20658 are even omitted when they are the default. */
20659 case INTEGER_CST:
20660 /* If the value for this bound is the default one, we can even omit the
20661 attribute. */
20662 if (bound_attr == DW_AT_lower_bound
20663 && tree_fits_shwi_p (bound)
20664 && (dflt = lower_bound_default ()) != -1
20665 && tree_to_shwi (bound) == dflt)
20666 return;
20667
20668 /* FALLTHRU */
20669
20670 default:
20671 /* Because of the complex interaction there can be with other GNAT
20672 encodings, GDB isn't ready yet to handle proper DWARF description
20673 for self-referencial subrange bounds: let GNAT encodings do the
20674 magic in such a case. */
20675 if (is_ada ()
20676 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20677 && contains_placeholder_p (bound))
20678 return;
20679
20680 add_scalar_info (subrange_die, bound_attr, bound,
20681 dw_scalar_form_constant
20682 | dw_scalar_form_exprloc
20683 | dw_scalar_form_reference,
20684 context);
20685 return;
20686 }
20687 }
20688
20689 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20690 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20691 Note that the block of subscript information for an array type also
20692 includes information about the element type of the given array type.
20693
20694 This function reuses previously set type and bound information if
20695 available. */
20696
20697 static void
20698 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20699 {
20700 unsigned dimension_number;
20701 tree lower, upper;
20702 dw_die_ref child = type_die->die_child;
20703
20704 for (dimension_number = 0;
20705 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20706 type = TREE_TYPE (type), dimension_number++)
20707 {
20708 tree domain = TYPE_DOMAIN (type);
20709
20710 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20711 break;
20712
20713 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20714 and (in GNU C only) variable bounds. Handle all three forms
20715 here. */
20716
20717 /* Find and reuse a previously generated DW_TAG_subrange_type if
20718 available.
20719
20720 For multi-dimensional arrays, as we iterate through the
20721 various dimensions in the enclosing for loop above, we also
20722 iterate through the DIE children and pick at each
20723 DW_TAG_subrange_type previously generated (if available).
20724 Each child DW_TAG_subrange_type DIE describes the range of
20725 the current dimension. At this point we should have as many
20726 DW_TAG_subrange_type's as we have dimensions in the
20727 array. */
20728 dw_die_ref subrange_die = NULL;
20729 if (child)
20730 while (1)
20731 {
20732 child = child->die_sib;
20733 if (child->die_tag == DW_TAG_subrange_type)
20734 subrange_die = child;
20735 if (child == type_die->die_child)
20736 {
20737 /* If we wrapped around, stop looking next time. */
20738 child = NULL;
20739 break;
20740 }
20741 if (child->die_tag == DW_TAG_subrange_type)
20742 break;
20743 }
20744 if (!subrange_die)
20745 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20746
20747 if (domain)
20748 {
20749 /* We have an array type with specified bounds. */
20750 lower = TYPE_MIN_VALUE (domain);
20751 upper = TYPE_MAX_VALUE (domain);
20752
20753 /* Define the index type. */
20754 if (TREE_TYPE (domain)
20755 && !get_AT (subrange_die, DW_AT_type))
20756 {
20757 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20758 TREE_TYPE field. We can't emit debug info for this
20759 because it is an unnamed integral type. */
20760 if (TREE_CODE (domain) == INTEGER_TYPE
20761 && TYPE_NAME (domain) == NULL_TREE
20762 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20763 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20764 ;
20765 else
20766 add_type_attribute (subrange_die, TREE_TYPE (domain),
20767 TYPE_UNQUALIFIED, false, type_die);
20768 }
20769
20770 /* ??? If upper is NULL, the array has unspecified length,
20771 but it does have a lower bound. This happens with Fortran
20772 dimension arr(N:*)
20773 Since the debugger is definitely going to need to know N
20774 to produce useful results, go ahead and output the lower
20775 bound solo, and hope the debugger can cope. */
20776
20777 if (!get_AT (subrange_die, DW_AT_lower_bound))
20778 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20779 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20780 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20781 }
20782
20783 /* Otherwise we have an array type with an unspecified length. The
20784 DWARF-2 spec does not say how to handle this; let's just leave out the
20785 bounds. */
20786 }
20787 }
20788
20789 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20790
20791 static void
20792 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20793 {
20794 dw_die_ref decl_die;
20795 HOST_WIDE_INT size;
20796 dw_loc_descr_ref size_expr = NULL;
20797
20798 switch (TREE_CODE (tree_node))
20799 {
20800 case ERROR_MARK:
20801 size = 0;
20802 break;
20803 case ENUMERAL_TYPE:
20804 case RECORD_TYPE:
20805 case UNION_TYPE:
20806 case QUAL_UNION_TYPE:
20807 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20808 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20809 {
20810 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20811 return;
20812 }
20813 size_expr = type_byte_size (tree_node, &size);
20814 break;
20815 case FIELD_DECL:
20816 /* For a data member of a struct or union, the DW_AT_byte_size is
20817 generally given as the number of bytes normally allocated for an
20818 object of the *declared* type of the member itself. This is true
20819 even for bit-fields. */
20820 size = int_size_in_bytes (field_type (tree_node));
20821 break;
20822 default:
20823 gcc_unreachable ();
20824 }
20825
20826 /* Support for dynamically-sized objects was introduced by DWARFv3.
20827 At the moment, GDB does not handle variable byte sizes very well,
20828 though. */
20829 if ((dwarf_version >= 3 || !dwarf_strict)
20830 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20831 && size_expr != NULL)
20832 add_AT_loc (die, DW_AT_byte_size, size_expr);
20833
20834 /* Note that `size' might be -1 when we get to this point. If it is, that
20835 indicates that the byte size of the entity in question is variable and
20836 that we could not generate a DWARF expression that computes it. */
20837 if (size >= 0)
20838 add_AT_unsigned (die, DW_AT_byte_size, size);
20839 }
20840
20841 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20842 alignment. */
20843
20844 static void
20845 add_alignment_attribute (dw_die_ref die, tree tree_node)
20846 {
20847 if (dwarf_version < 5 && dwarf_strict)
20848 return;
20849
20850 unsigned align;
20851
20852 if (DECL_P (tree_node))
20853 {
20854 if (!DECL_USER_ALIGN (tree_node))
20855 return;
20856
20857 align = DECL_ALIGN_UNIT (tree_node);
20858 }
20859 else if (TYPE_P (tree_node))
20860 {
20861 if (!TYPE_USER_ALIGN (tree_node))
20862 return;
20863
20864 align = TYPE_ALIGN_UNIT (tree_node);
20865 }
20866 else
20867 gcc_unreachable ();
20868
20869 add_AT_unsigned (die, DW_AT_alignment, align);
20870 }
20871
20872 /* For a FIELD_DECL node which represents a bit-field, output an attribute
20873 which specifies the distance in bits from the highest order bit of the
20874 "containing object" for the bit-field to the highest order bit of the
20875 bit-field itself.
20876
20877 For any given bit-field, the "containing object" is a hypothetical object
20878 (of some integral or enum type) within which the given bit-field lives. The
20879 type of this hypothetical "containing object" is always the same as the
20880 declared type of the individual bit-field itself. The determination of the
20881 exact location of the "containing object" for a bit-field is rather
20882 complicated. It's handled by the `field_byte_offset' function (above).
20883
20884 CTX is required: see the comment for VLR_CONTEXT.
20885
20886 Note that it is the size (in bytes) of the hypothetical "containing object"
20887 which will be given in the DW_AT_byte_size attribute for this bit-field.
20888 (See `byte_size_attribute' above). */
20889
20890 static inline void
20891 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
20892 {
20893 HOST_WIDE_INT object_offset_in_bytes;
20894 tree original_type = DECL_BIT_FIELD_TYPE (decl);
20895 HOST_WIDE_INT bitpos_int;
20896 HOST_WIDE_INT highest_order_object_bit_offset;
20897 HOST_WIDE_INT highest_order_field_bit_offset;
20898 HOST_WIDE_INT bit_offset;
20899
20900 field_byte_offset (decl, ctx, &object_offset_in_bytes);
20901
20902 /* Must be a field and a bit field. */
20903 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
20904
20905 /* We can't yet handle bit-fields whose offsets are variable, so if we
20906 encounter such things, just return without generating any attribute
20907 whatsoever. Likewise for variable or too large size. */
20908 if (! tree_fits_shwi_p (bit_position (decl))
20909 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
20910 return;
20911
20912 bitpos_int = int_bit_position (decl);
20913
20914 /* Note that the bit offset is always the distance (in bits) from the
20915 highest-order bit of the "containing object" to the highest-order bit of
20916 the bit-field itself. Since the "high-order end" of any object or field
20917 is different on big-endian and little-endian machines, the computation
20918 below must take account of these differences. */
20919 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
20920 highest_order_field_bit_offset = bitpos_int;
20921
20922 if (! BYTES_BIG_ENDIAN)
20923 {
20924 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
20925 highest_order_object_bit_offset +=
20926 simple_type_size_in_bits (original_type);
20927 }
20928
20929 bit_offset
20930 = (! BYTES_BIG_ENDIAN
20931 ? highest_order_object_bit_offset - highest_order_field_bit_offset
20932 : highest_order_field_bit_offset - highest_order_object_bit_offset);
20933
20934 if (bit_offset < 0)
20935 add_AT_int (die, DW_AT_bit_offset, bit_offset);
20936 else
20937 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
20938 }
20939
20940 /* For a FIELD_DECL node which represents a bit field, output an attribute
20941 which specifies the length in bits of the given field. */
20942
20943 static inline void
20944 add_bit_size_attribute (dw_die_ref die, tree decl)
20945 {
20946 /* Must be a field and a bit field. */
20947 gcc_assert (TREE_CODE (decl) == FIELD_DECL
20948 && DECL_BIT_FIELD_TYPE (decl));
20949
20950 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
20951 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
20952 }
20953
20954 /* If the compiled language is ANSI C, then add a 'prototyped'
20955 attribute, if arg types are given for the parameters of a function. */
20956
20957 static inline void
20958 add_prototyped_attribute (dw_die_ref die, tree func_type)
20959 {
20960 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20961 {
20962 case DW_LANG_C:
20963 case DW_LANG_C89:
20964 case DW_LANG_C99:
20965 case DW_LANG_C11:
20966 case DW_LANG_ObjC:
20967 if (prototype_p (func_type))
20968 add_AT_flag (die, DW_AT_prototyped, 1);
20969 break;
20970 default:
20971 break;
20972 }
20973 }
20974
20975 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
20976 by looking in the type declaration, the object declaration equate table or
20977 the block mapping. */
20978
20979 static inline dw_die_ref
20980 add_abstract_origin_attribute (dw_die_ref die, tree origin)
20981 {
20982 dw_die_ref origin_die = NULL;
20983
20984 if (DECL_P (origin))
20985 {
20986 dw_die_ref c;
20987 origin_die = lookup_decl_die (origin);
20988 /* "Unwrap" the decls DIE which we put in the imported unit context.
20989 We are looking for the abstract copy here. */
20990 if (in_lto_p
20991 && origin_die
20992 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
20993 /* ??? Identify this better. */
20994 && c->with_offset)
20995 origin_die = c;
20996 }
20997 else if (TYPE_P (origin))
20998 origin_die = lookup_type_die (origin);
20999 else if (TREE_CODE (origin) == BLOCK)
21000 origin_die = BLOCK_DIE (origin);
21001
21002 /* XXX: Functions that are never lowered don't always have correct block
21003 trees (in the case of java, they simply have no block tree, in some other
21004 languages). For these functions, there is nothing we can really do to
21005 output correct debug info for inlined functions in all cases. Rather
21006 than die, we'll just produce deficient debug info now, in that we will
21007 have variables without a proper abstract origin. In the future, when all
21008 functions are lowered, we should re-add a gcc_assert (origin_die)
21009 here. */
21010
21011 if (origin_die)
21012 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21013 return origin_die;
21014 }
21015
21016 /* We do not currently support the pure_virtual attribute. */
21017
21018 static inline void
21019 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21020 {
21021 if (DECL_VINDEX (func_decl))
21022 {
21023 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21024
21025 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21026 add_AT_loc (die, DW_AT_vtable_elem_location,
21027 new_loc_descr (DW_OP_constu,
21028 tree_to_shwi (DECL_VINDEX (func_decl)),
21029 0));
21030
21031 /* GNU extension: Record what type this method came from originally. */
21032 if (debug_info_level > DINFO_LEVEL_TERSE
21033 && DECL_CONTEXT (func_decl))
21034 add_AT_die_ref (die, DW_AT_containing_type,
21035 lookup_type_die (DECL_CONTEXT (func_decl)));
21036 }
21037 }
21038 \f
21039 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21040 given decl. This used to be a vendor extension until after DWARF 4
21041 standardized it. */
21042
21043 static void
21044 add_linkage_attr (dw_die_ref die, tree decl)
21045 {
21046 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21047
21048 /* Mimic what assemble_name_raw does with a leading '*'. */
21049 if (name[0] == '*')
21050 name = &name[1];
21051
21052 if (dwarf_version >= 4)
21053 add_AT_string (die, DW_AT_linkage_name, name);
21054 else
21055 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21056 }
21057
21058 /* Add source coordinate attributes for the given decl. */
21059
21060 static void
21061 add_src_coords_attributes (dw_die_ref die, tree decl)
21062 {
21063 expanded_location s;
21064
21065 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21066 return;
21067 s = expand_location (DECL_SOURCE_LOCATION (decl));
21068 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21069 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21070 if (debug_column_info && s.column)
21071 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21072 }
21073
21074 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21075
21076 static void
21077 add_linkage_name_raw (dw_die_ref die, tree decl)
21078 {
21079 /* Defer until we have an assembler name set. */
21080 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21081 {
21082 limbo_die_node *asm_name;
21083
21084 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21085 asm_name->die = die;
21086 asm_name->created_for = decl;
21087 asm_name->next = deferred_asm_name;
21088 deferred_asm_name = asm_name;
21089 }
21090 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21091 add_linkage_attr (die, decl);
21092 }
21093
21094 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21095
21096 static void
21097 add_linkage_name (dw_die_ref die, tree decl)
21098 {
21099 if (debug_info_level > DINFO_LEVEL_NONE
21100 && VAR_OR_FUNCTION_DECL_P (decl)
21101 && TREE_PUBLIC (decl)
21102 && !(VAR_P (decl) && DECL_REGISTER (decl))
21103 && die->die_tag != DW_TAG_member)
21104 add_linkage_name_raw (die, decl);
21105 }
21106
21107 /* Add a DW_AT_name attribute and source coordinate attribute for the
21108 given decl, but only if it actually has a name. */
21109
21110 static void
21111 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21112 bool no_linkage_name)
21113 {
21114 tree decl_name;
21115
21116 decl_name = DECL_NAME (decl);
21117 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21118 {
21119 const char *name = dwarf2_name (decl, 0);
21120 if (name)
21121 add_name_attribute (die, name);
21122 if (! DECL_ARTIFICIAL (decl))
21123 add_src_coords_attributes (die, decl);
21124
21125 if (!no_linkage_name)
21126 add_linkage_name (die, decl);
21127 }
21128
21129 #ifdef VMS_DEBUGGING_INFO
21130 /* Get the function's name, as described by its RTL. This may be different
21131 from the DECL_NAME name used in the source file. */
21132 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21133 {
21134 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21135 XEXP (DECL_RTL (decl), 0), false);
21136 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21137 }
21138 #endif /* VMS_DEBUGGING_INFO */
21139 }
21140
21141 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21142
21143 static void
21144 add_discr_value (dw_die_ref die, dw_discr_value *value)
21145 {
21146 dw_attr_node attr;
21147
21148 attr.dw_attr = DW_AT_discr_value;
21149 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21150 attr.dw_attr_val.val_entry = NULL;
21151 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21152 if (value->pos)
21153 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21154 else
21155 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21156 add_dwarf_attr (die, &attr);
21157 }
21158
21159 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21160
21161 static void
21162 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21163 {
21164 dw_attr_node attr;
21165
21166 attr.dw_attr = DW_AT_discr_list;
21167 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21168 attr.dw_attr_val.val_entry = NULL;
21169 attr.dw_attr_val.v.val_discr_list = discr_list;
21170 add_dwarf_attr (die, &attr);
21171 }
21172
21173 static inline dw_discr_list_ref
21174 AT_discr_list (dw_attr_node *attr)
21175 {
21176 return attr->dw_attr_val.v.val_discr_list;
21177 }
21178
21179 #ifdef VMS_DEBUGGING_INFO
21180 /* Output the debug main pointer die for VMS */
21181
21182 void
21183 dwarf2out_vms_debug_main_pointer (void)
21184 {
21185 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21186 dw_die_ref die;
21187
21188 /* Allocate the VMS debug main subprogram die. */
21189 die = new_die_raw (DW_TAG_subprogram);
21190 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21191 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21192 current_function_funcdef_no);
21193 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21194
21195 /* Make it the first child of comp_unit_die (). */
21196 die->die_parent = comp_unit_die ();
21197 if (comp_unit_die ()->die_child)
21198 {
21199 die->die_sib = comp_unit_die ()->die_child->die_sib;
21200 comp_unit_die ()->die_child->die_sib = die;
21201 }
21202 else
21203 {
21204 die->die_sib = die;
21205 comp_unit_die ()->die_child = die;
21206 }
21207 }
21208 #endif /* VMS_DEBUGGING_INFO */
21209
21210 /* Push a new declaration scope. */
21211
21212 static void
21213 push_decl_scope (tree scope)
21214 {
21215 vec_safe_push (decl_scope_table, scope);
21216 }
21217
21218 /* Pop a declaration scope. */
21219
21220 static inline void
21221 pop_decl_scope (void)
21222 {
21223 decl_scope_table->pop ();
21224 }
21225
21226 /* walk_tree helper function for uses_local_type, below. */
21227
21228 static tree
21229 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21230 {
21231 if (!TYPE_P (*tp))
21232 *walk_subtrees = 0;
21233 else
21234 {
21235 tree name = TYPE_NAME (*tp);
21236 if (name && DECL_P (name) && decl_function_context (name))
21237 return *tp;
21238 }
21239 return NULL_TREE;
21240 }
21241
21242 /* If TYPE involves a function-local type (including a local typedef to a
21243 non-local type), returns that type; otherwise returns NULL_TREE. */
21244
21245 static tree
21246 uses_local_type (tree type)
21247 {
21248 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21249 return used;
21250 }
21251
21252 /* Return the DIE for the scope that immediately contains this type.
21253 Non-named types that do not involve a function-local type get global
21254 scope. Named types nested in namespaces or other types get their
21255 containing scope. All other types (i.e. function-local named types) get
21256 the current active scope. */
21257
21258 static dw_die_ref
21259 scope_die_for (tree t, dw_die_ref context_die)
21260 {
21261 dw_die_ref scope_die = NULL;
21262 tree containing_scope;
21263
21264 /* Non-types always go in the current scope. */
21265 gcc_assert (TYPE_P (t));
21266
21267 /* Use the scope of the typedef, rather than the scope of the type
21268 it refers to. */
21269 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21270 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21271 else
21272 containing_scope = TYPE_CONTEXT (t);
21273
21274 /* Use the containing namespace if there is one. */
21275 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21276 {
21277 if (context_die == lookup_decl_die (containing_scope))
21278 /* OK */;
21279 else if (debug_info_level > DINFO_LEVEL_TERSE)
21280 context_die = get_context_die (containing_scope);
21281 else
21282 containing_scope = NULL_TREE;
21283 }
21284
21285 /* Ignore function type "scopes" from the C frontend. They mean that
21286 a tagged type is local to a parmlist of a function declarator, but
21287 that isn't useful to DWARF. */
21288 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21289 containing_scope = NULL_TREE;
21290
21291 if (SCOPE_FILE_SCOPE_P (containing_scope))
21292 {
21293 /* If T uses a local type keep it local as well, to avoid references
21294 to function-local DIEs from outside the function. */
21295 if (current_function_decl && uses_local_type (t))
21296 scope_die = context_die;
21297 else
21298 scope_die = comp_unit_die ();
21299 }
21300 else if (TYPE_P (containing_scope))
21301 {
21302 /* For types, we can just look up the appropriate DIE. */
21303 if (debug_info_level > DINFO_LEVEL_TERSE)
21304 scope_die = get_context_die (containing_scope);
21305 else
21306 {
21307 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21308 if (scope_die == NULL)
21309 scope_die = comp_unit_die ();
21310 }
21311 }
21312 else
21313 scope_die = context_die;
21314
21315 return scope_die;
21316 }
21317
21318 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21319
21320 static inline int
21321 local_scope_p (dw_die_ref context_die)
21322 {
21323 for (; context_die; context_die = context_die->die_parent)
21324 if (context_die->die_tag == DW_TAG_inlined_subroutine
21325 || context_die->die_tag == DW_TAG_subprogram)
21326 return 1;
21327
21328 return 0;
21329 }
21330
21331 /* Returns nonzero if CONTEXT_DIE is a class. */
21332
21333 static inline int
21334 class_scope_p (dw_die_ref context_die)
21335 {
21336 return (context_die
21337 && (context_die->die_tag == DW_TAG_structure_type
21338 || context_die->die_tag == DW_TAG_class_type
21339 || context_die->die_tag == DW_TAG_interface_type
21340 || context_die->die_tag == DW_TAG_union_type));
21341 }
21342
21343 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21344 whether or not to treat a DIE in this context as a declaration. */
21345
21346 static inline int
21347 class_or_namespace_scope_p (dw_die_ref context_die)
21348 {
21349 return (class_scope_p (context_die)
21350 || (context_die && context_die->die_tag == DW_TAG_namespace));
21351 }
21352
21353 /* Many forms of DIEs require a "type description" attribute. This
21354 routine locates the proper "type descriptor" die for the type given
21355 by 'type' plus any additional qualifiers given by 'cv_quals', and
21356 adds a DW_AT_type attribute below the given die. */
21357
21358 static void
21359 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21360 bool reverse, dw_die_ref context_die)
21361 {
21362 enum tree_code code = TREE_CODE (type);
21363 dw_die_ref type_die = NULL;
21364
21365 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21366 or fixed-point type, use the inner type. This is because we have no
21367 support for unnamed types in base_type_die. This can happen if this is
21368 an Ada subrange type. Correct solution is emit a subrange type die. */
21369 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21370 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21371 type = TREE_TYPE (type), code = TREE_CODE (type);
21372
21373 if (code == ERROR_MARK
21374 /* Handle a special case. For functions whose return type is void, we
21375 generate *no* type attribute. (Note that no object may have type
21376 `void', so this only applies to function return types). */
21377 || code == VOID_TYPE)
21378 return;
21379
21380 type_die = modified_type_die (type,
21381 cv_quals | TYPE_QUALS (type),
21382 reverse,
21383 context_die);
21384
21385 if (type_die != NULL)
21386 add_AT_die_ref (object_die, DW_AT_type, type_die);
21387 }
21388
21389 /* Given an object die, add the calling convention attribute for the
21390 function call type. */
21391 static void
21392 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21393 {
21394 enum dwarf_calling_convention value = DW_CC_normal;
21395
21396 value = ((enum dwarf_calling_convention)
21397 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21398
21399 if (is_fortran ()
21400 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21401 {
21402 /* DWARF 2 doesn't provide a way to identify a program's source-level
21403 entry point. DW_AT_calling_convention attributes are only meant
21404 to describe functions' calling conventions. However, lacking a
21405 better way to signal the Fortran main program, we used this for
21406 a long time, following existing custom. Now, DWARF 4 has
21407 DW_AT_main_subprogram, which we add below, but some tools still
21408 rely on the old way, which we thus keep. */
21409 value = DW_CC_program;
21410
21411 if (dwarf_version >= 4 || !dwarf_strict)
21412 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21413 }
21414
21415 /* Only add the attribute if the backend requests it, and
21416 is not DW_CC_normal. */
21417 if (value && (value != DW_CC_normal))
21418 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21419 }
21420
21421 /* Given a tree pointer to a struct, class, union, or enum type node, return
21422 a pointer to the (string) tag name for the given type, or zero if the type
21423 was declared without a tag. */
21424
21425 static const char *
21426 type_tag (const_tree type)
21427 {
21428 const char *name = 0;
21429
21430 if (TYPE_NAME (type) != 0)
21431 {
21432 tree t = 0;
21433
21434 /* Find the IDENTIFIER_NODE for the type name. */
21435 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21436 && !TYPE_NAMELESS (type))
21437 t = TYPE_NAME (type);
21438
21439 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21440 a TYPE_DECL node, regardless of whether or not a `typedef' was
21441 involved. */
21442 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21443 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21444 {
21445 /* We want to be extra verbose. Don't call dwarf_name if
21446 DECL_NAME isn't set. The default hook for decl_printable_name
21447 doesn't like that, and in this context it's correct to return
21448 0, instead of "<anonymous>" or the like. */
21449 if (DECL_NAME (TYPE_NAME (type))
21450 && !DECL_NAMELESS (TYPE_NAME (type)))
21451 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21452 }
21453
21454 /* Now get the name as a string, or invent one. */
21455 if (!name && t != 0)
21456 name = IDENTIFIER_POINTER (t);
21457 }
21458
21459 return (name == 0 || *name == '\0') ? 0 : name;
21460 }
21461
21462 /* Return the type associated with a data member, make a special check
21463 for bit field types. */
21464
21465 static inline tree
21466 member_declared_type (const_tree member)
21467 {
21468 return (DECL_BIT_FIELD_TYPE (member)
21469 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21470 }
21471
21472 /* Get the decl's label, as described by its RTL. This may be different
21473 from the DECL_NAME name used in the source file. */
21474
21475 #if 0
21476 static const char *
21477 decl_start_label (tree decl)
21478 {
21479 rtx x;
21480 const char *fnname;
21481
21482 x = DECL_RTL (decl);
21483 gcc_assert (MEM_P (x));
21484
21485 x = XEXP (x, 0);
21486 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21487
21488 fnname = XSTR (x, 0);
21489 return fnname;
21490 }
21491 #endif
21492 \f
21493 /* For variable-length arrays that have been previously generated, but
21494 may be incomplete due to missing subscript info, fill the subscript
21495 info. Return TRUE if this is one of those cases. */
21496 static bool
21497 fill_variable_array_bounds (tree type)
21498 {
21499 if (TREE_ASM_WRITTEN (type)
21500 && TREE_CODE (type) == ARRAY_TYPE
21501 && variably_modified_type_p (type, NULL))
21502 {
21503 dw_die_ref array_die = lookup_type_die (type);
21504 if (!array_die)
21505 return false;
21506 add_subscript_info (array_die, type, !is_ada ());
21507 return true;
21508 }
21509 return false;
21510 }
21511
21512 /* These routines generate the internal representation of the DIE's for
21513 the compilation unit. Debugging information is collected by walking
21514 the declaration trees passed in from dwarf2out_decl(). */
21515
21516 static void
21517 gen_array_type_die (tree type, dw_die_ref context_die)
21518 {
21519 dw_die_ref array_die;
21520
21521 /* GNU compilers represent multidimensional array types as sequences of one
21522 dimensional array types whose element types are themselves array types.
21523 We sometimes squish that down to a single array_type DIE with multiple
21524 subscripts in the Dwarf debugging info. The draft Dwarf specification
21525 say that we are allowed to do this kind of compression in C, because
21526 there is no difference between an array of arrays and a multidimensional
21527 array. We don't do this for Ada to remain as close as possible to the
21528 actual representation, which is especially important against the language
21529 flexibilty wrt arrays of variable size. */
21530
21531 bool collapse_nested_arrays = !is_ada ();
21532
21533 if (fill_variable_array_bounds (type))
21534 return;
21535
21536 dw_die_ref scope_die = scope_die_for (type, context_die);
21537 tree element_type;
21538
21539 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21540 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21541 if (TYPE_STRING_FLAG (type)
21542 && TREE_CODE (type) == ARRAY_TYPE
21543 && is_fortran ()
21544 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21545 {
21546 HOST_WIDE_INT size;
21547
21548 array_die = new_die (DW_TAG_string_type, scope_die, type);
21549 add_name_attribute (array_die, type_tag (type));
21550 equate_type_number_to_die (type, array_die);
21551 size = int_size_in_bytes (type);
21552 if (size >= 0)
21553 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21554 /* ??? We can't annotate types late, but for LTO we may not
21555 generate a location early either (gfortran.dg/save_6.f90). */
21556 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21557 && TYPE_DOMAIN (type) != NULL_TREE
21558 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21559 {
21560 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21561 tree rszdecl = szdecl;
21562
21563 size = int_size_in_bytes (TREE_TYPE (szdecl));
21564 if (!DECL_P (szdecl))
21565 {
21566 if (TREE_CODE (szdecl) == INDIRECT_REF
21567 && DECL_P (TREE_OPERAND (szdecl, 0)))
21568 {
21569 rszdecl = TREE_OPERAND (szdecl, 0);
21570 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21571 != DWARF2_ADDR_SIZE)
21572 size = 0;
21573 }
21574 else
21575 size = 0;
21576 }
21577 if (size > 0)
21578 {
21579 dw_loc_list_ref loc
21580 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21581 NULL);
21582 if (loc)
21583 {
21584 add_AT_location_description (array_die, DW_AT_string_length,
21585 loc);
21586 if (size != DWARF2_ADDR_SIZE)
21587 add_AT_unsigned (array_die, dwarf_version >= 5
21588 ? DW_AT_string_length_byte_size
21589 : DW_AT_byte_size, size);
21590 }
21591 }
21592 }
21593 return;
21594 }
21595
21596 array_die = new_die (DW_TAG_array_type, scope_die, type);
21597 add_name_attribute (array_die, type_tag (type));
21598 equate_type_number_to_die (type, array_die);
21599
21600 if (TREE_CODE (type) == VECTOR_TYPE)
21601 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21602
21603 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21604 if (is_fortran ()
21605 && TREE_CODE (type) == ARRAY_TYPE
21606 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21607 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21608 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21609
21610 #if 0
21611 /* We default the array ordering. Debuggers will probably do the right
21612 things even if DW_AT_ordering is not present. It's not even an issue
21613 until we start to get into multidimensional arrays anyway. If a debugger
21614 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21615 then we'll have to put the DW_AT_ordering attribute back in. (But if
21616 and when we find out that we need to put these in, we will only do so
21617 for multidimensional arrays. */
21618 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21619 #endif
21620
21621 if (TREE_CODE (type) == VECTOR_TYPE)
21622 {
21623 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21624 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21625 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21626 add_bound_info (subrange_die, DW_AT_upper_bound,
21627 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21628 }
21629 else
21630 add_subscript_info (array_die, type, collapse_nested_arrays);
21631
21632 /* Add representation of the type of the elements of this array type and
21633 emit the corresponding DIE if we haven't done it already. */
21634 element_type = TREE_TYPE (type);
21635 if (collapse_nested_arrays)
21636 while (TREE_CODE (element_type) == ARRAY_TYPE)
21637 {
21638 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21639 break;
21640 element_type = TREE_TYPE (element_type);
21641 }
21642
21643 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21644 TREE_CODE (type) == ARRAY_TYPE
21645 && TYPE_REVERSE_STORAGE_ORDER (type),
21646 context_die);
21647
21648 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21649 if (TYPE_ARTIFICIAL (type))
21650 add_AT_flag (array_die, DW_AT_artificial, 1);
21651
21652 if (get_AT (array_die, DW_AT_name))
21653 add_pubtype (type, array_die);
21654
21655 add_alignment_attribute (array_die, type);
21656 }
21657
21658 /* This routine generates DIE for array with hidden descriptor, details
21659 are filled into *info by a langhook. */
21660
21661 static void
21662 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21663 dw_die_ref context_die)
21664 {
21665 const dw_die_ref scope_die = scope_die_for (type, context_die);
21666 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21667 struct loc_descr_context context = { type, info->base_decl, NULL,
21668 false, false };
21669 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21670 int dim;
21671
21672 add_name_attribute (array_die, type_tag (type));
21673 equate_type_number_to_die (type, array_die);
21674
21675 if (info->ndimensions > 1)
21676 switch (info->ordering)
21677 {
21678 case array_descr_ordering_row_major:
21679 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21680 break;
21681 case array_descr_ordering_column_major:
21682 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21683 break;
21684 default:
21685 break;
21686 }
21687
21688 if (dwarf_version >= 3 || !dwarf_strict)
21689 {
21690 if (info->data_location)
21691 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21692 dw_scalar_form_exprloc, &context);
21693 if (info->associated)
21694 add_scalar_info (array_die, DW_AT_associated, info->associated,
21695 dw_scalar_form_constant
21696 | dw_scalar_form_exprloc
21697 | dw_scalar_form_reference, &context);
21698 if (info->allocated)
21699 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21700 dw_scalar_form_constant
21701 | dw_scalar_form_exprloc
21702 | dw_scalar_form_reference, &context);
21703 if (info->stride)
21704 {
21705 const enum dwarf_attribute attr
21706 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21707 const int forms
21708 = (info->stride_in_bits)
21709 ? dw_scalar_form_constant
21710 : (dw_scalar_form_constant
21711 | dw_scalar_form_exprloc
21712 | dw_scalar_form_reference);
21713
21714 add_scalar_info (array_die, attr, info->stride, forms, &context);
21715 }
21716 }
21717 if (dwarf_version >= 5)
21718 {
21719 if (info->rank)
21720 {
21721 add_scalar_info (array_die, DW_AT_rank, info->rank,
21722 dw_scalar_form_constant
21723 | dw_scalar_form_exprloc, &context);
21724 subrange_tag = DW_TAG_generic_subrange;
21725 context.placeholder_arg = true;
21726 }
21727 }
21728
21729 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21730
21731 for (dim = 0; dim < info->ndimensions; dim++)
21732 {
21733 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21734
21735 if (info->dimen[dim].bounds_type)
21736 add_type_attribute (subrange_die,
21737 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21738 false, context_die);
21739 if (info->dimen[dim].lower_bound)
21740 add_bound_info (subrange_die, DW_AT_lower_bound,
21741 info->dimen[dim].lower_bound, &context);
21742 if (info->dimen[dim].upper_bound)
21743 add_bound_info (subrange_die, DW_AT_upper_bound,
21744 info->dimen[dim].upper_bound, &context);
21745 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21746 add_scalar_info (subrange_die, DW_AT_byte_stride,
21747 info->dimen[dim].stride,
21748 dw_scalar_form_constant
21749 | dw_scalar_form_exprloc
21750 | dw_scalar_form_reference,
21751 &context);
21752 }
21753
21754 gen_type_die (info->element_type, context_die);
21755 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21756 TREE_CODE (type) == ARRAY_TYPE
21757 && TYPE_REVERSE_STORAGE_ORDER (type),
21758 context_die);
21759
21760 if (get_AT (array_die, DW_AT_name))
21761 add_pubtype (type, array_die);
21762
21763 add_alignment_attribute (array_die, type);
21764 }
21765
21766 #if 0
21767 static void
21768 gen_entry_point_die (tree decl, dw_die_ref context_die)
21769 {
21770 tree origin = decl_ultimate_origin (decl);
21771 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21772
21773 if (origin != NULL)
21774 add_abstract_origin_attribute (decl_die, origin);
21775 else
21776 {
21777 add_name_and_src_coords_attributes (decl_die, decl);
21778 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21779 TYPE_UNQUALIFIED, false, context_die);
21780 }
21781
21782 if (DECL_ABSTRACT_P (decl))
21783 equate_decl_number_to_die (decl, decl_die);
21784 else
21785 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21786 }
21787 #endif
21788
21789 /* Walk through the list of incomplete types again, trying once more to
21790 emit full debugging info for them. */
21791
21792 static void
21793 retry_incomplete_types (void)
21794 {
21795 set_early_dwarf s;
21796 int i;
21797
21798 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21799 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21800 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21801 vec_safe_truncate (incomplete_types, 0);
21802 }
21803
21804 /* Determine what tag to use for a record type. */
21805
21806 static enum dwarf_tag
21807 record_type_tag (tree type)
21808 {
21809 if (! lang_hooks.types.classify_record)
21810 return DW_TAG_structure_type;
21811
21812 switch (lang_hooks.types.classify_record (type))
21813 {
21814 case RECORD_IS_STRUCT:
21815 return DW_TAG_structure_type;
21816
21817 case RECORD_IS_CLASS:
21818 return DW_TAG_class_type;
21819
21820 case RECORD_IS_INTERFACE:
21821 if (dwarf_version >= 3 || !dwarf_strict)
21822 return DW_TAG_interface_type;
21823 return DW_TAG_structure_type;
21824
21825 default:
21826 gcc_unreachable ();
21827 }
21828 }
21829
21830 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21831 include all of the information about the enumeration values also. Each
21832 enumerated type name/value is listed as a child of the enumerated type
21833 DIE. */
21834
21835 static dw_die_ref
21836 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21837 {
21838 dw_die_ref type_die = lookup_type_die (type);
21839
21840 if (type_die == NULL)
21841 {
21842 type_die = new_die (DW_TAG_enumeration_type,
21843 scope_die_for (type, context_die), type);
21844 equate_type_number_to_die (type, type_die);
21845 add_name_attribute (type_die, type_tag (type));
21846 if (dwarf_version >= 4 || !dwarf_strict)
21847 {
21848 if (ENUM_IS_SCOPED (type))
21849 add_AT_flag (type_die, DW_AT_enum_class, 1);
21850 if (ENUM_IS_OPAQUE (type))
21851 add_AT_flag (type_die, DW_AT_declaration, 1);
21852 }
21853 if (!dwarf_strict)
21854 add_AT_unsigned (type_die, DW_AT_encoding,
21855 TYPE_UNSIGNED (type)
21856 ? DW_ATE_unsigned
21857 : DW_ATE_signed);
21858 }
21859 else if (! TYPE_SIZE (type))
21860 return type_die;
21861 else
21862 remove_AT (type_die, DW_AT_declaration);
21863
21864 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21865 given enum type is incomplete, do not generate the DW_AT_byte_size
21866 attribute or the DW_AT_element_list attribute. */
21867 if (TYPE_SIZE (type))
21868 {
21869 tree link;
21870
21871 TREE_ASM_WRITTEN (type) = 1;
21872 add_byte_size_attribute (type_die, type);
21873 add_alignment_attribute (type_die, type);
21874 if (dwarf_version >= 3 || !dwarf_strict)
21875 {
21876 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
21877 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
21878 context_die);
21879 }
21880 if (TYPE_STUB_DECL (type) != NULL_TREE)
21881 {
21882 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
21883 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
21884 }
21885
21886 /* If the first reference to this type was as the return type of an
21887 inline function, then it may not have a parent. Fix this now. */
21888 if (type_die->die_parent == NULL)
21889 add_child_die (scope_die_for (type, context_die), type_die);
21890
21891 for (link = TYPE_VALUES (type);
21892 link != NULL; link = TREE_CHAIN (link))
21893 {
21894 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
21895 tree value = TREE_VALUE (link);
21896
21897 add_name_attribute (enum_die,
21898 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
21899
21900 if (TREE_CODE (value) == CONST_DECL)
21901 value = DECL_INITIAL (value);
21902
21903 if (simple_type_size_in_bits (TREE_TYPE (value))
21904 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
21905 {
21906 /* For constant forms created by add_AT_unsigned DWARF
21907 consumers (GDB, elfutils, etc.) always zero extend
21908 the value. Only when the actual value is negative
21909 do we need to use add_AT_int to generate a constant
21910 form that can represent negative values. */
21911 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
21912 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
21913 add_AT_unsigned (enum_die, DW_AT_const_value,
21914 (unsigned HOST_WIDE_INT) val);
21915 else
21916 add_AT_int (enum_die, DW_AT_const_value, val);
21917 }
21918 else
21919 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
21920 that here. TODO: This should be re-worked to use correct
21921 signed/unsigned double tags for all cases. */
21922 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
21923 }
21924
21925 add_gnat_descriptive_type_attribute (type_die, type, context_die);
21926 if (TYPE_ARTIFICIAL (type))
21927 add_AT_flag (type_die, DW_AT_artificial, 1);
21928 }
21929 else
21930 add_AT_flag (type_die, DW_AT_declaration, 1);
21931
21932 add_pubtype (type, type_die);
21933
21934 return type_die;
21935 }
21936
21937 /* Generate a DIE to represent either a real live formal parameter decl or to
21938 represent just the type of some formal parameter position in some function
21939 type.
21940
21941 Note that this routine is a bit unusual because its argument may be a
21942 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
21943 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
21944 node. If it's the former then this function is being called to output a
21945 DIE to represent a formal parameter object (or some inlining thereof). If
21946 it's the latter, then this function is only being called to output a
21947 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
21948 argument type of some subprogram type.
21949 If EMIT_NAME_P is true, name and source coordinate attributes
21950 are emitted. */
21951
21952 static dw_die_ref
21953 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
21954 dw_die_ref context_die)
21955 {
21956 tree node_or_origin = node ? node : origin;
21957 tree ultimate_origin;
21958 dw_die_ref parm_die = NULL;
21959
21960 if (DECL_P (node_or_origin))
21961 {
21962 parm_die = lookup_decl_die (node);
21963
21964 /* If the contexts differ, we may not be talking about the same
21965 thing.
21966 ??? When in LTO the DIE parent is the "abstract" copy and the
21967 context_die is the specification "copy". But this whole block
21968 should eventually be no longer needed. */
21969 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
21970 {
21971 if (!DECL_ABSTRACT_P (node))
21972 {
21973 /* This can happen when creating an inlined instance, in
21974 which case we need to create a new DIE that will get
21975 annotated with DW_AT_abstract_origin. */
21976 parm_die = NULL;
21977 }
21978 else
21979 gcc_unreachable ();
21980 }
21981
21982 if (parm_die && parm_die->die_parent == NULL)
21983 {
21984 /* Check that parm_die already has the right attributes that
21985 we would have added below. If any attributes are
21986 missing, fall through to add them. */
21987 if (! DECL_ABSTRACT_P (node_or_origin)
21988 && !get_AT (parm_die, DW_AT_location)
21989 && !get_AT (parm_die, DW_AT_const_value))
21990 /* We are missing location info, and are about to add it. */
21991 ;
21992 else
21993 {
21994 add_child_die (context_die, parm_die);
21995 return parm_die;
21996 }
21997 }
21998 }
21999
22000 /* If we have a previously generated DIE, use it, unless this is an
22001 concrete instance (origin != NULL), in which case we need a new
22002 DIE with a corresponding DW_AT_abstract_origin. */
22003 bool reusing_die;
22004 if (parm_die && origin == NULL)
22005 reusing_die = true;
22006 else
22007 {
22008 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22009 reusing_die = false;
22010 }
22011
22012 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22013 {
22014 case tcc_declaration:
22015 ultimate_origin = decl_ultimate_origin (node_or_origin);
22016 if (node || ultimate_origin)
22017 origin = ultimate_origin;
22018
22019 if (reusing_die)
22020 goto add_location;
22021
22022 if (origin != NULL)
22023 add_abstract_origin_attribute (parm_die, origin);
22024 else if (emit_name_p)
22025 add_name_and_src_coords_attributes (parm_die, node);
22026 if (origin == NULL
22027 || (! DECL_ABSTRACT_P (node_or_origin)
22028 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22029 decl_function_context
22030 (node_or_origin))))
22031 {
22032 tree type = TREE_TYPE (node_or_origin);
22033 if (decl_by_reference_p (node_or_origin))
22034 add_type_attribute (parm_die, TREE_TYPE (type),
22035 TYPE_UNQUALIFIED,
22036 false, context_die);
22037 else
22038 add_type_attribute (parm_die, type,
22039 decl_quals (node_or_origin),
22040 false, context_die);
22041 }
22042 if (origin == NULL && DECL_ARTIFICIAL (node))
22043 add_AT_flag (parm_die, DW_AT_artificial, 1);
22044 add_location:
22045 if (node && node != origin)
22046 equate_decl_number_to_die (node, parm_die);
22047 if (! DECL_ABSTRACT_P (node_or_origin))
22048 add_location_or_const_value_attribute (parm_die, node_or_origin,
22049 node == NULL);
22050
22051 break;
22052
22053 case tcc_type:
22054 /* We were called with some kind of a ..._TYPE node. */
22055 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22056 context_die);
22057 break;
22058
22059 default:
22060 gcc_unreachable ();
22061 }
22062
22063 return parm_die;
22064 }
22065
22066 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22067 children DW_TAG_formal_parameter DIEs representing the arguments of the
22068 parameter pack.
22069
22070 PARM_PACK must be a function parameter pack.
22071 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22072 must point to the subsequent arguments of the function PACK_ARG belongs to.
22073 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22074 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22075 following the last one for which a DIE was generated. */
22076
22077 static dw_die_ref
22078 gen_formal_parameter_pack_die (tree parm_pack,
22079 tree pack_arg,
22080 dw_die_ref subr_die,
22081 tree *next_arg)
22082 {
22083 tree arg;
22084 dw_die_ref parm_pack_die;
22085
22086 gcc_assert (parm_pack
22087 && lang_hooks.function_parameter_pack_p (parm_pack)
22088 && subr_die);
22089
22090 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22091 add_src_coords_attributes (parm_pack_die, parm_pack);
22092
22093 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22094 {
22095 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22096 parm_pack))
22097 break;
22098 gen_formal_parameter_die (arg, NULL,
22099 false /* Don't emit name attribute. */,
22100 parm_pack_die);
22101 }
22102 if (next_arg)
22103 *next_arg = arg;
22104 return parm_pack_die;
22105 }
22106
22107 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22108 at the end of an (ANSI prototyped) formal parameters list. */
22109
22110 static void
22111 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22112 {
22113 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22114 }
22115
22116 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22117 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22118 parameters as specified in some function type specification (except for
22119 those which appear as part of a function *definition*). */
22120
22121 static void
22122 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22123 {
22124 tree link;
22125 tree formal_type = NULL;
22126 tree first_parm_type;
22127 tree arg;
22128
22129 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22130 {
22131 arg = DECL_ARGUMENTS (function_or_method_type);
22132 function_or_method_type = TREE_TYPE (function_or_method_type);
22133 }
22134 else
22135 arg = NULL_TREE;
22136
22137 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22138
22139 /* Make our first pass over the list of formal parameter types and output a
22140 DW_TAG_formal_parameter DIE for each one. */
22141 for (link = first_parm_type; link; )
22142 {
22143 dw_die_ref parm_die;
22144
22145 formal_type = TREE_VALUE (link);
22146 if (formal_type == void_type_node)
22147 break;
22148
22149 /* Output a (nameless) DIE to represent the formal parameter itself. */
22150 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22151 {
22152 parm_die = gen_formal_parameter_die (formal_type, NULL,
22153 true /* Emit name attribute. */,
22154 context_die);
22155 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22156 && link == first_parm_type)
22157 {
22158 add_AT_flag (parm_die, DW_AT_artificial, 1);
22159 if (dwarf_version >= 3 || !dwarf_strict)
22160 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22161 }
22162 else if (arg && DECL_ARTIFICIAL (arg))
22163 add_AT_flag (parm_die, DW_AT_artificial, 1);
22164 }
22165
22166 link = TREE_CHAIN (link);
22167 if (arg)
22168 arg = DECL_CHAIN (arg);
22169 }
22170
22171 /* If this function type has an ellipsis, add a
22172 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22173 if (formal_type != void_type_node)
22174 gen_unspecified_parameters_die (function_or_method_type, context_die);
22175
22176 /* Make our second (and final) pass over the list of formal parameter types
22177 and output DIEs to represent those types (as necessary). */
22178 for (link = TYPE_ARG_TYPES (function_or_method_type);
22179 link && TREE_VALUE (link);
22180 link = TREE_CHAIN (link))
22181 gen_type_die (TREE_VALUE (link), context_die);
22182 }
22183
22184 /* We want to generate the DIE for TYPE so that we can generate the
22185 die for MEMBER, which has been defined; we will need to refer back
22186 to the member declaration nested within TYPE. If we're trying to
22187 generate minimal debug info for TYPE, processing TYPE won't do the
22188 trick; we need to attach the member declaration by hand. */
22189
22190 static void
22191 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22192 {
22193 gen_type_die (type, context_die);
22194
22195 /* If we're trying to avoid duplicate debug info, we may not have
22196 emitted the member decl for this function. Emit it now. */
22197 if (TYPE_STUB_DECL (type)
22198 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22199 && ! lookup_decl_die (member))
22200 {
22201 dw_die_ref type_die;
22202 gcc_assert (!decl_ultimate_origin (member));
22203
22204 push_decl_scope (type);
22205 type_die = lookup_type_die_strip_naming_typedef (type);
22206 if (TREE_CODE (member) == FUNCTION_DECL)
22207 gen_subprogram_die (member, type_die);
22208 else if (TREE_CODE (member) == FIELD_DECL)
22209 {
22210 /* Ignore the nameless fields that are used to skip bits but handle
22211 C++ anonymous unions and structs. */
22212 if (DECL_NAME (member) != NULL_TREE
22213 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22214 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22215 {
22216 struct vlr_context vlr_ctx = {
22217 DECL_CONTEXT (member), /* struct_type */
22218 NULL_TREE /* variant_part_offset */
22219 };
22220 gen_type_die (member_declared_type (member), type_die);
22221 gen_field_die (member, &vlr_ctx, type_die);
22222 }
22223 }
22224 else
22225 gen_variable_die (member, NULL_TREE, type_die);
22226
22227 pop_decl_scope ();
22228 }
22229 }
22230 \f
22231 /* Forward declare these functions, because they are mutually recursive
22232 with their set_block_* pairing functions. */
22233 static void set_decl_origin_self (tree);
22234
22235 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22236 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22237 that it points to the node itself, thus indicating that the node is its
22238 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22239 the given node is NULL, recursively descend the decl/block tree which
22240 it is the root of, and for each other ..._DECL or BLOCK node contained
22241 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22242 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22243 values to point to themselves. */
22244
22245 static void
22246 set_block_origin_self (tree stmt)
22247 {
22248 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22249 {
22250 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22251
22252 {
22253 tree local_decl;
22254
22255 for (local_decl = BLOCK_VARS (stmt);
22256 local_decl != NULL_TREE;
22257 local_decl = DECL_CHAIN (local_decl))
22258 /* Do not recurse on nested functions since the inlining status
22259 of parent and child can be different as per the DWARF spec. */
22260 if (TREE_CODE (local_decl) != FUNCTION_DECL
22261 && !DECL_EXTERNAL (local_decl))
22262 set_decl_origin_self (local_decl);
22263 }
22264
22265 {
22266 tree subblock;
22267
22268 for (subblock = BLOCK_SUBBLOCKS (stmt);
22269 subblock != NULL_TREE;
22270 subblock = BLOCK_CHAIN (subblock))
22271 set_block_origin_self (subblock); /* Recurse. */
22272 }
22273 }
22274 }
22275
22276 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22277 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22278 node to so that it points to the node itself, thus indicating that the
22279 node represents its own (abstract) origin. Additionally, if the
22280 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22281 the decl/block tree of which the given node is the root of, and for
22282 each other ..._DECL or BLOCK node contained therein whose
22283 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22284 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22285 point to themselves. */
22286
22287 static void
22288 set_decl_origin_self (tree decl)
22289 {
22290 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22291 {
22292 DECL_ABSTRACT_ORIGIN (decl) = decl;
22293 if (TREE_CODE (decl) == FUNCTION_DECL)
22294 {
22295 tree arg;
22296
22297 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22298 DECL_ABSTRACT_ORIGIN (arg) = arg;
22299 if (DECL_INITIAL (decl) != NULL_TREE
22300 && DECL_INITIAL (decl) != error_mark_node)
22301 set_block_origin_self (DECL_INITIAL (decl));
22302 }
22303 }
22304 }
22305 \f
22306 /* Mark the early DIE for DECL as the abstract instance. */
22307
22308 static void
22309 dwarf2out_abstract_function (tree decl)
22310 {
22311 dw_die_ref old_die;
22312
22313 /* Make sure we have the actual abstract inline, not a clone. */
22314 decl = DECL_ORIGIN (decl);
22315
22316 if (DECL_IGNORED_P (decl))
22317 return;
22318
22319 old_die = lookup_decl_die (decl);
22320 /* With early debug we always have an old DIE unless we are in LTO
22321 and the user did not compile but only link with debug. */
22322 if (in_lto_p && ! old_die)
22323 return;
22324 gcc_assert (old_die != NULL);
22325 if (get_AT (old_die, DW_AT_inline)
22326 || get_AT (old_die, DW_AT_abstract_origin))
22327 /* We've already generated the abstract instance. */
22328 return;
22329
22330 /* Go ahead and put DW_AT_inline on the DIE. */
22331 if (DECL_DECLARED_INLINE_P (decl))
22332 {
22333 if (cgraph_function_possibly_inlined_p (decl))
22334 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22335 else
22336 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22337 }
22338 else
22339 {
22340 if (cgraph_function_possibly_inlined_p (decl))
22341 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22342 else
22343 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22344 }
22345
22346 if (DECL_DECLARED_INLINE_P (decl)
22347 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22348 add_AT_flag (old_die, DW_AT_artificial, 1);
22349
22350 set_decl_origin_self (decl);
22351 }
22352
22353 /* Helper function of premark_used_types() which gets called through
22354 htab_traverse.
22355
22356 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22357 marked as unused by prune_unused_types. */
22358
22359 bool
22360 premark_used_types_helper (tree const &type, void *)
22361 {
22362 dw_die_ref die;
22363
22364 die = lookup_type_die (type);
22365 if (die != NULL)
22366 die->die_perennial_p = 1;
22367 return true;
22368 }
22369
22370 /* Helper function of premark_types_used_by_global_vars which gets called
22371 through htab_traverse.
22372
22373 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22374 marked as unused by prune_unused_types. The DIE of the type is marked
22375 only if the global variable using the type will actually be emitted. */
22376
22377 int
22378 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22379 void *)
22380 {
22381 struct types_used_by_vars_entry *entry;
22382 dw_die_ref die;
22383
22384 entry = (struct types_used_by_vars_entry *) *slot;
22385 gcc_assert (entry->type != NULL
22386 && entry->var_decl != NULL);
22387 die = lookup_type_die (entry->type);
22388 if (die)
22389 {
22390 /* Ask cgraph if the global variable really is to be emitted.
22391 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22392 varpool_node *node = varpool_node::get (entry->var_decl);
22393 if (node && node->definition)
22394 {
22395 die->die_perennial_p = 1;
22396 /* Keep the parent DIEs as well. */
22397 while ((die = die->die_parent) && die->die_perennial_p == 0)
22398 die->die_perennial_p = 1;
22399 }
22400 }
22401 return 1;
22402 }
22403
22404 /* Mark all members of used_types_hash as perennial. */
22405
22406 static void
22407 premark_used_types (struct function *fun)
22408 {
22409 if (fun && fun->used_types_hash)
22410 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22411 }
22412
22413 /* Mark all members of types_used_by_vars_entry as perennial. */
22414
22415 static void
22416 premark_types_used_by_global_vars (void)
22417 {
22418 if (types_used_by_vars_hash)
22419 types_used_by_vars_hash
22420 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22421 }
22422
22423 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22424 for CA_LOC call arg loc node. */
22425
22426 static dw_die_ref
22427 gen_call_site_die (tree decl, dw_die_ref subr_die,
22428 struct call_arg_loc_node *ca_loc)
22429 {
22430 dw_die_ref stmt_die = NULL, die;
22431 tree block = ca_loc->block;
22432
22433 while (block
22434 && block != DECL_INITIAL (decl)
22435 && TREE_CODE (block) == BLOCK)
22436 {
22437 stmt_die = BLOCK_DIE (block);
22438 if (stmt_die)
22439 break;
22440 block = BLOCK_SUPERCONTEXT (block);
22441 }
22442 if (stmt_die == NULL)
22443 stmt_die = subr_die;
22444 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22445 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22446 if (ca_loc->tail_call_p)
22447 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22448 if (ca_loc->symbol_ref)
22449 {
22450 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22451 if (tdie)
22452 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22453 else
22454 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22455 false);
22456 }
22457 return die;
22458 }
22459
22460 /* Generate a DIE to represent a declared function (either file-scope or
22461 block-local). */
22462
22463 static void
22464 gen_subprogram_die (tree decl, dw_die_ref context_die)
22465 {
22466 tree origin = decl_ultimate_origin (decl);
22467 dw_die_ref subr_die;
22468 dw_die_ref old_die = lookup_decl_die (decl);
22469
22470 /* This function gets called multiple times for different stages of
22471 the debug process. For example, for func() in this code:
22472
22473 namespace S
22474 {
22475 void func() { ... }
22476 }
22477
22478 ...we get called 4 times. Twice in early debug and twice in
22479 late debug:
22480
22481 Early debug
22482 -----------
22483
22484 1. Once while generating func() within the namespace. This is
22485 the declaration. The declaration bit below is set, as the
22486 context is the namespace.
22487
22488 A new DIE will be generated with DW_AT_declaration set.
22489
22490 2. Once for func() itself. This is the specification. The
22491 declaration bit below is clear as the context is the CU.
22492
22493 We will use the cached DIE from (1) to create a new DIE with
22494 DW_AT_specification pointing to the declaration in (1).
22495
22496 Late debug via rest_of_handle_final()
22497 -------------------------------------
22498
22499 3. Once generating func() within the namespace. This is also the
22500 declaration, as in (1), but this time we will early exit below
22501 as we have a cached DIE and a declaration needs no additional
22502 annotations (no locations), as the source declaration line
22503 info is enough.
22504
22505 4. Once for func() itself. As in (2), this is the specification,
22506 but this time we will re-use the cached DIE, and just annotate
22507 it with the location information that should now be available.
22508
22509 For something without namespaces, but with abstract instances, we
22510 are also called a multiple times:
22511
22512 class Base
22513 {
22514 public:
22515 Base (); // constructor declaration (1)
22516 };
22517
22518 Base::Base () { } // constructor specification (2)
22519
22520 Early debug
22521 -----------
22522
22523 1. Once for the Base() constructor by virtue of it being a
22524 member of the Base class. This is done via
22525 rest_of_type_compilation.
22526
22527 This is a declaration, so a new DIE will be created with
22528 DW_AT_declaration.
22529
22530 2. Once for the Base() constructor definition, but this time
22531 while generating the abstract instance of the base
22532 constructor (__base_ctor) which is being generated via early
22533 debug of reachable functions.
22534
22535 Even though we have a cached version of the declaration (1),
22536 we will create a DW_AT_specification of the declaration DIE
22537 in (1).
22538
22539 3. Once for the __base_ctor itself, but this time, we generate
22540 an DW_AT_abstract_origin version of the DW_AT_specification in
22541 (2).
22542
22543 Late debug via rest_of_handle_final
22544 -----------------------------------
22545
22546 4. One final time for the __base_ctor (which will have a cached
22547 DIE with DW_AT_abstract_origin created in (3). This time,
22548 we will just annotate the location information now
22549 available.
22550 */
22551 int declaration = (current_function_decl != decl
22552 || class_or_namespace_scope_p (context_die));
22553
22554 /* A declaration that has been previously dumped needs no
22555 additional information. */
22556 if (old_die && declaration)
22557 return;
22558
22559 /* Now that the C++ front end lazily declares artificial member fns, we
22560 might need to retrofit the declaration into its class. */
22561 if (!declaration && !origin && !old_die
22562 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22563 && !class_or_namespace_scope_p (context_die)
22564 && debug_info_level > DINFO_LEVEL_TERSE)
22565 old_die = force_decl_die (decl);
22566
22567 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22568 if (origin != NULL)
22569 {
22570 gcc_assert (!declaration || local_scope_p (context_die));
22571
22572 /* Fixup die_parent for the abstract instance of a nested
22573 inline function. */
22574 if (old_die && old_die->die_parent == NULL)
22575 add_child_die (context_die, old_die);
22576
22577 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22578 {
22579 /* If we have a DW_AT_abstract_origin we have a working
22580 cached version. */
22581 subr_die = old_die;
22582 }
22583 else
22584 {
22585 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22586 add_abstract_origin_attribute (subr_die, origin);
22587 /* This is where the actual code for a cloned function is.
22588 Let's emit linkage name attribute for it. This helps
22589 debuggers to e.g, set breakpoints into
22590 constructors/destructors when the user asks "break
22591 K::K". */
22592 add_linkage_name (subr_die, decl);
22593 }
22594 }
22595 /* A cached copy, possibly from early dwarf generation. Reuse as
22596 much as possible. */
22597 else if (old_die)
22598 {
22599 if (!get_AT_flag (old_die, DW_AT_declaration)
22600 /* We can have a normal definition following an inline one in the
22601 case of redefinition of GNU C extern inlines.
22602 It seems reasonable to use AT_specification in this case. */
22603 && !get_AT (old_die, DW_AT_inline))
22604 {
22605 /* Detect and ignore this case, where we are trying to output
22606 something we have already output. */
22607 if (get_AT (old_die, DW_AT_low_pc)
22608 || get_AT (old_die, DW_AT_ranges))
22609 return;
22610
22611 /* If we have no location information, this must be a
22612 partially generated DIE from early dwarf generation.
22613 Fall through and generate it. */
22614 }
22615
22616 /* If the definition comes from the same place as the declaration,
22617 maybe use the old DIE. We always want the DIE for this function
22618 that has the *_pc attributes to be under comp_unit_die so the
22619 debugger can find it. We also need to do this for abstract
22620 instances of inlines, since the spec requires the out-of-line copy
22621 to have the same parent. For local class methods, this doesn't
22622 apply; we just use the old DIE. */
22623 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22624 struct dwarf_file_data * file_index = lookup_filename (s.file);
22625 if ((is_cu_die (old_die->die_parent)
22626 /* This condition fixes the inconsistency/ICE with the
22627 following Fortran test (or some derivative thereof) while
22628 building libgfortran:
22629
22630 module some_m
22631 contains
22632 logical function funky (FLAG)
22633 funky = .true.
22634 end function
22635 end module
22636 */
22637 || (old_die->die_parent
22638 && old_die->die_parent->die_tag == DW_TAG_module)
22639 || context_die == NULL)
22640 && (DECL_ARTIFICIAL (decl)
22641 /* The location attributes may be in the abstract origin
22642 which in the case of LTO might be not available to
22643 look at. */
22644 || get_AT (old_die, DW_AT_abstract_origin)
22645 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22646 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22647 == (unsigned) s.line)
22648 && (!debug_column_info
22649 || s.column == 0
22650 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22651 == (unsigned) s.column)))))
22652 {
22653 subr_die = old_die;
22654
22655 /* Clear out the declaration attribute, but leave the
22656 parameters so they can be augmented with location
22657 information later. Unless this was a declaration, in
22658 which case, wipe out the nameless parameters and recreate
22659 them further down. */
22660 if (remove_AT (subr_die, DW_AT_declaration))
22661 {
22662
22663 remove_AT (subr_die, DW_AT_object_pointer);
22664 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22665 }
22666 }
22667 /* Make a specification pointing to the previously built
22668 declaration. */
22669 else
22670 {
22671 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22672 add_AT_specification (subr_die, old_die);
22673 add_pubname (decl, subr_die);
22674 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22675 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22676 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22677 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22678 if (debug_column_info
22679 && s.column
22680 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22681 != (unsigned) s.column))
22682 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22683
22684 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22685 emit the real type on the definition die. */
22686 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22687 {
22688 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22689 if (die == auto_die || die == decltype_auto_die)
22690 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22691 TYPE_UNQUALIFIED, false, context_die);
22692 }
22693
22694 /* When we process the method declaration, we haven't seen
22695 the out-of-class defaulted definition yet, so we have to
22696 recheck now. */
22697 if ((dwarf_version >= 5 || ! dwarf_strict)
22698 && !get_AT (subr_die, DW_AT_defaulted))
22699 {
22700 int defaulted
22701 = lang_hooks.decls.decl_dwarf_attribute (decl,
22702 DW_AT_defaulted);
22703 if (defaulted != -1)
22704 {
22705 /* Other values must have been handled before. */
22706 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22707 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22708 }
22709 }
22710 }
22711 }
22712 /* Create a fresh DIE for anything else. */
22713 else
22714 {
22715 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22716
22717 if (TREE_PUBLIC (decl))
22718 add_AT_flag (subr_die, DW_AT_external, 1);
22719
22720 add_name_and_src_coords_attributes (subr_die, decl);
22721 add_pubname (decl, subr_die);
22722 if (debug_info_level > DINFO_LEVEL_TERSE)
22723 {
22724 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22725 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22726 TYPE_UNQUALIFIED, false, context_die);
22727 }
22728
22729 add_pure_or_virtual_attribute (subr_die, decl);
22730 if (DECL_ARTIFICIAL (decl))
22731 add_AT_flag (subr_die, DW_AT_artificial, 1);
22732
22733 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22734 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22735
22736 add_alignment_attribute (subr_die, decl);
22737
22738 add_accessibility_attribute (subr_die, decl);
22739 }
22740
22741 /* Unless we have an existing non-declaration DIE, equate the new
22742 DIE. */
22743 if (!old_die || is_declaration_die (old_die))
22744 equate_decl_number_to_die (decl, subr_die);
22745
22746 if (declaration)
22747 {
22748 if (!old_die || !get_AT (old_die, DW_AT_inline))
22749 {
22750 add_AT_flag (subr_die, DW_AT_declaration, 1);
22751
22752 /* If this is an explicit function declaration then generate
22753 a DW_AT_explicit attribute. */
22754 if ((dwarf_version >= 3 || !dwarf_strict)
22755 && lang_hooks.decls.decl_dwarf_attribute (decl,
22756 DW_AT_explicit) == 1)
22757 add_AT_flag (subr_die, DW_AT_explicit, 1);
22758
22759 /* If this is a C++11 deleted special function member then generate
22760 a DW_AT_deleted attribute. */
22761 if ((dwarf_version >= 5 || !dwarf_strict)
22762 && lang_hooks.decls.decl_dwarf_attribute (decl,
22763 DW_AT_deleted) == 1)
22764 add_AT_flag (subr_die, DW_AT_deleted, 1);
22765
22766 /* If this is a C++11 defaulted special function member then
22767 generate a DW_AT_defaulted attribute. */
22768 if (dwarf_version >= 5 || !dwarf_strict)
22769 {
22770 int defaulted
22771 = lang_hooks.decls.decl_dwarf_attribute (decl,
22772 DW_AT_defaulted);
22773 if (defaulted != -1)
22774 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22775 }
22776
22777 /* If this is a C++11 non-static member function with & ref-qualifier
22778 then generate a DW_AT_reference attribute. */
22779 if ((dwarf_version >= 5 || !dwarf_strict)
22780 && lang_hooks.decls.decl_dwarf_attribute (decl,
22781 DW_AT_reference) == 1)
22782 add_AT_flag (subr_die, DW_AT_reference, 1);
22783
22784 /* If this is a C++11 non-static member function with &&
22785 ref-qualifier then generate a DW_AT_reference attribute. */
22786 if ((dwarf_version >= 5 || !dwarf_strict)
22787 && lang_hooks.decls.decl_dwarf_attribute (decl,
22788 DW_AT_rvalue_reference)
22789 == 1)
22790 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22791 }
22792 }
22793 /* For non DECL_EXTERNALs, if range information is available, fill
22794 the DIE with it. */
22795 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22796 {
22797 HOST_WIDE_INT cfa_fb_offset;
22798
22799 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22800
22801 if (!crtl->has_bb_partition)
22802 {
22803 dw_fde_ref fde = fun->fde;
22804 if (fde->dw_fde_begin)
22805 {
22806 /* We have already generated the labels. */
22807 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22808 fde->dw_fde_end, false);
22809 }
22810 else
22811 {
22812 /* Create start/end labels and add the range. */
22813 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22814 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22815 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22816 current_function_funcdef_no);
22817 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22818 current_function_funcdef_no);
22819 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22820 false);
22821 }
22822
22823 #if VMS_DEBUGGING_INFO
22824 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22825 Section 2.3 Prologue and Epilogue Attributes:
22826 When a breakpoint is set on entry to a function, it is generally
22827 desirable for execution to be suspended, not on the very first
22828 instruction of the function, but rather at a point after the
22829 function's frame has been set up, after any language defined local
22830 declaration processing has been completed, and before execution of
22831 the first statement of the function begins. Debuggers generally
22832 cannot properly determine where this point is. Similarly for a
22833 breakpoint set on exit from a function. The prologue and epilogue
22834 attributes allow a compiler to communicate the location(s) to use. */
22835
22836 {
22837 if (fde->dw_fde_vms_end_prologue)
22838 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22839 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22840
22841 if (fde->dw_fde_vms_begin_epilogue)
22842 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22843 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22844 }
22845 #endif
22846
22847 }
22848 else
22849 {
22850 /* Generate pubnames entries for the split function code ranges. */
22851 dw_fde_ref fde = fun->fde;
22852
22853 if (fde->dw_fde_second_begin)
22854 {
22855 if (dwarf_version >= 3 || !dwarf_strict)
22856 {
22857 /* We should use ranges for non-contiguous code section
22858 addresses. Use the actual code range for the initial
22859 section, since the HOT/COLD labels might precede an
22860 alignment offset. */
22861 bool range_list_added = false;
22862 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22863 fde->dw_fde_end, &range_list_added,
22864 false);
22865 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22866 fde->dw_fde_second_end,
22867 &range_list_added, false);
22868 if (range_list_added)
22869 add_ranges (NULL);
22870 }
22871 else
22872 {
22873 /* There is no real support in DW2 for this .. so we make
22874 a work-around. First, emit the pub name for the segment
22875 containing the function label. Then make and emit a
22876 simplified subprogram DIE for the second segment with the
22877 name pre-fixed by __hot/cold_sect_of_. We use the same
22878 linkage name for the second die so that gdb will find both
22879 sections when given "b foo". */
22880 const char *name = NULL;
22881 tree decl_name = DECL_NAME (decl);
22882 dw_die_ref seg_die;
22883
22884 /* Do the 'primary' section. */
22885 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22886 fde->dw_fde_end, false);
22887
22888 /* Build a minimal DIE for the secondary section. */
22889 seg_die = new_die (DW_TAG_subprogram,
22890 subr_die->die_parent, decl);
22891
22892 if (TREE_PUBLIC (decl))
22893 add_AT_flag (seg_die, DW_AT_external, 1);
22894
22895 if (decl_name != NULL
22896 && IDENTIFIER_POINTER (decl_name) != NULL)
22897 {
22898 name = dwarf2_name (decl, 1);
22899 if (! DECL_ARTIFICIAL (decl))
22900 add_src_coords_attributes (seg_die, decl);
22901
22902 add_linkage_name (seg_die, decl);
22903 }
22904 gcc_assert (name != NULL);
22905 add_pure_or_virtual_attribute (seg_die, decl);
22906 if (DECL_ARTIFICIAL (decl))
22907 add_AT_flag (seg_die, DW_AT_artificial, 1);
22908
22909 name = concat ("__second_sect_of_", name, NULL);
22910 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
22911 fde->dw_fde_second_end, false);
22912 add_name_attribute (seg_die, name);
22913 if (want_pubnames ())
22914 add_pubname_string (name, seg_die);
22915 }
22916 }
22917 else
22918 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
22919 false);
22920 }
22921
22922 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
22923
22924 /* We define the "frame base" as the function's CFA. This is more
22925 convenient for several reasons: (1) It's stable across the prologue
22926 and epilogue, which makes it better than just a frame pointer,
22927 (2) With dwarf3, there exists a one-byte encoding that allows us
22928 to reference the .debug_frame data by proxy, but failing that,
22929 (3) We can at least reuse the code inspection and interpretation
22930 code that determines the CFA position at various points in the
22931 function. */
22932 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
22933 {
22934 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
22935 add_AT_loc (subr_die, DW_AT_frame_base, op);
22936 }
22937 else
22938 {
22939 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
22940 if (list->dw_loc_next)
22941 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
22942 else
22943 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
22944 }
22945
22946 /* Compute a displacement from the "steady-state frame pointer" to
22947 the CFA. The former is what all stack slots and argument slots
22948 will reference in the rtl; the latter is what we've told the
22949 debugger about. We'll need to adjust all frame_base references
22950 by this displacement. */
22951 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
22952
22953 if (fun->static_chain_decl)
22954 {
22955 /* DWARF requires here a location expression that computes the
22956 address of the enclosing subprogram's frame base. The machinery
22957 in tree-nested.c is supposed to store this specific address in the
22958 last field of the FRAME record. */
22959 const tree frame_type
22960 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
22961 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
22962
22963 tree fb_expr
22964 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
22965 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
22966 fb_expr, fb_decl, NULL_TREE);
22967
22968 add_AT_location_description (subr_die, DW_AT_static_link,
22969 loc_list_from_tree (fb_expr, 0, NULL));
22970 }
22971
22972 resolve_variable_values ();
22973 }
22974
22975 /* Generate child dies for template paramaters. */
22976 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
22977 gen_generic_params_dies (decl);
22978
22979 /* Now output descriptions of the arguments for this function. This gets
22980 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
22981 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
22982 `...' at the end of the formal parameter list. In order to find out if
22983 there was a trailing ellipsis or not, we must instead look at the type
22984 associated with the FUNCTION_DECL. This will be a node of type
22985 FUNCTION_TYPE. If the chain of type nodes hanging off of this
22986 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
22987 an ellipsis at the end. */
22988
22989 /* In the case where we are describing a mere function declaration, all we
22990 need to do here (and all we *can* do here) is to describe the *types* of
22991 its formal parameters. */
22992 if (debug_info_level <= DINFO_LEVEL_TERSE)
22993 ;
22994 else if (declaration)
22995 gen_formal_types_die (decl, subr_die);
22996 else
22997 {
22998 /* Generate DIEs to represent all known formal parameters. */
22999 tree parm = DECL_ARGUMENTS (decl);
23000 tree generic_decl = early_dwarf
23001 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23002 tree generic_decl_parm = generic_decl
23003 ? DECL_ARGUMENTS (generic_decl)
23004 : NULL;
23005
23006 /* Now we want to walk the list of parameters of the function and
23007 emit their relevant DIEs.
23008
23009 We consider the case of DECL being an instance of a generic function
23010 as well as it being a normal function.
23011
23012 If DECL is an instance of a generic function we walk the
23013 parameters of the generic function declaration _and_ the parameters of
23014 DECL itself. This is useful because we want to emit specific DIEs for
23015 function parameter packs and those are declared as part of the
23016 generic function declaration. In that particular case,
23017 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23018 That DIE has children DIEs representing the set of arguments
23019 of the pack. Note that the set of pack arguments can be empty.
23020 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23021 children DIE.
23022
23023 Otherwise, we just consider the parameters of DECL. */
23024 while (generic_decl_parm || parm)
23025 {
23026 if (generic_decl_parm
23027 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23028 gen_formal_parameter_pack_die (generic_decl_parm,
23029 parm, subr_die,
23030 &parm);
23031 else if (parm && !POINTER_BOUNDS_P (parm))
23032 {
23033 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23034
23035 if (early_dwarf
23036 && parm == DECL_ARGUMENTS (decl)
23037 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23038 && parm_die
23039 && (dwarf_version >= 3 || !dwarf_strict))
23040 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23041
23042 parm = DECL_CHAIN (parm);
23043 }
23044 else if (parm)
23045 parm = DECL_CHAIN (parm);
23046
23047 if (generic_decl_parm)
23048 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23049 }
23050
23051 /* Decide whether we need an unspecified_parameters DIE at the end.
23052 There are 2 more cases to do this for: 1) the ansi ... declaration -
23053 this is detectable when the end of the arg list is not a
23054 void_type_node 2) an unprototyped function declaration (not a
23055 definition). This just means that we have no info about the
23056 parameters at all. */
23057 if (early_dwarf)
23058 {
23059 if (prototype_p (TREE_TYPE (decl)))
23060 {
23061 /* This is the prototyped case, check for.... */
23062 if (stdarg_p (TREE_TYPE (decl)))
23063 gen_unspecified_parameters_die (decl, subr_die);
23064 }
23065 else if (DECL_INITIAL (decl) == NULL_TREE)
23066 gen_unspecified_parameters_die (decl, subr_die);
23067 }
23068 }
23069
23070 if (subr_die != old_die)
23071 /* Add the calling convention attribute if requested. */
23072 add_calling_convention_attribute (subr_die, decl);
23073
23074 /* Output Dwarf info for all of the stuff within the body of the function
23075 (if it has one - it may be just a declaration).
23076
23077 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23078 a function. This BLOCK actually represents the outermost binding contour
23079 for the function, i.e. the contour in which the function's formal
23080 parameters and labels get declared. Curiously, it appears that the front
23081 end doesn't actually put the PARM_DECL nodes for the current function onto
23082 the BLOCK_VARS list for this outer scope, but are strung off of the
23083 DECL_ARGUMENTS list for the function instead.
23084
23085 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23086 the LABEL_DECL nodes for the function however, and we output DWARF info
23087 for those in decls_for_scope. Just within the `outer_scope' there will be
23088 a BLOCK node representing the function's outermost pair of curly braces,
23089 and any blocks used for the base and member initializers of a C++
23090 constructor function. */
23091 tree outer_scope = DECL_INITIAL (decl);
23092 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23093 {
23094 int call_site_note_count = 0;
23095 int tail_call_site_note_count = 0;
23096
23097 /* Emit a DW_TAG_variable DIE for a named return value. */
23098 if (DECL_NAME (DECL_RESULT (decl)))
23099 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23100
23101 /* The first time through decls_for_scope we will generate the
23102 DIEs for the locals. The second time, we fill in the
23103 location info. */
23104 decls_for_scope (outer_scope, subr_die);
23105
23106 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23107 {
23108 struct call_arg_loc_node *ca_loc;
23109 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23110 {
23111 dw_die_ref die = NULL;
23112 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23113 rtx arg, next_arg;
23114
23115 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23116 ? XEXP (ca_loc->call_arg_loc_note, 0)
23117 : NULL_RTX);
23118 arg; arg = next_arg)
23119 {
23120 dw_loc_descr_ref reg, val;
23121 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23122 dw_die_ref cdie, tdie = NULL;
23123
23124 next_arg = XEXP (arg, 1);
23125 if (REG_P (XEXP (XEXP (arg, 0), 0))
23126 && next_arg
23127 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23128 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23129 && REGNO (XEXP (XEXP (arg, 0), 0))
23130 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23131 next_arg = XEXP (next_arg, 1);
23132 if (mode == VOIDmode)
23133 {
23134 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23135 if (mode == VOIDmode)
23136 mode = GET_MODE (XEXP (arg, 0));
23137 }
23138 if (mode == VOIDmode || mode == BLKmode)
23139 continue;
23140 /* Get dynamic information about call target only if we
23141 have no static information: we cannot generate both
23142 DW_AT_call_origin and DW_AT_call_target
23143 attributes. */
23144 if (ca_loc->symbol_ref == NULL_RTX)
23145 {
23146 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23147 {
23148 tloc = XEXP (XEXP (arg, 0), 1);
23149 continue;
23150 }
23151 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23152 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23153 {
23154 tlocc = XEXP (XEXP (arg, 0), 1);
23155 continue;
23156 }
23157 }
23158 reg = NULL;
23159 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23160 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23161 VAR_INIT_STATUS_INITIALIZED);
23162 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23163 {
23164 rtx mem = XEXP (XEXP (arg, 0), 0);
23165 reg = mem_loc_descriptor (XEXP (mem, 0),
23166 get_address_mode (mem),
23167 GET_MODE (mem),
23168 VAR_INIT_STATUS_INITIALIZED);
23169 }
23170 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23171 == DEBUG_PARAMETER_REF)
23172 {
23173 tree tdecl
23174 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23175 tdie = lookup_decl_die (tdecl);
23176 if (tdie == NULL)
23177 continue;
23178 }
23179 else
23180 continue;
23181 if (reg == NULL
23182 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23183 != DEBUG_PARAMETER_REF)
23184 continue;
23185 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23186 VOIDmode,
23187 VAR_INIT_STATUS_INITIALIZED);
23188 if (val == NULL)
23189 continue;
23190 if (die == NULL)
23191 die = gen_call_site_die (decl, subr_die, ca_loc);
23192 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23193 NULL_TREE);
23194 if (reg != NULL)
23195 add_AT_loc (cdie, DW_AT_location, reg);
23196 else if (tdie != NULL)
23197 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23198 tdie);
23199 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23200 if (next_arg != XEXP (arg, 1))
23201 {
23202 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23203 if (mode == VOIDmode)
23204 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23205 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23206 0), 1),
23207 mode, VOIDmode,
23208 VAR_INIT_STATUS_INITIALIZED);
23209 if (val != NULL)
23210 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23211 val);
23212 }
23213 }
23214 if (die == NULL
23215 && (ca_loc->symbol_ref || tloc))
23216 die = gen_call_site_die (decl, subr_die, ca_loc);
23217 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23218 {
23219 dw_loc_descr_ref tval = NULL;
23220
23221 if (tloc != NULL_RTX)
23222 tval = mem_loc_descriptor (tloc,
23223 GET_MODE (tloc) == VOIDmode
23224 ? Pmode : GET_MODE (tloc),
23225 VOIDmode,
23226 VAR_INIT_STATUS_INITIALIZED);
23227 if (tval)
23228 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23229 else if (tlocc != NULL_RTX)
23230 {
23231 tval = mem_loc_descriptor (tlocc,
23232 GET_MODE (tlocc) == VOIDmode
23233 ? Pmode : GET_MODE (tlocc),
23234 VOIDmode,
23235 VAR_INIT_STATUS_INITIALIZED);
23236 if (tval)
23237 add_AT_loc (die,
23238 dwarf_AT (DW_AT_call_target_clobbered),
23239 tval);
23240 }
23241 }
23242 if (die != NULL)
23243 {
23244 call_site_note_count++;
23245 if (ca_loc->tail_call_p)
23246 tail_call_site_note_count++;
23247 }
23248 }
23249 }
23250 call_arg_locations = NULL;
23251 call_arg_loc_last = NULL;
23252 if (tail_call_site_count >= 0
23253 && tail_call_site_count == tail_call_site_note_count
23254 && (!dwarf_strict || dwarf_version >= 5))
23255 {
23256 if (call_site_count >= 0
23257 && call_site_count == call_site_note_count)
23258 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23259 else
23260 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23261 }
23262 call_site_count = -1;
23263 tail_call_site_count = -1;
23264 }
23265
23266 /* Mark used types after we have created DIEs for the functions scopes. */
23267 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23268 }
23269
23270 /* Returns a hash value for X (which really is a die_struct). */
23271
23272 hashval_t
23273 block_die_hasher::hash (die_struct *d)
23274 {
23275 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23276 }
23277
23278 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23279 as decl_id and die_parent of die_struct Y. */
23280
23281 bool
23282 block_die_hasher::equal (die_struct *x, die_struct *y)
23283 {
23284 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23285 }
23286
23287 /* Hold information about markers for inlined entry points. */
23288 struct GTY ((for_user)) inline_entry_data
23289 {
23290 /* The block that's the inlined_function_outer_scope for an inlined
23291 function. */
23292 tree block;
23293
23294 /* The label at the inlined entry point. */
23295 const char *label_pfx;
23296 unsigned int label_num;
23297
23298 /* The view number to be used as the inlined entry point. */
23299 var_loc_view view;
23300 };
23301
23302 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23303 {
23304 typedef tree compare_type;
23305 static inline hashval_t hash (const inline_entry_data *);
23306 static inline bool equal (const inline_entry_data *, const_tree);
23307 };
23308
23309 /* Hash table routines for inline_entry_data. */
23310
23311 inline hashval_t
23312 inline_entry_data_hasher::hash (const inline_entry_data *data)
23313 {
23314 return htab_hash_pointer (data->block);
23315 }
23316
23317 inline bool
23318 inline_entry_data_hasher::equal (const inline_entry_data *data,
23319 const_tree block)
23320 {
23321 return data->block == block;
23322 }
23323
23324 /* Inlined entry points pending DIE creation in this compilation unit. */
23325
23326 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23327
23328
23329 /* Return TRUE if DECL, which may have been previously generated as
23330 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23331 true if decl (or its origin) is either an extern declaration or a
23332 class/namespace scoped declaration.
23333
23334 The declare_in_namespace support causes us to get two DIEs for one
23335 variable, both of which are declarations. We want to avoid
23336 considering one to be a specification, so we must test for
23337 DECLARATION and DW_AT_declaration. */
23338 static inline bool
23339 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23340 {
23341 return (old_die && TREE_STATIC (decl) && !declaration
23342 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23343 }
23344
23345 /* Return true if DECL is a local static. */
23346
23347 static inline bool
23348 local_function_static (tree decl)
23349 {
23350 gcc_assert (VAR_P (decl));
23351 return TREE_STATIC (decl)
23352 && DECL_CONTEXT (decl)
23353 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23354 }
23355
23356 /* Generate a DIE to represent a declared data object.
23357 Either DECL or ORIGIN must be non-null. */
23358
23359 static void
23360 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23361 {
23362 HOST_WIDE_INT off = 0;
23363 tree com_decl;
23364 tree decl_or_origin = decl ? decl : origin;
23365 tree ultimate_origin;
23366 dw_die_ref var_die;
23367 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23368 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23369 || class_or_namespace_scope_p (context_die));
23370 bool specialization_p = false;
23371 bool no_linkage_name = false;
23372
23373 /* While C++ inline static data members have definitions inside of the
23374 class, force the first DIE to be a declaration, then let gen_member_die
23375 reparent it to the class context and call gen_variable_die again
23376 to create the outside of the class DIE for the definition. */
23377 if (!declaration
23378 && old_die == NULL
23379 && decl
23380 && DECL_CONTEXT (decl)
23381 && TYPE_P (DECL_CONTEXT (decl))
23382 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23383 {
23384 declaration = true;
23385 if (dwarf_version < 5)
23386 no_linkage_name = true;
23387 }
23388
23389 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23390 if (decl || ultimate_origin)
23391 origin = ultimate_origin;
23392 com_decl = fortran_common (decl_or_origin, &off);
23393
23394 /* Symbol in common gets emitted as a child of the common block, in the form
23395 of a data member. */
23396 if (com_decl)
23397 {
23398 dw_die_ref com_die;
23399 dw_loc_list_ref loc = NULL;
23400 die_node com_die_arg;
23401
23402 var_die = lookup_decl_die (decl_or_origin);
23403 if (var_die)
23404 {
23405 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23406 {
23407 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23408 if (loc)
23409 {
23410 if (off)
23411 {
23412 /* Optimize the common case. */
23413 if (single_element_loc_list_p (loc)
23414 && loc->expr->dw_loc_opc == DW_OP_addr
23415 && loc->expr->dw_loc_next == NULL
23416 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23417 == SYMBOL_REF)
23418 {
23419 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23420 loc->expr->dw_loc_oprnd1.v.val_addr
23421 = plus_constant (GET_MODE (x), x , off);
23422 }
23423 else
23424 loc_list_plus_const (loc, off);
23425 }
23426 add_AT_location_description (var_die, DW_AT_location, loc);
23427 remove_AT (var_die, DW_AT_declaration);
23428 }
23429 }
23430 return;
23431 }
23432
23433 if (common_block_die_table == NULL)
23434 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23435
23436 com_die_arg.decl_id = DECL_UID (com_decl);
23437 com_die_arg.die_parent = context_die;
23438 com_die = common_block_die_table->find (&com_die_arg);
23439 if (! early_dwarf)
23440 loc = loc_list_from_tree (com_decl, 2, NULL);
23441 if (com_die == NULL)
23442 {
23443 const char *cnam
23444 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23445 die_node **slot;
23446
23447 com_die = new_die (DW_TAG_common_block, context_die, decl);
23448 add_name_and_src_coords_attributes (com_die, com_decl);
23449 if (loc)
23450 {
23451 add_AT_location_description (com_die, DW_AT_location, loc);
23452 /* Avoid sharing the same loc descriptor between
23453 DW_TAG_common_block and DW_TAG_variable. */
23454 loc = loc_list_from_tree (com_decl, 2, NULL);
23455 }
23456 else if (DECL_EXTERNAL (decl_or_origin))
23457 add_AT_flag (com_die, DW_AT_declaration, 1);
23458 if (want_pubnames ())
23459 add_pubname_string (cnam, com_die); /* ??? needed? */
23460 com_die->decl_id = DECL_UID (com_decl);
23461 slot = common_block_die_table->find_slot (com_die, INSERT);
23462 *slot = com_die;
23463 }
23464 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23465 {
23466 add_AT_location_description (com_die, DW_AT_location, loc);
23467 loc = loc_list_from_tree (com_decl, 2, NULL);
23468 remove_AT (com_die, DW_AT_declaration);
23469 }
23470 var_die = new_die (DW_TAG_variable, com_die, decl);
23471 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23472 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23473 decl_quals (decl_or_origin), false,
23474 context_die);
23475 add_alignment_attribute (var_die, decl);
23476 add_AT_flag (var_die, DW_AT_external, 1);
23477 if (loc)
23478 {
23479 if (off)
23480 {
23481 /* Optimize the common case. */
23482 if (single_element_loc_list_p (loc)
23483 && loc->expr->dw_loc_opc == DW_OP_addr
23484 && loc->expr->dw_loc_next == NULL
23485 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23486 {
23487 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23488 loc->expr->dw_loc_oprnd1.v.val_addr
23489 = plus_constant (GET_MODE (x), x, off);
23490 }
23491 else
23492 loc_list_plus_const (loc, off);
23493 }
23494 add_AT_location_description (var_die, DW_AT_location, loc);
23495 }
23496 else if (DECL_EXTERNAL (decl_or_origin))
23497 add_AT_flag (var_die, DW_AT_declaration, 1);
23498 if (decl)
23499 equate_decl_number_to_die (decl, var_die);
23500 return;
23501 }
23502
23503 if (old_die)
23504 {
23505 if (declaration)
23506 {
23507 /* A declaration that has been previously dumped, needs no
23508 further annotations, since it doesn't need location on
23509 the second pass. */
23510 return;
23511 }
23512 else if (decl_will_get_specification_p (old_die, decl, declaration)
23513 && !get_AT (old_die, DW_AT_specification))
23514 {
23515 /* Fall-thru so we can make a new variable die along with a
23516 DW_AT_specification. */
23517 }
23518 else if (origin && old_die->die_parent != context_die)
23519 {
23520 /* If we will be creating an inlined instance, we need a
23521 new DIE that will get annotated with
23522 DW_AT_abstract_origin. */
23523 gcc_assert (!DECL_ABSTRACT_P (decl));
23524 }
23525 else
23526 {
23527 /* If a DIE was dumped early, it still needs location info.
23528 Skip to where we fill the location bits. */
23529 var_die = old_die;
23530
23531 /* ??? In LTRANS we cannot annotate early created variably
23532 modified type DIEs without copying them and adjusting all
23533 references to them. Thus we dumped them again. Also add a
23534 reference to them but beware of -g0 compile and -g link
23535 in which case the reference will be already present. */
23536 tree type = TREE_TYPE (decl_or_origin);
23537 if (in_lto_p
23538 && ! get_AT (var_die, DW_AT_type)
23539 && variably_modified_type_p
23540 (type, decl_function_context (decl_or_origin)))
23541 {
23542 if (decl_by_reference_p (decl_or_origin))
23543 add_type_attribute (var_die, TREE_TYPE (type),
23544 TYPE_UNQUALIFIED, false, context_die);
23545 else
23546 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23547 false, context_die);
23548 }
23549
23550 goto gen_variable_die_location;
23551 }
23552 }
23553
23554 /* For static data members, the declaration in the class is supposed
23555 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23556 also in DWARF2; the specification should still be DW_TAG_variable
23557 referencing the DW_TAG_member DIE. */
23558 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23559 var_die = new_die (DW_TAG_member, context_die, decl);
23560 else
23561 var_die = new_die (DW_TAG_variable, context_die, decl);
23562
23563 if (origin != NULL)
23564 add_abstract_origin_attribute (var_die, origin);
23565
23566 /* Loop unrolling can create multiple blocks that refer to the same
23567 static variable, so we must test for the DW_AT_declaration flag.
23568
23569 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23570 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23571 sharing them.
23572
23573 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23574 else if (decl_will_get_specification_p (old_die, decl, declaration))
23575 {
23576 /* This is a definition of a C++ class level static. */
23577 add_AT_specification (var_die, old_die);
23578 specialization_p = true;
23579 if (DECL_NAME (decl))
23580 {
23581 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23582 struct dwarf_file_data * file_index = lookup_filename (s.file);
23583
23584 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23585 add_AT_file (var_die, DW_AT_decl_file, file_index);
23586
23587 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23588 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23589
23590 if (debug_column_info
23591 && s.column
23592 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23593 != (unsigned) s.column))
23594 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23595
23596 if (old_die->die_tag == DW_TAG_member)
23597 add_linkage_name (var_die, decl);
23598 }
23599 }
23600 else
23601 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23602
23603 if ((origin == NULL && !specialization_p)
23604 || (origin != NULL
23605 && !DECL_ABSTRACT_P (decl_or_origin)
23606 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23607 decl_function_context
23608 (decl_or_origin))))
23609 {
23610 tree type = TREE_TYPE (decl_or_origin);
23611
23612 if (decl_by_reference_p (decl_or_origin))
23613 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23614 context_die);
23615 else
23616 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23617 context_die);
23618 }
23619
23620 if (origin == NULL && !specialization_p)
23621 {
23622 if (TREE_PUBLIC (decl))
23623 add_AT_flag (var_die, DW_AT_external, 1);
23624
23625 if (DECL_ARTIFICIAL (decl))
23626 add_AT_flag (var_die, DW_AT_artificial, 1);
23627
23628 add_alignment_attribute (var_die, decl);
23629
23630 add_accessibility_attribute (var_die, decl);
23631 }
23632
23633 if (declaration)
23634 add_AT_flag (var_die, DW_AT_declaration, 1);
23635
23636 if (decl && (DECL_ABSTRACT_P (decl)
23637 || !old_die || is_declaration_die (old_die)))
23638 equate_decl_number_to_die (decl, var_die);
23639
23640 gen_variable_die_location:
23641 if (! declaration
23642 && (! DECL_ABSTRACT_P (decl_or_origin)
23643 /* Local static vars are shared between all clones/inlines,
23644 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23645 already set. */
23646 || (VAR_P (decl_or_origin)
23647 && TREE_STATIC (decl_or_origin)
23648 && DECL_RTL_SET_P (decl_or_origin))))
23649 {
23650 if (early_dwarf)
23651 add_pubname (decl_or_origin, var_die);
23652 else
23653 add_location_or_const_value_attribute (var_die, decl_or_origin,
23654 decl == NULL);
23655 }
23656 else
23657 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23658
23659 if ((dwarf_version >= 4 || !dwarf_strict)
23660 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23661 DW_AT_const_expr) == 1
23662 && !get_AT (var_die, DW_AT_const_expr)
23663 && !specialization_p)
23664 add_AT_flag (var_die, DW_AT_const_expr, 1);
23665
23666 if (!dwarf_strict)
23667 {
23668 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23669 DW_AT_inline);
23670 if (inl != -1
23671 && !get_AT (var_die, DW_AT_inline)
23672 && !specialization_p)
23673 add_AT_unsigned (var_die, DW_AT_inline, inl);
23674 }
23675 }
23676
23677 /* Generate a DIE to represent a named constant. */
23678
23679 static void
23680 gen_const_die (tree decl, dw_die_ref context_die)
23681 {
23682 dw_die_ref const_die;
23683 tree type = TREE_TYPE (decl);
23684
23685 const_die = lookup_decl_die (decl);
23686 if (const_die)
23687 return;
23688
23689 const_die = new_die (DW_TAG_constant, context_die, decl);
23690 equate_decl_number_to_die (decl, const_die);
23691 add_name_and_src_coords_attributes (const_die, decl);
23692 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23693 if (TREE_PUBLIC (decl))
23694 add_AT_flag (const_die, DW_AT_external, 1);
23695 if (DECL_ARTIFICIAL (decl))
23696 add_AT_flag (const_die, DW_AT_artificial, 1);
23697 tree_add_const_value_attribute_for_decl (const_die, decl);
23698 }
23699
23700 /* Generate a DIE to represent a label identifier. */
23701
23702 static void
23703 gen_label_die (tree decl, dw_die_ref context_die)
23704 {
23705 tree origin = decl_ultimate_origin (decl);
23706 dw_die_ref lbl_die = lookup_decl_die (decl);
23707 rtx insn;
23708 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23709
23710 if (!lbl_die)
23711 {
23712 lbl_die = new_die (DW_TAG_label, context_die, decl);
23713 equate_decl_number_to_die (decl, lbl_die);
23714
23715 if (origin != NULL)
23716 add_abstract_origin_attribute (lbl_die, origin);
23717 else
23718 add_name_and_src_coords_attributes (lbl_die, decl);
23719 }
23720
23721 if (DECL_ABSTRACT_P (decl))
23722 equate_decl_number_to_die (decl, lbl_die);
23723 else if (! early_dwarf)
23724 {
23725 insn = DECL_RTL_IF_SET (decl);
23726
23727 /* Deleted labels are programmer specified labels which have been
23728 eliminated because of various optimizations. We still emit them
23729 here so that it is possible to put breakpoints on them. */
23730 if (insn
23731 && (LABEL_P (insn)
23732 || ((NOTE_P (insn)
23733 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23734 {
23735 /* When optimization is enabled (via -O) some parts of the compiler
23736 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23737 represent source-level labels which were explicitly declared by
23738 the user. This really shouldn't be happening though, so catch
23739 it if it ever does happen. */
23740 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23741
23742 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23743 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23744 }
23745 else if (insn
23746 && NOTE_P (insn)
23747 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23748 && CODE_LABEL_NUMBER (insn) != -1)
23749 {
23750 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23751 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23752 }
23753 }
23754 }
23755
23756 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23757 attributes to the DIE for a block STMT, to describe where the inlined
23758 function was called from. This is similar to add_src_coords_attributes. */
23759
23760 static inline void
23761 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23762 {
23763 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23764
23765 if (dwarf_version >= 3 || !dwarf_strict)
23766 {
23767 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23768 add_AT_unsigned (die, DW_AT_call_line, s.line);
23769 if (debug_column_info && s.column)
23770 add_AT_unsigned (die, DW_AT_call_column, s.column);
23771 }
23772 }
23773
23774
23775 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23776 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23777
23778 static inline void
23779 add_high_low_attributes (tree stmt, dw_die_ref die)
23780 {
23781 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23782
23783 if (inline_entry_data **iedp
23784 = !inline_entry_data_table ? NULL
23785 : inline_entry_data_table->find_slot_with_hash (stmt,
23786 htab_hash_pointer (stmt),
23787 NO_INSERT))
23788 {
23789 inline_entry_data *ied = *iedp;
23790 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23791 gcc_assert (debug_inline_points);
23792 gcc_assert (inlined_function_outer_scope_p (stmt));
23793
23794 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23795 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23796
23797 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23798 && !dwarf_strict)
23799 {
23800 if (!output_asm_line_debug_info ())
23801 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23802 else
23803 {
23804 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23805 /* FIXME: this will resolve to a small number. Could we
23806 possibly emit smaller data? Ideally we'd emit a
23807 uleb128, but that would make the size of DIEs
23808 impossible for the compiler to compute, since it's
23809 the assembler that computes the value of the view
23810 label in this case. Ideally, we'd have a single form
23811 encompassing both the address and the view, and
23812 indirecting them through a table might make things
23813 easier, but even that would be more wasteful,
23814 space-wise, than what we have now. */
23815 add_AT_lbl_id (die, DW_AT_GNU_entry_view, label);
23816 }
23817 }
23818
23819 inline_entry_data_table->clear_slot (iedp);
23820 }
23821
23822 if (BLOCK_FRAGMENT_CHAIN (stmt)
23823 && (dwarf_version >= 3 || !dwarf_strict))
23824 {
23825 tree chain, superblock = NULL_TREE;
23826 dw_die_ref pdie;
23827 dw_attr_node *attr = NULL;
23828
23829 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23830 {
23831 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23832 BLOCK_NUMBER (stmt));
23833 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23834 }
23835
23836 /* Optimize duplicate .debug_ranges lists or even tails of
23837 lists. If this BLOCK has same ranges as its supercontext,
23838 lookup DW_AT_ranges attribute in the supercontext (and
23839 recursively so), verify that the ranges_table contains the
23840 right values and use it instead of adding a new .debug_range. */
23841 for (chain = stmt, pdie = die;
23842 BLOCK_SAME_RANGE (chain);
23843 chain = BLOCK_SUPERCONTEXT (chain))
23844 {
23845 dw_attr_node *new_attr;
23846
23847 pdie = pdie->die_parent;
23848 if (pdie == NULL)
23849 break;
23850 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23851 break;
23852 new_attr = get_AT (pdie, DW_AT_ranges);
23853 if (new_attr == NULL
23854 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23855 break;
23856 attr = new_attr;
23857 superblock = BLOCK_SUPERCONTEXT (chain);
23858 }
23859 if (attr != NULL
23860 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23861 == BLOCK_NUMBER (superblock))
23862 && BLOCK_FRAGMENT_CHAIN (superblock))
23863 {
23864 unsigned long off = attr->dw_attr_val.v.val_offset;
23865 unsigned long supercnt = 0, thiscnt = 0;
23866 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23867 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23868 {
23869 ++supercnt;
23870 gcc_checking_assert ((*ranges_table)[off + supercnt].num
23871 == BLOCK_NUMBER (chain));
23872 }
23873 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
23874 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
23875 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23876 ++thiscnt;
23877 gcc_assert (supercnt >= thiscnt);
23878 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
23879 false);
23880 note_rnglist_head (off + supercnt - thiscnt);
23881 return;
23882 }
23883
23884 unsigned int offset = add_ranges (stmt, true);
23885 add_AT_range_list (die, DW_AT_ranges, offset, false);
23886 note_rnglist_head (offset);
23887
23888 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
23889 chain = BLOCK_FRAGMENT_CHAIN (stmt);
23890 do
23891 {
23892 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
23893 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
23894 chain = BLOCK_FRAGMENT_CHAIN (chain);
23895 }
23896 while (chain);
23897 add_ranges (NULL);
23898 }
23899 else
23900 {
23901 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
23902 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23903 BLOCK_NUMBER (stmt));
23904 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
23905 BLOCK_NUMBER (stmt));
23906 add_AT_low_high_pc (die, label, label_high, false);
23907 }
23908 }
23909
23910 /* Generate a DIE for a lexical block. */
23911
23912 static void
23913 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
23914 {
23915 dw_die_ref old_die = BLOCK_DIE (stmt);
23916 dw_die_ref stmt_die = NULL;
23917 if (!old_die)
23918 {
23919 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23920 BLOCK_DIE (stmt) = stmt_die;
23921 }
23922
23923 if (BLOCK_ABSTRACT (stmt))
23924 {
23925 if (old_die)
23926 {
23927 /* This must have been generated early and it won't even
23928 need location information since it's a DW_AT_inline
23929 function. */
23930 if (flag_checking)
23931 for (dw_die_ref c = context_die; c; c = c->die_parent)
23932 if (c->die_tag == DW_TAG_inlined_subroutine
23933 || c->die_tag == DW_TAG_subprogram)
23934 {
23935 gcc_assert (get_AT (c, DW_AT_inline));
23936 break;
23937 }
23938 return;
23939 }
23940 }
23941 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
23942 {
23943 /* If this is an inlined instance, create a new lexical die for
23944 anything below to attach DW_AT_abstract_origin to. */
23945 if (old_die)
23946 {
23947 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
23948 BLOCK_DIE (stmt) = stmt_die;
23949 old_die = NULL;
23950 }
23951
23952 tree origin = block_ultimate_origin (stmt);
23953 if (origin != NULL_TREE && origin != stmt)
23954 add_abstract_origin_attribute (stmt_die, origin);
23955 }
23956
23957 if (old_die)
23958 stmt_die = old_die;
23959
23960 /* A non abstract block whose blocks have already been reordered
23961 should have the instruction range for this block. If so, set the
23962 high/low attributes. */
23963 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
23964 {
23965 gcc_assert (stmt_die);
23966 add_high_low_attributes (stmt, stmt_die);
23967 }
23968
23969 decls_for_scope (stmt, stmt_die);
23970 }
23971
23972 /* Generate a DIE for an inlined subprogram. */
23973
23974 static void
23975 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
23976 {
23977 tree decl;
23978
23979 /* The instance of function that is effectively being inlined shall not
23980 be abstract. */
23981 gcc_assert (! BLOCK_ABSTRACT (stmt));
23982
23983 decl = block_ultimate_origin (stmt);
23984
23985 /* Make sure any inlined functions are known to be inlineable. */
23986 gcc_checking_assert (DECL_ABSTRACT_P (decl)
23987 || cgraph_function_possibly_inlined_p (decl));
23988
23989 if (! BLOCK_ABSTRACT (stmt))
23990 {
23991 dw_die_ref subr_die
23992 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
23993
23994 if (call_arg_locations || debug_inline_points)
23995 BLOCK_DIE (stmt) = subr_die;
23996 add_abstract_origin_attribute (subr_die, decl);
23997 if (TREE_ASM_WRITTEN (stmt))
23998 add_high_low_attributes (stmt, subr_die);
23999 add_call_src_coords_attributes (stmt, subr_die);
24000
24001 decls_for_scope (stmt, subr_die);
24002 }
24003 }
24004
24005 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24006 the comment for VLR_CONTEXT. */
24007
24008 static void
24009 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24010 {
24011 dw_die_ref decl_die;
24012
24013 if (TREE_TYPE (decl) == error_mark_node)
24014 return;
24015
24016 decl_die = new_die (DW_TAG_member, context_die, decl);
24017 add_name_and_src_coords_attributes (decl_die, decl);
24018 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24019 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24020 context_die);
24021
24022 if (DECL_BIT_FIELD_TYPE (decl))
24023 {
24024 add_byte_size_attribute (decl_die, decl);
24025 add_bit_size_attribute (decl_die, decl);
24026 add_bit_offset_attribute (decl_die, decl, ctx);
24027 }
24028
24029 add_alignment_attribute (decl_die, decl);
24030
24031 /* If we have a variant part offset, then we are supposed to process a member
24032 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24033 trees. */
24034 gcc_assert (ctx->variant_part_offset == NULL_TREE
24035 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24036 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24037 add_data_member_location_attribute (decl_die, decl, ctx);
24038
24039 if (DECL_ARTIFICIAL (decl))
24040 add_AT_flag (decl_die, DW_AT_artificial, 1);
24041
24042 add_accessibility_attribute (decl_die, decl);
24043
24044 /* Equate decl number to die, so that we can look up this decl later on. */
24045 equate_decl_number_to_die (decl, decl_die);
24046 }
24047
24048 /* Generate a DIE for a pointer to a member type. TYPE can be an
24049 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24050 pointer to member function. */
24051
24052 static void
24053 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24054 {
24055 if (lookup_type_die (type))
24056 return;
24057
24058 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24059 scope_die_for (type, context_die), type);
24060
24061 equate_type_number_to_die (type, ptr_die);
24062 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24063 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24064 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24065 context_die);
24066 add_alignment_attribute (ptr_die, type);
24067
24068 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24069 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24070 {
24071 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24072 add_AT_loc (ptr_die, DW_AT_use_location, op);
24073 }
24074 }
24075
24076 static char *producer_string;
24077
24078 /* Return a heap allocated producer string including command line options
24079 if -grecord-gcc-switches. */
24080
24081 static char *
24082 gen_producer_string (void)
24083 {
24084 size_t j;
24085 auto_vec<const char *> switches;
24086 const char *language_string = lang_hooks.name;
24087 char *producer, *tail;
24088 const char *p;
24089 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24090 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24091
24092 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24093 switch (save_decoded_options[j].opt_index)
24094 {
24095 case OPT_o:
24096 case OPT_d:
24097 case OPT_dumpbase:
24098 case OPT_dumpdir:
24099 case OPT_auxbase:
24100 case OPT_auxbase_strip:
24101 case OPT_quiet:
24102 case OPT_version:
24103 case OPT_v:
24104 case OPT_w:
24105 case OPT_L:
24106 case OPT_D:
24107 case OPT_I:
24108 case OPT_U:
24109 case OPT_SPECIAL_unknown:
24110 case OPT_SPECIAL_ignore:
24111 case OPT_SPECIAL_program_name:
24112 case OPT_SPECIAL_input_file:
24113 case OPT_grecord_gcc_switches:
24114 case OPT__output_pch_:
24115 case OPT_fdiagnostics_show_location_:
24116 case OPT_fdiagnostics_show_option:
24117 case OPT_fdiagnostics_show_caret:
24118 case OPT_fdiagnostics_color_:
24119 case OPT_fverbose_asm:
24120 case OPT____:
24121 case OPT__sysroot_:
24122 case OPT_nostdinc:
24123 case OPT_nostdinc__:
24124 case OPT_fpreprocessed:
24125 case OPT_fltrans_output_list_:
24126 case OPT_fresolution_:
24127 case OPT_fdebug_prefix_map_:
24128 case OPT_fmacro_prefix_map_:
24129 case OPT_ffile_prefix_map_:
24130 case OPT_fcompare_debug:
24131 /* Ignore these. */
24132 continue;
24133 default:
24134 if (cl_options[save_decoded_options[j].opt_index].flags
24135 & CL_NO_DWARF_RECORD)
24136 continue;
24137 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24138 == '-');
24139 switch (save_decoded_options[j].canonical_option[0][1])
24140 {
24141 case 'M':
24142 case 'i':
24143 case 'W':
24144 continue;
24145 case 'f':
24146 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24147 "dump", 4) == 0)
24148 continue;
24149 break;
24150 default:
24151 break;
24152 }
24153 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24154 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24155 break;
24156 }
24157
24158 producer = XNEWVEC (char, plen + 1 + len + 1);
24159 tail = producer;
24160 sprintf (tail, "%s %s", language_string, version_string);
24161 tail += plen;
24162
24163 FOR_EACH_VEC_ELT (switches, j, p)
24164 {
24165 len = strlen (p);
24166 *tail = ' ';
24167 memcpy (tail + 1, p, len);
24168 tail += len + 1;
24169 }
24170
24171 *tail = '\0';
24172 return producer;
24173 }
24174
24175 /* Given a C and/or C++ language/version string return the "highest".
24176 C++ is assumed to be "higher" than C in this case. Used for merging
24177 LTO translation unit languages. */
24178 static const char *
24179 highest_c_language (const char *lang1, const char *lang2)
24180 {
24181 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24182 return "GNU C++17";
24183 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24184 return "GNU C++14";
24185 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24186 return "GNU C++11";
24187 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24188 return "GNU C++98";
24189
24190 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24191 return "GNU C17";
24192 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24193 return "GNU C11";
24194 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24195 return "GNU C99";
24196 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24197 return "GNU C89";
24198
24199 gcc_unreachable ();
24200 }
24201
24202
24203 /* Generate the DIE for the compilation unit. */
24204
24205 static dw_die_ref
24206 gen_compile_unit_die (const char *filename)
24207 {
24208 dw_die_ref die;
24209 const char *language_string = lang_hooks.name;
24210 int language;
24211
24212 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24213
24214 if (filename)
24215 {
24216 add_name_attribute (die, filename);
24217 /* Don't add cwd for <built-in>. */
24218 if (filename[0] != '<')
24219 add_comp_dir_attribute (die);
24220 }
24221
24222 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24223
24224 /* If our producer is LTO try to figure out a common language to use
24225 from the global list of translation units. */
24226 if (strcmp (language_string, "GNU GIMPLE") == 0)
24227 {
24228 unsigned i;
24229 tree t;
24230 const char *common_lang = NULL;
24231
24232 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24233 {
24234 if (!TRANSLATION_UNIT_LANGUAGE (t))
24235 continue;
24236 if (!common_lang)
24237 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24238 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24239 ;
24240 else if (strncmp (common_lang, "GNU C", 5) == 0
24241 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24242 /* Mixing C and C++ is ok, use C++ in that case. */
24243 common_lang = highest_c_language (common_lang,
24244 TRANSLATION_UNIT_LANGUAGE (t));
24245 else
24246 {
24247 /* Fall back to C. */
24248 common_lang = NULL;
24249 break;
24250 }
24251 }
24252
24253 if (common_lang)
24254 language_string = common_lang;
24255 }
24256
24257 language = DW_LANG_C;
24258 if (strncmp (language_string, "GNU C", 5) == 0
24259 && ISDIGIT (language_string[5]))
24260 {
24261 language = DW_LANG_C89;
24262 if (dwarf_version >= 3 || !dwarf_strict)
24263 {
24264 if (strcmp (language_string, "GNU C89") != 0)
24265 language = DW_LANG_C99;
24266
24267 if (dwarf_version >= 5 /* || !dwarf_strict */)
24268 if (strcmp (language_string, "GNU C11") == 0
24269 || strcmp (language_string, "GNU C17") == 0)
24270 language = DW_LANG_C11;
24271 }
24272 }
24273 else if (strncmp (language_string, "GNU C++", 7) == 0)
24274 {
24275 language = DW_LANG_C_plus_plus;
24276 if (dwarf_version >= 5 /* || !dwarf_strict */)
24277 {
24278 if (strcmp (language_string, "GNU C++11") == 0)
24279 language = DW_LANG_C_plus_plus_11;
24280 else if (strcmp (language_string, "GNU C++14") == 0)
24281 language = DW_LANG_C_plus_plus_14;
24282 else if (strcmp (language_string, "GNU C++17") == 0)
24283 /* For now. */
24284 language = DW_LANG_C_plus_plus_14;
24285 }
24286 }
24287 else if (strcmp (language_string, "GNU F77") == 0)
24288 language = DW_LANG_Fortran77;
24289 else if (dwarf_version >= 3 || !dwarf_strict)
24290 {
24291 if (strcmp (language_string, "GNU Ada") == 0)
24292 language = DW_LANG_Ada95;
24293 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24294 {
24295 language = DW_LANG_Fortran95;
24296 if (dwarf_version >= 5 /* || !dwarf_strict */)
24297 {
24298 if (strcmp (language_string, "GNU Fortran2003") == 0)
24299 language = DW_LANG_Fortran03;
24300 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24301 language = DW_LANG_Fortran08;
24302 }
24303 }
24304 else if (strcmp (language_string, "GNU Objective-C") == 0)
24305 language = DW_LANG_ObjC;
24306 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24307 language = DW_LANG_ObjC_plus_plus;
24308 else if (dwarf_version >= 5 || !dwarf_strict)
24309 {
24310 if (strcmp (language_string, "GNU Go") == 0)
24311 language = DW_LANG_Go;
24312 }
24313 }
24314 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24315 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24316 language = DW_LANG_Fortran90;
24317
24318 add_AT_unsigned (die, DW_AT_language, language);
24319
24320 switch (language)
24321 {
24322 case DW_LANG_Fortran77:
24323 case DW_LANG_Fortran90:
24324 case DW_LANG_Fortran95:
24325 case DW_LANG_Fortran03:
24326 case DW_LANG_Fortran08:
24327 /* Fortran has case insensitive identifiers and the front-end
24328 lowercases everything. */
24329 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24330 break;
24331 default:
24332 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24333 break;
24334 }
24335 return die;
24336 }
24337
24338 /* Generate the DIE for a base class. */
24339
24340 static void
24341 gen_inheritance_die (tree binfo, tree access, tree type,
24342 dw_die_ref context_die)
24343 {
24344 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24345 struct vlr_context ctx = { type, NULL };
24346
24347 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24348 context_die);
24349 add_data_member_location_attribute (die, binfo, &ctx);
24350
24351 if (BINFO_VIRTUAL_P (binfo))
24352 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24353
24354 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24355 children, otherwise the default is DW_ACCESS_public. In DWARF2
24356 the default has always been DW_ACCESS_private. */
24357 if (access == access_public_node)
24358 {
24359 if (dwarf_version == 2
24360 || context_die->die_tag == DW_TAG_class_type)
24361 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24362 }
24363 else if (access == access_protected_node)
24364 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24365 else if (dwarf_version > 2
24366 && context_die->die_tag != DW_TAG_class_type)
24367 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24368 }
24369
24370 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24371 structure. */
24372 static bool
24373 is_variant_part (tree decl)
24374 {
24375 return (TREE_CODE (decl) == FIELD_DECL
24376 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24377 }
24378
24379 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24380 return the FIELD_DECL. Return NULL_TREE otherwise. */
24381
24382 static tree
24383 analyze_discr_in_predicate (tree operand, tree struct_type)
24384 {
24385 bool continue_stripping = true;
24386 while (continue_stripping)
24387 switch (TREE_CODE (operand))
24388 {
24389 CASE_CONVERT:
24390 operand = TREE_OPERAND (operand, 0);
24391 break;
24392 default:
24393 continue_stripping = false;
24394 break;
24395 }
24396
24397 /* Match field access to members of struct_type only. */
24398 if (TREE_CODE (operand) == COMPONENT_REF
24399 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24400 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24401 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24402 return TREE_OPERAND (operand, 1);
24403 else
24404 return NULL_TREE;
24405 }
24406
24407 /* Check that SRC is a constant integer that can be represented as a native
24408 integer constant (either signed or unsigned). If so, store it into DEST and
24409 return true. Return false otherwise. */
24410
24411 static bool
24412 get_discr_value (tree src, dw_discr_value *dest)
24413 {
24414 tree discr_type = TREE_TYPE (src);
24415
24416 if (lang_hooks.types.get_debug_type)
24417 {
24418 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24419 if (debug_type != NULL)
24420 discr_type = debug_type;
24421 }
24422
24423 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24424 return false;
24425
24426 /* Signedness can vary between the original type and the debug type. This
24427 can happen for character types in Ada for instance: the character type
24428 used for code generation can be signed, to be compatible with the C one,
24429 but from a debugger point of view, it must be unsigned. */
24430 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24431 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24432
24433 if (is_orig_unsigned != is_debug_unsigned)
24434 src = fold_convert (discr_type, src);
24435
24436 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24437 return false;
24438
24439 dest->pos = is_debug_unsigned;
24440 if (is_debug_unsigned)
24441 dest->v.uval = tree_to_uhwi (src);
24442 else
24443 dest->v.sval = tree_to_shwi (src);
24444
24445 return true;
24446 }
24447
24448 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24449 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24450 store NULL_TREE in DISCR_DECL. Otherwise:
24451
24452 - store the discriminant field in STRUCT_TYPE that controls the variant
24453 part to *DISCR_DECL
24454
24455 - put in *DISCR_LISTS_P an array where for each variant, the item
24456 represents the corresponding matching list of discriminant values.
24457
24458 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24459 the above array.
24460
24461 Note that when the array is allocated (i.e. when the analysis is
24462 successful), it is up to the caller to free the array. */
24463
24464 static void
24465 analyze_variants_discr (tree variant_part_decl,
24466 tree struct_type,
24467 tree *discr_decl,
24468 dw_discr_list_ref **discr_lists_p,
24469 unsigned *discr_lists_length)
24470 {
24471 tree variant_part_type = TREE_TYPE (variant_part_decl);
24472 tree variant;
24473 dw_discr_list_ref *discr_lists;
24474 unsigned i;
24475
24476 /* Compute how many variants there are in this variant part. */
24477 *discr_lists_length = 0;
24478 for (variant = TYPE_FIELDS (variant_part_type);
24479 variant != NULL_TREE;
24480 variant = DECL_CHAIN (variant))
24481 ++*discr_lists_length;
24482
24483 *discr_decl = NULL_TREE;
24484 *discr_lists_p
24485 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24486 sizeof (**discr_lists_p));
24487 discr_lists = *discr_lists_p;
24488
24489 /* And then analyze all variants to extract discriminant information for all
24490 of them. This analysis is conservative: as soon as we detect something we
24491 do not support, abort everything and pretend we found nothing. */
24492 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24493 variant != NULL_TREE;
24494 variant = DECL_CHAIN (variant), ++i)
24495 {
24496 tree match_expr = DECL_QUALIFIER (variant);
24497
24498 /* Now, try to analyze the predicate and deduce a discriminant for
24499 it. */
24500 if (match_expr == boolean_true_node)
24501 /* Typically happens for the default variant: it matches all cases that
24502 previous variants rejected. Don't output any matching value for
24503 this one. */
24504 continue;
24505
24506 /* The following loop tries to iterate over each discriminant
24507 possibility: single values or ranges. */
24508 while (match_expr != NULL_TREE)
24509 {
24510 tree next_round_match_expr;
24511 tree candidate_discr = NULL_TREE;
24512 dw_discr_list_ref new_node = NULL;
24513
24514 /* Possibilities are matched one after the other by nested
24515 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24516 continue with the rest at next iteration. */
24517 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24518 {
24519 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24520 match_expr = TREE_OPERAND (match_expr, 1);
24521 }
24522 else
24523 next_round_match_expr = NULL_TREE;
24524
24525 if (match_expr == boolean_false_node)
24526 /* This sub-expression matches nothing: just wait for the next
24527 one. */
24528 ;
24529
24530 else if (TREE_CODE (match_expr) == EQ_EXPR)
24531 {
24532 /* We are matching: <discr_field> == <integer_cst>
24533 This sub-expression matches a single value. */
24534 tree integer_cst = TREE_OPERAND (match_expr, 1);
24535
24536 candidate_discr
24537 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24538 struct_type);
24539
24540 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24541 if (!get_discr_value (integer_cst,
24542 &new_node->dw_discr_lower_bound))
24543 goto abort;
24544 new_node->dw_discr_range = false;
24545 }
24546
24547 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24548 {
24549 /* We are matching:
24550 <discr_field> > <integer_cst>
24551 && <discr_field> < <integer_cst>.
24552 This sub-expression matches the range of values between the
24553 two matched integer constants. Note that comparisons can be
24554 inclusive or exclusive. */
24555 tree candidate_discr_1, candidate_discr_2;
24556 tree lower_cst, upper_cst;
24557 bool lower_cst_included, upper_cst_included;
24558 tree lower_op = TREE_OPERAND (match_expr, 0);
24559 tree upper_op = TREE_OPERAND (match_expr, 1);
24560
24561 /* When the comparison is exclusive, the integer constant is not
24562 the discriminant range bound we are looking for: we will have
24563 to increment or decrement it. */
24564 if (TREE_CODE (lower_op) == GE_EXPR)
24565 lower_cst_included = true;
24566 else if (TREE_CODE (lower_op) == GT_EXPR)
24567 lower_cst_included = false;
24568 else
24569 goto abort;
24570
24571 if (TREE_CODE (upper_op) == LE_EXPR)
24572 upper_cst_included = true;
24573 else if (TREE_CODE (upper_op) == LT_EXPR)
24574 upper_cst_included = false;
24575 else
24576 goto abort;
24577
24578 /* Extract the discriminant from the first operand and check it
24579 is consistant with the same analysis in the second
24580 operand. */
24581 candidate_discr_1
24582 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24583 struct_type);
24584 candidate_discr_2
24585 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24586 struct_type);
24587 if (candidate_discr_1 == candidate_discr_2)
24588 candidate_discr = candidate_discr_1;
24589 else
24590 goto abort;
24591
24592 /* Extract bounds from both. */
24593 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24594 lower_cst = TREE_OPERAND (lower_op, 1);
24595 upper_cst = TREE_OPERAND (upper_op, 1);
24596
24597 if (!lower_cst_included)
24598 lower_cst
24599 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24600 build_int_cst (TREE_TYPE (lower_cst), 1));
24601 if (!upper_cst_included)
24602 upper_cst
24603 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24604 build_int_cst (TREE_TYPE (upper_cst), 1));
24605
24606 if (!get_discr_value (lower_cst,
24607 &new_node->dw_discr_lower_bound)
24608 || !get_discr_value (upper_cst,
24609 &new_node->dw_discr_upper_bound))
24610 goto abort;
24611
24612 new_node->dw_discr_range = true;
24613 }
24614
24615 else
24616 /* Unsupported sub-expression: we cannot determine the set of
24617 matching discriminant values. Abort everything. */
24618 goto abort;
24619
24620 /* If the discriminant info is not consistant with what we saw so
24621 far, consider the analysis failed and abort everything. */
24622 if (candidate_discr == NULL_TREE
24623 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24624 goto abort;
24625 else
24626 *discr_decl = candidate_discr;
24627
24628 if (new_node != NULL)
24629 {
24630 new_node->dw_discr_next = discr_lists[i];
24631 discr_lists[i] = new_node;
24632 }
24633 match_expr = next_round_match_expr;
24634 }
24635 }
24636
24637 /* If we reach this point, we could match everything we were interested
24638 in. */
24639 return;
24640
24641 abort:
24642 /* Clean all data structure and return no result. */
24643 free (*discr_lists_p);
24644 *discr_lists_p = NULL;
24645 *discr_decl = NULL_TREE;
24646 }
24647
24648 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24649 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24650 under CONTEXT_DIE.
24651
24652 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24653 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24654 this type, which are record types, represent the available variants and each
24655 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24656 values are inferred from these attributes.
24657
24658 In trees, the offsets for the fields inside these sub-records are relative
24659 to the variant part itself, whereas the corresponding DIEs should have
24660 offset attributes that are relative to the embedding record base address.
24661 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24662 must be an expression that computes the offset of the variant part to
24663 describe in DWARF. */
24664
24665 static void
24666 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24667 dw_die_ref context_die)
24668 {
24669 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24670 tree variant_part_offset = vlr_ctx->variant_part_offset;
24671 struct loc_descr_context ctx = {
24672 vlr_ctx->struct_type, /* context_type */
24673 NULL_TREE, /* base_decl */
24674 NULL, /* dpi */
24675 false, /* placeholder_arg */
24676 false /* placeholder_seen */
24677 };
24678
24679 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24680 NULL_TREE if there is no such field. */
24681 tree discr_decl = NULL_TREE;
24682 dw_discr_list_ref *discr_lists;
24683 unsigned discr_lists_length = 0;
24684 unsigned i;
24685
24686 dw_die_ref dwarf_proc_die = NULL;
24687 dw_die_ref variant_part_die
24688 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24689
24690 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24691
24692 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24693 &discr_decl, &discr_lists, &discr_lists_length);
24694
24695 if (discr_decl != NULL_TREE)
24696 {
24697 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24698
24699 if (discr_die)
24700 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24701 else
24702 /* We have no DIE for the discriminant, so just discard all
24703 discrimimant information in the output. */
24704 discr_decl = NULL_TREE;
24705 }
24706
24707 /* If the offset for this variant part is more complex than a constant,
24708 create a DWARF procedure for it so that we will not have to generate DWARF
24709 expressions for it for each member. */
24710 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24711 && (dwarf_version >= 3 || !dwarf_strict))
24712 {
24713 const tree dwarf_proc_fndecl
24714 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24715 build_function_type (TREE_TYPE (variant_part_offset),
24716 NULL_TREE));
24717 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24718 const dw_loc_descr_ref dwarf_proc_body
24719 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24720
24721 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24722 dwarf_proc_fndecl, context_die);
24723 if (dwarf_proc_die != NULL)
24724 variant_part_offset = dwarf_proc_call;
24725 }
24726
24727 /* Output DIEs for all variants. */
24728 i = 0;
24729 for (tree variant = TYPE_FIELDS (variant_part_type);
24730 variant != NULL_TREE;
24731 variant = DECL_CHAIN (variant), ++i)
24732 {
24733 tree variant_type = TREE_TYPE (variant);
24734 dw_die_ref variant_die;
24735
24736 /* All variants (i.e. members of a variant part) are supposed to be
24737 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24738 under these records. */
24739 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24740
24741 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24742 equate_decl_number_to_die (variant, variant_die);
24743
24744 /* Output discriminant values this variant matches, if any. */
24745 if (discr_decl == NULL || discr_lists[i] == NULL)
24746 /* In the case we have discriminant information at all, this is
24747 probably the default variant: as the standard says, don't
24748 output any discriminant value/list attribute. */
24749 ;
24750 else if (discr_lists[i]->dw_discr_next == NULL
24751 && !discr_lists[i]->dw_discr_range)
24752 /* If there is only one accepted value, don't bother outputting a
24753 list. */
24754 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24755 else
24756 add_discr_list (variant_die, discr_lists[i]);
24757
24758 for (tree member = TYPE_FIELDS (variant_type);
24759 member != NULL_TREE;
24760 member = DECL_CHAIN (member))
24761 {
24762 struct vlr_context vlr_sub_ctx = {
24763 vlr_ctx->struct_type, /* struct_type */
24764 NULL /* variant_part_offset */
24765 };
24766 if (is_variant_part (member))
24767 {
24768 /* All offsets for fields inside variant parts are relative to
24769 the top-level embedding RECORD_TYPE's base address. On the
24770 other hand, offsets in GCC's types are relative to the
24771 nested-most variant part. So we have to sum offsets each time
24772 we recurse. */
24773
24774 vlr_sub_ctx.variant_part_offset
24775 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24776 variant_part_offset, byte_position (member));
24777 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24778 }
24779 else
24780 {
24781 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24782 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24783 }
24784 }
24785 }
24786
24787 free (discr_lists);
24788 }
24789
24790 /* Generate a DIE for a class member. */
24791
24792 static void
24793 gen_member_die (tree type, dw_die_ref context_die)
24794 {
24795 tree member;
24796 tree binfo = TYPE_BINFO (type);
24797
24798 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24799
24800 /* If this is not an incomplete type, output descriptions of each of its
24801 members. Note that as we output the DIEs necessary to represent the
24802 members of this record or union type, we will also be trying to output
24803 DIEs to represent the *types* of those members. However the `type'
24804 function (above) will specifically avoid generating type DIEs for member
24805 types *within* the list of member DIEs for this (containing) type except
24806 for those types (of members) which are explicitly marked as also being
24807 members of this (containing) type themselves. The g++ front- end can
24808 force any given type to be treated as a member of some other (containing)
24809 type by setting the TYPE_CONTEXT of the given (member) type to point to
24810 the TREE node representing the appropriate (containing) type. */
24811
24812 /* First output info about the base classes. */
24813 if (binfo)
24814 {
24815 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24816 int i;
24817 tree base;
24818
24819 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24820 gen_inheritance_die (base,
24821 (accesses ? (*accesses)[i] : access_public_node),
24822 type,
24823 context_die);
24824 }
24825
24826 /* Now output info about the data members and type members. */
24827 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24828 {
24829 struct vlr_context vlr_ctx = { type, NULL_TREE };
24830 bool static_inline_p
24831 = (TREE_STATIC (member)
24832 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24833 != -1));
24834
24835 /* Ignore clones. */
24836 if (DECL_ABSTRACT_ORIGIN (member))
24837 continue;
24838
24839 /* If we thought we were generating minimal debug info for TYPE
24840 and then changed our minds, some of the member declarations
24841 may have already been defined. Don't define them again, but
24842 do put them in the right order. */
24843
24844 if (dw_die_ref child = lookup_decl_die (member))
24845 {
24846 /* Handle inline static data members, which only have in-class
24847 declarations. */
24848 dw_die_ref ref = NULL;
24849 if (child->die_tag == DW_TAG_variable
24850 && child->die_parent == comp_unit_die ())
24851 {
24852 ref = get_AT_ref (child, DW_AT_specification);
24853 /* For C++17 inline static data members followed by redundant
24854 out of class redeclaration, we might get here with
24855 child being the DIE created for the out of class
24856 redeclaration and with its DW_AT_specification being
24857 the DIE created for in-class definition. We want to
24858 reparent the latter, and don't want to create another
24859 DIE with DW_AT_specification in that case, because
24860 we already have one. */
24861 if (ref
24862 && static_inline_p
24863 && ref->die_tag == DW_TAG_variable
24864 && ref->die_parent == comp_unit_die ()
24865 && get_AT (ref, DW_AT_specification) == NULL)
24866 {
24867 child = ref;
24868 ref = NULL;
24869 static_inline_p = false;
24870 }
24871 }
24872
24873 if (child->die_tag == DW_TAG_variable
24874 && child->die_parent == comp_unit_die ()
24875 && ref == NULL)
24876 {
24877 reparent_child (child, context_die);
24878 if (dwarf_version < 5)
24879 child->die_tag = DW_TAG_member;
24880 }
24881 else
24882 splice_child_die (context_die, child);
24883 }
24884
24885 /* Do not generate standard DWARF for variant parts if we are generating
24886 the corresponding GNAT encodings: DIEs generated for both would
24887 conflict in our mappings. */
24888 else if (is_variant_part (member)
24889 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
24890 {
24891 vlr_ctx.variant_part_offset = byte_position (member);
24892 gen_variant_part (member, &vlr_ctx, context_die);
24893 }
24894 else
24895 {
24896 vlr_ctx.variant_part_offset = NULL_TREE;
24897 gen_decl_die (member, NULL, &vlr_ctx, context_die);
24898 }
24899
24900 /* For C++ inline static data members emit immediately a DW_TAG_variable
24901 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
24902 DW_AT_specification. */
24903 if (static_inline_p)
24904 {
24905 int old_extern = DECL_EXTERNAL (member);
24906 DECL_EXTERNAL (member) = 0;
24907 gen_decl_die (member, NULL, NULL, comp_unit_die ());
24908 DECL_EXTERNAL (member) = old_extern;
24909 }
24910 }
24911 }
24912
24913 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
24914 is set, we pretend that the type was never defined, so we only get the
24915 member DIEs needed by later specification DIEs. */
24916
24917 static void
24918 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
24919 enum debug_info_usage usage)
24920 {
24921 if (TREE_ASM_WRITTEN (type))
24922 {
24923 /* Fill in the bound of variable-length fields in late dwarf if
24924 still incomplete. */
24925 if (!early_dwarf && variably_modified_type_p (type, NULL))
24926 for (tree member = TYPE_FIELDS (type);
24927 member;
24928 member = DECL_CHAIN (member))
24929 fill_variable_array_bounds (TREE_TYPE (member));
24930 return;
24931 }
24932
24933 dw_die_ref type_die = lookup_type_die (type);
24934 dw_die_ref scope_die = 0;
24935 int nested = 0;
24936 int complete = (TYPE_SIZE (type)
24937 && (! TYPE_STUB_DECL (type)
24938 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
24939 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
24940 complete = complete && should_emit_struct_debug (type, usage);
24941
24942 if (type_die && ! complete)
24943 return;
24944
24945 if (TYPE_CONTEXT (type) != NULL_TREE
24946 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
24947 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
24948 nested = 1;
24949
24950 scope_die = scope_die_for (type, context_die);
24951
24952 /* Generate child dies for template paramaters. */
24953 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
24954 schedule_generic_params_dies_gen (type);
24955
24956 if (! type_die || (nested && is_cu_die (scope_die)))
24957 /* First occurrence of type or toplevel definition of nested class. */
24958 {
24959 dw_die_ref old_die = type_die;
24960
24961 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
24962 ? record_type_tag (type) : DW_TAG_union_type,
24963 scope_die, type);
24964 equate_type_number_to_die (type, type_die);
24965 if (old_die)
24966 add_AT_specification (type_die, old_die);
24967 else
24968 add_name_attribute (type_die, type_tag (type));
24969 }
24970 else
24971 remove_AT (type_die, DW_AT_declaration);
24972
24973 /* If this type has been completed, then give it a byte_size attribute and
24974 then give a list of members. */
24975 if (complete && !ns_decl)
24976 {
24977 /* Prevent infinite recursion in cases where the type of some member of
24978 this type is expressed in terms of this type itself. */
24979 TREE_ASM_WRITTEN (type) = 1;
24980 add_byte_size_attribute (type_die, type);
24981 add_alignment_attribute (type_die, type);
24982 if (TYPE_STUB_DECL (type) != NULL_TREE)
24983 {
24984 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
24985 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
24986 }
24987
24988 /* If the first reference to this type was as the return type of an
24989 inline function, then it may not have a parent. Fix this now. */
24990 if (type_die->die_parent == NULL)
24991 add_child_die (scope_die, type_die);
24992
24993 push_decl_scope (type);
24994 gen_member_die (type, type_die);
24995 pop_decl_scope ();
24996
24997 add_gnat_descriptive_type_attribute (type_die, type, context_die);
24998 if (TYPE_ARTIFICIAL (type))
24999 add_AT_flag (type_die, DW_AT_artificial, 1);
25000
25001 /* GNU extension: Record what type our vtable lives in. */
25002 if (TYPE_VFIELD (type))
25003 {
25004 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25005
25006 gen_type_die (vtype, context_die);
25007 add_AT_die_ref (type_die, DW_AT_containing_type,
25008 lookup_type_die (vtype));
25009 }
25010 }
25011 else
25012 {
25013 add_AT_flag (type_die, DW_AT_declaration, 1);
25014
25015 /* We don't need to do this for function-local types. */
25016 if (TYPE_STUB_DECL (type)
25017 && ! decl_function_context (TYPE_STUB_DECL (type)))
25018 vec_safe_push (incomplete_types, type);
25019 }
25020
25021 if (get_AT (type_die, DW_AT_name))
25022 add_pubtype (type, type_die);
25023 }
25024
25025 /* Generate a DIE for a subroutine _type_. */
25026
25027 static void
25028 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25029 {
25030 tree return_type = TREE_TYPE (type);
25031 dw_die_ref subr_die
25032 = new_die (DW_TAG_subroutine_type,
25033 scope_die_for (type, context_die), type);
25034
25035 equate_type_number_to_die (type, subr_die);
25036 add_prototyped_attribute (subr_die, type);
25037 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25038 context_die);
25039 add_alignment_attribute (subr_die, type);
25040 gen_formal_types_die (type, subr_die);
25041
25042 if (get_AT (subr_die, DW_AT_name))
25043 add_pubtype (type, subr_die);
25044 if ((dwarf_version >= 5 || !dwarf_strict)
25045 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25046 add_AT_flag (subr_die, DW_AT_reference, 1);
25047 if ((dwarf_version >= 5 || !dwarf_strict)
25048 && lang_hooks.types.type_dwarf_attribute (type,
25049 DW_AT_rvalue_reference) != -1)
25050 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25051 }
25052
25053 /* Generate a DIE for a type definition. */
25054
25055 static void
25056 gen_typedef_die (tree decl, dw_die_ref context_die)
25057 {
25058 dw_die_ref type_die;
25059 tree type;
25060
25061 if (TREE_ASM_WRITTEN (decl))
25062 {
25063 if (DECL_ORIGINAL_TYPE (decl))
25064 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25065 return;
25066 }
25067
25068 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25069 checks in process_scope_var and modified_type_die), this should be called
25070 only for original types. */
25071 gcc_assert (decl_ultimate_origin (decl) == NULL
25072 || decl_ultimate_origin (decl) == decl);
25073
25074 TREE_ASM_WRITTEN (decl) = 1;
25075 type_die = new_die (DW_TAG_typedef, context_die, decl);
25076
25077 add_name_and_src_coords_attributes (type_die, decl);
25078 if (DECL_ORIGINAL_TYPE (decl))
25079 {
25080 type = DECL_ORIGINAL_TYPE (decl);
25081 if (type == error_mark_node)
25082 return;
25083
25084 gcc_assert (type != TREE_TYPE (decl));
25085 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25086 }
25087 else
25088 {
25089 type = TREE_TYPE (decl);
25090 if (type == error_mark_node)
25091 return;
25092
25093 if (is_naming_typedef_decl (TYPE_NAME (type)))
25094 {
25095 /* Here, we are in the case of decl being a typedef naming
25096 an anonymous type, e.g:
25097 typedef struct {...} foo;
25098 In that case TREE_TYPE (decl) is not a typedef variant
25099 type and TYPE_NAME of the anonymous type is set to the
25100 TYPE_DECL of the typedef. This construct is emitted by
25101 the C++ FE.
25102
25103 TYPE is the anonymous struct named by the typedef
25104 DECL. As we need the DW_AT_type attribute of the
25105 DW_TAG_typedef to point to the DIE of TYPE, let's
25106 generate that DIE right away. add_type_attribute
25107 called below will then pick (via lookup_type_die) that
25108 anonymous struct DIE. */
25109 if (!TREE_ASM_WRITTEN (type))
25110 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25111
25112 /* This is a GNU Extension. We are adding a
25113 DW_AT_linkage_name attribute to the DIE of the
25114 anonymous struct TYPE. The value of that attribute
25115 is the name of the typedef decl naming the anonymous
25116 struct. This greatly eases the work of consumers of
25117 this debug info. */
25118 add_linkage_name_raw (lookup_type_die (type), decl);
25119 }
25120 }
25121
25122 add_type_attribute (type_die, type, decl_quals (decl), false,
25123 context_die);
25124
25125 if (is_naming_typedef_decl (decl))
25126 /* We want that all subsequent calls to lookup_type_die with
25127 TYPE in argument yield the DW_TAG_typedef we have just
25128 created. */
25129 equate_type_number_to_die (type, type_die);
25130
25131 add_alignment_attribute (type_die, TREE_TYPE (decl));
25132
25133 add_accessibility_attribute (type_die, decl);
25134
25135 if (DECL_ABSTRACT_P (decl))
25136 equate_decl_number_to_die (decl, type_die);
25137
25138 if (get_AT (type_die, DW_AT_name))
25139 add_pubtype (decl, type_die);
25140 }
25141
25142 /* Generate a DIE for a struct, class, enum or union type. */
25143
25144 static void
25145 gen_tagged_type_die (tree type,
25146 dw_die_ref context_die,
25147 enum debug_info_usage usage)
25148 {
25149 int need_pop;
25150
25151 if (type == NULL_TREE
25152 || !is_tagged_type (type))
25153 return;
25154
25155 if (TREE_ASM_WRITTEN (type))
25156 need_pop = 0;
25157 /* If this is a nested type whose containing class hasn't been written
25158 out yet, writing it out will cover this one, too. This does not apply
25159 to instantiations of member class templates; they need to be added to
25160 the containing class as they are generated. FIXME: This hurts the
25161 idea of combining type decls from multiple TUs, since we can't predict
25162 what set of template instantiations we'll get. */
25163 else if (TYPE_CONTEXT (type)
25164 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25165 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25166 {
25167 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25168
25169 if (TREE_ASM_WRITTEN (type))
25170 return;
25171
25172 /* If that failed, attach ourselves to the stub. */
25173 push_decl_scope (TYPE_CONTEXT (type));
25174 context_die = lookup_type_die (TYPE_CONTEXT (type));
25175 need_pop = 1;
25176 }
25177 else if (TYPE_CONTEXT (type) != NULL_TREE
25178 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25179 {
25180 /* If this type is local to a function that hasn't been written
25181 out yet, use a NULL context for now; it will be fixed up in
25182 decls_for_scope. */
25183 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25184 /* A declaration DIE doesn't count; nested types need to go in the
25185 specification. */
25186 if (context_die && is_declaration_die (context_die))
25187 context_die = NULL;
25188 need_pop = 0;
25189 }
25190 else
25191 {
25192 context_die = declare_in_namespace (type, context_die);
25193 need_pop = 0;
25194 }
25195
25196 if (TREE_CODE (type) == ENUMERAL_TYPE)
25197 {
25198 /* This might have been written out by the call to
25199 declare_in_namespace. */
25200 if (!TREE_ASM_WRITTEN (type))
25201 gen_enumeration_type_die (type, context_die);
25202 }
25203 else
25204 gen_struct_or_union_type_die (type, context_die, usage);
25205
25206 if (need_pop)
25207 pop_decl_scope ();
25208
25209 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25210 it up if it is ever completed. gen_*_type_die will set it for us
25211 when appropriate. */
25212 }
25213
25214 /* Generate a type description DIE. */
25215
25216 static void
25217 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25218 enum debug_info_usage usage)
25219 {
25220 struct array_descr_info info;
25221
25222 if (type == NULL_TREE || type == error_mark_node)
25223 return;
25224
25225 if (flag_checking && type)
25226 verify_type (type);
25227
25228 if (TYPE_NAME (type) != NULL_TREE
25229 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25230 && is_redundant_typedef (TYPE_NAME (type))
25231 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25232 /* The DECL of this type is a typedef we don't want to emit debug
25233 info for but we want debug info for its underlying typedef.
25234 This can happen for e.g, the injected-class-name of a C++
25235 type. */
25236 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25237
25238 /* If TYPE is a typedef type variant, let's generate debug info
25239 for the parent typedef which TYPE is a type of. */
25240 if (typedef_variant_p (type))
25241 {
25242 if (TREE_ASM_WRITTEN (type))
25243 return;
25244
25245 tree name = TYPE_NAME (type);
25246 tree origin = decl_ultimate_origin (name);
25247 if (origin != NULL && origin != name)
25248 {
25249 gen_decl_die (origin, NULL, NULL, context_die);
25250 return;
25251 }
25252
25253 /* Prevent broken recursion; we can't hand off to the same type. */
25254 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25255
25256 /* Give typedefs the right scope. */
25257 context_die = scope_die_for (type, context_die);
25258
25259 TREE_ASM_WRITTEN (type) = 1;
25260
25261 gen_decl_die (name, NULL, NULL, context_die);
25262 return;
25263 }
25264
25265 /* If type is an anonymous tagged type named by a typedef, let's
25266 generate debug info for the typedef. */
25267 if (is_naming_typedef_decl (TYPE_NAME (type)))
25268 {
25269 /* Use the DIE of the containing namespace as the parent DIE of
25270 the type description DIE we want to generate. */
25271 if (DECL_CONTEXT (TYPE_NAME (type))
25272 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25273 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25274
25275 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25276 return;
25277 }
25278
25279 if (lang_hooks.types.get_debug_type)
25280 {
25281 tree debug_type = lang_hooks.types.get_debug_type (type);
25282
25283 if (debug_type != NULL_TREE && debug_type != type)
25284 {
25285 gen_type_die_with_usage (debug_type, context_die, usage);
25286 return;
25287 }
25288 }
25289
25290 /* We are going to output a DIE to represent the unqualified version
25291 of this type (i.e. without any const or volatile qualifiers) so
25292 get the main variant (i.e. the unqualified version) of this type
25293 now. (Vectors and arrays are special because the debugging info is in the
25294 cloned type itself. Similarly function/method types can contain extra
25295 ref-qualification). */
25296 if (TREE_CODE (type) == FUNCTION_TYPE
25297 || TREE_CODE (type) == METHOD_TYPE)
25298 {
25299 /* For function/method types, can't use type_main_variant here,
25300 because that can have different ref-qualifiers for C++,
25301 but try to canonicalize. */
25302 tree main = TYPE_MAIN_VARIANT (type);
25303 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25304 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25305 && check_base_type (t, main)
25306 && check_lang_type (t, type))
25307 {
25308 type = t;
25309 break;
25310 }
25311 }
25312 else if (TREE_CODE (type) != VECTOR_TYPE
25313 && TREE_CODE (type) != ARRAY_TYPE)
25314 type = type_main_variant (type);
25315
25316 /* If this is an array type with hidden descriptor, handle it first. */
25317 if (!TREE_ASM_WRITTEN (type)
25318 && lang_hooks.types.get_array_descr_info)
25319 {
25320 memset (&info, 0, sizeof (info));
25321 if (lang_hooks.types.get_array_descr_info (type, &info))
25322 {
25323 /* Fortran sometimes emits array types with no dimension. */
25324 gcc_assert (info.ndimensions >= 0
25325 && (info.ndimensions
25326 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25327 gen_descr_array_type_die (type, &info, context_die);
25328 TREE_ASM_WRITTEN (type) = 1;
25329 return;
25330 }
25331 }
25332
25333 if (TREE_ASM_WRITTEN (type))
25334 {
25335 /* Variable-length types may be incomplete even if
25336 TREE_ASM_WRITTEN. For such types, fall through to
25337 gen_array_type_die() and possibly fill in
25338 DW_AT_{upper,lower}_bound attributes. */
25339 if ((TREE_CODE (type) != ARRAY_TYPE
25340 && TREE_CODE (type) != RECORD_TYPE
25341 && TREE_CODE (type) != UNION_TYPE
25342 && TREE_CODE (type) != QUAL_UNION_TYPE)
25343 || !variably_modified_type_p (type, NULL))
25344 return;
25345 }
25346
25347 switch (TREE_CODE (type))
25348 {
25349 case ERROR_MARK:
25350 break;
25351
25352 case POINTER_TYPE:
25353 case REFERENCE_TYPE:
25354 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25355 ensures that the gen_type_die recursion will terminate even if the
25356 type is recursive. Recursive types are possible in Ada. */
25357 /* ??? We could perhaps do this for all types before the switch
25358 statement. */
25359 TREE_ASM_WRITTEN (type) = 1;
25360
25361 /* For these types, all that is required is that we output a DIE (or a
25362 set of DIEs) to represent the "basis" type. */
25363 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25364 DINFO_USAGE_IND_USE);
25365 break;
25366
25367 case OFFSET_TYPE:
25368 /* This code is used for C++ pointer-to-data-member types.
25369 Output a description of the relevant class type. */
25370 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25371 DINFO_USAGE_IND_USE);
25372
25373 /* Output a description of the type of the object pointed to. */
25374 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25375 DINFO_USAGE_IND_USE);
25376
25377 /* Now output a DIE to represent this pointer-to-data-member type
25378 itself. */
25379 gen_ptr_to_mbr_type_die (type, context_die);
25380 break;
25381
25382 case FUNCTION_TYPE:
25383 /* Force out return type (in case it wasn't forced out already). */
25384 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25385 DINFO_USAGE_DIR_USE);
25386 gen_subroutine_type_die (type, context_die);
25387 break;
25388
25389 case METHOD_TYPE:
25390 /* Force out return type (in case it wasn't forced out already). */
25391 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25392 DINFO_USAGE_DIR_USE);
25393 gen_subroutine_type_die (type, context_die);
25394 break;
25395
25396 case ARRAY_TYPE:
25397 case VECTOR_TYPE:
25398 gen_array_type_die (type, context_die);
25399 break;
25400
25401 case ENUMERAL_TYPE:
25402 case RECORD_TYPE:
25403 case UNION_TYPE:
25404 case QUAL_UNION_TYPE:
25405 gen_tagged_type_die (type, context_die, usage);
25406 return;
25407
25408 case VOID_TYPE:
25409 case INTEGER_TYPE:
25410 case REAL_TYPE:
25411 case FIXED_POINT_TYPE:
25412 case COMPLEX_TYPE:
25413 case BOOLEAN_TYPE:
25414 case POINTER_BOUNDS_TYPE:
25415 /* No DIEs needed for fundamental types. */
25416 break;
25417
25418 case NULLPTR_TYPE:
25419 case LANG_TYPE:
25420 /* Just use DW_TAG_unspecified_type. */
25421 {
25422 dw_die_ref type_die = lookup_type_die (type);
25423 if (type_die == NULL)
25424 {
25425 tree name = TYPE_IDENTIFIER (type);
25426 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25427 type);
25428 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25429 equate_type_number_to_die (type, type_die);
25430 }
25431 }
25432 break;
25433
25434 default:
25435 if (is_cxx_auto (type))
25436 {
25437 tree name = TYPE_IDENTIFIER (type);
25438 dw_die_ref *die = (name == get_identifier ("auto")
25439 ? &auto_die : &decltype_auto_die);
25440 if (!*die)
25441 {
25442 *die = new_die (DW_TAG_unspecified_type,
25443 comp_unit_die (), NULL_TREE);
25444 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25445 }
25446 equate_type_number_to_die (type, *die);
25447 break;
25448 }
25449 gcc_unreachable ();
25450 }
25451
25452 TREE_ASM_WRITTEN (type) = 1;
25453 }
25454
25455 static void
25456 gen_type_die (tree type, dw_die_ref context_die)
25457 {
25458 if (type != error_mark_node)
25459 {
25460 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25461 if (flag_checking)
25462 {
25463 dw_die_ref die = lookup_type_die (type);
25464 if (die)
25465 check_die (die);
25466 }
25467 }
25468 }
25469
25470 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25471 things which are local to the given block. */
25472
25473 static void
25474 gen_block_die (tree stmt, dw_die_ref context_die)
25475 {
25476 int must_output_die = 0;
25477 bool inlined_func;
25478
25479 /* Ignore blocks that are NULL. */
25480 if (stmt == NULL_TREE)
25481 return;
25482
25483 inlined_func = inlined_function_outer_scope_p (stmt);
25484
25485 /* If the block is one fragment of a non-contiguous block, do not
25486 process the variables, since they will have been done by the
25487 origin block. Do process subblocks. */
25488 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25489 {
25490 tree sub;
25491
25492 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25493 gen_block_die (sub, context_die);
25494
25495 return;
25496 }
25497
25498 /* Determine if we need to output any Dwarf DIEs at all to represent this
25499 block. */
25500 if (inlined_func)
25501 /* The outer scopes for inlinings *must* always be represented. We
25502 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25503 must_output_die = 1;
25504 else
25505 {
25506 /* Determine if this block directly contains any "significant"
25507 local declarations which we will need to output DIEs for. */
25508 if (debug_info_level > DINFO_LEVEL_TERSE)
25509 /* We are not in terse mode so *any* local declaration counts
25510 as being a "significant" one. */
25511 must_output_die = ((BLOCK_VARS (stmt) != NULL
25512 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25513 && (TREE_USED (stmt)
25514 || TREE_ASM_WRITTEN (stmt)
25515 || BLOCK_ABSTRACT (stmt)));
25516 else if ((TREE_USED (stmt)
25517 || TREE_ASM_WRITTEN (stmt)
25518 || BLOCK_ABSTRACT (stmt))
25519 && !dwarf2out_ignore_block (stmt))
25520 must_output_die = 1;
25521 }
25522
25523 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25524 DIE for any block which contains no significant local declarations at
25525 all. Rather, in such cases we just call `decls_for_scope' so that any
25526 needed Dwarf info for any sub-blocks will get properly generated. Note
25527 that in terse mode, our definition of what constitutes a "significant"
25528 local declaration gets restricted to include only inlined function
25529 instances and local (nested) function definitions. */
25530 if (must_output_die)
25531 {
25532 if (inlined_func)
25533 {
25534 /* If STMT block is abstract, that means we have been called
25535 indirectly from dwarf2out_abstract_function.
25536 That function rightfully marks the descendent blocks (of
25537 the abstract function it is dealing with) as being abstract,
25538 precisely to prevent us from emitting any
25539 DW_TAG_inlined_subroutine DIE as a descendent
25540 of an abstract function instance. So in that case, we should
25541 not call gen_inlined_subroutine_die.
25542
25543 Later though, when cgraph asks dwarf2out to emit info
25544 for the concrete instance of the function decl into which
25545 the concrete instance of STMT got inlined, the later will lead
25546 to the generation of a DW_TAG_inlined_subroutine DIE. */
25547 if (! BLOCK_ABSTRACT (stmt))
25548 gen_inlined_subroutine_die (stmt, context_die);
25549 }
25550 else
25551 gen_lexical_block_die (stmt, context_die);
25552 }
25553 else
25554 decls_for_scope (stmt, context_die);
25555 }
25556
25557 /* Process variable DECL (or variable with origin ORIGIN) within
25558 block STMT and add it to CONTEXT_DIE. */
25559 static void
25560 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25561 {
25562 dw_die_ref die;
25563 tree decl_or_origin = decl ? decl : origin;
25564
25565 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25566 die = lookup_decl_die (decl_or_origin);
25567 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25568 {
25569 if (TYPE_DECL_IS_STUB (decl_or_origin))
25570 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25571 else
25572 die = lookup_decl_die (decl_or_origin);
25573 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25574 if (! die && ! early_dwarf)
25575 return;
25576 }
25577 else
25578 die = NULL;
25579
25580 /* Avoid creating DIEs for local typedefs and concrete static variables that
25581 will only be pruned later. */
25582 if ((origin || decl_ultimate_origin (decl))
25583 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25584 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25585 {
25586 origin = decl_ultimate_origin (decl_or_origin);
25587 if (decl && VAR_P (decl) && die != NULL)
25588 {
25589 die = lookup_decl_die (origin);
25590 if (die != NULL)
25591 equate_decl_number_to_die (decl, die);
25592 }
25593 return;
25594 }
25595
25596 if (die != NULL && die->die_parent == NULL)
25597 add_child_die (context_die, die);
25598 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25599 {
25600 if (early_dwarf)
25601 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25602 stmt, context_die);
25603 }
25604 else
25605 {
25606 if (decl && DECL_P (decl))
25607 {
25608 die = lookup_decl_die (decl);
25609
25610 /* Early created DIEs do not have a parent as the decls refer
25611 to the function as DECL_CONTEXT rather than the BLOCK. */
25612 if (die && die->die_parent == NULL)
25613 {
25614 gcc_assert (in_lto_p);
25615 add_child_die (context_die, die);
25616 }
25617 }
25618
25619 gen_decl_die (decl, origin, NULL, context_die);
25620 }
25621 }
25622
25623 /* Generate all of the decls declared within a given scope and (recursively)
25624 all of its sub-blocks. */
25625
25626 static void
25627 decls_for_scope (tree stmt, dw_die_ref context_die)
25628 {
25629 tree decl;
25630 unsigned int i;
25631 tree subblocks;
25632
25633 /* Ignore NULL blocks. */
25634 if (stmt == NULL_TREE)
25635 return;
25636
25637 /* Output the DIEs to represent all of the data objects and typedefs
25638 declared directly within this block but not within any nested
25639 sub-blocks. Also, nested function and tag DIEs have been
25640 generated with a parent of NULL; fix that up now. We don't
25641 have to do this if we're at -g1. */
25642 if (debug_info_level > DINFO_LEVEL_TERSE)
25643 {
25644 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25645 process_scope_var (stmt, decl, NULL_TREE, context_die);
25646 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25647 origin - avoid doing this twice as we have no good way to see
25648 if we've done it once already. */
25649 if (! early_dwarf)
25650 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25651 {
25652 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25653 if (decl == current_function_decl)
25654 /* Ignore declarations of the current function, while they
25655 are declarations, gen_subprogram_die would treat them
25656 as definitions again, because they are equal to
25657 current_function_decl and endlessly recurse. */;
25658 else if (TREE_CODE (decl) == FUNCTION_DECL)
25659 process_scope_var (stmt, decl, NULL_TREE, context_die);
25660 else
25661 process_scope_var (stmt, NULL_TREE, decl, context_die);
25662 }
25663 }
25664
25665 /* Even if we're at -g1, we need to process the subblocks in order to get
25666 inlined call information. */
25667
25668 /* Output the DIEs to represent all sub-blocks (and the items declared
25669 therein) of this block. */
25670 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25671 subblocks != NULL;
25672 subblocks = BLOCK_CHAIN (subblocks))
25673 gen_block_die (subblocks, context_die);
25674 }
25675
25676 /* Is this a typedef we can avoid emitting? */
25677
25678 bool
25679 is_redundant_typedef (const_tree decl)
25680 {
25681 if (TYPE_DECL_IS_STUB (decl))
25682 return true;
25683
25684 if (DECL_ARTIFICIAL (decl)
25685 && DECL_CONTEXT (decl)
25686 && is_tagged_type (DECL_CONTEXT (decl))
25687 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25688 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25689 /* Also ignore the artificial member typedef for the class name. */
25690 return true;
25691
25692 return false;
25693 }
25694
25695 /* Return TRUE if TYPE is a typedef that names a type for linkage
25696 purposes. This kind of typedefs is produced by the C++ FE for
25697 constructs like:
25698
25699 typedef struct {...} foo;
25700
25701 In that case, there is no typedef variant type produced for foo.
25702 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25703 struct type. */
25704
25705 static bool
25706 is_naming_typedef_decl (const_tree decl)
25707 {
25708 if (decl == NULL_TREE
25709 || TREE_CODE (decl) != TYPE_DECL
25710 || DECL_NAMELESS (decl)
25711 || !is_tagged_type (TREE_TYPE (decl))
25712 || DECL_IS_BUILTIN (decl)
25713 || is_redundant_typedef (decl)
25714 /* It looks like Ada produces TYPE_DECLs that are very similar
25715 to C++ naming typedefs but that have different
25716 semantics. Let's be specific to c++ for now. */
25717 || !is_cxx (decl))
25718 return FALSE;
25719
25720 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25721 && TYPE_NAME (TREE_TYPE (decl)) == decl
25722 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25723 != TYPE_NAME (TREE_TYPE (decl))));
25724 }
25725
25726 /* Looks up the DIE for a context. */
25727
25728 static inline dw_die_ref
25729 lookup_context_die (tree context)
25730 {
25731 if (context)
25732 {
25733 /* Find die that represents this context. */
25734 if (TYPE_P (context))
25735 {
25736 context = TYPE_MAIN_VARIANT (context);
25737 dw_die_ref ctx = lookup_type_die (context);
25738 if (!ctx)
25739 return NULL;
25740 return strip_naming_typedef (context, ctx);
25741 }
25742 else
25743 return lookup_decl_die (context);
25744 }
25745 return comp_unit_die ();
25746 }
25747
25748 /* Returns the DIE for a context. */
25749
25750 static inline dw_die_ref
25751 get_context_die (tree context)
25752 {
25753 if (context)
25754 {
25755 /* Find die that represents this context. */
25756 if (TYPE_P (context))
25757 {
25758 context = TYPE_MAIN_VARIANT (context);
25759 return strip_naming_typedef (context, force_type_die (context));
25760 }
25761 else
25762 return force_decl_die (context);
25763 }
25764 return comp_unit_die ();
25765 }
25766
25767 /* Returns the DIE for decl. A DIE will always be returned. */
25768
25769 static dw_die_ref
25770 force_decl_die (tree decl)
25771 {
25772 dw_die_ref decl_die;
25773 unsigned saved_external_flag;
25774 tree save_fn = NULL_TREE;
25775 decl_die = lookup_decl_die (decl);
25776 if (!decl_die)
25777 {
25778 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25779
25780 decl_die = lookup_decl_die (decl);
25781 if (decl_die)
25782 return decl_die;
25783
25784 switch (TREE_CODE (decl))
25785 {
25786 case FUNCTION_DECL:
25787 /* Clear current_function_decl, so that gen_subprogram_die thinks
25788 that this is a declaration. At this point, we just want to force
25789 declaration die. */
25790 save_fn = current_function_decl;
25791 current_function_decl = NULL_TREE;
25792 gen_subprogram_die (decl, context_die);
25793 current_function_decl = save_fn;
25794 break;
25795
25796 case VAR_DECL:
25797 /* Set external flag to force declaration die. Restore it after
25798 gen_decl_die() call. */
25799 saved_external_flag = DECL_EXTERNAL (decl);
25800 DECL_EXTERNAL (decl) = 1;
25801 gen_decl_die (decl, NULL, NULL, context_die);
25802 DECL_EXTERNAL (decl) = saved_external_flag;
25803 break;
25804
25805 case NAMESPACE_DECL:
25806 if (dwarf_version >= 3 || !dwarf_strict)
25807 dwarf2out_decl (decl);
25808 else
25809 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25810 decl_die = comp_unit_die ();
25811 break;
25812
25813 case TRANSLATION_UNIT_DECL:
25814 decl_die = comp_unit_die ();
25815 break;
25816
25817 default:
25818 gcc_unreachable ();
25819 }
25820
25821 /* We should be able to find the DIE now. */
25822 if (!decl_die)
25823 decl_die = lookup_decl_die (decl);
25824 gcc_assert (decl_die);
25825 }
25826
25827 return decl_die;
25828 }
25829
25830 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25831 always returned. */
25832
25833 static dw_die_ref
25834 force_type_die (tree type)
25835 {
25836 dw_die_ref type_die;
25837
25838 type_die = lookup_type_die (type);
25839 if (!type_die)
25840 {
25841 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25842
25843 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25844 false, context_die);
25845 gcc_assert (type_die);
25846 }
25847 return type_die;
25848 }
25849
25850 /* Force out any required namespaces to be able to output DECL,
25851 and return the new context_die for it, if it's changed. */
25852
25853 static dw_die_ref
25854 setup_namespace_context (tree thing, dw_die_ref context_die)
25855 {
25856 tree context = (DECL_P (thing)
25857 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25858 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25859 /* Force out the namespace. */
25860 context_die = force_decl_die (context);
25861
25862 return context_die;
25863 }
25864
25865 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25866 type) within its namespace, if appropriate.
25867
25868 For compatibility with older debuggers, namespace DIEs only contain
25869 declarations; all definitions are emitted at CU scope, with
25870 DW_AT_specification pointing to the declaration (like with class
25871 members). */
25872
25873 static dw_die_ref
25874 declare_in_namespace (tree thing, dw_die_ref context_die)
25875 {
25876 dw_die_ref ns_context;
25877
25878 if (debug_info_level <= DINFO_LEVEL_TERSE)
25879 return context_die;
25880
25881 /* External declarations in the local scope only need to be emitted
25882 once, not once in the namespace and once in the scope.
25883
25884 This avoids declaring the `extern' below in the
25885 namespace DIE as well as in the innermost scope:
25886
25887 namespace S
25888 {
25889 int i=5;
25890 int foo()
25891 {
25892 int i=8;
25893 extern int i;
25894 return i;
25895 }
25896 }
25897 */
25898 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
25899 return context_die;
25900
25901 /* If this decl is from an inlined function, then don't try to emit it in its
25902 namespace, as we will get confused. It would have already been emitted
25903 when the abstract instance of the inline function was emitted anyways. */
25904 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
25905 return context_die;
25906
25907 ns_context = setup_namespace_context (thing, context_die);
25908
25909 if (ns_context != context_die)
25910 {
25911 if (is_fortran ())
25912 return ns_context;
25913 if (DECL_P (thing))
25914 gen_decl_die (thing, NULL, NULL, ns_context);
25915 else
25916 gen_type_die (thing, ns_context);
25917 }
25918 return context_die;
25919 }
25920
25921 /* Generate a DIE for a namespace or namespace alias. */
25922
25923 static void
25924 gen_namespace_die (tree decl, dw_die_ref context_die)
25925 {
25926 dw_die_ref namespace_die;
25927
25928 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
25929 they are an alias of. */
25930 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
25931 {
25932 /* Output a real namespace or module. */
25933 context_die = setup_namespace_context (decl, comp_unit_die ());
25934 namespace_die = new_die (is_fortran ()
25935 ? DW_TAG_module : DW_TAG_namespace,
25936 context_die, decl);
25937 /* For Fortran modules defined in different CU don't add src coords. */
25938 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
25939 {
25940 const char *name = dwarf2_name (decl, 0);
25941 if (name)
25942 add_name_attribute (namespace_die, name);
25943 }
25944 else
25945 add_name_and_src_coords_attributes (namespace_die, decl);
25946 if (DECL_EXTERNAL (decl))
25947 add_AT_flag (namespace_die, DW_AT_declaration, 1);
25948 equate_decl_number_to_die (decl, namespace_die);
25949 }
25950 else
25951 {
25952 /* Output a namespace alias. */
25953
25954 /* Force out the namespace we are an alias of, if necessary. */
25955 dw_die_ref origin_die
25956 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
25957
25958 if (DECL_FILE_SCOPE_P (decl)
25959 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
25960 context_die = setup_namespace_context (decl, comp_unit_die ());
25961 /* Now create the namespace alias DIE. */
25962 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
25963 add_name_and_src_coords_attributes (namespace_die, decl);
25964 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
25965 equate_decl_number_to_die (decl, namespace_die);
25966 }
25967 if ((dwarf_version >= 5 || !dwarf_strict)
25968 && lang_hooks.decls.decl_dwarf_attribute (decl,
25969 DW_AT_export_symbols) == 1)
25970 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
25971
25972 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
25973 if (want_pubnames ())
25974 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
25975 }
25976
25977 /* Generate Dwarf debug information for a decl described by DECL.
25978 The return value is currently only meaningful for PARM_DECLs,
25979 for all other decls it returns NULL.
25980
25981 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
25982 It can be NULL otherwise. */
25983
25984 static dw_die_ref
25985 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
25986 dw_die_ref context_die)
25987 {
25988 tree decl_or_origin = decl ? decl : origin;
25989 tree class_origin = NULL, ultimate_origin;
25990
25991 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
25992 return NULL;
25993
25994 /* Ignore pointer bounds decls. */
25995 if (DECL_P (decl_or_origin)
25996 && TREE_TYPE (decl_or_origin)
25997 && POINTER_BOUNDS_P (decl_or_origin))
25998 return NULL;
25999
26000 switch (TREE_CODE (decl_or_origin))
26001 {
26002 case ERROR_MARK:
26003 break;
26004
26005 case CONST_DECL:
26006 if (!is_fortran () && !is_ada ())
26007 {
26008 /* The individual enumerators of an enum type get output when we output
26009 the Dwarf representation of the relevant enum type itself. */
26010 break;
26011 }
26012
26013 /* Emit its type. */
26014 gen_type_die (TREE_TYPE (decl), context_die);
26015
26016 /* And its containing namespace. */
26017 context_die = declare_in_namespace (decl, context_die);
26018
26019 gen_const_die (decl, context_die);
26020 break;
26021
26022 case FUNCTION_DECL:
26023 #if 0
26024 /* FIXME */
26025 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26026 on local redeclarations of global functions. That seems broken. */
26027 if (current_function_decl != decl)
26028 /* This is only a declaration. */;
26029 #endif
26030
26031 /* We should have abstract copies already and should not generate
26032 stray type DIEs in late LTO dumping. */
26033 if (! early_dwarf)
26034 ;
26035
26036 /* If we're emitting a clone, emit info for the abstract instance. */
26037 else if (origin || DECL_ORIGIN (decl) != decl)
26038 dwarf2out_abstract_function (origin
26039 ? DECL_ORIGIN (origin)
26040 : DECL_ABSTRACT_ORIGIN (decl));
26041
26042 /* If we're emitting a possibly inlined function emit it as
26043 abstract instance. */
26044 else if (cgraph_function_possibly_inlined_p (decl)
26045 && ! DECL_ABSTRACT_P (decl)
26046 && ! class_or_namespace_scope_p (context_die)
26047 /* dwarf2out_abstract_function won't emit a die if this is just
26048 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26049 that case, because that works only if we have a die. */
26050 && DECL_INITIAL (decl) != NULL_TREE)
26051 dwarf2out_abstract_function (decl);
26052
26053 /* Otherwise we're emitting the primary DIE for this decl. */
26054 else if (debug_info_level > DINFO_LEVEL_TERSE)
26055 {
26056 /* Before we describe the FUNCTION_DECL itself, make sure that we
26057 have its containing type. */
26058 if (!origin)
26059 origin = decl_class_context (decl);
26060 if (origin != NULL_TREE)
26061 gen_type_die (origin, context_die);
26062
26063 /* And its return type. */
26064 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26065
26066 /* And its virtual context. */
26067 if (DECL_VINDEX (decl) != NULL_TREE)
26068 gen_type_die (DECL_CONTEXT (decl), context_die);
26069
26070 /* Make sure we have a member DIE for decl. */
26071 if (origin != NULL_TREE)
26072 gen_type_die_for_member (origin, decl, context_die);
26073
26074 /* And its containing namespace. */
26075 context_die = declare_in_namespace (decl, context_die);
26076 }
26077
26078 /* Now output a DIE to represent the function itself. */
26079 if (decl)
26080 gen_subprogram_die (decl, context_die);
26081 break;
26082
26083 case TYPE_DECL:
26084 /* If we are in terse mode, don't generate any DIEs to represent any
26085 actual typedefs. */
26086 if (debug_info_level <= DINFO_LEVEL_TERSE)
26087 break;
26088
26089 /* In the special case of a TYPE_DECL node representing the declaration
26090 of some type tag, if the given TYPE_DECL is marked as having been
26091 instantiated from some other (original) TYPE_DECL node (e.g. one which
26092 was generated within the original definition of an inline function) we
26093 used to generate a special (abbreviated) DW_TAG_structure_type,
26094 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26095 should be actually referencing those DIEs, as variable DIEs with that
26096 type would be emitted already in the abstract origin, so it was always
26097 removed during unused type prunning. Don't add anything in this
26098 case. */
26099 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26100 break;
26101
26102 if (is_redundant_typedef (decl))
26103 gen_type_die (TREE_TYPE (decl), context_die);
26104 else
26105 /* Output a DIE to represent the typedef itself. */
26106 gen_typedef_die (decl, context_die);
26107 break;
26108
26109 case LABEL_DECL:
26110 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26111 gen_label_die (decl, context_die);
26112 break;
26113
26114 case VAR_DECL:
26115 case RESULT_DECL:
26116 /* If we are in terse mode, don't generate any DIEs to represent any
26117 variable declarations or definitions. */
26118 if (debug_info_level <= DINFO_LEVEL_TERSE)
26119 break;
26120
26121 /* Avoid generating stray type DIEs during late dwarf dumping.
26122 All types have been dumped early. */
26123 if (early_dwarf
26124 /* ??? But in LTRANS we cannot annotate early created variably
26125 modified type DIEs without copying them and adjusting all
26126 references to them. Dump them again as happens for inlining
26127 which copies both the decl and the types. */
26128 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26129 in VLA bound information for example. */
26130 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26131 current_function_decl)))
26132 {
26133 /* Output any DIEs that are needed to specify the type of this data
26134 object. */
26135 if (decl_by_reference_p (decl_or_origin))
26136 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26137 else
26138 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26139 }
26140
26141 if (early_dwarf)
26142 {
26143 /* And its containing type. */
26144 class_origin = decl_class_context (decl_or_origin);
26145 if (class_origin != NULL_TREE)
26146 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26147
26148 /* And its containing namespace. */
26149 context_die = declare_in_namespace (decl_or_origin, context_die);
26150 }
26151
26152 /* Now output the DIE to represent the data object itself. This gets
26153 complicated because of the possibility that the VAR_DECL really
26154 represents an inlined instance of a formal parameter for an inline
26155 function. */
26156 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26157 if (ultimate_origin != NULL_TREE
26158 && TREE_CODE (ultimate_origin) == PARM_DECL)
26159 gen_formal_parameter_die (decl, origin,
26160 true /* Emit name attribute. */,
26161 context_die);
26162 else
26163 gen_variable_die (decl, origin, context_die);
26164 break;
26165
26166 case FIELD_DECL:
26167 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26168 /* Ignore the nameless fields that are used to skip bits but handle C++
26169 anonymous unions and structs. */
26170 if (DECL_NAME (decl) != NULL_TREE
26171 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26172 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26173 {
26174 gen_type_die (member_declared_type (decl), context_die);
26175 gen_field_die (decl, ctx, context_die);
26176 }
26177 break;
26178
26179 case PARM_DECL:
26180 /* Avoid generating stray type DIEs during late dwarf dumping.
26181 All types have been dumped early. */
26182 if (early_dwarf
26183 /* ??? But in LTRANS we cannot annotate early created variably
26184 modified type DIEs without copying them and adjusting all
26185 references to them. Dump them again as happens for inlining
26186 which copies both the decl and the types. */
26187 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26188 in VLA bound information for example. */
26189 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26190 current_function_decl)))
26191 {
26192 if (DECL_BY_REFERENCE (decl_or_origin))
26193 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26194 else
26195 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26196 }
26197 return gen_formal_parameter_die (decl, origin,
26198 true /* Emit name attribute. */,
26199 context_die);
26200
26201 case NAMESPACE_DECL:
26202 if (dwarf_version >= 3 || !dwarf_strict)
26203 gen_namespace_die (decl, context_die);
26204 break;
26205
26206 case IMPORTED_DECL:
26207 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26208 DECL_CONTEXT (decl), context_die);
26209 break;
26210
26211 case NAMELIST_DECL:
26212 gen_namelist_decl (DECL_NAME (decl), context_die,
26213 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26214 break;
26215
26216 default:
26217 /* Probably some frontend-internal decl. Assume we don't care. */
26218 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26219 break;
26220 }
26221
26222 return NULL;
26223 }
26224 \f
26225 /* Output initial debug information for global DECL. Called at the
26226 end of the parsing process.
26227
26228 This is the initial debug generation process. As such, the DIEs
26229 generated may be incomplete. A later debug generation pass
26230 (dwarf2out_late_global_decl) will augment the information generated
26231 in this pass (e.g., with complete location info). */
26232
26233 static void
26234 dwarf2out_early_global_decl (tree decl)
26235 {
26236 set_early_dwarf s;
26237
26238 /* gen_decl_die() will set DECL_ABSTRACT because
26239 cgraph_function_possibly_inlined_p() returns true. This is in
26240 turn will cause DW_AT_inline attributes to be set.
26241
26242 This happens because at early dwarf generation, there is no
26243 cgraph information, causing cgraph_function_possibly_inlined_p()
26244 to return true. Trick cgraph_function_possibly_inlined_p()
26245 while we generate dwarf early. */
26246 bool save = symtab->global_info_ready;
26247 symtab->global_info_ready = true;
26248
26249 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26250 other DECLs and they can point to template types or other things
26251 that dwarf2out can't handle when done via dwarf2out_decl. */
26252 if (TREE_CODE (decl) != TYPE_DECL
26253 && TREE_CODE (decl) != PARM_DECL)
26254 {
26255 if (TREE_CODE (decl) == FUNCTION_DECL)
26256 {
26257 tree save_fndecl = current_function_decl;
26258
26259 /* For nested functions, make sure we have DIEs for the parents first
26260 so that all nested DIEs are generated at the proper scope in the
26261 first shot. */
26262 tree context = decl_function_context (decl);
26263 if (context != NULL)
26264 {
26265 dw_die_ref context_die = lookup_decl_die (context);
26266 current_function_decl = context;
26267
26268 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26269 enough so that it lands in its own context. This avoids type
26270 pruning issues later on. */
26271 if (context_die == NULL || is_declaration_die (context_die))
26272 dwarf2out_decl (context);
26273 }
26274
26275 /* Emit an abstract origin of a function first. This happens
26276 with C++ constructor clones for example and makes
26277 dwarf2out_abstract_function happy which requires the early
26278 DIE of the abstract instance to be present. */
26279 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26280 dw_die_ref origin_die;
26281 if (origin != NULL
26282 /* Do not emit the DIE multiple times but make sure to
26283 process it fully here in case we just saw a declaration. */
26284 && ((origin_die = lookup_decl_die (origin)) == NULL
26285 || is_declaration_die (origin_die)))
26286 {
26287 current_function_decl = origin;
26288 dwarf2out_decl (origin);
26289 }
26290
26291 /* Emit the DIE for decl but avoid doing that multiple times. */
26292 dw_die_ref old_die;
26293 if ((old_die = lookup_decl_die (decl)) == NULL
26294 || is_declaration_die (old_die))
26295 {
26296 current_function_decl = decl;
26297 dwarf2out_decl (decl);
26298 }
26299
26300 current_function_decl = save_fndecl;
26301 }
26302 else
26303 dwarf2out_decl (decl);
26304 }
26305 symtab->global_info_ready = save;
26306 }
26307
26308 /* Return whether EXPR is an expression with the following pattern:
26309 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26310
26311 static bool
26312 is_trivial_indirect_ref (tree expr)
26313 {
26314 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26315 return false;
26316
26317 tree nop = TREE_OPERAND (expr, 0);
26318 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26319 return false;
26320
26321 tree int_cst = TREE_OPERAND (nop, 0);
26322 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26323 }
26324
26325 /* Output debug information for global decl DECL. Called from
26326 toplev.c after compilation proper has finished. */
26327
26328 static void
26329 dwarf2out_late_global_decl (tree decl)
26330 {
26331 /* Fill-in any location information we were unable to determine
26332 on the first pass. */
26333 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26334 {
26335 dw_die_ref die = lookup_decl_die (decl);
26336
26337 /* We may have to generate early debug late for LTO in case debug
26338 was not enabled at compile-time or the target doesn't support
26339 the LTO early debug scheme. */
26340 if (! die && in_lto_p)
26341 {
26342 dwarf2out_decl (decl);
26343 die = lookup_decl_die (decl);
26344 }
26345
26346 if (die)
26347 {
26348 /* We get called via the symtab code invoking late_global_decl
26349 for symbols that are optimized out.
26350
26351 Do not add locations for those, except if they have a
26352 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26353 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26354 INDIRECT_REF expression, as this could generate relocations to
26355 text symbols in LTO object files, which is invalid. */
26356 varpool_node *node = varpool_node::get (decl);
26357 if ((! node || ! node->definition)
26358 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26359 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26360 tree_add_const_value_attribute_for_decl (die, decl);
26361 else
26362 add_location_or_const_value_attribute (die, decl, false);
26363 }
26364 }
26365 }
26366
26367 /* Output debug information for type decl DECL. Called from toplev.c
26368 and from language front ends (to record built-in types). */
26369 static void
26370 dwarf2out_type_decl (tree decl, int local)
26371 {
26372 if (!local)
26373 {
26374 set_early_dwarf s;
26375 dwarf2out_decl (decl);
26376 }
26377 }
26378
26379 /* Output debug information for imported module or decl DECL.
26380 NAME is non-NULL name in the lexical block if the decl has been renamed.
26381 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26382 that DECL belongs to.
26383 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26384 static void
26385 dwarf2out_imported_module_or_decl_1 (tree decl,
26386 tree name,
26387 tree lexical_block,
26388 dw_die_ref lexical_block_die)
26389 {
26390 expanded_location xloc;
26391 dw_die_ref imported_die = NULL;
26392 dw_die_ref at_import_die;
26393
26394 if (TREE_CODE (decl) == IMPORTED_DECL)
26395 {
26396 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26397 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26398 gcc_assert (decl);
26399 }
26400 else
26401 xloc = expand_location (input_location);
26402
26403 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26404 {
26405 at_import_die = force_type_die (TREE_TYPE (decl));
26406 /* For namespace N { typedef void T; } using N::T; base_type_die
26407 returns NULL, but DW_TAG_imported_declaration requires
26408 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26409 if (!at_import_die)
26410 {
26411 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26412 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26413 at_import_die = lookup_type_die (TREE_TYPE (decl));
26414 gcc_assert (at_import_die);
26415 }
26416 }
26417 else
26418 {
26419 at_import_die = lookup_decl_die (decl);
26420 if (!at_import_die)
26421 {
26422 /* If we're trying to avoid duplicate debug info, we may not have
26423 emitted the member decl for this field. Emit it now. */
26424 if (TREE_CODE (decl) == FIELD_DECL)
26425 {
26426 tree type = DECL_CONTEXT (decl);
26427
26428 if (TYPE_CONTEXT (type)
26429 && TYPE_P (TYPE_CONTEXT (type))
26430 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26431 DINFO_USAGE_DIR_USE))
26432 return;
26433 gen_type_die_for_member (type, decl,
26434 get_context_die (TYPE_CONTEXT (type)));
26435 }
26436 if (TREE_CODE (decl) == NAMELIST_DECL)
26437 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26438 get_context_die (DECL_CONTEXT (decl)),
26439 NULL_TREE);
26440 else
26441 at_import_die = force_decl_die (decl);
26442 }
26443 }
26444
26445 if (TREE_CODE (decl) == NAMESPACE_DECL)
26446 {
26447 if (dwarf_version >= 3 || !dwarf_strict)
26448 imported_die = new_die (DW_TAG_imported_module,
26449 lexical_block_die,
26450 lexical_block);
26451 else
26452 return;
26453 }
26454 else
26455 imported_die = new_die (DW_TAG_imported_declaration,
26456 lexical_block_die,
26457 lexical_block);
26458
26459 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26460 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26461 if (debug_column_info && xloc.column)
26462 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26463 if (name)
26464 add_AT_string (imported_die, DW_AT_name,
26465 IDENTIFIER_POINTER (name));
26466 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26467 }
26468
26469 /* Output debug information for imported module or decl DECL.
26470 NAME is non-NULL name in context if the decl has been renamed.
26471 CHILD is true if decl is one of the renamed decls as part of
26472 importing whole module.
26473 IMPLICIT is set if this hook is called for an implicit import
26474 such as inline namespace. */
26475
26476 static void
26477 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26478 bool child, bool implicit)
26479 {
26480 /* dw_die_ref at_import_die; */
26481 dw_die_ref scope_die;
26482
26483 if (debug_info_level <= DINFO_LEVEL_TERSE)
26484 return;
26485
26486 gcc_assert (decl);
26487
26488 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26489 should be enough, for DWARF4 and older even if we emit as extension
26490 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26491 for the benefit of consumers unaware of DW_AT_export_symbols. */
26492 if (implicit
26493 && dwarf_version >= 5
26494 && lang_hooks.decls.decl_dwarf_attribute (decl,
26495 DW_AT_export_symbols) == 1)
26496 return;
26497
26498 set_early_dwarf s;
26499
26500 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26501 We need decl DIE for reference and scope die. First, get DIE for the decl
26502 itself. */
26503
26504 /* Get the scope die for decl context. Use comp_unit_die for global module
26505 or decl. If die is not found for non globals, force new die. */
26506 if (context
26507 && TYPE_P (context)
26508 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26509 return;
26510
26511 scope_die = get_context_die (context);
26512
26513 if (child)
26514 {
26515 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26516 there is nothing we can do, here. */
26517 if (dwarf_version < 3 && dwarf_strict)
26518 return;
26519
26520 gcc_assert (scope_die->die_child);
26521 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26522 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26523 scope_die = scope_die->die_child;
26524 }
26525
26526 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26527 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26528 }
26529
26530 /* Output debug information for namelists. */
26531
26532 static dw_die_ref
26533 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26534 {
26535 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26536 tree value;
26537 unsigned i;
26538
26539 if (debug_info_level <= DINFO_LEVEL_TERSE)
26540 return NULL;
26541
26542 gcc_assert (scope_die != NULL);
26543 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26544 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26545
26546 /* If there are no item_decls, we have a nondefining namelist, e.g.
26547 with USE association; hence, set DW_AT_declaration. */
26548 if (item_decls == NULL_TREE)
26549 {
26550 add_AT_flag (nml_die, DW_AT_declaration, 1);
26551 return nml_die;
26552 }
26553
26554 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26555 {
26556 nml_item_ref_die = lookup_decl_die (value);
26557 if (!nml_item_ref_die)
26558 nml_item_ref_die = force_decl_die (value);
26559
26560 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26561 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26562 }
26563 return nml_die;
26564 }
26565
26566
26567 /* Write the debugging output for DECL and return the DIE. */
26568
26569 static void
26570 dwarf2out_decl (tree decl)
26571 {
26572 dw_die_ref context_die = comp_unit_die ();
26573
26574 switch (TREE_CODE (decl))
26575 {
26576 case ERROR_MARK:
26577 return;
26578
26579 case FUNCTION_DECL:
26580 /* If we're a nested function, initially use a parent of NULL; if we're
26581 a plain function, this will be fixed up in decls_for_scope. If
26582 we're a method, it will be ignored, since we already have a DIE. */
26583 if (decl_function_context (decl)
26584 /* But if we're in terse mode, we don't care about scope. */
26585 && debug_info_level > DINFO_LEVEL_TERSE)
26586 context_die = NULL;
26587 break;
26588
26589 case VAR_DECL:
26590 /* For local statics lookup proper context die. */
26591 if (local_function_static (decl))
26592 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26593
26594 /* If we are in terse mode, don't generate any DIEs to represent any
26595 variable declarations or definitions. */
26596 if (debug_info_level <= DINFO_LEVEL_TERSE)
26597 return;
26598 break;
26599
26600 case CONST_DECL:
26601 if (debug_info_level <= DINFO_LEVEL_TERSE)
26602 return;
26603 if (!is_fortran () && !is_ada ())
26604 return;
26605 if (TREE_STATIC (decl) && decl_function_context (decl))
26606 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26607 break;
26608
26609 case NAMESPACE_DECL:
26610 case IMPORTED_DECL:
26611 if (debug_info_level <= DINFO_LEVEL_TERSE)
26612 return;
26613 if (lookup_decl_die (decl) != NULL)
26614 return;
26615 break;
26616
26617 case TYPE_DECL:
26618 /* Don't emit stubs for types unless they are needed by other DIEs. */
26619 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26620 return;
26621
26622 /* Don't bother trying to generate any DIEs to represent any of the
26623 normal built-in types for the language we are compiling. */
26624 if (DECL_IS_BUILTIN (decl))
26625 return;
26626
26627 /* If we are in terse mode, don't generate any DIEs for types. */
26628 if (debug_info_level <= DINFO_LEVEL_TERSE)
26629 return;
26630
26631 /* If we're a function-scope tag, initially use a parent of NULL;
26632 this will be fixed up in decls_for_scope. */
26633 if (decl_function_context (decl))
26634 context_die = NULL;
26635
26636 break;
26637
26638 case NAMELIST_DECL:
26639 break;
26640
26641 default:
26642 return;
26643 }
26644
26645 gen_decl_die (decl, NULL, NULL, context_die);
26646
26647 if (flag_checking)
26648 {
26649 dw_die_ref die = lookup_decl_die (decl);
26650 if (die)
26651 check_die (die);
26652 }
26653 }
26654
26655 /* Write the debugging output for DECL. */
26656
26657 static void
26658 dwarf2out_function_decl (tree decl)
26659 {
26660 dwarf2out_decl (decl);
26661 call_arg_locations = NULL;
26662 call_arg_loc_last = NULL;
26663 call_site_count = -1;
26664 tail_call_site_count = -1;
26665 decl_loc_table->empty ();
26666 cached_dw_loc_list_table->empty ();
26667 }
26668
26669 /* Output a marker (i.e. a label) for the beginning of the generated code for
26670 a lexical block. */
26671
26672 static void
26673 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26674 unsigned int blocknum)
26675 {
26676 switch_to_section (current_function_section ());
26677 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26678 }
26679
26680 /* Output a marker (i.e. a label) for the end of the generated code for a
26681 lexical block. */
26682
26683 static void
26684 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26685 {
26686 switch_to_section (current_function_section ());
26687 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26688 }
26689
26690 /* Returns nonzero if it is appropriate not to emit any debugging
26691 information for BLOCK, because it doesn't contain any instructions.
26692
26693 Don't allow this for blocks with nested functions or local classes
26694 as we would end up with orphans, and in the presence of scheduling
26695 we may end up calling them anyway. */
26696
26697 static bool
26698 dwarf2out_ignore_block (const_tree block)
26699 {
26700 tree decl;
26701 unsigned int i;
26702
26703 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26704 if (TREE_CODE (decl) == FUNCTION_DECL
26705 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26706 return 0;
26707 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26708 {
26709 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26710 if (TREE_CODE (decl) == FUNCTION_DECL
26711 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26712 return 0;
26713 }
26714
26715 return 1;
26716 }
26717
26718 /* Hash table routines for file_hash. */
26719
26720 bool
26721 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26722 {
26723 return filename_cmp (p1->filename, p2) == 0;
26724 }
26725
26726 hashval_t
26727 dwarf_file_hasher::hash (dwarf_file_data *p)
26728 {
26729 return htab_hash_string (p->filename);
26730 }
26731
26732 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26733 dwarf2out.c) and return its "index". The index of each (known) filename is
26734 just a unique number which is associated with only that one filename. We
26735 need such numbers for the sake of generating labels (in the .debug_sfnames
26736 section) and references to those files numbers (in the .debug_srcinfo
26737 and .debug_macinfo sections). If the filename given as an argument is not
26738 found in our current list, add it to the list and assign it the next
26739 available unique index number. */
26740
26741 static struct dwarf_file_data *
26742 lookup_filename (const char *file_name)
26743 {
26744 struct dwarf_file_data * created;
26745
26746 if (!file_name)
26747 return NULL;
26748
26749 dwarf_file_data **slot
26750 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26751 INSERT);
26752 if (*slot)
26753 return *slot;
26754
26755 created = ggc_alloc<dwarf_file_data> ();
26756 created->filename = file_name;
26757 created->emitted_number = 0;
26758 *slot = created;
26759 return created;
26760 }
26761
26762 /* If the assembler will construct the file table, then translate the compiler
26763 internal file table number into the assembler file table number, and emit
26764 a .file directive if we haven't already emitted one yet. The file table
26765 numbers are different because we prune debug info for unused variables and
26766 types, which may include filenames. */
26767
26768 static int
26769 maybe_emit_file (struct dwarf_file_data * fd)
26770 {
26771 if (! fd->emitted_number)
26772 {
26773 if (last_emitted_file)
26774 fd->emitted_number = last_emitted_file->emitted_number + 1;
26775 else
26776 fd->emitted_number = 1;
26777 last_emitted_file = fd;
26778
26779 if (output_asm_line_debug_info ())
26780 {
26781 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26782 output_quoted_string (asm_out_file,
26783 remap_debug_filename (fd->filename));
26784 fputc ('\n', asm_out_file);
26785 }
26786 }
26787
26788 return fd->emitted_number;
26789 }
26790
26791 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26792 That generation should happen after function debug info has been
26793 generated. The value of the attribute is the constant value of ARG. */
26794
26795 static void
26796 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26797 {
26798 die_arg_entry entry;
26799
26800 if (!die || !arg)
26801 return;
26802
26803 gcc_assert (early_dwarf);
26804
26805 if (!tmpl_value_parm_die_table)
26806 vec_alloc (tmpl_value_parm_die_table, 32);
26807
26808 entry.die = die;
26809 entry.arg = arg;
26810 vec_safe_push (tmpl_value_parm_die_table, entry);
26811 }
26812
26813 /* Return TRUE if T is an instance of generic type, FALSE
26814 otherwise. */
26815
26816 static bool
26817 generic_type_p (tree t)
26818 {
26819 if (t == NULL_TREE || !TYPE_P (t))
26820 return false;
26821 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26822 }
26823
26824 /* Schedule the generation of the generic parameter dies for the
26825 instance of generic type T. The proper generation itself is later
26826 done by gen_scheduled_generic_parms_dies. */
26827
26828 static void
26829 schedule_generic_params_dies_gen (tree t)
26830 {
26831 if (!generic_type_p (t))
26832 return;
26833
26834 gcc_assert (early_dwarf);
26835
26836 if (!generic_type_instances)
26837 vec_alloc (generic_type_instances, 256);
26838
26839 vec_safe_push (generic_type_instances, t);
26840 }
26841
26842 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26843 by append_entry_to_tmpl_value_parm_die_table. This function must
26844 be called after function DIEs have been generated. */
26845
26846 static void
26847 gen_remaining_tmpl_value_param_die_attribute (void)
26848 {
26849 if (tmpl_value_parm_die_table)
26850 {
26851 unsigned i, j;
26852 die_arg_entry *e;
26853
26854 /* We do this in two phases - first get the cases we can
26855 handle during early-finish, preserving those we cannot
26856 (containing symbolic constants where we don't yet know
26857 whether we are going to output the referenced symbols).
26858 For those we try again at late-finish. */
26859 j = 0;
26860 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26861 {
26862 if (!e->die->removed
26863 && !tree_add_const_value_attribute (e->die, e->arg))
26864 {
26865 dw_loc_descr_ref loc = NULL;
26866 if (! early_dwarf
26867 && (dwarf_version >= 5 || !dwarf_strict))
26868 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26869 if (loc)
26870 add_AT_loc (e->die, DW_AT_location, loc);
26871 else
26872 (*tmpl_value_parm_die_table)[j++] = *e;
26873 }
26874 }
26875 tmpl_value_parm_die_table->truncate (j);
26876 }
26877 }
26878
26879 /* Generate generic parameters DIEs for instances of generic types
26880 that have been previously scheduled by
26881 schedule_generic_params_dies_gen. This function must be called
26882 after all the types of the CU have been laid out. */
26883
26884 static void
26885 gen_scheduled_generic_parms_dies (void)
26886 {
26887 unsigned i;
26888 tree t;
26889
26890 if (!generic_type_instances)
26891 return;
26892
26893 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
26894 if (COMPLETE_TYPE_P (t))
26895 gen_generic_params_dies (t);
26896
26897 generic_type_instances = NULL;
26898 }
26899
26900
26901 /* Replace DW_AT_name for the decl with name. */
26902
26903 static void
26904 dwarf2out_set_name (tree decl, tree name)
26905 {
26906 dw_die_ref die;
26907 dw_attr_node *attr;
26908 const char *dname;
26909
26910 die = TYPE_SYMTAB_DIE (decl);
26911 if (!die)
26912 return;
26913
26914 dname = dwarf2_name (name, 0);
26915 if (!dname)
26916 return;
26917
26918 attr = get_AT (die, DW_AT_name);
26919 if (attr)
26920 {
26921 struct indirect_string_node *node;
26922
26923 node = find_AT_string (dname);
26924 /* replace the string. */
26925 attr->dw_attr_val.v.val_str = node;
26926 }
26927
26928 else
26929 add_name_attribute (die, dname);
26930 }
26931
26932 /* True if before or during processing of the first function being emitted. */
26933 static bool in_first_function_p = true;
26934 /* True if loc_note during dwarf2out_var_location call might still be
26935 before first real instruction at address equal to .Ltext0. */
26936 static bool maybe_at_text_label_p = true;
26937 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
26938 static unsigned int first_loclabel_num_not_at_text_label;
26939
26940 /* Look ahead for a real insn, or for a begin stmt marker. */
26941
26942 static rtx_insn *
26943 dwarf2out_next_real_insn (rtx_insn *loc_note)
26944 {
26945 rtx_insn *next_real = NEXT_INSN (loc_note);
26946
26947 while (next_real)
26948 if (INSN_P (next_real))
26949 break;
26950 else
26951 next_real = NEXT_INSN (next_real);
26952
26953 return next_real;
26954 }
26955
26956 /* Called by the final INSN scan whenever we see a var location. We
26957 use it to drop labels in the right places, and throw the location in
26958 our lookup table. */
26959
26960 static void
26961 dwarf2out_var_location (rtx_insn *loc_note)
26962 {
26963 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
26964 struct var_loc_node *newloc;
26965 rtx_insn *next_real, *next_note;
26966 rtx_insn *call_insn = NULL;
26967 static const char *last_label;
26968 static const char *last_postcall_label;
26969 static bool last_in_cold_section_p;
26970 static rtx_insn *expected_next_loc_note;
26971 tree decl;
26972 bool var_loc_p;
26973 var_loc_view view = 0;
26974
26975 if (!NOTE_P (loc_note))
26976 {
26977 if (CALL_P (loc_note))
26978 {
26979 maybe_reset_location_view (loc_note, cur_line_info_table);
26980 call_site_count++;
26981 if (SIBLING_CALL_P (loc_note))
26982 tail_call_site_count++;
26983 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
26984 {
26985 call_insn = loc_note;
26986 loc_note = NULL;
26987 var_loc_p = false;
26988
26989 next_real = dwarf2out_next_real_insn (call_insn);
26990 next_note = NULL;
26991 cached_next_real_insn = NULL;
26992 goto create_label;
26993 }
26994 if (optimize == 0 && !flag_var_tracking)
26995 {
26996 /* When the var-tracking pass is not running, there is no note
26997 for indirect calls whose target is compile-time known. In this
26998 case, process such calls specifically so that we generate call
26999 sites for them anyway. */
27000 rtx x = PATTERN (loc_note);
27001 if (GET_CODE (x) == PARALLEL)
27002 x = XVECEXP (x, 0, 0);
27003 if (GET_CODE (x) == SET)
27004 x = SET_SRC (x);
27005 if (GET_CODE (x) == CALL)
27006 x = XEXP (x, 0);
27007 if (!MEM_P (x)
27008 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27009 || !SYMBOL_REF_DECL (XEXP (x, 0))
27010 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27011 != FUNCTION_DECL))
27012 {
27013 call_insn = loc_note;
27014 loc_note = NULL;
27015 var_loc_p = false;
27016
27017 next_real = dwarf2out_next_real_insn (call_insn);
27018 next_note = NULL;
27019 cached_next_real_insn = NULL;
27020 goto create_label;
27021 }
27022 }
27023 }
27024 else if (!debug_variable_location_views)
27025 gcc_unreachable ();
27026 else
27027 maybe_reset_location_view (loc_note, cur_line_info_table);
27028
27029 return;
27030 }
27031
27032 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27033 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27034 return;
27035
27036 /* Optimize processing a large consecutive sequence of location
27037 notes so we don't spend too much time in next_real_insn. If the
27038 next insn is another location note, remember the next_real_insn
27039 calculation for next time. */
27040 next_real = cached_next_real_insn;
27041 if (next_real)
27042 {
27043 if (expected_next_loc_note != loc_note)
27044 next_real = NULL;
27045 }
27046
27047 next_note = NEXT_INSN (loc_note);
27048 if (! next_note
27049 || next_note->deleted ()
27050 || ! NOTE_P (next_note)
27051 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27052 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27053 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27054 next_note = NULL;
27055
27056 if (! next_real)
27057 next_real = dwarf2out_next_real_insn (loc_note);
27058
27059 if (next_note)
27060 {
27061 expected_next_loc_note = next_note;
27062 cached_next_real_insn = next_real;
27063 }
27064 else
27065 cached_next_real_insn = NULL;
27066
27067 /* If there are no instructions which would be affected by this note,
27068 don't do anything. */
27069 if (var_loc_p
27070 && next_real == NULL_RTX
27071 && !NOTE_DURING_CALL_P (loc_note))
27072 return;
27073
27074 create_label:
27075
27076 if (next_real == NULL_RTX)
27077 next_real = get_last_insn ();
27078
27079 /* If there were any real insns between note we processed last time
27080 and this note (or if it is the first note), clear
27081 last_{,postcall_}label so that they are not reused this time. */
27082 if (last_var_location_insn == NULL_RTX
27083 || last_var_location_insn != next_real
27084 || last_in_cold_section_p != in_cold_section_p)
27085 {
27086 last_label = NULL;
27087 last_postcall_label = NULL;
27088 }
27089
27090 if (var_loc_p)
27091 {
27092 const char *label
27093 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27094 view = cur_line_info_table->view;
27095 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27096 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27097 if (newloc == NULL)
27098 return;
27099 }
27100 else
27101 {
27102 decl = NULL_TREE;
27103 newloc = NULL;
27104 }
27105
27106 /* If there were no real insns between note we processed last time
27107 and this note, use the label we emitted last time. Otherwise
27108 create a new label and emit it. */
27109 if (last_label == NULL)
27110 {
27111 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27112 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27113 loclabel_num++;
27114 last_label = ggc_strdup (loclabel);
27115 /* See if loclabel might be equal to .Ltext0. If yes,
27116 bump first_loclabel_num_not_at_text_label. */
27117 if (!have_multiple_function_sections
27118 && in_first_function_p
27119 && maybe_at_text_label_p)
27120 {
27121 static rtx_insn *last_start;
27122 rtx_insn *insn;
27123 for (insn = loc_note; insn; insn = previous_insn (insn))
27124 if (insn == last_start)
27125 break;
27126 else if (!NONDEBUG_INSN_P (insn))
27127 continue;
27128 else
27129 {
27130 rtx body = PATTERN (insn);
27131 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27132 continue;
27133 /* Inline asm could occupy zero bytes. */
27134 else if (GET_CODE (body) == ASM_INPUT
27135 || asm_noperands (body) >= 0)
27136 continue;
27137 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27138 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27139 continue;
27140 #endif
27141 else
27142 {
27143 /* Assume insn has non-zero length. */
27144 maybe_at_text_label_p = false;
27145 break;
27146 }
27147 }
27148 if (maybe_at_text_label_p)
27149 {
27150 last_start = loc_note;
27151 first_loclabel_num_not_at_text_label = loclabel_num;
27152 }
27153 }
27154 }
27155
27156 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27157 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27158
27159 if (!var_loc_p)
27160 {
27161 struct call_arg_loc_node *ca_loc
27162 = ggc_cleared_alloc<call_arg_loc_node> ();
27163 rtx_insn *prev = call_insn;
27164
27165 ca_loc->call_arg_loc_note
27166 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27167 ca_loc->next = NULL;
27168 ca_loc->label = last_label;
27169 gcc_assert (prev
27170 && (CALL_P (prev)
27171 || (NONJUMP_INSN_P (prev)
27172 && GET_CODE (PATTERN (prev)) == SEQUENCE
27173 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27174 if (!CALL_P (prev))
27175 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27176 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27177
27178 /* Look for a SYMBOL_REF in the "prev" instruction. */
27179 rtx x = get_call_rtx_from (PATTERN (prev));
27180 if (x)
27181 {
27182 /* Try to get the call symbol, if any. */
27183 if (MEM_P (XEXP (x, 0)))
27184 x = XEXP (x, 0);
27185 /* First, look for a memory access to a symbol_ref. */
27186 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27187 && SYMBOL_REF_DECL (XEXP (x, 0))
27188 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27189 ca_loc->symbol_ref = XEXP (x, 0);
27190 /* Otherwise, look at a compile-time known user-level function
27191 declaration. */
27192 else if (MEM_P (x)
27193 && MEM_EXPR (x)
27194 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27195 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27196 }
27197
27198 ca_loc->block = insn_scope (prev);
27199 if (call_arg_locations)
27200 call_arg_loc_last->next = ca_loc;
27201 else
27202 call_arg_locations = ca_loc;
27203 call_arg_loc_last = ca_loc;
27204 }
27205 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27206 {
27207 newloc->label = last_label;
27208 newloc->view = view;
27209 }
27210 else
27211 {
27212 if (!last_postcall_label)
27213 {
27214 sprintf (loclabel, "%s-1", last_label);
27215 last_postcall_label = ggc_strdup (loclabel);
27216 }
27217 newloc->label = last_postcall_label;
27218 /* ??? This view is at last_label, not last_label-1, but we
27219 could only assume view at last_label-1 is zero if we could
27220 assume calls always have length greater than one. This is
27221 probably true in general, though there might be a rare
27222 exception to this rule, e.g. if a call insn is optimized out
27223 by target magic. Then, even the -1 in the label will be
27224 wrong, which might invalidate the range. Anyway, using view,
27225 though technically possibly incorrect, will work as far as
27226 ranges go: since L-1 is in the middle of the call insn,
27227 (L-1).0 and (L-1).V shouldn't make any difference, and having
27228 the loclist entry refer to the .loc entry might be useful, so
27229 leave it like this. */
27230 newloc->view = view;
27231 }
27232
27233 if (var_loc_p && flag_debug_asm)
27234 {
27235 const char *name, *sep, *patstr;
27236 if (decl && DECL_NAME (decl))
27237 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27238 else
27239 name = "";
27240 if (NOTE_VAR_LOCATION_LOC (loc_note))
27241 {
27242 sep = " => ";
27243 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27244 }
27245 else
27246 {
27247 sep = " ";
27248 patstr = "RESET";
27249 }
27250 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27251 name, sep, patstr);
27252 }
27253
27254 last_var_location_insn = next_real;
27255 last_in_cold_section_p = in_cold_section_p;
27256 }
27257
27258 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27259 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27260 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27261 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27262 BLOCK_FRAGMENT_ORIGIN links. */
27263 static bool
27264 block_within_block_p (tree block, tree outer, bool bothways)
27265 {
27266 if (block == outer)
27267 return true;
27268
27269 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27270 for (tree context = BLOCK_SUPERCONTEXT (block);
27271 context != outer;
27272 context = BLOCK_SUPERCONTEXT (context))
27273 if (!context || TREE_CODE (context) != BLOCK)
27274 return false;
27275
27276 if (!bothways)
27277 return true;
27278
27279 /* Now check that each block is actually referenced by its
27280 parent. */
27281 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27282 context = BLOCK_SUPERCONTEXT (context))
27283 {
27284 if (BLOCK_FRAGMENT_ORIGIN (context))
27285 {
27286 gcc_assert (!BLOCK_SUBBLOCKS (context));
27287 context = BLOCK_FRAGMENT_ORIGIN (context);
27288 }
27289 for (tree sub = BLOCK_SUBBLOCKS (context);
27290 sub != block;
27291 sub = BLOCK_CHAIN (sub))
27292 if (!sub)
27293 return false;
27294 if (context == outer)
27295 return true;
27296 else
27297 block = context;
27298 }
27299 }
27300
27301 /* Called during final while assembling the marker of the entry point
27302 for an inlined function. */
27303
27304 static void
27305 dwarf2out_inline_entry (tree block)
27306 {
27307 gcc_assert (debug_inline_points);
27308
27309 /* If we can't represent it, don't bother. */
27310 if (!(dwarf_version >= 3 || !dwarf_strict))
27311 return;
27312
27313 gcc_assert (DECL_P (block_ultimate_origin (block)));
27314
27315 /* Sanity check the block tree. This would catch a case in which
27316 BLOCK got removed from the tree reachable from the outermost
27317 lexical block, but got retained in markers. It would still link
27318 back to its parents, but some ancestor would be missing a link
27319 down the path to the sub BLOCK. If the block got removed, its
27320 BLOCK_NUMBER will not be a usable value. */
27321 if (flag_checking)
27322 gcc_assert (block_within_block_p (block,
27323 DECL_INITIAL (current_function_decl),
27324 true));
27325
27326 gcc_assert (inlined_function_outer_scope_p (block));
27327 gcc_assert (!BLOCK_DIE (block));
27328
27329 if (BLOCK_FRAGMENT_ORIGIN (block))
27330 block = BLOCK_FRAGMENT_ORIGIN (block);
27331 /* Can the entry point ever not be at the beginning of an
27332 unfragmented lexical block? */
27333 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27334 || (cur_line_info_table
27335 && !ZERO_VIEW_P (cur_line_info_table->view))))
27336 return;
27337
27338 if (!inline_entry_data_table)
27339 inline_entry_data_table
27340 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27341
27342
27343 inline_entry_data **iedp
27344 = inline_entry_data_table->find_slot_with_hash (block,
27345 htab_hash_pointer (block),
27346 INSERT);
27347 if (*iedp)
27348 /* ??? Ideally, we'd record all entry points for the same inlined
27349 function (some may have been duplicated by e.g. unrolling), but
27350 we have no way to represent that ATM. */
27351 return;
27352
27353 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27354 ied->block = block;
27355 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27356 ied->label_num = BLOCK_NUMBER (block);
27357 if (cur_line_info_table)
27358 ied->view = cur_line_info_table->view;
27359
27360 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27361
27362 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27363 BLOCK_NUMBER (block));
27364 ASM_OUTPUT_LABEL (asm_out_file, label);
27365 }
27366
27367 /* Called from finalize_size_functions for size functions so that their body
27368 can be encoded in the debug info to describe the layout of variable-length
27369 structures. */
27370
27371 static void
27372 dwarf2out_size_function (tree decl)
27373 {
27374 function_to_dwarf_procedure (decl);
27375 }
27376
27377 /* Note in one location list that text section has changed. */
27378
27379 int
27380 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27381 {
27382 var_loc_list *list = *slot;
27383 if (list->first)
27384 list->last_before_switch
27385 = list->last->next ? list->last->next : list->last;
27386 return 1;
27387 }
27388
27389 /* Note in all location lists that text section has changed. */
27390
27391 static void
27392 var_location_switch_text_section (void)
27393 {
27394 if (decl_loc_table == NULL)
27395 return;
27396
27397 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27398 }
27399
27400 /* Create a new line number table. */
27401
27402 static dw_line_info_table *
27403 new_line_info_table (void)
27404 {
27405 dw_line_info_table *table;
27406
27407 table = ggc_cleared_alloc<dw_line_info_table> ();
27408 table->file_num = 1;
27409 table->line_num = 1;
27410 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27411 FORCE_RESET_NEXT_VIEW (table->view);
27412
27413 return table;
27414 }
27415
27416 /* Lookup the "current" table into which we emit line info, so
27417 that we don't have to do it for every source line. */
27418
27419 static void
27420 set_cur_line_info_table (section *sec)
27421 {
27422 dw_line_info_table *table;
27423
27424 if (sec == text_section)
27425 table = text_section_line_info;
27426 else if (sec == cold_text_section)
27427 {
27428 table = cold_text_section_line_info;
27429 if (!table)
27430 {
27431 cold_text_section_line_info = table = new_line_info_table ();
27432 table->end_label = cold_end_label;
27433 }
27434 }
27435 else
27436 {
27437 const char *end_label;
27438
27439 if (crtl->has_bb_partition)
27440 {
27441 if (in_cold_section_p)
27442 end_label = crtl->subsections.cold_section_end_label;
27443 else
27444 end_label = crtl->subsections.hot_section_end_label;
27445 }
27446 else
27447 {
27448 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27449 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27450 current_function_funcdef_no);
27451 end_label = ggc_strdup (label);
27452 }
27453
27454 table = new_line_info_table ();
27455 table->end_label = end_label;
27456
27457 vec_safe_push (separate_line_info, table);
27458 }
27459
27460 if (output_asm_line_debug_info ())
27461 table->is_stmt = (cur_line_info_table
27462 ? cur_line_info_table->is_stmt
27463 : DWARF_LINE_DEFAULT_IS_STMT_START);
27464 cur_line_info_table = table;
27465 }
27466
27467
27468 /* We need to reset the locations at the beginning of each
27469 function. We can't do this in the end_function hook, because the
27470 declarations that use the locations won't have been output when
27471 that hook is called. Also compute have_multiple_function_sections here. */
27472
27473 static void
27474 dwarf2out_begin_function (tree fun)
27475 {
27476 section *sec = function_section (fun);
27477
27478 if (sec != text_section)
27479 have_multiple_function_sections = true;
27480
27481 if (crtl->has_bb_partition && !cold_text_section)
27482 {
27483 gcc_assert (current_function_decl == fun);
27484 cold_text_section = unlikely_text_section ();
27485 switch_to_section (cold_text_section);
27486 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27487 switch_to_section (sec);
27488 }
27489
27490 dwarf2out_note_section_used ();
27491 call_site_count = 0;
27492 tail_call_site_count = 0;
27493
27494 set_cur_line_info_table (sec);
27495 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27496 }
27497
27498 /* Helper function of dwarf2out_end_function, called only after emitting
27499 the very first function into assembly. Check if some .debug_loc range
27500 might end with a .LVL* label that could be equal to .Ltext0.
27501 In that case we must force using absolute addresses in .debug_loc ranges,
27502 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27503 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27504 list terminator.
27505 Set have_multiple_function_sections to true in that case and
27506 terminate htab traversal. */
27507
27508 int
27509 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27510 {
27511 var_loc_list *entry = *slot;
27512 struct var_loc_node *node;
27513
27514 node = entry->first;
27515 if (node && node->next && node->next->label)
27516 {
27517 unsigned int i;
27518 const char *label = node->next->label;
27519 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27520
27521 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27522 {
27523 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27524 if (strcmp (label, loclabel) == 0)
27525 {
27526 have_multiple_function_sections = true;
27527 return 0;
27528 }
27529 }
27530 }
27531 return 1;
27532 }
27533
27534 /* Hook called after emitting a function into assembly.
27535 This does something only for the very first function emitted. */
27536
27537 static void
27538 dwarf2out_end_function (unsigned int)
27539 {
27540 if (in_first_function_p
27541 && !have_multiple_function_sections
27542 && first_loclabel_num_not_at_text_label
27543 && decl_loc_table)
27544 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27545 in_first_function_p = false;
27546 maybe_at_text_label_p = false;
27547 }
27548
27549 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27550 front-ends register a translation unit even before dwarf2out_init is
27551 called. */
27552 static tree main_translation_unit = NULL_TREE;
27553
27554 /* Hook called by front-ends after they built their main translation unit.
27555 Associate comp_unit_die to UNIT. */
27556
27557 static void
27558 dwarf2out_register_main_translation_unit (tree unit)
27559 {
27560 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27561 && main_translation_unit == NULL_TREE);
27562 main_translation_unit = unit;
27563 /* If dwarf2out_init has not been called yet, it will perform the association
27564 itself looking at main_translation_unit. */
27565 if (decl_die_table != NULL)
27566 equate_decl_number_to_die (unit, comp_unit_die ());
27567 }
27568
27569 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27570
27571 static void
27572 push_dw_line_info_entry (dw_line_info_table *table,
27573 enum dw_line_info_opcode opcode, unsigned int val)
27574 {
27575 dw_line_info_entry e;
27576 e.opcode = opcode;
27577 e.val = val;
27578 vec_safe_push (table->entries, e);
27579 }
27580
27581 /* Output a label to mark the beginning of a source code line entry
27582 and record information relating to this source line, in
27583 'line_info_table' for later output of the .debug_line section. */
27584 /* ??? The discriminator parameter ought to be unsigned. */
27585
27586 static void
27587 dwarf2out_source_line (unsigned int line, unsigned int column,
27588 const char *filename,
27589 int discriminator, bool is_stmt)
27590 {
27591 unsigned int file_num;
27592 dw_line_info_table *table;
27593 static var_loc_view lvugid;
27594
27595 if (debug_info_level < DINFO_LEVEL_TERSE)
27596 return;
27597
27598 table = cur_line_info_table;
27599
27600 if (line == 0)
27601 {
27602 if (debug_variable_location_views
27603 && output_asm_line_debug_info ()
27604 && table && !RESETTING_VIEW_P (table->view))
27605 {
27606 /* If we're using the assembler to compute view numbers, we
27607 can't issue a .loc directive for line zero, so we can't
27608 get a view number at this point. We might attempt to
27609 compute it from the previous view, but since we're
27610 omitting the line number entry, we might as well omit the
27611 view number as well. That means pretending it's a view
27612 number zero, which might very well turn out to be
27613 correct. */
27614 if (!zero_view_p)
27615 zero_view_p = BITMAP_GGC_ALLOC ();
27616 bitmap_set_bit (zero_view_p, table->view);
27617 if (flag_debug_asm)
27618 {
27619 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27620 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27621 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27622 ASM_COMMENT_START);
27623 assemble_name (asm_out_file, label);
27624 putc ('\n', asm_out_file);
27625 }
27626 table->view = ++lvugid;
27627 }
27628 return;
27629 }
27630
27631 /* The discriminator column was added in dwarf4. Simplify the below
27632 by simply removing it if we're not supposed to output it. */
27633 if (dwarf_version < 4 && dwarf_strict)
27634 discriminator = 0;
27635
27636 if (!debug_column_info)
27637 column = 0;
27638
27639 file_num = maybe_emit_file (lookup_filename (filename));
27640
27641 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27642 the debugger has used the second (possibly duplicate) line number
27643 at the beginning of the function to mark the end of the prologue.
27644 We could eliminate any other duplicates within the function. For
27645 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27646 that second line number entry. */
27647 /* Recall that this end-of-prologue indication is *not* the same thing
27648 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27649 to which the hook corresponds, follows the last insn that was
27650 emitted by gen_prologue. What we need is to precede the first insn
27651 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27652 insn that corresponds to something the user wrote. These may be
27653 very different locations once scheduling is enabled. */
27654
27655 if (0 && file_num == table->file_num
27656 && line == table->line_num
27657 && column == table->column_num
27658 && discriminator == table->discrim_num
27659 && is_stmt == table->is_stmt)
27660 return;
27661
27662 switch_to_section (current_function_section ());
27663
27664 /* If requested, emit something human-readable. */
27665 if (flag_debug_asm)
27666 {
27667 if (debug_column_info)
27668 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27669 filename, line, column);
27670 else
27671 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27672 filename, line);
27673 }
27674
27675 if (output_asm_line_debug_info ())
27676 {
27677 /* Emit the .loc directive understood by GNU as. */
27678 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27679 file_num, line, is_stmt, discriminator */
27680 fputs ("\t.loc ", asm_out_file);
27681 fprint_ul (asm_out_file, file_num);
27682 putc (' ', asm_out_file);
27683 fprint_ul (asm_out_file, line);
27684 putc (' ', asm_out_file);
27685 fprint_ul (asm_out_file, column);
27686
27687 if (is_stmt != table->is_stmt)
27688 {
27689 fputs (" is_stmt ", asm_out_file);
27690 putc (is_stmt ? '1' : '0', asm_out_file);
27691 }
27692 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27693 {
27694 gcc_assert (discriminator > 0);
27695 fputs (" discriminator ", asm_out_file);
27696 fprint_ul (asm_out_file, (unsigned long) discriminator);
27697 }
27698 if (debug_variable_location_views)
27699 {
27700 if (!RESETTING_VIEW_P (table->view))
27701 {
27702 /* When we're using the assembler to compute view
27703 numbers, we output symbolic labels after "view" in
27704 .loc directives, and the assembler will set them for
27705 us, so that we can refer to the view numbers in
27706 location lists. The only exceptions are when we know
27707 a view will be zero: "-0" is a forced reset, used
27708 e.g. in the beginning of functions, whereas "0" tells
27709 the assembler to check that there was a PC change
27710 since the previous view, in a way that implicitly
27711 resets the next view. */
27712 fputs (" view ", asm_out_file);
27713 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27714 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27715 assemble_name (asm_out_file, label);
27716 table->view = ++lvugid;
27717 }
27718 else
27719 {
27720 if (FORCE_RESETTING_VIEW_P (table->view))
27721 fputs (" view -0", asm_out_file);
27722 else
27723 fputs (" view 0", asm_out_file);
27724 /* Mark the present view as a zero view. Earlier debug
27725 binds may have already added its id to loclists to be
27726 emitted later, so we can't reuse the id for something
27727 else. However, it's good to know whether a view is
27728 known to be zero, because then we may be able to
27729 optimize out locviews that are all zeros, so take
27730 note of it in zero_view_p. */
27731 if (!zero_view_p)
27732 zero_view_p = BITMAP_GGC_ALLOC ();
27733 bitmap_set_bit (zero_view_p, lvugid);
27734 table->view = ++lvugid;
27735 }
27736 }
27737 putc ('\n', asm_out_file);
27738 }
27739 else
27740 {
27741 unsigned int label_num = ++line_info_label_num;
27742
27743 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27744
27745 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27746 push_dw_line_info_entry (table, LI_adv_address, label_num);
27747 else
27748 push_dw_line_info_entry (table, LI_set_address, label_num);
27749 if (debug_variable_location_views)
27750 {
27751 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27752 if (resetting)
27753 table->view = 0;
27754
27755 if (flag_debug_asm)
27756 fprintf (asm_out_file, "\t%s view %s%d\n",
27757 ASM_COMMENT_START,
27758 resetting ? "-" : "",
27759 table->view);
27760
27761 table->view++;
27762 }
27763 if (file_num != table->file_num)
27764 push_dw_line_info_entry (table, LI_set_file, file_num);
27765 if (discriminator != table->discrim_num)
27766 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27767 if (is_stmt != table->is_stmt)
27768 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27769 push_dw_line_info_entry (table, LI_set_line, line);
27770 if (debug_column_info)
27771 push_dw_line_info_entry (table, LI_set_column, column);
27772 }
27773
27774 table->file_num = file_num;
27775 table->line_num = line;
27776 table->column_num = column;
27777 table->discrim_num = discriminator;
27778 table->is_stmt = is_stmt;
27779 table->in_use = true;
27780 }
27781
27782 /* Record the beginning of a new source file. */
27783
27784 static void
27785 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27786 {
27787 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27788 {
27789 macinfo_entry e;
27790 e.code = DW_MACINFO_start_file;
27791 e.lineno = lineno;
27792 e.info = ggc_strdup (filename);
27793 vec_safe_push (macinfo_table, e);
27794 }
27795 }
27796
27797 /* Record the end of a source file. */
27798
27799 static void
27800 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27801 {
27802 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27803 {
27804 macinfo_entry e;
27805 e.code = DW_MACINFO_end_file;
27806 e.lineno = lineno;
27807 e.info = NULL;
27808 vec_safe_push (macinfo_table, e);
27809 }
27810 }
27811
27812 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27813 the tail part of the directive line, i.e. the part which is past the
27814 initial whitespace, #, whitespace, directive-name, whitespace part. */
27815
27816 static void
27817 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27818 const char *buffer ATTRIBUTE_UNUSED)
27819 {
27820 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27821 {
27822 macinfo_entry e;
27823 /* Insert a dummy first entry to be able to optimize the whole
27824 predefined macro block using DW_MACRO_import. */
27825 if (macinfo_table->is_empty () && lineno <= 1)
27826 {
27827 e.code = 0;
27828 e.lineno = 0;
27829 e.info = NULL;
27830 vec_safe_push (macinfo_table, e);
27831 }
27832 e.code = DW_MACINFO_define;
27833 e.lineno = lineno;
27834 e.info = ggc_strdup (buffer);
27835 vec_safe_push (macinfo_table, e);
27836 }
27837 }
27838
27839 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27840 the tail part of the directive line, i.e. the part which is past the
27841 initial whitespace, #, whitespace, directive-name, whitespace part. */
27842
27843 static void
27844 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27845 const char *buffer ATTRIBUTE_UNUSED)
27846 {
27847 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27848 {
27849 macinfo_entry e;
27850 /* Insert a dummy first entry to be able to optimize the whole
27851 predefined macro block using DW_MACRO_import. */
27852 if (macinfo_table->is_empty () && lineno <= 1)
27853 {
27854 e.code = 0;
27855 e.lineno = 0;
27856 e.info = NULL;
27857 vec_safe_push (macinfo_table, e);
27858 }
27859 e.code = DW_MACINFO_undef;
27860 e.lineno = lineno;
27861 e.info = ggc_strdup (buffer);
27862 vec_safe_push (macinfo_table, e);
27863 }
27864 }
27865
27866 /* Helpers to manipulate hash table of CUs. */
27867
27868 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
27869 {
27870 static inline hashval_t hash (const macinfo_entry *);
27871 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
27872 };
27873
27874 inline hashval_t
27875 macinfo_entry_hasher::hash (const macinfo_entry *entry)
27876 {
27877 return htab_hash_string (entry->info);
27878 }
27879
27880 inline bool
27881 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
27882 const macinfo_entry *entry2)
27883 {
27884 return !strcmp (entry1->info, entry2->info);
27885 }
27886
27887 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
27888
27889 /* Output a single .debug_macinfo entry. */
27890
27891 static void
27892 output_macinfo_op (macinfo_entry *ref)
27893 {
27894 int file_num;
27895 size_t len;
27896 struct indirect_string_node *node;
27897 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27898 struct dwarf_file_data *fd;
27899
27900 switch (ref->code)
27901 {
27902 case DW_MACINFO_start_file:
27903 fd = lookup_filename (ref->info);
27904 file_num = maybe_emit_file (fd);
27905 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
27906 dw2_asm_output_data_uleb128 (ref->lineno,
27907 "Included from line number %lu",
27908 (unsigned long) ref->lineno);
27909 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
27910 break;
27911 case DW_MACINFO_end_file:
27912 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
27913 break;
27914 case DW_MACINFO_define:
27915 case DW_MACINFO_undef:
27916 len = strlen (ref->info) + 1;
27917 if (!dwarf_strict
27918 && len > DWARF_OFFSET_SIZE
27919 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
27920 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
27921 {
27922 ref->code = ref->code == DW_MACINFO_define
27923 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
27924 output_macinfo_op (ref);
27925 return;
27926 }
27927 dw2_asm_output_data (1, ref->code,
27928 ref->code == DW_MACINFO_define
27929 ? "Define macro" : "Undefine macro");
27930 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
27931 (unsigned long) ref->lineno);
27932 dw2_asm_output_nstring (ref->info, -1, "The macro");
27933 break;
27934 case DW_MACRO_define_strp:
27935 case DW_MACRO_undef_strp:
27936 node = find_AT_string (ref->info);
27937 gcc_assert (node
27938 && (node->form == DW_FORM_strp
27939 || node->form == DW_FORM_GNU_str_index));
27940 dw2_asm_output_data (1, ref->code,
27941 ref->code == DW_MACRO_define_strp
27942 ? "Define macro strp"
27943 : "Undefine macro strp");
27944 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
27945 (unsigned long) ref->lineno);
27946 if (node->form == DW_FORM_strp)
27947 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
27948 debug_str_section, "The macro: \"%s\"",
27949 ref->info);
27950 else
27951 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
27952 ref->info);
27953 break;
27954 case DW_MACRO_import:
27955 dw2_asm_output_data (1, ref->code, "Import");
27956 ASM_GENERATE_INTERNAL_LABEL (label,
27957 DEBUG_MACRO_SECTION_LABEL,
27958 ref->lineno + macinfo_label_base);
27959 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
27960 break;
27961 default:
27962 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
27963 ASM_COMMENT_START, (unsigned long) ref->code);
27964 break;
27965 }
27966 }
27967
27968 /* Attempt to make a sequence of define/undef macinfo ops shareable with
27969 other compilation unit .debug_macinfo sections. IDX is the first
27970 index of a define/undef, return the number of ops that should be
27971 emitted in a comdat .debug_macinfo section and emit
27972 a DW_MACRO_import entry referencing it.
27973 If the define/undef entry should be emitted normally, return 0. */
27974
27975 static unsigned
27976 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
27977 macinfo_hash_type **macinfo_htab)
27978 {
27979 macinfo_entry *first, *second, *cur, *inc;
27980 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
27981 unsigned char checksum[16];
27982 struct md5_ctx ctx;
27983 char *grp_name, *tail;
27984 const char *base;
27985 unsigned int i, count, encoded_filename_len, linebuf_len;
27986 macinfo_entry **slot;
27987
27988 first = &(*macinfo_table)[idx];
27989 second = &(*macinfo_table)[idx + 1];
27990
27991 /* Optimize only if there are at least two consecutive define/undef ops,
27992 and either all of them are before first DW_MACINFO_start_file
27993 with lineno {0,1} (i.e. predefined macro block), or all of them are
27994 in some included header file. */
27995 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
27996 return 0;
27997 if (vec_safe_is_empty (files))
27998 {
27999 if (first->lineno > 1 || second->lineno > 1)
28000 return 0;
28001 }
28002 else if (first->lineno == 0)
28003 return 0;
28004
28005 /* Find the last define/undef entry that can be grouped together
28006 with first and at the same time compute md5 checksum of their
28007 codes, linenumbers and strings. */
28008 md5_init_ctx (&ctx);
28009 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28010 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28011 break;
28012 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28013 break;
28014 else
28015 {
28016 unsigned char code = cur->code;
28017 md5_process_bytes (&code, 1, &ctx);
28018 checksum_uleb128 (cur->lineno, &ctx);
28019 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28020 }
28021 md5_finish_ctx (&ctx, checksum);
28022 count = i - idx;
28023
28024 /* From the containing include filename (if any) pick up just
28025 usable characters from its basename. */
28026 if (vec_safe_is_empty (files))
28027 base = "";
28028 else
28029 base = lbasename (files->last ().info);
28030 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28031 if (ISIDNUM (base[i]) || base[i] == '.')
28032 encoded_filename_len++;
28033 /* Count . at the end. */
28034 if (encoded_filename_len)
28035 encoded_filename_len++;
28036
28037 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28038 linebuf_len = strlen (linebuf);
28039
28040 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28041 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28042 + 16 * 2 + 1);
28043 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28044 tail = grp_name + 4;
28045 if (encoded_filename_len)
28046 {
28047 for (i = 0; base[i]; i++)
28048 if (ISIDNUM (base[i]) || base[i] == '.')
28049 *tail++ = base[i];
28050 *tail++ = '.';
28051 }
28052 memcpy (tail, linebuf, linebuf_len);
28053 tail += linebuf_len;
28054 *tail++ = '.';
28055 for (i = 0; i < 16; i++)
28056 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28057
28058 /* Construct a macinfo_entry for DW_MACRO_import
28059 in the empty vector entry before the first define/undef. */
28060 inc = &(*macinfo_table)[idx - 1];
28061 inc->code = DW_MACRO_import;
28062 inc->lineno = 0;
28063 inc->info = ggc_strdup (grp_name);
28064 if (!*macinfo_htab)
28065 *macinfo_htab = new macinfo_hash_type (10);
28066 /* Avoid emitting duplicates. */
28067 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28068 if (*slot != NULL)
28069 {
28070 inc->code = 0;
28071 inc->info = NULL;
28072 /* If such an entry has been used before, just emit
28073 a DW_MACRO_import op. */
28074 inc = *slot;
28075 output_macinfo_op (inc);
28076 /* And clear all macinfo_entry in the range to avoid emitting them
28077 in the second pass. */
28078 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28079 {
28080 cur->code = 0;
28081 cur->info = NULL;
28082 }
28083 }
28084 else
28085 {
28086 *slot = inc;
28087 inc->lineno = (*macinfo_htab)->elements ();
28088 output_macinfo_op (inc);
28089 }
28090 return count;
28091 }
28092
28093 /* Save any strings needed by the macinfo table in the debug str
28094 table. All strings must be collected into the table by the time
28095 index_string is called. */
28096
28097 static void
28098 save_macinfo_strings (void)
28099 {
28100 unsigned len;
28101 unsigned i;
28102 macinfo_entry *ref;
28103
28104 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28105 {
28106 switch (ref->code)
28107 {
28108 /* Match the logic in output_macinfo_op to decide on
28109 indirect strings. */
28110 case DW_MACINFO_define:
28111 case DW_MACINFO_undef:
28112 len = strlen (ref->info) + 1;
28113 if (!dwarf_strict
28114 && len > DWARF_OFFSET_SIZE
28115 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28116 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28117 set_indirect_string (find_AT_string (ref->info));
28118 break;
28119 case DW_MACRO_define_strp:
28120 case DW_MACRO_undef_strp:
28121 set_indirect_string (find_AT_string (ref->info));
28122 break;
28123 default:
28124 break;
28125 }
28126 }
28127 }
28128
28129 /* Output macinfo section(s). */
28130
28131 static void
28132 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28133 {
28134 unsigned i;
28135 unsigned long length = vec_safe_length (macinfo_table);
28136 macinfo_entry *ref;
28137 vec<macinfo_entry, va_gc> *files = NULL;
28138 macinfo_hash_type *macinfo_htab = NULL;
28139 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28140
28141 if (! length)
28142 return;
28143
28144 /* output_macinfo* uses these interchangeably. */
28145 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28146 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28147 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28148 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28149
28150 /* AIX Assembler inserts the length, so adjust the reference to match the
28151 offset expected by debuggers. */
28152 strcpy (dl_section_ref, debug_line_label);
28153 if (XCOFF_DEBUGGING_INFO)
28154 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28155
28156 /* For .debug_macro emit the section header. */
28157 if (!dwarf_strict || dwarf_version >= 5)
28158 {
28159 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28160 "DWARF macro version number");
28161 if (DWARF_OFFSET_SIZE == 8)
28162 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28163 else
28164 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28165 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28166 debug_line_section, NULL);
28167 }
28168
28169 /* In the first loop, it emits the primary .debug_macinfo section
28170 and after each emitted op the macinfo_entry is cleared.
28171 If a longer range of define/undef ops can be optimized using
28172 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28173 the vector before the first define/undef in the range and the
28174 whole range of define/undef ops is not emitted and kept. */
28175 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28176 {
28177 switch (ref->code)
28178 {
28179 case DW_MACINFO_start_file:
28180 vec_safe_push (files, *ref);
28181 break;
28182 case DW_MACINFO_end_file:
28183 if (!vec_safe_is_empty (files))
28184 files->pop ();
28185 break;
28186 case DW_MACINFO_define:
28187 case DW_MACINFO_undef:
28188 if ((!dwarf_strict || dwarf_version >= 5)
28189 && HAVE_COMDAT_GROUP
28190 && vec_safe_length (files) != 1
28191 && i > 0
28192 && i + 1 < length
28193 && (*macinfo_table)[i - 1].code == 0)
28194 {
28195 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28196 if (count)
28197 {
28198 i += count - 1;
28199 continue;
28200 }
28201 }
28202 break;
28203 case 0:
28204 /* A dummy entry may be inserted at the beginning to be able
28205 to optimize the whole block of predefined macros. */
28206 if (i == 0)
28207 continue;
28208 default:
28209 break;
28210 }
28211 output_macinfo_op (ref);
28212 ref->info = NULL;
28213 ref->code = 0;
28214 }
28215
28216 if (!macinfo_htab)
28217 return;
28218
28219 /* Save the number of transparent includes so we can adjust the
28220 label number for the fat LTO object DWARF. */
28221 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28222
28223 delete macinfo_htab;
28224 macinfo_htab = NULL;
28225
28226 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28227 terminate the current chain and switch to a new comdat .debug_macinfo
28228 section and emit the define/undef entries within it. */
28229 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28230 switch (ref->code)
28231 {
28232 case 0:
28233 continue;
28234 case DW_MACRO_import:
28235 {
28236 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28237 tree comdat_key = get_identifier (ref->info);
28238 /* Terminate the previous .debug_macinfo section. */
28239 dw2_asm_output_data (1, 0, "End compilation unit");
28240 targetm.asm_out.named_section (debug_macinfo_section_name,
28241 SECTION_DEBUG
28242 | SECTION_LINKONCE
28243 | (early_lto_debug
28244 ? SECTION_EXCLUDE : 0),
28245 comdat_key);
28246 ASM_GENERATE_INTERNAL_LABEL (label,
28247 DEBUG_MACRO_SECTION_LABEL,
28248 ref->lineno + macinfo_label_base);
28249 ASM_OUTPUT_LABEL (asm_out_file, label);
28250 ref->code = 0;
28251 ref->info = NULL;
28252 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28253 "DWARF macro version number");
28254 if (DWARF_OFFSET_SIZE == 8)
28255 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28256 else
28257 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28258 }
28259 break;
28260 case DW_MACINFO_define:
28261 case DW_MACINFO_undef:
28262 output_macinfo_op (ref);
28263 ref->code = 0;
28264 ref->info = NULL;
28265 break;
28266 default:
28267 gcc_unreachable ();
28268 }
28269
28270 macinfo_label_base += macinfo_label_base_adj;
28271 }
28272
28273 /* Initialize the various sections and labels for dwarf output and prefix
28274 them with PREFIX if non-NULL. Returns the generation (zero based
28275 number of times function was called). */
28276
28277 static unsigned
28278 init_sections_and_labels (bool early_lto_debug)
28279 {
28280 /* As we may get called multiple times have a generation count for
28281 labels. */
28282 static unsigned generation = 0;
28283
28284 if (early_lto_debug)
28285 {
28286 if (!dwarf_split_debug_info)
28287 {
28288 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28289 SECTION_DEBUG | SECTION_EXCLUDE,
28290 NULL);
28291 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28292 SECTION_DEBUG | SECTION_EXCLUDE,
28293 NULL);
28294 debug_macinfo_section_name
28295 = ((dwarf_strict && dwarf_version < 5)
28296 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28297 debug_macinfo_section = get_section (debug_macinfo_section_name,
28298 SECTION_DEBUG
28299 | SECTION_EXCLUDE, NULL);
28300 /* For macro info we have to refer to a debug_line section, so
28301 similar to split-dwarf emit a skeleton one for early debug. */
28302 debug_skeleton_line_section
28303 = get_section (DEBUG_LTO_LINE_SECTION,
28304 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28305 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28306 DEBUG_SKELETON_LINE_SECTION_LABEL,
28307 generation);
28308 }
28309 else
28310 {
28311 /* ??? Which of the following do we need early? */
28312 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28313 SECTION_DEBUG | SECTION_EXCLUDE,
28314 NULL);
28315 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28316 SECTION_DEBUG | SECTION_EXCLUDE,
28317 NULL);
28318 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28319 SECTION_DEBUG
28320 | SECTION_EXCLUDE, NULL);
28321 debug_skeleton_abbrev_section
28322 = get_section (DEBUG_LTO_ABBREV_SECTION,
28323 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28324 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28325 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28326 generation);
28327
28328 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28329 stay in the main .o, but the skeleton_line goes into the split
28330 off dwo. */
28331 debug_skeleton_line_section
28332 = get_section (DEBUG_LTO_LINE_SECTION,
28333 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28334 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28335 DEBUG_SKELETON_LINE_SECTION_LABEL,
28336 generation);
28337 debug_str_offsets_section
28338 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28339 SECTION_DEBUG | SECTION_EXCLUDE,
28340 NULL);
28341 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28342 DEBUG_SKELETON_INFO_SECTION_LABEL,
28343 generation);
28344 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28345 DEBUG_STR_DWO_SECTION_FLAGS,
28346 NULL);
28347 debug_macinfo_section_name
28348 = ((dwarf_strict && dwarf_version < 5)
28349 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28350 debug_macinfo_section = get_section (debug_macinfo_section_name,
28351 SECTION_DEBUG | SECTION_EXCLUDE,
28352 NULL);
28353 }
28354 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28355 DEBUG_STR_SECTION_FLAGS
28356 | SECTION_EXCLUDE, NULL);
28357 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28358 debug_line_str_section
28359 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28360 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28361 }
28362 else
28363 {
28364 if (!dwarf_split_debug_info)
28365 {
28366 debug_info_section = get_section (DEBUG_INFO_SECTION,
28367 SECTION_DEBUG, NULL);
28368 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28369 SECTION_DEBUG, NULL);
28370 debug_loc_section = get_section (dwarf_version >= 5
28371 ? DEBUG_LOCLISTS_SECTION
28372 : DEBUG_LOC_SECTION,
28373 SECTION_DEBUG, NULL);
28374 debug_macinfo_section_name
28375 = ((dwarf_strict && dwarf_version < 5)
28376 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28377 debug_macinfo_section = get_section (debug_macinfo_section_name,
28378 SECTION_DEBUG, NULL);
28379 }
28380 else
28381 {
28382 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28383 SECTION_DEBUG | SECTION_EXCLUDE,
28384 NULL);
28385 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28386 SECTION_DEBUG | SECTION_EXCLUDE,
28387 NULL);
28388 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28389 SECTION_DEBUG, NULL);
28390 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28391 SECTION_DEBUG, NULL);
28392 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28393 SECTION_DEBUG, NULL);
28394 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28395 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28396 generation);
28397
28398 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28399 stay in the main .o, but the skeleton_line goes into the
28400 split off dwo. */
28401 debug_skeleton_line_section
28402 = get_section (DEBUG_DWO_LINE_SECTION,
28403 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28404 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28405 DEBUG_SKELETON_LINE_SECTION_LABEL,
28406 generation);
28407 debug_str_offsets_section
28408 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28409 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28410 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28411 DEBUG_SKELETON_INFO_SECTION_LABEL,
28412 generation);
28413 debug_loc_section = get_section (dwarf_version >= 5
28414 ? DEBUG_DWO_LOCLISTS_SECTION
28415 : DEBUG_DWO_LOC_SECTION,
28416 SECTION_DEBUG | SECTION_EXCLUDE,
28417 NULL);
28418 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28419 DEBUG_STR_DWO_SECTION_FLAGS,
28420 NULL);
28421 debug_macinfo_section_name
28422 = ((dwarf_strict && dwarf_version < 5)
28423 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28424 debug_macinfo_section = get_section (debug_macinfo_section_name,
28425 SECTION_DEBUG | SECTION_EXCLUDE,
28426 NULL);
28427 }
28428 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28429 SECTION_DEBUG, NULL);
28430 debug_line_section = get_section (DEBUG_LINE_SECTION,
28431 SECTION_DEBUG, NULL);
28432 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28433 SECTION_DEBUG, NULL);
28434 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28435 SECTION_DEBUG, NULL);
28436 debug_str_section = get_section (DEBUG_STR_SECTION,
28437 DEBUG_STR_SECTION_FLAGS, NULL);
28438 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28439 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28440 DEBUG_STR_SECTION_FLAGS, NULL);
28441
28442 debug_ranges_section = get_section (dwarf_version >= 5
28443 ? DEBUG_RNGLISTS_SECTION
28444 : DEBUG_RANGES_SECTION,
28445 SECTION_DEBUG, NULL);
28446 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28447 SECTION_DEBUG, NULL);
28448 }
28449
28450 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28451 DEBUG_ABBREV_SECTION_LABEL, generation);
28452 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28453 DEBUG_INFO_SECTION_LABEL, generation);
28454 info_section_emitted = false;
28455 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28456 DEBUG_LINE_SECTION_LABEL, generation);
28457 /* There are up to 4 unique ranges labels per generation.
28458 See also output_rnglists. */
28459 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28460 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28461 if (dwarf_version >= 5 && dwarf_split_debug_info)
28462 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28463 DEBUG_RANGES_SECTION_LABEL,
28464 1 + generation * 4);
28465 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28466 DEBUG_ADDR_SECTION_LABEL, generation);
28467 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28468 (dwarf_strict && dwarf_version < 5)
28469 ? DEBUG_MACINFO_SECTION_LABEL
28470 : DEBUG_MACRO_SECTION_LABEL, generation);
28471 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28472 generation);
28473
28474 ++generation;
28475 return generation - 1;
28476 }
28477
28478 /* Set up for Dwarf output at the start of compilation. */
28479
28480 static void
28481 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28482 {
28483 /* Allocate the file_table. */
28484 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28485
28486 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28487 /* Allocate the decl_die_table. */
28488 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28489
28490 /* Allocate the decl_loc_table. */
28491 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28492
28493 /* Allocate the cached_dw_loc_list_table. */
28494 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28495
28496 /* Allocate the initial hunk of the decl_scope_table. */
28497 vec_alloc (decl_scope_table, 256);
28498
28499 /* Allocate the initial hunk of the abbrev_die_table. */
28500 vec_alloc (abbrev_die_table, 256);
28501 /* Zero-th entry is allocated, but unused. */
28502 abbrev_die_table->quick_push (NULL);
28503
28504 /* Allocate the dwarf_proc_stack_usage_map. */
28505 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28506
28507 /* Allocate the pubtypes and pubnames vectors. */
28508 vec_alloc (pubname_table, 32);
28509 vec_alloc (pubtype_table, 32);
28510
28511 vec_alloc (incomplete_types, 64);
28512
28513 vec_alloc (used_rtx_array, 32);
28514
28515 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28516 vec_alloc (macinfo_table, 64);
28517 #endif
28518
28519 /* If front-ends already registered a main translation unit but we were not
28520 ready to perform the association, do this now. */
28521 if (main_translation_unit != NULL_TREE)
28522 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28523 }
28524
28525 /* Called before compile () starts outputtting functions, variables
28526 and toplevel asms into assembly. */
28527
28528 static void
28529 dwarf2out_assembly_start (void)
28530 {
28531 if (text_section_line_info)
28532 return;
28533
28534 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28535 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28536 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28537 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28538 COLD_TEXT_SECTION_LABEL, 0);
28539 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28540
28541 switch_to_section (text_section);
28542 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28543 #endif
28544
28545 /* Make sure the line number table for .text always exists. */
28546 text_section_line_info = new_line_info_table ();
28547 text_section_line_info->end_label = text_end_label;
28548
28549 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28550 cur_line_info_table = text_section_line_info;
28551 #endif
28552
28553 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28554 && dwarf2out_do_cfi_asm ()
28555 && !dwarf2out_do_eh_frame ())
28556 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28557 }
28558
28559 /* A helper function for dwarf2out_finish called through
28560 htab_traverse. Assign a string its index. All strings must be
28561 collected into the table by the time index_string is called,
28562 because the indexing code relies on htab_traverse to traverse nodes
28563 in the same order for each run. */
28564
28565 int
28566 index_string (indirect_string_node **h, unsigned int *index)
28567 {
28568 indirect_string_node *node = *h;
28569
28570 find_string_form (node);
28571 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28572 {
28573 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28574 node->index = *index;
28575 *index += 1;
28576 }
28577 return 1;
28578 }
28579
28580 /* A helper function for output_indirect_strings called through
28581 htab_traverse. Output the offset to a string and update the
28582 current offset. */
28583
28584 int
28585 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28586 {
28587 indirect_string_node *node = *h;
28588
28589 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28590 {
28591 /* Assert that this node has been assigned an index. */
28592 gcc_assert (node->index != NO_INDEX_ASSIGNED
28593 && node->index != NOT_INDEXED);
28594 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28595 "indexed string 0x%x: %s", node->index, node->str);
28596 *offset += strlen (node->str) + 1;
28597 }
28598 return 1;
28599 }
28600
28601 /* A helper function for dwarf2out_finish called through
28602 htab_traverse. Output the indexed string. */
28603
28604 int
28605 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28606 {
28607 struct indirect_string_node *node = *h;
28608
28609 if (node->form == DW_FORM_GNU_str_index && node->refcount > 0)
28610 {
28611 /* Assert that the strings are output in the same order as their
28612 indexes were assigned. */
28613 gcc_assert (*cur_idx == node->index);
28614 assemble_string (node->str, strlen (node->str) + 1);
28615 *cur_idx += 1;
28616 }
28617 return 1;
28618 }
28619
28620 /* A helper function for dwarf2out_finish called through
28621 htab_traverse. Emit one queued .debug_str string. */
28622
28623 int
28624 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28625 {
28626 struct indirect_string_node *node = *h;
28627
28628 node->form = find_string_form (node);
28629 if (node->form == form && node->refcount > 0)
28630 {
28631 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28632 assemble_string (node->str, strlen (node->str) + 1);
28633 }
28634
28635 return 1;
28636 }
28637
28638 /* Output the indexed string table. */
28639
28640 static void
28641 output_indirect_strings (void)
28642 {
28643 switch_to_section (debug_str_section);
28644 if (!dwarf_split_debug_info)
28645 debug_str_hash->traverse<enum dwarf_form,
28646 output_indirect_string> (DW_FORM_strp);
28647 else
28648 {
28649 unsigned int offset = 0;
28650 unsigned int cur_idx = 0;
28651
28652 if (skeleton_debug_str_hash)
28653 skeleton_debug_str_hash->traverse<enum dwarf_form,
28654 output_indirect_string> (DW_FORM_strp);
28655
28656 switch_to_section (debug_str_offsets_section);
28657 debug_str_hash->traverse_noresize
28658 <unsigned int *, output_index_string_offset> (&offset);
28659 switch_to_section (debug_str_dwo_section);
28660 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28661 (&cur_idx);
28662 }
28663 }
28664
28665 /* Callback for htab_traverse to assign an index to an entry in the
28666 table, and to write that entry to the .debug_addr section. */
28667
28668 int
28669 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28670 {
28671 addr_table_entry *entry = *slot;
28672
28673 if (entry->refcount == 0)
28674 {
28675 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28676 || entry->index == NOT_INDEXED);
28677 return 1;
28678 }
28679
28680 gcc_assert (entry->index == *cur_index);
28681 (*cur_index)++;
28682
28683 switch (entry->kind)
28684 {
28685 case ate_kind_rtx:
28686 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28687 "0x%x", entry->index);
28688 break;
28689 case ate_kind_rtx_dtprel:
28690 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28691 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28692 DWARF2_ADDR_SIZE,
28693 entry->addr.rtl);
28694 fputc ('\n', asm_out_file);
28695 break;
28696 case ate_kind_label:
28697 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28698 "0x%x", entry->index);
28699 break;
28700 default:
28701 gcc_unreachable ();
28702 }
28703 return 1;
28704 }
28705
28706 /* Produce the .debug_addr section. */
28707
28708 static void
28709 output_addr_table (void)
28710 {
28711 unsigned int index = 0;
28712 if (addr_index_table == NULL || addr_index_table->size () == 0)
28713 return;
28714
28715 switch_to_section (debug_addr_section);
28716 addr_index_table
28717 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28718 }
28719
28720 #if ENABLE_ASSERT_CHECKING
28721 /* Verify that all marks are clear. */
28722
28723 static void
28724 verify_marks_clear (dw_die_ref die)
28725 {
28726 dw_die_ref c;
28727
28728 gcc_assert (! die->die_mark);
28729 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28730 }
28731 #endif /* ENABLE_ASSERT_CHECKING */
28732
28733 /* Clear the marks for a die and its children.
28734 Be cool if the mark isn't set. */
28735
28736 static void
28737 prune_unmark_dies (dw_die_ref die)
28738 {
28739 dw_die_ref c;
28740
28741 if (die->die_mark)
28742 die->die_mark = 0;
28743 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28744 }
28745
28746 /* Given LOC that is referenced by a DIE we're marking as used, find all
28747 referenced DWARF procedures it references and mark them as used. */
28748
28749 static void
28750 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28751 {
28752 for (; loc != NULL; loc = loc->dw_loc_next)
28753 switch (loc->dw_loc_opc)
28754 {
28755 case DW_OP_implicit_pointer:
28756 case DW_OP_convert:
28757 case DW_OP_reinterpret:
28758 case DW_OP_GNU_implicit_pointer:
28759 case DW_OP_GNU_convert:
28760 case DW_OP_GNU_reinterpret:
28761 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28762 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28763 break;
28764 case DW_OP_GNU_variable_value:
28765 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28766 {
28767 dw_die_ref ref
28768 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28769 if (ref == NULL)
28770 break;
28771 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28772 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28773 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28774 }
28775 /* FALLTHRU */
28776 case DW_OP_call2:
28777 case DW_OP_call4:
28778 case DW_OP_call_ref:
28779 case DW_OP_const_type:
28780 case DW_OP_GNU_const_type:
28781 case DW_OP_GNU_parameter_ref:
28782 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28783 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28784 break;
28785 case DW_OP_regval_type:
28786 case DW_OP_deref_type:
28787 case DW_OP_GNU_regval_type:
28788 case DW_OP_GNU_deref_type:
28789 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28790 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28791 break;
28792 case DW_OP_entry_value:
28793 case DW_OP_GNU_entry_value:
28794 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28795 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28796 break;
28797 default:
28798 break;
28799 }
28800 }
28801
28802 /* Given DIE that we're marking as used, find any other dies
28803 it references as attributes and mark them as used. */
28804
28805 static void
28806 prune_unused_types_walk_attribs (dw_die_ref die)
28807 {
28808 dw_attr_node *a;
28809 unsigned ix;
28810
28811 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28812 {
28813 switch (AT_class (a))
28814 {
28815 /* Make sure DWARF procedures referenced by location descriptions will
28816 get emitted. */
28817 case dw_val_class_loc:
28818 prune_unused_types_walk_loc_descr (AT_loc (a));
28819 break;
28820 case dw_val_class_loc_list:
28821 for (dw_loc_list_ref list = AT_loc_list (a);
28822 list != NULL;
28823 list = list->dw_loc_next)
28824 prune_unused_types_walk_loc_descr (list->expr);
28825 break;
28826
28827 case dw_val_class_view_list:
28828 /* This points to a loc_list in another attribute, so it's
28829 already covered. */
28830 break;
28831
28832 case dw_val_class_die_ref:
28833 /* A reference to another DIE.
28834 Make sure that it will get emitted.
28835 If it was broken out into a comdat group, don't follow it. */
28836 if (! AT_ref (a)->comdat_type_p
28837 || a->dw_attr == DW_AT_specification)
28838 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
28839 break;
28840
28841 case dw_val_class_str:
28842 /* Set the string's refcount to 0 so that prune_unused_types_mark
28843 accounts properly for it. */
28844 a->dw_attr_val.v.val_str->refcount = 0;
28845 break;
28846
28847 default:
28848 break;
28849 }
28850 }
28851 }
28852
28853 /* Mark the generic parameters and arguments children DIEs of DIE. */
28854
28855 static void
28856 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
28857 {
28858 dw_die_ref c;
28859
28860 if (die == NULL || die->die_child == NULL)
28861 return;
28862 c = die->die_child;
28863 do
28864 {
28865 if (is_template_parameter (c))
28866 prune_unused_types_mark (c, 1);
28867 c = c->die_sib;
28868 } while (c && c != die->die_child);
28869 }
28870
28871 /* Mark DIE as being used. If DOKIDS is true, then walk down
28872 to DIE's children. */
28873
28874 static void
28875 prune_unused_types_mark (dw_die_ref die, int dokids)
28876 {
28877 dw_die_ref c;
28878
28879 if (die->die_mark == 0)
28880 {
28881 /* We haven't done this node yet. Mark it as used. */
28882 die->die_mark = 1;
28883 /* If this is the DIE of a generic type instantiation,
28884 mark the children DIEs that describe its generic parms and
28885 args. */
28886 prune_unused_types_mark_generic_parms_dies (die);
28887
28888 /* We also have to mark its parents as used.
28889 (But we don't want to mark our parent's kids due to this,
28890 unless it is a class.) */
28891 if (die->die_parent)
28892 prune_unused_types_mark (die->die_parent,
28893 class_scope_p (die->die_parent));
28894
28895 /* Mark any referenced nodes. */
28896 prune_unused_types_walk_attribs (die);
28897
28898 /* If this node is a specification,
28899 also mark the definition, if it exists. */
28900 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
28901 prune_unused_types_mark (die->die_definition, 1);
28902 }
28903
28904 if (dokids && die->die_mark != 2)
28905 {
28906 /* We need to walk the children, but haven't done so yet.
28907 Remember that we've walked the kids. */
28908 die->die_mark = 2;
28909
28910 /* If this is an array type, we need to make sure our
28911 kids get marked, even if they're types. If we're
28912 breaking out types into comdat sections, do this
28913 for all type definitions. */
28914 if (die->die_tag == DW_TAG_array_type
28915 || (use_debug_types
28916 && is_type_die (die) && ! is_declaration_die (die)))
28917 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
28918 else
28919 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
28920 }
28921 }
28922
28923 /* For local classes, look if any static member functions were emitted
28924 and if so, mark them. */
28925
28926 static void
28927 prune_unused_types_walk_local_classes (dw_die_ref die)
28928 {
28929 dw_die_ref c;
28930
28931 if (die->die_mark == 2)
28932 return;
28933
28934 switch (die->die_tag)
28935 {
28936 case DW_TAG_structure_type:
28937 case DW_TAG_union_type:
28938 case DW_TAG_class_type:
28939 break;
28940
28941 case DW_TAG_subprogram:
28942 if (!get_AT_flag (die, DW_AT_declaration)
28943 || die->die_definition != NULL)
28944 prune_unused_types_mark (die, 1);
28945 return;
28946
28947 default:
28948 return;
28949 }
28950
28951 /* Mark children. */
28952 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
28953 }
28954
28955 /* Walk the tree DIE and mark types that we actually use. */
28956
28957 static void
28958 prune_unused_types_walk (dw_die_ref die)
28959 {
28960 dw_die_ref c;
28961
28962 /* Don't do anything if this node is already marked and
28963 children have been marked as well. */
28964 if (die->die_mark == 2)
28965 return;
28966
28967 switch (die->die_tag)
28968 {
28969 case DW_TAG_structure_type:
28970 case DW_TAG_union_type:
28971 case DW_TAG_class_type:
28972 if (die->die_perennial_p)
28973 break;
28974
28975 for (c = die->die_parent; c; c = c->die_parent)
28976 if (c->die_tag == DW_TAG_subprogram)
28977 break;
28978
28979 /* Finding used static member functions inside of classes
28980 is needed just for local classes, because for other classes
28981 static member function DIEs with DW_AT_specification
28982 are emitted outside of the DW_TAG_*_type. If we ever change
28983 it, we'd need to call this even for non-local classes. */
28984 if (c)
28985 prune_unused_types_walk_local_classes (die);
28986
28987 /* It's a type node --- don't mark it. */
28988 return;
28989
28990 case DW_TAG_const_type:
28991 case DW_TAG_packed_type:
28992 case DW_TAG_pointer_type:
28993 case DW_TAG_reference_type:
28994 case DW_TAG_rvalue_reference_type:
28995 case DW_TAG_volatile_type:
28996 case DW_TAG_typedef:
28997 case DW_TAG_array_type:
28998 case DW_TAG_interface_type:
28999 case DW_TAG_friend:
29000 case DW_TAG_enumeration_type:
29001 case DW_TAG_subroutine_type:
29002 case DW_TAG_string_type:
29003 case DW_TAG_set_type:
29004 case DW_TAG_subrange_type:
29005 case DW_TAG_ptr_to_member_type:
29006 case DW_TAG_file_type:
29007 /* Type nodes are useful only when other DIEs reference them --- don't
29008 mark them. */
29009 /* FALLTHROUGH */
29010
29011 case DW_TAG_dwarf_procedure:
29012 /* Likewise for DWARF procedures. */
29013
29014 if (die->die_perennial_p)
29015 break;
29016
29017 return;
29018
29019 default:
29020 /* Mark everything else. */
29021 break;
29022 }
29023
29024 if (die->die_mark == 0)
29025 {
29026 die->die_mark = 1;
29027
29028 /* Now, mark any dies referenced from here. */
29029 prune_unused_types_walk_attribs (die);
29030 }
29031
29032 die->die_mark = 2;
29033
29034 /* Mark children. */
29035 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29036 }
29037
29038 /* Increment the string counts on strings referred to from DIE's
29039 attributes. */
29040
29041 static void
29042 prune_unused_types_update_strings (dw_die_ref die)
29043 {
29044 dw_attr_node *a;
29045 unsigned ix;
29046
29047 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29048 if (AT_class (a) == dw_val_class_str)
29049 {
29050 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29051 s->refcount++;
29052 /* Avoid unnecessarily putting strings that are used less than
29053 twice in the hash table. */
29054 if (s->refcount
29055 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29056 {
29057 indirect_string_node **slot
29058 = debug_str_hash->find_slot_with_hash (s->str,
29059 htab_hash_string (s->str),
29060 INSERT);
29061 gcc_assert (*slot == NULL);
29062 *slot = s;
29063 }
29064 }
29065 }
29066
29067 /* Mark DIE and its children as removed. */
29068
29069 static void
29070 mark_removed (dw_die_ref die)
29071 {
29072 dw_die_ref c;
29073 die->removed = true;
29074 FOR_EACH_CHILD (die, c, mark_removed (c));
29075 }
29076
29077 /* Remove from the tree DIE any dies that aren't marked. */
29078
29079 static void
29080 prune_unused_types_prune (dw_die_ref die)
29081 {
29082 dw_die_ref c;
29083
29084 gcc_assert (die->die_mark);
29085 prune_unused_types_update_strings (die);
29086
29087 if (! die->die_child)
29088 return;
29089
29090 c = die->die_child;
29091 do {
29092 dw_die_ref prev = c, next;
29093 for (c = c->die_sib; ! c->die_mark; c = next)
29094 if (c == die->die_child)
29095 {
29096 /* No marked children between 'prev' and the end of the list. */
29097 if (prev == c)
29098 /* No marked children at all. */
29099 die->die_child = NULL;
29100 else
29101 {
29102 prev->die_sib = c->die_sib;
29103 die->die_child = prev;
29104 }
29105 c->die_sib = NULL;
29106 mark_removed (c);
29107 return;
29108 }
29109 else
29110 {
29111 next = c->die_sib;
29112 c->die_sib = NULL;
29113 mark_removed (c);
29114 }
29115
29116 if (c != prev->die_sib)
29117 prev->die_sib = c;
29118 prune_unused_types_prune (c);
29119 } while (c != die->die_child);
29120 }
29121
29122 /* Remove dies representing declarations that we never use. */
29123
29124 static void
29125 prune_unused_types (void)
29126 {
29127 unsigned int i;
29128 limbo_die_node *node;
29129 comdat_type_node *ctnode;
29130 pubname_entry *pub;
29131 dw_die_ref base_type;
29132
29133 #if ENABLE_ASSERT_CHECKING
29134 /* All the marks should already be clear. */
29135 verify_marks_clear (comp_unit_die ());
29136 for (node = limbo_die_list; node; node = node->next)
29137 verify_marks_clear (node->die);
29138 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29139 verify_marks_clear (ctnode->root_die);
29140 #endif /* ENABLE_ASSERT_CHECKING */
29141
29142 /* Mark types that are used in global variables. */
29143 premark_types_used_by_global_vars ();
29144
29145 /* Set the mark on nodes that are actually used. */
29146 prune_unused_types_walk (comp_unit_die ());
29147 for (node = limbo_die_list; node; node = node->next)
29148 prune_unused_types_walk (node->die);
29149 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29150 {
29151 prune_unused_types_walk (ctnode->root_die);
29152 prune_unused_types_mark (ctnode->type_die, 1);
29153 }
29154
29155 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29156 are unusual in that they are pubnames that are the children of pubtypes.
29157 They should only be marked via their parent DW_TAG_enumeration_type die,
29158 not as roots in themselves. */
29159 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29160 if (pub->die->die_tag != DW_TAG_enumerator)
29161 prune_unused_types_mark (pub->die, 1);
29162 for (i = 0; base_types.iterate (i, &base_type); i++)
29163 prune_unused_types_mark (base_type, 1);
29164
29165 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29166 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29167 callees). */
29168 cgraph_node *cnode;
29169 FOR_EACH_FUNCTION (cnode)
29170 if (cnode->referred_to_p (false))
29171 {
29172 dw_die_ref die = lookup_decl_die (cnode->decl);
29173 if (die == NULL || die->die_mark)
29174 continue;
29175 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29176 if (e->caller != cnode
29177 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29178 {
29179 prune_unused_types_mark (die, 1);
29180 break;
29181 }
29182 }
29183
29184 if (debug_str_hash)
29185 debug_str_hash->empty ();
29186 if (skeleton_debug_str_hash)
29187 skeleton_debug_str_hash->empty ();
29188 prune_unused_types_prune (comp_unit_die ());
29189 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29190 {
29191 node = *pnode;
29192 if (!node->die->die_mark)
29193 *pnode = node->next;
29194 else
29195 {
29196 prune_unused_types_prune (node->die);
29197 pnode = &node->next;
29198 }
29199 }
29200 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29201 prune_unused_types_prune (ctnode->root_die);
29202
29203 /* Leave the marks clear. */
29204 prune_unmark_dies (comp_unit_die ());
29205 for (node = limbo_die_list; node; node = node->next)
29206 prune_unmark_dies (node->die);
29207 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29208 prune_unmark_dies (ctnode->root_die);
29209 }
29210
29211 /* Helpers to manipulate hash table of comdat type units. */
29212
29213 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29214 {
29215 static inline hashval_t hash (const comdat_type_node *);
29216 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29217 };
29218
29219 inline hashval_t
29220 comdat_type_hasher::hash (const comdat_type_node *type_node)
29221 {
29222 hashval_t h;
29223 memcpy (&h, type_node->signature, sizeof (h));
29224 return h;
29225 }
29226
29227 inline bool
29228 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29229 const comdat_type_node *type_node_2)
29230 {
29231 return (! memcmp (type_node_1->signature, type_node_2->signature,
29232 DWARF_TYPE_SIGNATURE_SIZE));
29233 }
29234
29235 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29236 to the location it would have been added, should we know its
29237 DECL_ASSEMBLER_NAME when we added other attributes. This will
29238 probably improve compactness of debug info, removing equivalent
29239 abbrevs, and hide any differences caused by deferring the
29240 computation of the assembler name, triggered by e.g. PCH. */
29241
29242 static inline void
29243 move_linkage_attr (dw_die_ref die)
29244 {
29245 unsigned ix = vec_safe_length (die->die_attr);
29246 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29247
29248 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29249 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29250
29251 while (--ix > 0)
29252 {
29253 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29254
29255 if (prev->dw_attr == DW_AT_decl_line
29256 || prev->dw_attr == DW_AT_decl_column
29257 || prev->dw_attr == DW_AT_name)
29258 break;
29259 }
29260
29261 if (ix != vec_safe_length (die->die_attr) - 1)
29262 {
29263 die->die_attr->pop ();
29264 die->die_attr->quick_insert (ix, linkage);
29265 }
29266 }
29267
29268 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29269 referenced from typed stack ops and count how often they are used. */
29270
29271 static void
29272 mark_base_types (dw_loc_descr_ref loc)
29273 {
29274 dw_die_ref base_type = NULL;
29275
29276 for (; loc; loc = loc->dw_loc_next)
29277 {
29278 switch (loc->dw_loc_opc)
29279 {
29280 case DW_OP_regval_type:
29281 case DW_OP_deref_type:
29282 case DW_OP_GNU_regval_type:
29283 case DW_OP_GNU_deref_type:
29284 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29285 break;
29286 case DW_OP_convert:
29287 case DW_OP_reinterpret:
29288 case DW_OP_GNU_convert:
29289 case DW_OP_GNU_reinterpret:
29290 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29291 continue;
29292 /* FALLTHRU */
29293 case DW_OP_const_type:
29294 case DW_OP_GNU_const_type:
29295 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29296 break;
29297 case DW_OP_entry_value:
29298 case DW_OP_GNU_entry_value:
29299 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29300 continue;
29301 default:
29302 continue;
29303 }
29304 gcc_assert (base_type->die_parent == comp_unit_die ());
29305 if (base_type->die_mark)
29306 base_type->die_mark++;
29307 else
29308 {
29309 base_types.safe_push (base_type);
29310 base_type->die_mark = 1;
29311 }
29312 }
29313 }
29314
29315 /* Comparison function for sorting marked base types. */
29316
29317 static int
29318 base_type_cmp (const void *x, const void *y)
29319 {
29320 dw_die_ref dx = *(const dw_die_ref *) x;
29321 dw_die_ref dy = *(const dw_die_ref *) y;
29322 unsigned int byte_size1, byte_size2;
29323 unsigned int encoding1, encoding2;
29324 unsigned int align1, align2;
29325 if (dx->die_mark > dy->die_mark)
29326 return -1;
29327 if (dx->die_mark < dy->die_mark)
29328 return 1;
29329 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29330 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29331 if (byte_size1 < byte_size2)
29332 return 1;
29333 if (byte_size1 > byte_size2)
29334 return -1;
29335 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29336 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29337 if (encoding1 < encoding2)
29338 return 1;
29339 if (encoding1 > encoding2)
29340 return -1;
29341 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29342 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29343 if (align1 < align2)
29344 return 1;
29345 if (align1 > align2)
29346 return -1;
29347 return 0;
29348 }
29349
29350 /* Move base types marked by mark_base_types as early as possible
29351 in the CU, sorted by decreasing usage count both to make the
29352 uleb128 references as small as possible and to make sure they
29353 will have die_offset already computed by calc_die_sizes when
29354 sizes of typed stack loc ops is computed. */
29355
29356 static void
29357 move_marked_base_types (void)
29358 {
29359 unsigned int i;
29360 dw_die_ref base_type, die, c;
29361
29362 if (base_types.is_empty ())
29363 return;
29364
29365 /* Sort by decreasing usage count, they will be added again in that
29366 order later on. */
29367 base_types.qsort (base_type_cmp);
29368 die = comp_unit_die ();
29369 c = die->die_child;
29370 do
29371 {
29372 dw_die_ref prev = c;
29373 c = c->die_sib;
29374 while (c->die_mark)
29375 {
29376 remove_child_with_prev (c, prev);
29377 /* As base types got marked, there must be at least
29378 one node other than DW_TAG_base_type. */
29379 gcc_assert (die->die_child != NULL);
29380 c = prev->die_sib;
29381 }
29382 }
29383 while (c != die->die_child);
29384 gcc_assert (die->die_child);
29385 c = die->die_child;
29386 for (i = 0; base_types.iterate (i, &base_type); i++)
29387 {
29388 base_type->die_mark = 0;
29389 base_type->die_sib = c->die_sib;
29390 c->die_sib = base_type;
29391 c = base_type;
29392 }
29393 }
29394
29395 /* Helper function for resolve_addr, attempt to resolve
29396 one CONST_STRING, return true if successful. Similarly verify that
29397 SYMBOL_REFs refer to variables emitted in the current CU. */
29398
29399 static bool
29400 resolve_one_addr (rtx *addr)
29401 {
29402 rtx rtl = *addr;
29403
29404 if (GET_CODE (rtl) == CONST_STRING)
29405 {
29406 size_t len = strlen (XSTR (rtl, 0)) + 1;
29407 tree t = build_string (len, XSTR (rtl, 0));
29408 tree tlen = size_int (len - 1);
29409 TREE_TYPE (t)
29410 = build_array_type (char_type_node, build_index_type (tlen));
29411 rtl = lookup_constant_def (t);
29412 if (!rtl || !MEM_P (rtl))
29413 return false;
29414 rtl = XEXP (rtl, 0);
29415 if (GET_CODE (rtl) == SYMBOL_REF
29416 && SYMBOL_REF_DECL (rtl)
29417 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29418 return false;
29419 vec_safe_push (used_rtx_array, rtl);
29420 *addr = rtl;
29421 return true;
29422 }
29423
29424 if (GET_CODE (rtl) == SYMBOL_REF
29425 && SYMBOL_REF_DECL (rtl))
29426 {
29427 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29428 {
29429 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29430 return false;
29431 }
29432 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29433 return false;
29434 }
29435
29436 if (GET_CODE (rtl) == CONST)
29437 {
29438 subrtx_ptr_iterator::array_type array;
29439 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29440 if (!resolve_one_addr (*iter))
29441 return false;
29442 }
29443
29444 return true;
29445 }
29446
29447 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29448 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29449 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29450
29451 static rtx
29452 string_cst_pool_decl (tree t)
29453 {
29454 rtx rtl = output_constant_def (t, 1);
29455 unsigned char *array;
29456 dw_loc_descr_ref l;
29457 tree decl;
29458 size_t len;
29459 dw_die_ref ref;
29460
29461 if (!rtl || !MEM_P (rtl))
29462 return NULL_RTX;
29463 rtl = XEXP (rtl, 0);
29464 if (GET_CODE (rtl) != SYMBOL_REF
29465 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29466 return NULL_RTX;
29467
29468 decl = SYMBOL_REF_DECL (rtl);
29469 if (!lookup_decl_die (decl))
29470 {
29471 len = TREE_STRING_LENGTH (t);
29472 vec_safe_push (used_rtx_array, rtl);
29473 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29474 array = ggc_vec_alloc<unsigned char> (len);
29475 memcpy (array, TREE_STRING_POINTER (t), len);
29476 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29477 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29478 l->dw_loc_oprnd2.v.val_vec.length = len;
29479 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29480 l->dw_loc_oprnd2.v.val_vec.array = array;
29481 add_AT_loc (ref, DW_AT_location, l);
29482 equate_decl_number_to_die (decl, ref);
29483 }
29484 return rtl;
29485 }
29486
29487 /* Helper function of resolve_addr_in_expr. LOC is
29488 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29489 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29490 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29491 with DW_OP_implicit_pointer if possible
29492 and return true, if unsuccessful, return false. */
29493
29494 static bool
29495 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29496 {
29497 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29498 HOST_WIDE_INT offset = 0;
29499 dw_die_ref ref = NULL;
29500 tree decl;
29501
29502 if (GET_CODE (rtl) == CONST
29503 && GET_CODE (XEXP (rtl, 0)) == PLUS
29504 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29505 {
29506 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29507 rtl = XEXP (XEXP (rtl, 0), 0);
29508 }
29509 if (GET_CODE (rtl) == CONST_STRING)
29510 {
29511 size_t len = strlen (XSTR (rtl, 0)) + 1;
29512 tree t = build_string (len, XSTR (rtl, 0));
29513 tree tlen = size_int (len - 1);
29514
29515 TREE_TYPE (t)
29516 = build_array_type (char_type_node, build_index_type (tlen));
29517 rtl = string_cst_pool_decl (t);
29518 if (!rtl)
29519 return false;
29520 }
29521 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29522 {
29523 decl = SYMBOL_REF_DECL (rtl);
29524 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29525 {
29526 ref = lookup_decl_die (decl);
29527 if (ref && (get_AT (ref, DW_AT_location)
29528 || get_AT (ref, DW_AT_const_value)))
29529 {
29530 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29531 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29532 loc->dw_loc_oprnd1.val_entry = NULL;
29533 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29534 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29535 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29536 loc->dw_loc_oprnd2.v.val_int = offset;
29537 return true;
29538 }
29539 }
29540 }
29541 return false;
29542 }
29543
29544 /* Helper function for resolve_addr, handle one location
29545 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29546 the location list couldn't be resolved. */
29547
29548 static bool
29549 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29550 {
29551 dw_loc_descr_ref keep = NULL;
29552 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29553 switch (loc->dw_loc_opc)
29554 {
29555 case DW_OP_addr:
29556 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29557 {
29558 if ((prev == NULL
29559 || prev->dw_loc_opc == DW_OP_piece
29560 || prev->dw_loc_opc == DW_OP_bit_piece)
29561 && loc->dw_loc_next
29562 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29563 && (!dwarf_strict || dwarf_version >= 5)
29564 && optimize_one_addr_into_implicit_ptr (loc))
29565 break;
29566 return false;
29567 }
29568 break;
29569 case DW_OP_GNU_addr_index:
29570 case DW_OP_GNU_const_index:
29571 if (loc->dw_loc_opc == DW_OP_GNU_addr_index
29572 || (loc->dw_loc_opc == DW_OP_GNU_const_index && loc->dtprel))
29573 {
29574 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29575 if (!resolve_one_addr (&rtl))
29576 return false;
29577 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29578 loc->dw_loc_oprnd1.val_entry
29579 = add_addr_table_entry (rtl, ate_kind_rtx);
29580 }
29581 break;
29582 case DW_OP_const4u:
29583 case DW_OP_const8u:
29584 if (loc->dtprel
29585 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29586 return false;
29587 break;
29588 case DW_OP_plus_uconst:
29589 if (size_of_loc_descr (loc)
29590 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29591 + 1
29592 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29593 {
29594 dw_loc_descr_ref repl
29595 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29596 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29597 add_loc_descr (&repl, loc->dw_loc_next);
29598 *loc = *repl;
29599 }
29600 break;
29601 case DW_OP_implicit_value:
29602 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29603 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29604 return false;
29605 break;
29606 case DW_OP_implicit_pointer:
29607 case DW_OP_GNU_implicit_pointer:
29608 case DW_OP_GNU_parameter_ref:
29609 case DW_OP_GNU_variable_value:
29610 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29611 {
29612 dw_die_ref ref
29613 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29614 if (ref == NULL)
29615 return false;
29616 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29617 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29618 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29619 }
29620 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29621 {
29622 if (prev == NULL
29623 && loc->dw_loc_next == NULL
29624 && AT_class (a) == dw_val_class_loc)
29625 switch (a->dw_attr)
29626 {
29627 /* Following attributes allow both exprloc and reference,
29628 so if the whole expression is DW_OP_GNU_variable_value
29629 alone we could transform it into reference. */
29630 case DW_AT_byte_size:
29631 case DW_AT_bit_size:
29632 case DW_AT_lower_bound:
29633 case DW_AT_upper_bound:
29634 case DW_AT_bit_stride:
29635 case DW_AT_count:
29636 case DW_AT_allocated:
29637 case DW_AT_associated:
29638 case DW_AT_byte_stride:
29639 a->dw_attr_val.val_class = dw_val_class_die_ref;
29640 a->dw_attr_val.val_entry = NULL;
29641 a->dw_attr_val.v.val_die_ref.die
29642 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29643 a->dw_attr_val.v.val_die_ref.external = 0;
29644 return true;
29645 default:
29646 break;
29647 }
29648 if (dwarf_strict)
29649 return false;
29650 }
29651 break;
29652 case DW_OP_const_type:
29653 case DW_OP_regval_type:
29654 case DW_OP_deref_type:
29655 case DW_OP_convert:
29656 case DW_OP_reinterpret:
29657 case DW_OP_GNU_const_type:
29658 case DW_OP_GNU_regval_type:
29659 case DW_OP_GNU_deref_type:
29660 case DW_OP_GNU_convert:
29661 case DW_OP_GNU_reinterpret:
29662 while (loc->dw_loc_next
29663 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29664 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29665 {
29666 dw_die_ref base1, base2;
29667 unsigned enc1, enc2, size1, size2;
29668 if (loc->dw_loc_opc == DW_OP_regval_type
29669 || loc->dw_loc_opc == DW_OP_deref_type
29670 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29671 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29672 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29673 else if (loc->dw_loc_oprnd1.val_class
29674 == dw_val_class_unsigned_const)
29675 break;
29676 else
29677 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29678 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29679 == dw_val_class_unsigned_const)
29680 break;
29681 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29682 gcc_assert (base1->die_tag == DW_TAG_base_type
29683 && base2->die_tag == DW_TAG_base_type);
29684 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29685 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29686 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29687 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29688 if (size1 == size2
29689 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29690 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29691 && loc != keep)
29692 || enc1 == enc2))
29693 {
29694 /* Optimize away next DW_OP_convert after
29695 adjusting LOC's base type die reference. */
29696 if (loc->dw_loc_opc == DW_OP_regval_type
29697 || loc->dw_loc_opc == DW_OP_deref_type
29698 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29699 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29700 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29701 else
29702 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29703 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29704 continue;
29705 }
29706 /* Don't change integer DW_OP_convert after e.g. floating
29707 point typed stack entry. */
29708 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29709 keep = loc->dw_loc_next;
29710 break;
29711 }
29712 break;
29713 default:
29714 break;
29715 }
29716 return true;
29717 }
29718
29719 /* Helper function of resolve_addr. DIE had DW_AT_location of
29720 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29721 and DW_OP_addr couldn't be resolved. resolve_addr has already
29722 removed the DW_AT_location attribute. This function attempts to
29723 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29724 to it or DW_AT_const_value attribute, if possible. */
29725
29726 static void
29727 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29728 {
29729 if (!VAR_P (decl)
29730 || lookup_decl_die (decl) != die
29731 || DECL_EXTERNAL (decl)
29732 || !TREE_STATIC (decl)
29733 || DECL_INITIAL (decl) == NULL_TREE
29734 || DECL_P (DECL_INITIAL (decl))
29735 || get_AT (die, DW_AT_const_value))
29736 return;
29737
29738 tree init = DECL_INITIAL (decl);
29739 HOST_WIDE_INT offset = 0;
29740 /* For variables that have been optimized away and thus
29741 don't have a memory location, see if we can emit
29742 DW_AT_const_value instead. */
29743 if (tree_add_const_value_attribute (die, init))
29744 return;
29745 if (dwarf_strict && dwarf_version < 5)
29746 return;
29747 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29748 and ADDR_EXPR refers to a decl that has DW_AT_location or
29749 DW_AT_const_value (but isn't addressable, otherwise
29750 resolving the original DW_OP_addr wouldn't fail), see if
29751 we can add DW_OP_implicit_pointer. */
29752 STRIP_NOPS (init);
29753 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29754 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29755 {
29756 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29757 init = TREE_OPERAND (init, 0);
29758 STRIP_NOPS (init);
29759 }
29760 if (TREE_CODE (init) != ADDR_EXPR)
29761 return;
29762 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29763 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29764 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29765 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29766 && TREE_OPERAND (init, 0) != decl))
29767 {
29768 dw_die_ref ref;
29769 dw_loc_descr_ref l;
29770
29771 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29772 {
29773 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29774 if (!rtl)
29775 return;
29776 decl = SYMBOL_REF_DECL (rtl);
29777 }
29778 else
29779 decl = TREE_OPERAND (init, 0);
29780 ref = lookup_decl_die (decl);
29781 if (ref == NULL
29782 || (!get_AT (ref, DW_AT_location)
29783 && !get_AT (ref, DW_AT_const_value)))
29784 return;
29785 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29786 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29787 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29788 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29789 add_AT_loc (die, DW_AT_location, l);
29790 }
29791 }
29792
29793 /* Return NULL if l is a DWARF expression, or first op that is not
29794 valid DWARF expression. */
29795
29796 static dw_loc_descr_ref
29797 non_dwarf_expression (dw_loc_descr_ref l)
29798 {
29799 while (l)
29800 {
29801 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29802 return l;
29803 switch (l->dw_loc_opc)
29804 {
29805 case DW_OP_regx:
29806 case DW_OP_implicit_value:
29807 case DW_OP_stack_value:
29808 case DW_OP_implicit_pointer:
29809 case DW_OP_GNU_implicit_pointer:
29810 case DW_OP_GNU_parameter_ref:
29811 case DW_OP_piece:
29812 case DW_OP_bit_piece:
29813 return l;
29814 default:
29815 break;
29816 }
29817 l = l->dw_loc_next;
29818 }
29819 return NULL;
29820 }
29821
29822 /* Return adjusted copy of EXPR:
29823 If it is empty DWARF expression, return it.
29824 If it is valid non-empty DWARF expression,
29825 return copy of EXPR with DW_OP_deref appended to it.
29826 If it is DWARF expression followed by DW_OP_reg{N,x}, return
29827 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
29828 If it is DWARF expression followed by DW_OP_stack_value, return
29829 copy of the DWARF expression without anything appended.
29830 Otherwise, return NULL. */
29831
29832 static dw_loc_descr_ref
29833 copy_deref_exprloc (dw_loc_descr_ref expr)
29834 {
29835 dw_loc_descr_ref tail = NULL;
29836
29837 if (expr == NULL)
29838 return NULL;
29839
29840 dw_loc_descr_ref l = non_dwarf_expression (expr);
29841 if (l && l->dw_loc_next)
29842 return NULL;
29843
29844 if (l)
29845 {
29846 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29847 tail = new_loc_descr ((enum dwarf_location_atom)
29848 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
29849 0, 0);
29850 else
29851 switch (l->dw_loc_opc)
29852 {
29853 case DW_OP_regx:
29854 tail = new_loc_descr (DW_OP_bregx,
29855 l->dw_loc_oprnd1.v.val_unsigned, 0);
29856 break;
29857 case DW_OP_stack_value:
29858 break;
29859 default:
29860 return NULL;
29861 }
29862 }
29863 else
29864 tail = new_loc_descr (DW_OP_deref, 0, 0);
29865
29866 dw_loc_descr_ref ret = NULL, *p = &ret;
29867 while (expr != l)
29868 {
29869 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
29870 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
29871 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
29872 p = &(*p)->dw_loc_next;
29873 expr = expr->dw_loc_next;
29874 }
29875 *p = tail;
29876 return ret;
29877 }
29878
29879 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
29880 reference to a variable or argument, adjust it if needed and return:
29881 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
29882 attribute if present should be removed
29883 0 keep the attribute perhaps with minor modifications, no need to rescan
29884 1 if the attribute has been successfully adjusted. */
29885
29886 static int
29887 optimize_string_length (dw_attr_node *a)
29888 {
29889 dw_loc_descr_ref l = AT_loc (a), lv;
29890 dw_die_ref die;
29891 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29892 {
29893 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
29894 die = lookup_decl_die (decl);
29895 if (die)
29896 {
29897 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29898 l->dw_loc_oprnd1.v.val_die_ref.die = die;
29899 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29900 }
29901 else
29902 return -1;
29903 }
29904 else
29905 die = l->dw_loc_oprnd1.v.val_die_ref.die;
29906
29907 /* DWARF5 allows reference class, so we can then reference the DIE.
29908 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
29909 if (l->dw_loc_next != NULL && dwarf_version >= 5)
29910 {
29911 a->dw_attr_val.val_class = dw_val_class_die_ref;
29912 a->dw_attr_val.val_entry = NULL;
29913 a->dw_attr_val.v.val_die_ref.die = die;
29914 a->dw_attr_val.v.val_die_ref.external = 0;
29915 return 0;
29916 }
29917
29918 dw_attr_node *av = get_AT (die, DW_AT_location);
29919 dw_loc_list_ref d;
29920 bool non_dwarf_expr = false;
29921
29922 if (av == NULL)
29923 return dwarf_strict ? -1 : 0;
29924 switch (AT_class (av))
29925 {
29926 case dw_val_class_loc_list:
29927 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29928 if (d->expr && non_dwarf_expression (d->expr))
29929 non_dwarf_expr = true;
29930 break;
29931 case dw_val_class_view_list:
29932 gcc_unreachable ();
29933 case dw_val_class_loc:
29934 lv = AT_loc (av);
29935 if (lv == NULL)
29936 return dwarf_strict ? -1 : 0;
29937 if (non_dwarf_expression (lv))
29938 non_dwarf_expr = true;
29939 break;
29940 default:
29941 return dwarf_strict ? -1 : 0;
29942 }
29943
29944 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
29945 into DW_OP_call4 or DW_OP_GNU_variable_value into
29946 DW_OP_call4 DW_OP_deref, do so. */
29947 if (!non_dwarf_expr
29948 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
29949 {
29950 l->dw_loc_opc = DW_OP_call4;
29951 if (l->dw_loc_next)
29952 l->dw_loc_next = NULL;
29953 else
29954 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
29955 return 0;
29956 }
29957
29958 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
29959 copy over the DW_AT_location attribute from die to a. */
29960 if (l->dw_loc_next != NULL)
29961 {
29962 a->dw_attr_val = av->dw_attr_val;
29963 return 1;
29964 }
29965
29966 dw_loc_list_ref list, *p;
29967 switch (AT_class (av))
29968 {
29969 case dw_val_class_loc_list:
29970 p = &list;
29971 list = NULL;
29972 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
29973 {
29974 lv = copy_deref_exprloc (d->expr);
29975 if (lv)
29976 {
29977 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
29978 p = &(*p)->dw_loc_next;
29979 }
29980 else if (!dwarf_strict && d->expr)
29981 return 0;
29982 }
29983 if (list == NULL)
29984 return dwarf_strict ? -1 : 0;
29985 a->dw_attr_val.val_class = dw_val_class_loc_list;
29986 gen_llsym (list);
29987 *AT_loc_list_ptr (a) = list;
29988 return 1;
29989 case dw_val_class_loc:
29990 lv = copy_deref_exprloc (AT_loc (av));
29991 if (lv == NULL)
29992 return dwarf_strict ? -1 : 0;
29993 a->dw_attr_val.v.val_loc = lv;
29994 return 1;
29995 default:
29996 gcc_unreachable ();
29997 }
29998 }
29999
30000 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30001 an address in .rodata section if the string literal is emitted there,
30002 or remove the containing location list or replace DW_AT_const_value
30003 with DW_AT_location and empty location expression, if it isn't found
30004 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30005 to something that has been emitted in the current CU. */
30006
30007 static void
30008 resolve_addr (dw_die_ref die)
30009 {
30010 dw_die_ref c;
30011 dw_attr_node *a;
30012 dw_loc_list_ref *curr, *start, loc;
30013 unsigned ix;
30014 bool remove_AT_byte_size = false;
30015
30016 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30017 switch (AT_class (a))
30018 {
30019 case dw_val_class_loc_list:
30020 start = curr = AT_loc_list_ptr (a);
30021 loc = *curr;
30022 gcc_assert (loc);
30023 /* The same list can be referenced more than once. See if we have
30024 already recorded the result from a previous pass. */
30025 if (loc->replaced)
30026 *curr = loc->dw_loc_next;
30027 else if (!loc->resolved_addr)
30028 {
30029 /* As things stand, we do not expect or allow one die to
30030 reference a suffix of another die's location list chain.
30031 References must be identical or completely separate.
30032 There is therefore no need to cache the result of this
30033 pass on any list other than the first; doing so
30034 would lead to unnecessary writes. */
30035 while (*curr)
30036 {
30037 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30038 if (!resolve_addr_in_expr (a, (*curr)->expr))
30039 {
30040 dw_loc_list_ref next = (*curr)->dw_loc_next;
30041 dw_loc_descr_ref l = (*curr)->expr;
30042
30043 if (next && (*curr)->ll_symbol)
30044 {
30045 gcc_assert (!next->ll_symbol);
30046 next->ll_symbol = (*curr)->ll_symbol;
30047 next->vl_symbol = (*curr)->vl_symbol;
30048 }
30049 if (dwarf_split_debug_info)
30050 remove_loc_list_addr_table_entries (l);
30051 *curr = next;
30052 }
30053 else
30054 {
30055 mark_base_types ((*curr)->expr);
30056 curr = &(*curr)->dw_loc_next;
30057 }
30058 }
30059 if (loc == *start)
30060 loc->resolved_addr = 1;
30061 else
30062 {
30063 loc->replaced = 1;
30064 loc->dw_loc_next = *start;
30065 }
30066 }
30067 if (!*start)
30068 {
30069 remove_AT (die, a->dw_attr);
30070 ix--;
30071 }
30072 break;
30073 case dw_val_class_view_list:
30074 {
30075 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30076 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30077 dw_val_node *llnode
30078 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30079 /* If we no longer have a loclist, or it no longer needs
30080 views, drop this attribute. */
30081 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30082 {
30083 remove_AT (die, a->dw_attr);
30084 ix--;
30085 }
30086 break;
30087 }
30088 case dw_val_class_loc:
30089 {
30090 dw_loc_descr_ref l = AT_loc (a);
30091 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30092 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30093 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30094 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30095 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30096 with DW_FORM_ref referencing the same DIE as
30097 DW_OP_GNU_variable_value used to reference. */
30098 if (a->dw_attr == DW_AT_string_length
30099 && l
30100 && l->dw_loc_opc == DW_OP_GNU_variable_value
30101 && (l->dw_loc_next == NULL
30102 || (l->dw_loc_next->dw_loc_next == NULL
30103 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30104 {
30105 switch (optimize_string_length (a))
30106 {
30107 case -1:
30108 remove_AT (die, a->dw_attr);
30109 ix--;
30110 /* If we drop DW_AT_string_length, we need to drop also
30111 DW_AT_{string_length_,}byte_size. */
30112 remove_AT_byte_size = true;
30113 continue;
30114 default:
30115 break;
30116 case 1:
30117 /* Even if we keep the optimized DW_AT_string_length,
30118 it might have changed AT_class, so process it again. */
30119 ix--;
30120 continue;
30121 }
30122 }
30123 /* For -gdwarf-2 don't attempt to optimize
30124 DW_AT_data_member_location containing
30125 DW_OP_plus_uconst - older consumers might
30126 rely on it being that op instead of a more complex,
30127 but shorter, location description. */
30128 if ((dwarf_version > 2
30129 || a->dw_attr != DW_AT_data_member_location
30130 || l == NULL
30131 || l->dw_loc_opc != DW_OP_plus_uconst
30132 || l->dw_loc_next != NULL)
30133 && !resolve_addr_in_expr (a, l))
30134 {
30135 if (dwarf_split_debug_info)
30136 remove_loc_list_addr_table_entries (l);
30137 if (l != NULL
30138 && l->dw_loc_next == NULL
30139 && l->dw_loc_opc == DW_OP_addr
30140 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30141 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30142 && a->dw_attr == DW_AT_location)
30143 {
30144 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30145 remove_AT (die, a->dw_attr);
30146 ix--;
30147 optimize_location_into_implicit_ptr (die, decl);
30148 break;
30149 }
30150 if (a->dw_attr == DW_AT_string_length)
30151 /* If we drop DW_AT_string_length, we need to drop also
30152 DW_AT_{string_length_,}byte_size. */
30153 remove_AT_byte_size = true;
30154 remove_AT (die, a->dw_attr);
30155 ix--;
30156 }
30157 else
30158 mark_base_types (l);
30159 }
30160 break;
30161 case dw_val_class_addr:
30162 if (a->dw_attr == DW_AT_const_value
30163 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30164 {
30165 if (AT_index (a) != NOT_INDEXED)
30166 remove_addr_table_entry (a->dw_attr_val.val_entry);
30167 remove_AT (die, a->dw_attr);
30168 ix--;
30169 }
30170 if ((die->die_tag == DW_TAG_call_site
30171 && a->dw_attr == DW_AT_call_origin)
30172 || (die->die_tag == DW_TAG_GNU_call_site
30173 && a->dw_attr == DW_AT_abstract_origin))
30174 {
30175 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30176 dw_die_ref tdie = lookup_decl_die (tdecl);
30177 dw_die_ref cdie;
30178 if (tdie == NULL
30179 && DECL_EXTERNAL (tdecl)
30180 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30181 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30182 {
30183 dw_die_ref pdie = cdie;
30184 /* Make sure we don't add these DIEs into type units.
30185 We could emit skeleton DIEs for context (namespaces,
30186 outer structs/classes) and a skeleton DIE for the
30187 innermost context with DW_AT_signature pointing to the
30188 type unit. See PR78835. */
30189 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30190 pdie = pdie->die_parent;
30191 if (pdie == NULL)
30192 {
30193 /* Creating a full DIE for tdecl is overly expensive and
30194 at this point even wrong when in the LTO phase
30195 as it can end up generating new type DIEs we didn't
30196 output and thus optimize_external_refs will crash. */
30197 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30198 add_AT_flag (tdie, DW_AT_external, 1);
30199 add_AT_flag (tdie, DW_AT_declaration, 1);
30200 add_linkage_attr (tdie, tdecl);
30201 add_name_and_src_coords_attributes (tdie, tdecl, true);
30202 equate_decl_number_to_die (tdecl, tdie);
30203 }
30204 }
30205 if (tdie)
30206 {
30207 a->dw_attr_val.val_class = dw_val_class_die_ref;
30208 a->dw_attr_val.v.val_die_ref.die = tdie;
30209 a->dw_attr_val.v.val_die_ref.external = 0;
30210 }
30211 else
30212 {
30213 if (AT_index (a) != NOT_INDEXED)
30214 remove_addr_table_entry (a->dw_attr_val.val_entry);
30215 remove_AT (die, a->dw_attr);
30216 ix--;
30217 }
30218 }
30219 break;
30220 default:
30221 break;
30222 }
30223
30224 if (remove_AT_byte_size)
30225 remove_AT (die, dwarf_version >= 5
30226 ? DW_AT_string_length_byte_size
30227 : DW_AT_byte_size);
30228
30229 FOR_EACH_CHILD (die, c, resolve_addr (c));
30230 }
30231 \f
30232 /* Helper routines for optimize_location_lists.
30233 This pass tries to share identical local lists in .debug_loc
30234 section. */
30235
30236 /* Iteratively hash operands of LOC opcode into HSTATE. */
30237
30238 static void
30239 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30240 {
30241 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30242 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30243
30244 switch (loc->dw_loc_opc)
30245 {
30246 case DW_OP_const4u:
30247 case DW_OP_const8u:
30248 if (loc->dtprel)
30249 goto hash_addr;
30250 /* FALLTHRU */
30251 case DW_OP_const1u:
30252 case DW_OP_const1s:
30253 case DW_OP_const2u:
30254 case DW_OP_const2s:
30255 case DW_OP_const4s:
30256 case DW_OP_const8s:
30257 case DW_OP_constu:
30258 case DW_OP_consts:
30259 case DW_OP_pick:
30260 case DW_OP_plus_uconst:
30261 case DW_OP_breg0:
30262 case DW_OP_breg1:
30263 case DW_OP_breg2:
30264 case DW_OP_breg3:
30265 case DW_OP_breg4:
30266 case DW_OP_breg5:
30267 case DW_OP_breg6:
30268 case DW_OP_breg7:
30269 case DW_OP_breg8:
30270 case DW_OP_breg9:
30271 case DW_OP_breg10:
30272 case DW_OP_breg11:
30273 case DW_OP_breg12:
30274 case DW_OP_breg13:
30275 case DW_OP_breg14:
30276 case DW_OP_breg15:
30277 case DW_OP_breg16:
30278 case DW_OP_breg17:
30279 case DW_OP_breg18:
30280 case DW_OP_breg19:
30281 case DW_OP_breg20:
30282 case DW_OP_breg21:
30283 case DW_OP_breg22:
30284 case DW_OP_breg23:
30285 case DW_OP_breg24:
30286 case DW_OP_breg25:
30287 case DW_OP_breg26:
30288 case DW_OP_breg27:
30289 case DW_OP_breg28:
30290 case DW_OP_breg29:
30291 case DW_OP_breg30:
30292 case DW_OP_breg31:
30293 case DW_OP_regx:
30294 case DW_OP_fbreg:
30295 case DW_OP_piece:
30296 case DW_OP_deref_size:
30297 case DW_OP_xderef_size:
30298 hstate.add_object (val1->v.val_int);
30299 break;
30300 case DW_OP_skip:
30301 case DW_OP_bra:
30302 {
30303 int offset;
30304
30305 gcc_assert (val1->val_class == dw_val_class_loc);
30306 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30307 hstate.add_object (offset);
30308 }
30309 break;
30310 case DW_OP_implicit_value:
30311 hstate.add_object (val1->v.val_unsigned);
30312 switch (val2->val_class)
30313 {
30314 case dw_val_class_const:
30315 hstate.add_object (val2->v.val_int);
30316 break;
30317 case dw_val_class_vec:
30318 {
30319 unsigned int elt_size = val2->v.val_vec.elt_size;
30320 unsigned int len = val2->v.val_vec.length;
30321
30322 hstate.add_int (elt_size);
30323 hstate.add_int (len);
30324 hstate.add (val2->v.val_vec.array, len * elt_size);
30325 }
30326 break;
30327 case dw_val_class_const_double:
30328 hstate.add_object (val2->v.val_double.low);
30329 hstate.add_object (val2->v.val_double.high);
30330 break;
30331 case dw_val_class_wide_int:
30332 hstate.add (val2->v.val_wide->get_val (),
30333 get_full_len (*val2->v.val_wide)
30334 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30335 break;
30336 case dw_val_class_addr:
30337 inchash::add_rtx (val2->v.val_addr, hstate);
30338 break;
30339 default:
30340 gcc_unreachable ();
30341 }
30342 break;
30343 case DW_OP_bregx:
30344 case DW_OP_bit_piece:
30345 hstate.add_object (val1->v.val_int);
30346 hstate.add_object (val2->v.val_int);
30347 break;
30348 case DW_OP_addr:
30349 hash_addr:
30350 if (loc->dtprel)
30351 {
30352 unsigned char dtprel = 0xd1;
30353 hstate.add_object (dtprel);
30354 }
30355 inchash::add_rtx (val1->v.val_addr, hstate);
30356 break;
30357 case DW_OP_GNU_addr_index:
30358 case DW_OP_GNU_const_index:
30359 {
30360 if (loc->dtprel)
30361 {
30362 unsigned char dtprel = 0xd1;
30363 hstate.add_object (dtprel);
30364 }
30365 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30366 }
30367 break;
30368 case DW_OP_implicit_pointer:
30369 case DW_OP_GNU_implicit_pointer:
30370 hstate.add_int (val2->v.val_int);
30371 break;
30372 case DW_OP_entry_value:
30373 case DW_OP_GNU_entry_value:
30374 hstate.add_object (val1->v.val_loc);
30375 break;
30376 case DW_OP_regval_type:
30377 case DW_OP_deref_type:
30378 case DW_OP_GNU_regval_type:
30379 case DW_OP_GNU_deref_type:
30380 {
30381 unsigned int byte_size
30382 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30383 unsigned int encoding
30384 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30385 hstate.add_object (val1->v.val_int);
30386 hstate.add_object (byte_size);
30387 hstate.add_object (encoding);
30388 }
30389 break;
30390 case DW_OP_convert:
30391 case DW_OP_reinterpret:
30392 case DW_OP_GNU_convert:
30393 case DW_OP_GNU_reinterpret:
30394 if (val1->val_class == dw_val_class_unsigned_const)
30395 {
30396 hstate.add_object (val1->v.val_unsigned);
30397 break;
30398 }
30399 /* FALLTHRU */
30400 case DW_OP_const_type:
30401 case DW_OP_GNU_const_type:
30402 {
30403 unsigned int byte_size
30404 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30405 unsigned int encoding
30406 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30407 hstate.add_object (byte_size);
30408 hstate.add_object (encoding);
30409 if (loc->dw_loc_opc != DW_OP_const_type
30410 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30411 break;
30412 hstate.add_object (val2->val_class);
30413 switch (val2->val_class)
30414 {
30415 case dw_val_class_const:
30416 hstate.add_object (val2->v.val_int);
30417 break;
30418 case dw_val_class_vec:
30419 {
30420 unsigned int elt_size = val2->v.val_vec.elt_size;
30421 unsigned int len = val2->v.val_vec.length;
30422
30423 hstate.add_object (elt_size);
30424 hstate.add_object (len);
30425 hstate.add (val2->v.val_vec.array, len * elt_size);
30426 }
30427 break;
30428 case dw_val_class_const_double:
30429 hstate.add_object (val2->v.val_double.low);
30430 hstate.add_object (val2->v.val_double.high);
30431 break;
30432 case dw_val_class_wide_int:
30433 hstate.add (val2->v.val_wide->get_val (),
30434 get_full_len (*val2->v.val_wide)
30435 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30436 break;
30437 default:
30438 gcc_unreachable ();
30439 }
30440 }
30441 break;
30442
30443 default:
30444 /* Other codes have no operands. */
30445 break;
30446 }
30447 }
30448
30449 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30450
30451 static inline void
30452 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30453 {
30454 dw_loc_descr_ref l;
30455 bool sizes_computed = false;
30456 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30457 size_of_locs (loc);
30458
30459 for (l = loc; l != NULL; l = l->dw_loc_next)
30460 {
30461 enum dwarf_location_atom opc = l->dw_loc_opc;
30462 hstate.add_object (opc);
30463 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30464 {
30465 size_of_locs (loc);
30466 sizes_computed = true;
30467 }
30468 hash_loc_operands (l, hstate);
30469 }
30470 }
30471
30472 /* Compute hash of the whole location list LIST_HEAD. */
30473
30474 static inline void
30475 hash_loc_list (dw_loc_list_ref list_head)
30476 {
30477 dw_loc_list_ref curr = list_head;
30478 inchash::hash hstate;
30479
30480 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30481 {
30482 hstate.add (curr->begin, strlen (curr->begin) + 1);
30483 hstate.add (curr->end, strlen (curr->end) + 1);
30484 hstate.add_object (curr->vbegin);
30485 hstate.add_object (curr->vend);
30486 if (curr->section)
30487 hstate.add (curr->section, strlen (curr->section) + 1);
30488 hash_locs (curr->expr, hstate);
30489 }
30490 list_head->hash = hstate.end ();
30491 }
30492
30493 /* Return true if X and Y opcodes have the same operands. */
30494
30495 static inline bool
30496 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30497 {
30498 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30499 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30500 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30501 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30502
30503 switch (x->dw_loc_opc)
30504 {
30505 case DW_OP_const4u:
30506 case DW_OP_const8u:
30507 if (x->dtprel)
30508 goto hash_addr;
30509 /* FALLTHRU */
30510 case DW_OP_const1u:
30511 case DW_OP_const1s:
30512 case DW_OP_const2u:
30513 case DW_OP_const2s:
30514 case DW_OP_const4s:
30515 case DW_OP_const8s:
30516 case DW_OP_constu:
30517 case DW_OP_consts:
30518 case DW_OP_pick:
30519 case DW_OP_plus_uconst:
30520 case DW_OP_breg0:
30521 case DW_OP_breg1:
30522 case DW_OP_breg2:
30523 case DW_OP_breg3:
30524 case DW_OP_breg4:
30525 case DW_OP_breg5:
30526 case DW_OP_breg6:
30527 case DW_OP_breg7:
30528 case DW_OP_breg8:
30529 case DW_OP_breg9:
30530 case DW_OP_breg10:
30531 case DW_OP_breg11:
30532 case DW_OP_breg12:
30533 case DW_OP_breg13:
30534 case DW_OP_breg14:
30535 case DW_OP_breg15:
30536 case DW_OP_breg16:
30537 case DW_OP_breg17:
30538 case DW_OP_breg18:
30539 case DW_OP_breg19:
30540 case DW_OP_breg20:
30541 case DW_OP_breg21:
30542 case DW_OP_breg22:
30543 case DW_OP_breg23:
30544 case DW_OP_breg24:
30545 case DW_OP_breg25:
30546 case DW_OP_breg26:
30547 case DW_OP_breg27:
30548 case DW_OP_breg28:
30549 case DW_OP_breg29:
30550 case DW_OP_breg30:
30551 case DW_OP_breg31:
30552 case DW_OP_regx:
30553 case DW_OP_fbreg:
30554 case DW_OP_piece:
30555 case DW_OP_deref_size:
30556 case DW_OP_xderef_size:
30557 return valx1->v.val_int == valy1->v.val_int;
30558 case DW_OP_skip:
30559 case DW_OP_bra:
30560 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30561 can cause irrelevant differences in dw_loc_addr. */
30562 gcc_assert (valx1->val_class == dw_val_class_loc
30563 && valy1->val_class == dw_val_class_loc
30564 && (dwarf_split_debug_info
30565 || x->dw_loc_addr == y->dw_loc_addr));
30566 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30567 case DW_OP_implicit_value:
30568 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30569 || valx2->val_class != valy2->val_class)
30570 return false;
30571 switch (valx2->val_class)
30572 {
30573 case dw_val_class_const:
30574 return valx2->v.val_int == valy2->v.val_int;
30575 case dw_val_class_vec:
30576 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30577 && valx2->v.val_vec.length == valy2->v.val_vec.length
30578 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30579 valx2->v.val_vec.elt_size
30580 * valx2->v.val_vec.length) == 0;
30581 case dw_val_class_const_double:
30582 return valx2->v.val_double.low == valy2->v.val_double.low
30583 && valx2->v.val_double.high == valy2->v.val_double.high;
30584 case dw_val_class_wide_int:
30585 return *valx2->v.val_wide == *valy2->v.val_wide;
30586 case dw_val_class_addr:
30587 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30588 default:
30589 gcc_unreachable ();
30590 }
30591 case DW_OP_bregx:
30592 case DW_OP_bit_piece:
30593 return valx1->v.val_int == valy1->v.val_int
30594 && valx2->v.val_int == valy2->v.val_int;
30595 case DW_OP_addr:
30596 hash_addr:
30597 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30598 case DW_OP_GNU_addr_index:
30599 case DW_OP_GNU_const_index:
30600 {
30601 rtx ax1 = valx1->val_entry->addr.rtl;
30602 rtx ay1 = valy1->val_entry->addr.rtl;
30603 return rtx_equal_p (ax1, ay1);
30604 }
30605 case DW_OP_implicit_pointer:
30606 case DW_OP_GNU_implicit_pointer:
30607 return valx1->val_class == dw_val_class_die_ref
30608 && valx1->val_class == valy1->val_class
30609 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30610 && valx2->v.val_int == valy2->v.val_int;
30611 case DW_OP_entry_value:
30612 case DW_OP_GNU_entry_value:
30613 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30614 case DW_OP_const_type:
30615 case DW_OP_GNU_const_type:
30616 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30617 || valx2->val_class != valy2->val_class)
30618 return false;
30619 switch (valx2->val_class)
30620 {
30621 case dw_val_class_const:
30622 return valx2->v.val_int == valy2->v.val_int;
30623 case dw_val_class_vec:
30624 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30625 && valx2->v.val_vec.length == valy2->v.val_vec.length
30626 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30627 valx2->v.val_vec.elt_size
30628 * valx2->v.val_vec.length) == 0;
30629 case dw_val_class_const_double:
30630 return valx2->v.val_double.low == valy2->v.val_double.low
30631 && valx2->v.val_double.high == valy2->v.val_double.high;
30632 case dw_val_class_wide_int:
30633 return *valx2->v.val_wide == *valy2->v.val_wide;
30634 default:
30635 gcc_unreachable ();
30636 }
30637 case DW_OP_regval_type:
30638 case DW_OP_deref_type:
30639 case DW_OP_GNU_regval_type:
30640 case DW_OP_GNU_deref_type:
30641 return valx1->v.val_int == valy1->v.val_int
30642 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30643 case DW_OP_convert:
30644 case DW_OP_reinterpret:
30645 case DW_OP_GNU_convert:
30646 case DW_OP_GNU_reinterpret:
30647 if (valx1->val_class != valy1->val_class)
30648 return false;
30649 if (valx1->val_class == dw_val_class_unsigned_const)
30650 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30651 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30652 case DW_OP_GNU_parameter_ref:
30653 return valx1->val_class == dw_val_class_die_ref
30654 && valx1->val_class == valy1->val_class
30655 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30656 default:
30657 /* Other codes have no operands. */
30658 return true;
30659 }
30660 }
30661
30662 /* Return true if DWARF location expressions X and Y are the same. */
30663
30664 static inline bool
30665 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30666 {
30667 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30668 if (x->dw_loc_opc != y->dw_loc_opc
30669 || x->dtprel != y->dtprel
30670 || !compare_loc_operands (x, y))
30671 break;
30672 return x == NULL && y == NULL;
30673 }
30674
30675 /* Hashtable helpers. */
30676
30677 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30678 {
30679 static inline hashval_t hash (const dw_loc_list_struct *);
30680 static inline bool equal (const dw_loc_list_struct *,
30681 const dw_loc_list_struct *);
30682 };
30683
30684 /* Return precomputed hash of location list X. */
30685
30686 inline hashval_t
30687 loc_list_hasher::hash (const dw_loc_list_struct *x)
30688 {
30689 return x->hash;
30690 }
30691
30692 /* Return true if location lists A and B are the same. */
30693
30694 inline bool
30695 loc_list_hasher::equal (const dw_loc_list_struct *a,
30696 const dw_loc_list_struct *b)
30697 {
30698 if (a == b)
30699 return 1;
30700 if (a->hash != b->hash)
30701 return 0;
30702 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30703 if (strcmp (a->begin, b->begin) != 0
30704 || strcmp (a->end, b->end) != 0
30705 || (a->section == NULL) != (b->section == NULL)
30706 || (a->section && strcmp (a->section, b->section) != 0)
30707 || a->vbegin != b->vbegin || a->vend != b->vend
30708 || !compare_locs (a->expr, b->expr))
30709 break;
30710 return a == NULL && b == NULL;
30711 }
30712
30713 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30714
30715
30716 /* Recursively optimize location lists referenced from DIE
30717 children and share them whenever possible. */
30718
30719 static void
30720 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30721 {
30722 dw_die_ref c;
30723 dw_attr_node *a;
30724 unsigned ix;
30725 dw_loc_list_struct **slot;
30726 bool drop_locviews = false;
30727 bool has_locviews = false;
30728
30729 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30730 if (AT_class (a) == dw_val_class_loc_list)
30731 {
30732 dw_loc_list_ref list = AT_loc_list (a);
30733 /* TODO: perform some optimizations here, before hashing
30734 it and storing into the hash table. */
30735 hash_loc_list (list);
30736 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30737 if (*slot == NULL)
30738 {
30739 *slot = list;
30740 if (loc_list_has_views (list))
30741 gcc_assert (list->vl_symbol);
30742 else if (list->vl_symbol)
30743 {
30744 drop_locviews = true;
30745 list->vl_symbol = NULL;
30746 }
30747 }
30748 else
30749 {
30750 if (list->vl_symbol && !(*slot)->vl_symbol)
30751 drop_locviews = true;
30752 a->dw_attr_val.v.val_loc_list = *slot;
30753 }
30754 }
30755 else if (AT_class (a) == dw_val_class_view_list)
30756 {
30757 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30758 has_locviews = true;
30759 }
30760
30761
30762 if (drop_locviews && has_locviews)
30763 remove_AT (die, DW_AT_GNU_locviews);
30764
30765 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30766 }
30767
30768
30769 /* Recursively assign each location list a unique index into the debug_addr
30770 section. */
30771
30772 static void
30773 index_location_lists (dw_die_ref die)
30774 {
30775 dw_die_ref c;
30776 dw_attr_node *a;
30777 unsigned ix;
30778
30779 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30780 if (AT_class (a) == dw_val_class_loc_list)
30781 {
30782 dw_loc_list_ref list = AT_loc_list (a);
30783 dw_loc_list_ref curr;
30784 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30785 {
30786 /* Don't index an entry that has already been indexed
30787 or won't be output. */
30788 if (curr->begin_entry != NULL
30789 || skip_loc_list_entry (curr))
30790 continue;
30791
30792 curr->begin_entry
30793 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30794 }
30795 }
30796
30797 FOR_EACH_CHILD (die, c, index_location_lists (c));
30798 }
30799
30800 /* Optimize location lists referenced from DIE
30801 children and share them whenever possible. */
30802
30803 static void
30804 optimize_location_lists (dw_die_ref die)
30805 {
30806 loc_list_hash_type htab (500);
30807 optimize_location_lists_1 (die, &htab);
30808 }
30809 \f
30810 /* Traverse the limbo die list, and add parent/child links. The only
30811 dies without parents that should be here are concrete instances of
30812 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
30813 For concrete instances, we can get the parent die from the abstract
30814 instance. */
30815
30816 static void
30817 flush_limbo_die_list (void)
30818 {
30819 limbo_die_node *node;
30820
30821 /* get_context_die calls force_decl_die, which can put new DIEs on the
30822 limbo list in LTO mode when nested functions are put in a different
30823 partition than that of their parent function. */
30824 while ((node = limbo_die_list))
30825 {
30826 dw_die_ref die = node->die;
30827 limbo_die_list = node->next;
30828
30829 if (die->die_parent == NULL)
30830 {
30831 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
30832
30833 if (origin && origin->die_parent)
30834 add_child_die (origin->die_parent, die);
30835 else if (is_cu_die (die))
30836 ;
30837 else if (seen_error ())
30838 /* It's OK to be confused by errors in the input. */
30839 add_child_die (comp_unit_die (), die);
30840 else
30841 {
30842 /* In certain situations, the lexical block containing a
30843 nested function can be optimized away, which results
30844 in the nested function die being orphaned. Likewise
30845 with the return type of that nested function. Force
30846 this to be a child of the containing function.
30847
30848 It may happen that even the containing function got fully
30849 inlined and optimized out. In that case we are lost and
30850 assign the empty child. This should not be big issue as
30851 the function is likely unreachable too. */
30852 gcc_assert (node->created_for);
30853
30854 if (DECL_P (node->created_for))
30855 origin = get_context_die (DECL_CONTEXT (node->created_for));
30856 else if (TYPE_P (node->created_for))
30857 origin = scope_die_for (node->created_for, comp_unit_die ());
30858 else
30859 origin = comp_unit_die ();
30860
30861 add_child_die (origin, die);
30862 }
30863 }
30864 }
30865 }
30866
30867 /* Reset DIEs so we can output them again. */
30868
30869 static void
30870 reset_dies (dw_die_ref die)
30871 {
30872 dw_die_ref c;
30873
30874 /* Remove stuff we re-generate. */
30875 die->die_mark = 0;
30876 die->die_offset = 0;
30877 die->die_abbrev = 0;
30878 remove_AT (die, DW_AT_sibling);
30879
30880 FOR_EACH_CHILD (die, c, reset_dies (c));
30881 }
30882
30883 /* Output stuff that dwarf requires at the end of every file,
30884 and generate the DWARF-2 debugging info. */
30885
30886 static void
30887 dwarf2out_finish (const char *)
30888 {
30889 comdat_type_node *ctnode;
30890 dw_die_ref main_comp_unit_die;
30891 unsigned char checksum[16];
30892 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
30893
30894 /* Flush out any latecomers to the limbo party. */
30895 flush_limbo_die_list ();
30896
30897 if (inline_entry_data_table)
30898 gcc_assert (inline_entry_data_table->elements () == 0);
30899
30900 if (flag_checking)
30901 {
30902 verify_die (comp_unit_die ());
30903 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30904 verify_die (node->die);
30905 }
30906
30907 /* We shouldn't have any symbols with delayed asm names for
30908 DIEs generated after early finish. */
30909 gcc_assert (deferred_asm_name == NULL);
30910
30911 gen_remaining_tmpl_value_param_die_attribute ();
30912
30913 if (flag_generate_lto || flag_generate_offload)
30914 {
30915 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
30916
30917 /* Prune stuff so that dwarf2out_finish runs successfully
30918 for the fat part of the object. */
30919 reset_dies (comp_unit_die ());
30920 for (limbo_die_node *node = cu_die_list; node; node = node->next)
30921 reset_dies (node->die);
30922
30923 hash_table<comdat_type_hasher> comdat_type_table (100);
30924 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30925 {
30926 comdat_type_node **slot
30927 = comdat_type_table.find_slot (ctnode, INSERT);
30928
30929 /* Don't reset types twice. */
30930 if (*slot != HTAB_EMPTY_ENTRY)
30931 continue;
30932
30933 /* Add a pointer to the line table for the main compilation unit
30934 so that the debugger can make sense of DW_AT_decl_file
30935 attributes. */
30936 if (debug_info_level >= DINFO_LEVEL_TERSE)
30937 reset_dies (ctnode->root_die);
30938
30939 *slot = ctnode;
30940 }
30941
30942 /* Reset die CU symbol so we don't output it twice. */
30943 comp_unit_die ()->die_id.die_symbol = NULL;
30944
30945 /* Remove DW_AT_macro from the early output. */
30946 if (have_macinfo)
30947 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
30948
30949 /* Remove indirect string decisions. */
30950 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
30951 }
30952
30953 #if ENABLE_ASSERT_CHECKING
30954 {
30955 dw_die_ref die = comp_unit_die (), c;
30956 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
30957 }
30958 #endif
30959 resolve_addr (comp_unit_die ());
30960 move_marked_base_types ();
30961
30962 /* Initialize sections and labels used for actual assembler output. */
30963 unsigned generation = init_sections_and_labels (false);
30964
30965 /* Traverse the DIE's and add sibling attributes to those DIE's that
30966 have children. */
30967 add_sibling_attributes (comp_unit_die ());
30968 limbo_die_node *node;
30969 for (node = cu_die_list; node; node = node->next)
30970 add_sibling_attributes (node->die);
30971 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
30972 add_sibling_attributes (ctnode->root_die);
30973
30974 /* When splitting DWARF info, we put some attributes in the
30975 skeleton compile_unit DIE that remains in the .o, while
30976 most attributes go in the DWO compile_unit_die. */
30977 if (dwarf_split_debug_info)
30978 {
30979 limbo_die_node *cu;
30980 main_comp_unit_die = gen_compile_unit_die (NULL);
30981 if (dwarf_version >= 5)
30982 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
30983 cu = limbo_die_list;
30984 gcc_assert (cu->die == main_comp_unit_die);
30985 limbo_die_list = limbo_die_list->next;
30986 cu->next = cu_die_list;
30987 cu_die_list = cu;
30988 }
30989 else
30990 main_comp_unit_die = comp_unit_die ();
30991
30992 /* Output a terminator label for the .text section. */
30993 switch_to_section (text_section);
30994 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
30995 if (cold_text_section)
30996 {
30997 switch_to_section (cold_text_section);
30998 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
30999 }
31000
31001 /* We can only use the low/high_pc attributes if all of the code was
31002 in .text. */
31003 if (!have_multiple_function_sections
31004 || (dwarf_version < 3 && dwarf_strict))
31005 {
31006 /* Don't add if the CU has no associated code. */
31007 if (text_section_used)
31008 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31009 text_end_label, true);
31010 }
31011 else
31012 {
31013 unsigned fde_idx;
31014 dw_fde_ref fde;
31015 bool range_list_added = false;
31016
31017 if (text_section_used)
31018 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31019 text_end_label, &range_list_added, true);
31020 if (cold_text_section_used)
31021 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31022 cold_end_label, &range_list_added, true);
31023
31024 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31025 {
31026 if (DECL_IGNORED_P (fde->decl))
31027 continue;
31028 if (!fde->in_std_section)
31029 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31030 fde->dw_fde_end, &range_list_added,
31031 true);
31032 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31033 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31034 fde->dw_fde_second_end, &range_list_added,
31035 true);
31036 }
31037
31038 if (range_list_added)
31039 {
31040 /* We need to give .debug_loc and .debug_ranges an appropriate
31041 "base address". Use zero so that these addresses become
31042 absolute. Historically, we've emitted the unexpected
31043 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31044 Emit both to give time for other tools to adapt. */
31045 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31046 if (! dwarf_strict && dwarf_version < 4)
31047 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31048
31049 add_ranges (NULL);
31050 }
31051 }
31052
31053 /* AIX Assembler inserts the length, so adjust the reference to match the
31054 offset expected by debuggers. */
31055 strcpy (dl_section_ref, debug_line_section_label);
31056 if (XCOFF_DEBUGGING_INFO)
31057 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31058
31059 if (debug_info_level >= DINFO_LEVEL_TERSE)
31060 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31061 dl_section_ref);
31062
31063 if (have_macinfo)
31064 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31065 macinfo_section_label);
31066
31067 if (dwarf_split_debug_info)
31068 {
31069 if (have_location_lists)
31070 {
31071 if (dwarf_version >= 5)
31072 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31073 loc_section_label);
31074 /* optimize_location_lists calculates the size of the lists,
31075 so index them first, and assign indices to the entries.
31076 Although optimize_location_lists will remove entries from
31077 the table, it only does so for duplicates, and therefore
31078 only reduces ref_counts to 1. */
31079 index_location_lists (comp_unit_die ());
31080 }
31081
31082 if (addr_index_table != NULL)
31083 {
31084 unsigned int index = 0;
31085 addr_index_table
31086 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31087 (&index);
31088 }
31089 }
31090
31091 loc_list_idx = 0;
31092 if (have_location_lists)
31093 {
31094 optimize_location_lists (comp_unit_die ());
31095 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31096 if (dwarf_version >= 5 && dwarf_split_debug_info)
31097 assign_location_list_indexes (comp_unit_die ());
31098 }
31099
31100 save_macinfo_strings ();
31101
31102 if (dwarf_split_debug_info)
31103 {
31104 unsigned int index = 0;
31105
31106 /* Add attributes common to skeleton compile_units and
31107 type_units. Because these attributes include strings, it
31108 must be done before freezing the string table. Top-level
31109 skeleton die attrs are added when the skeleton type unit is
31110 created, so ensure it is created by this point. */
31111 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31112 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31113 }
31114
31115 /* Output all of the compilation units. We put the main one last so that
31116 the offsets are available to output_pubnames. */
31117 for (node = cu_die_list; node; node = node->next)
31118 output_comp_unit (node->die, 0, NULL);
31119
31120 hash_table<comdat_type_hasher> comdat_type_table (100);
31121 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31122 {
31123 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31124
31125 /* Don't output duplicate types. */
31126 if (*slot != HTAB_EMPTY_ENTRY)
31127 continue;
31128
31129 /* Add a pointer to the line table for the main compilation unit
31130 so that the debugger can make sense of DW_AT_decl_file
31131 attributes. */
31132 if (debug_info_level >= DINFO_LEVEL_TERSE)
31133 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31134 (!dwarf_split_debug_info
31135 ? dl_section_ref
31136 : debug_skeleton_line_section_label));
31137
31138 output_comdat_type_unit (ctnode);
31139 *slot = ctnode;
31140 }
31141
31142 if (dwarf_split_debug_info)
31143 {
31144 int mark;
31145 struct md5_ctx ctx;
31146
31147 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31148 index_rnglists ();
31149
31150 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31151 md5_init_ctx (&ctx);
31152 mark = 0;
31153 die_checksum (comp_unit_die (), &ctx, &mark);
31154 unmark_all_dies (comp_unit_die ());
31155 md5_finish_ctx (&ctx, checksum);
31156
31157 if (dwarf_version < 5)
31158 {
31159 /* Use the first 8 bytes of the checksum as the dwo_id,
31160 and add it to both comp-unit DIEs. */
31161 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31162 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31163 }
31164
31165 /* Add the base offset of the ranges table to the skeleton
31166 comp-unit DIE. */
31167 if (!vec_safe_is_empty (ranges_table))
31168 {
31169 if (dwarf_version >= 5)
31170 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31171 ranges_base_label);
31172 else
31173 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31174 ranges_section_label);
31175 }
31176
31177 switch_to_section (debug_addr_section);
31178 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31179 output_addr_table ();
31180 }
31181
31182 /* Output the main compilation unit if non-empty or if .debug_macinfo
31183 or .debug_macro will be emitted. */
31184 output_comp_unit (comp_unit_die (), have_macinfo,
31185 dwarf_split_debug_info ? checksum : NULL);
31186
31187 if (dwarf_split_debug_info && info_section_emitted)
31188 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31189
31190 /* Output the abbreviation table. */
31191 if (vec_safe_length (abbrev_die_table) != 1)
31192 {
31193 switch_to_section (debug_abbrev_section);
31194 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31195 output_abbrev_section ();
31196 }
31197
31198 /* Output location list section if necessary. */
31199 if (have_location_lists)
31200 {
31201 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31202 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31203 /* Output the location lists info. */
31204 switch_to_section (debug_loc_section);
31205 if (dwarf_version >= 5)
31206 {
31207 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31208 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31209 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31210 dw2_asm_output_data (4, 0xffffffff,
31211 "Initial length escape value indicating "
31212 "64-bit DWARF extension");
31213 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31214 "Length of Location Lists");
31215 ASM_OUTPUT_LABEL (asm_out_file, l1);
31216 output_dwarf_version ();
31217 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31218 dw2_asm_output_data (1, 0, "Segment Size");
31219 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31220 "Offset Entry Count");
31221 }
31222 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31223 if (dwarf_version >= 5 && dwarf_split_debug_info)
31224 {
31225 unsigned int save_loc_list_idx = loc_list_idx;
31226 loc_list_idx = 0;
31227 output_loclists_offsets (comp_unit_die ());
31228 gcc_assert (save_loc_list_idx == loc_list_idx);
31229 }
31230 output_location_lists (comp_unit_die ());
31231 if (dwarf_version >= 5)
31232 ASM_OUTPUT_LABEL (asm_out_file, l2);
31233 }
31234
31235 output_pubtables ();
31236
31237 /* Output the address range information if a CU (.debug_info section)
31238 was emitted. We output an empty table even if we had no functions
31239 to put in it. This because the consumer has no way to tell the
31240 difference between an empty table that we omitted and failure to
31241 generate a table that would have contained data. */
31242 if (info_section_emitted)
31243 {
31244 switch_to_section (debug_aranges_section);
31245 output_aranges ();
31246 }
31247
31248 /* Output ranges section if necessary. */
31249 if (!vec_safe_is_empty (ranges_table))
31250 {
31251 if (dwarf_version >= 5)
31252 output_rnglists (generation);
31253 else
31254 output_ranges ();
31255 }
31256
31257 /* Have to end the macro section. */
31258 if (have_macinfo)
31259 {
31260 switch_to_section (debug_macinfo_section);
31261 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31262 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31263 : debug_skeleton_line_section_label, false);
31264 dw2_asm_output_data (1, 0, "End compilation unit");
31265 }
31266
31267 /* Output the source line correspondence table. We must do this
31268 even if there is no line information. Otherwise, on an empty
31269 translation unit, we will generate a present, but empty,
31270 .debug_info section. IRIX 6.5 `nm' will then complain when
31271 examining the file. This is done late so that any filenames
31272 used by the debug_info section are marked as 'used'. */
31273 switch_to_section (debug_line_section);
31274 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31275 if (! output_asm_line_debug_info ())
31276 output_line_info (false);
31277
31278 if (dwarf_split_debug_info && info_section_emitted)
31279 {
31280 switch_to_section (debug_skeleton_line_section);
31281 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31282 output_line_info (true);
31283 }
31284
31285 /* If we emitted any indirect strings, output the string table too. */
31286 if (debug_str_hash || skeleton_debug_str_hash)
31287 output_indirect_strings ();
31288 if (debug_line_str_hash)
31289 {
31290 switch_to_section (debug_line_str_section);
31291 const enum dwarf_form form = DW_FORM_line_strp;
31292 debug_line_str_hash->traverse<enum dwarf_form,
31293 output_indirect_string> (form);
31294 }
31295 }
31296
31297 /* Returns a hash value for X (which really is a variable_value_struct). */
31298
31299 inline hashval_t
31300 variable_value_hasher::hash (variable_value_struct *x)
31301 {
31302 return (hashval_t) x->decl_id;
31303 }
31304
31305 /* Return nonzero if decl_id of variable_value_struct X is the same as
31306 UID of decl Y. */
31307
31308 inline bool
31309 variable_value_hasher::equal (variable_value_struct *x, tree y)
31310 {
31311 return x->decl_id == DECL_UID (y);
31312 }
31313
31314 /* Helper function for resolve_variable_value, handle
31315 DW_OP_GNU_variable_value in one location expression.
31316 Return true if exprloc has been changed into loclist. */
31317
31318 static bool
31319 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31320 {
31321 dw_loc_descr_ref next;
31322 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31323 {
31324 next = loc->dw_loc_next;
31325 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31326 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31327 continue;
31328
31329 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31330 if (DECL_CONTEXT (decl) != current_function_decl)
31331 continue;
31332
31333 dw_die_ref ref = lookup_decl_die (decl);
31334 if (ref)
31335 {
31336 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31337 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31338 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31339 continue;
31340 }
31341 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31342 if (l == NULL)
31343 continue;
31344 if (l->dw_loc_next)
31345 {
31346 if (AT_class (a) != dw_val_class_loc)
31347 continue;
31348 switch (a->dw_attr)
31349 {
31350 /* Following attributes allow both exprloc and loclist
31351 classes, so we can change them into a loclist. */
31352 case DW_AT_location:
31353 case DW_AT_string_length:
31354 case DW_AT_return_addr:
31355 case DW_AT_data_member_location:
31356 case DW_AT_frame_base:
31357 case DW_AT_segment:
31358 case DW_AT_static_link:
31359 case DW_AT_use_location:
31360 case DW_AT_vtable_elem_location:
31361 if (prev)
31362 {
31363 prev->dw_loc_next = NULL;
31364 prepend_loc_descr_to_each (l, AT_loc (a));
31365 }
31366 if (next)
31367 add_loc_descr_to_each (l, next);
31368 a->dw_attr_val.val_class = dw_val_class_loc_list;
31369 a->dw_attr_val.val_entry = NULL;
31370 a->dw_attr_val.v.val_loc_list = l;
31371 have_location_lists = true;
31372 return true;
31373 /* Following attributes allow both exprloc and reference,
31374 so if the whole expression is DW_OP_GNU_variable_value alone
31375 we could transform it into reference. */
31376 case DW_AT_byte_size:
31377 case DW_AT_bit_size:
31378 case DW_AT_lower_bound:
31379 case DW_AT_upper_bound:
31380 case DW_AT_bit_stride:
31381 case DW_AT_count:
31382 case DW_AT_allocated:
31383 case DW_AT_associated:
31384 case DW_AT_byte_stride:
31385 if (prev == NULL && next == NULL)
31386 break;
31387 /* FALLTHRU */
31388 default:
31389 if (dwarf_strict)
31390 continue;
31391 break;
31392 }
31393 /* Create DW_TAG_variable that we can refer to. */
31394 gen_decl_die (decl, NULL_TREE, NULL,
31395 lookup_decl_die (current_function_decl));
31396 ref = lookup_decl_die (decl);
31397 if (ref)
31398 {
31399 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31400 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31401 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31402 }
31403 continue;
31404 }
31405 if (prev)
31406 {
31407 prev->dw_loc_next = l->expr;
31408 add_loc_descr (&prev->dw_loc_next, next);
31409 free_loc_descr (loc, NULL);
31410 next = prev->dw_loc_next;
31411 }
31412 else
31413 {
31414 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31415 add_loc_descr (&loc, next);
31416 next = loc;
31417 }
31418 loc = prev;
31419 }
31420 return false;
31421 }
31422
31423 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31424
31425 static void
31426 resolve_variable_value (dw_die_ref die)
31427 {
31428 dw_attr_node *a;
31429 dw_loc_list_ref loc;
31430 unsigned ix;
31431
31432 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31433 switch (AT_class (a))
31434 {
31435 case dw_val_class_loc:
31436 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31437 break;
31438 /* FALLTHRU */
31439 case dw_val_class_loc_list:
31440 loc = AT_loc_list (a);
31441 gcc_assert (loc);
31442 for (; loc; loc = loc->dw_loc_next)
31443 resolve_variable_value_in_expr (a, loc->expr);
31444 break;
31445 default:
31446 break;
31447 }
31448 }
31449
31450 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31451 temporaries in the current function. */
31452
31453 static void
31454 resolve_variable_values (void)
31455 {
31456 if (!variable_value_hash || !current_function_decl)
31457 return;
31458
31459 struct variable_value_struct *node
31460 = variable_value_hash->find_with_hash (current_function_decl,
31461 DECL_UID (current_function_decl));
31462
31463 if (node == NULL)
31464 return;
31465
31466 unsigned int i;
31467 dw_die_ref die;
31468 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31469 resolve_variable_value (die);
31470 }
31471
31472 /* Helper function for note_variable_value, handle one location
31473 expression. */
31474
31475 static void
31476 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31477 {
31478 for (; loc; loc = loc->dw_loc_next)
31479 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31480 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31481 {
31482 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31483 dw_die_ref ref = lookup_decl_die (decl);
31484 if (! ref && (flag_generate_lto || flag_generate_offload))
31485 {
31486 /* ??? This is somewhat a hack because we do not create DIEs
31487 for variables not in BLOCK trees early but when generating
31488 early LTO output we need the dw_val_class_decl_ref to be
31489 fully resolved. For fat LTO objects we'd also like to
31490 undo this after LTO dwarf output. */
31491 gcc_assert (DECL_CONTEXT (decl));
31492 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31493 gcc_assert (ctx != NULL);
31494 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31495 ref = lookup_decl_die (decl);
31496 gcc_assert (ref != NULL);
31497 }
31498 if (ref)
31499 {
31500 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31501 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31502 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31503 continue;
31504 }
31505 if (VAR_P (decl)
31506 && DECL_CONTEXT (decl)
31507 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31508 && lookup_decl_die (DECL_CONTEXT (decl)))
31509 {
31510 if (!variable_value_hash)
31511 variable_value_hash
31512 = hash_table<variable_value_hasher>::create_ggc (10);
31513
31514 tree fndecl = DECL_CONTEXT (decl);
31515 struct variable_value_struct *node;
31516 struct variable_value_struct **slot
31517 = variable_value_hash->find_slot_with_hash (fndecl,
31518 DECL_UID (fndecl),
31519 INSERT);
31520 if (*slot == NULL)
31521 {
31522 node = ggc_cleared_alloc<variable_value_struct> ();
31523 node->decl_id = DECL_UID (fndecl);
31524 *slot = node;
31525 }
31526 else
31527 node = *slot;
31528
31529 vec_safe_push (node->dies, die);
31530 }
31531 }
31532 }
31533
31534 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31535 with dw_val_class_decl_ref operand. */
31536
31537 static void
31538 note_variable_value (dw_die_ref die)
31539 {
31540 dw_die_ref c;
31541 dw_attr_node *a;
31542 dw_loc_list_ref loc;
31543 unsigned ix;
31544
31545 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31546 switch (AT_class (a))
31547 {
31548 case dw_val_class_loc_list:
31549 loc = AT_loc_list (a);
31550 gcc_assert (loc);
31551 if (!loc->noted_variable_value)
31552 {
31553 loc->noted_variable_value = 1;
31554 for (; loc; loc = loc->dw_loc_next)
31555 note_variable_value_in_expr (die, loc->expr);
31556 }
31557 break;
31558 case dw_val_class_loc:
31559 note_variable_value_in_expr (die, AT_loc (a));
31560 break;
31561 default:
31562 break;
31563 }
31564
31565 /* Mark children. */
31566 FOR_EACH_CHILD (die, c, note_variable_value (c));
31567 }
31568
31569 /* Perform any cleanups needed after the early debug generation pass
31570 has run. */
31571
31572 static void
31573 dwarf2out_early_finish (const char *filename)
31574 {
31575 set_early_dwarf s;
31576
31577 /* PCH might result in DW_AT_producer string being restored from the
31578 header compilation, so always fill it with empty string initially
31579 and overwrite only here. */
31580 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31581 producer_string = gen_producer_string ();
31582 producer->dw_attr_val.v.val_str->refcount--;
31583 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31584
31585 /* Add the name for the main input file now. We delayed this from
31586 dwarf2out_init to avoid complications with PCH. */
31587 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31588 add_comp_dir_attribute (comp_unit_die ());
31589
31590 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31591 DW_AT_comp_dir into .debug_line_str section. */
31592 if (!dwarf2out_as_loc_support
31593 && dwarf_version >= 5
31594 && DWARF5_USE_DEBUG_LINE_STR)
31595 {
31596 for (int i = 0; i < 2; i++)
31597 {
31598 dw_attr_node *a = get_AT (comp_unit_die (),
31599 i ? DW_AT_comp_dir : DW_AT_name);
31600 if (a == NULL
31601 || AT_class (a) != dw_val_class_str
31602 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31603 continue;
31604
31605 if (! debug_line_str_hash)
31606 debug_line_str_hash
31607 = hash_table<indirect_string_hasher>::create_ggc (10);
31608
31609 struct indirect_string_node *node
31610 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31611 set_indirect_string (node);
31612 node->form = DW_FORM_line_strp;
31613 a->dw_attr_val.v.val_str->refcount--;
31614 a->dw_attr_val.v.val_str = node;
31615 }
31616 }
31617
31618 /* With LTO early dwarf was really finished at compile-time, so make
31619 sure to adjust the phase after annotating the LTRANS CU DIE. */
31620 if (in_lto_p)
31621 {
31622 early_dwarf_finished = true;
31623 return;
31624 }
31625
31626 /* Walk through the list of incomplete types again, trying once more to
31627 emit full debugging info for them. */
31628 retry_incomplete_types ();
31629
31630 /* The point here is to flush out the limbo list so that it is empty
31631 and we don't need to stream it for LTO. */
31632 flush_limbo_die_list ();
31633
31634 gen_scheduled_generic_parms_dies ();
31635 gen_remaining_tmpl_value_param_die_attribute ();
31636
31637 /* Add DW_AT_linkage_name for all deferred DIEs. */
31638 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31639 {
31640 tree decl = node->created_for;
31641 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31642 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31643 ended up in deferred_asm_name before we knew it was
31644 constant and never written to disk. */
31645 && DECL_ASSEMBLER_NAME (decl))
31646 {
31647 add_linkage_attr (node->die, decl);
31648 move_linkage_attr (node->die);
31649 }
31650 }
31651 deferred_asm_name = NULL;
31652
31653 if (flag_eliminate_unused_debug_types)
31654 prune_unused_types ();
31655
31656 /* Generate separate COMDAT sections for type DIEs. */
31657 if (use_debug_types)
31658 {
31659 break_out_comdat_types (comp_unit_die ());
31660
31661 /* Each new type_unit DIE was added to the limbo die list when created.
31662 Since these have all been added to comdat_type_list, clear the
31663 limbo die list. */
31664 limbo_die_list = NULL;
31665
31666 /* For each new comdat type unit, copy declarations for incomplete
31667 types to make the new unit self-contained (i.e., no direct
31668 references to the main compile unit). */
31669 for (comdat_type_node *ctnode = comdat_type_list;
31670 ctnode != NULL; ctnode = ctnode->next)
31671 copy_decls_for_unworthy_types (ctnode->root_die);
31672 copy_decls_for_unworthy_types (comp_unit_die ());
31673
31674 /* In the process of copying declarations from one unit to another,
31675 we may have left some declarations behind that are no longer
31676 referenced. Prune them. */
31677 prune_unused_types ();
31678 }
31679
31680 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31681 with dw_val_class_decl_ref operand. */
31682 note_variable_value (comp_unit_die ());
31683 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31684 note_variable_value (node->die);
31685 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31686 ctnode = ctnode->next)
31687 note_variable_value (ctnode->root_die);
31688 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31689 note_variable_value (node->die);
31690
31691 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31692 both the main_cu and all skeleton TUs. Making this call unconditional
31693 would end up either adding a second copy of the AT_pubnames attribute, or
31694 requiring a special case in add_top_level_skeleton_die_attrs. */
31695 if (!dwarf_split_debug_info)
31696 add_AT_pubnames (comp_unit_die ());
31697
31698 /* The early debug phase is now finished. */
31699 early_dwarf_finished = true;
31700
31701 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31702 if (!flag_generate_lto && !flag_generate_offload)
31703 return;
31704
31705 /* Now as we are going to output for LTO initialize sections and labels
31706 to the LTO variants. We don't need a random-seed postfix as other
31707 LTO sections as linking the LTO debug sections into one in a partial
31708 link is fine. */
31709 init_sections_and_labels (true);
31710
31711 /* The output below is modeled after dwarf2out_finish with all
31712 location related output removed and some LTO specific changes.
31713 Some refactoring might make both smaller and easier to match up. */
31714
31715 /* Traverse the DIE's and add add sibling attributes to those DIE's
31716 that have children. */
31717 add_sibling_attributes (comp_unit_die ());
31718 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31719 add_sibling_attributes (node->die);
31720 for (comdat_type_node *ctnode = comdat_type_list;
31721 ctnode != NULL; ctnode = ctnode->next)
31722 add_sibling_attributes (ctnode->root_die);
31723
31724 if (have_macinfo)
31725 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31726 macinfo_section_label);
31727
31728 save_macinfo_strings ();
31729
31730 if (dwarf_split_debug_info)
31731 {
31732 unsigned int index = 0;
31733 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31734 }
31735
31736 /* Output all of the compilation units. We put the main one last so that
31737 the offsets are available to output_pubnames. */
31738 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31739 output_comp_unit (node->die, 0, NULL);
31740
31741 hash_table<comdat_type_hasher> comdat_type_table (100);
31742 for (comdat_type_node *ctnode = comdat_type_list;
31743 ctnode != NULL; ctnode = ctnode->next)
31744 {
31745 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31746
31747 /* Don't output duplicate types. */
31748 if (*slot != HTAB_EMPTY_ENTRY)
31749 continue;
31750
31751 /* Add a pointer to the line table for the main compilation unit
31752 so that the debugger can make sense of DW_AT_decl_file
31753 attributes. */
31754 if (debug_info_level >= DINFO_LEVEL_TERSE)
31755 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31756 (!dwarf_split_debug_info
31757 ? debug_line_section_label
31758 : debug_skeleton_line_section_label));
31759
31760 output_comdat_type_unit (ctnode);
31761 *slot = ctnode;
31762 }
31763
31764 /* Stick a unique symbol to the main debuginfo section. */
31765 compute_comp_unit_symbol (comp_unit_die ());
31766
31767 /* Output the main compilation unit. We always need it if only for
31768 the CU symbol. */
31769 output_comp_unit (comp_unit_die (), true, NULL);
31770
31771 /* Output the abbreviation table. */
31772 if (vec_safe_length (abbrev_die_table) != 1)
31773 {
31774 switch_to_section (debug_abbrev_section);
31775 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31776 output_abbrev_section ();
31777 }
31778
31779 /* Have to end the macro section. */
31780 if (have_macinfo)
31781 {
31782 /* We have to save macinfo state if we need to output it again
31783 for the FAT part of the object. */
31784 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
31785 if (flag_fat_lto_objects)
31786 macinfo_table = macinfo_table->copy ();
31787
31788 switch_to_section (debug_macinfo_section);
31789 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31790 output_macinfo (debug_skeleton_line_section_label, true);
31791 dw2_asm_output_data (1, 0, "End compilation unit");
31792
31793 /* Emit a skeleton debug_line section. */
31794 switch_to_section (debug_skeleton_line_section);
31795 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31796 output_line_info (true);
31797
31798 if (flag_fat_lto_objects)
31799 {
31800 vec_free (macinfo_table);
31801 macinfo_table = saved_macinfo_table;
31802 }
31803 }
31804
31805
31806 /* If we emitted any indirect strings, output the string table too. */
31807 if (debug_str_hash || skeleton_debug_str_hash)
31808 output_indirect_strings ();
31809
31810 /* Switch back to the text section. */
31811 switch_to_section (text_section);
31812 }
31813
31814 /* Reset all state within dwarf2out.c so that we can rerun the compiler
31815 within the same process. For use by toplev::finalize. */
31816
31817 void
31818 dwarf2out_c_finalize (void)
31819 {
31820 last_var_location_insn = NULL;
31821 cached_next_real_insn = NULL;
31822 used_rtx_array = NULL;
31823 incomplete_types = NULL;
31824 decl_scope_table = NULL;
31825 debug_info_section = NULL;
31826 debug_skeleton_info_section = NULL;
31827 debug_abbrev_section = NULL;
31828 debug_skeleton_abbrev_section = NULL;
31829 debug_aranges_section = NULL;
31830 debug_addr_section = NULL;
31831 debug_macinfo_section = NULL;
31832 debug_line_section = NULL;
31833 debug_skeleton_line_section = NULL;
31834 debug_loc_section = NULL;
31835 debug_pubnames_section = NULL;
31836 debug_pubtypes_section = NULL;
31837 debug_str_section = NULL;
31838 debug_line_str_section = NULL;
31839 debug_str_dwo_section = NULL;
31840 debug_str_offsets_section = NULL;
31841 debug_ranges_section = NULL;
31842 debug_frame_section = NULL;
31843 fde_vec = NULL;
31844 debug_str_hash = NULL;
31845 debug_line_str_hash = NULL;
31846 skeleton_debug_str_hash = NULL;
31847 dw2_string_counter = 0;
31848 have_multiple_function_sections = false;
31849 text_section_used = false;
31850 cold_text_section_used = false;
31851 cold_text_section = NULL;
31852 current_unit_personality = NULL;
31853
31854 early_dwarf = false;
31855 early_dwarf_finished = false;
31856
31857 next_die_offset = 0;
31858 single_comp_unit_die = NULL;
31859 comdat_type_list = NULL;
31860 limbo_die_list = NULL;
31861 file_table = NULL;
31862 decl_die_table = NULL;
31863 common_block_die_table = NULL;
31864 decl_loc_table = NULL;
31865 call_arg_locations = NULL;
31866 call_arg_loc_last = NULL;
31867 call_site_count = -1;
31868 tail_call_site_count = -1;
31869 cached_dw_loc_list_table = NULL;
31870 abbrev_die_table = NULL;
31871 delete dwarf_proc_stack_usage_map;
31872 dwarf_proc_stack_usage_map = NULL;
31873 line_info_label_num = 0;
31874 cur_line_info_table = NULL;
31875 text_section_line_info = NULL;
31876 cold_text_section_line_info = NULL;
31877 separate_line_info = NULL;
31878 info_section_emitted = false;
31879 pubname_table = NULL;
31880 pubtype_table = NULL;
31881 macinfo_table = NULL;
31882 ranges_table = NULL;
31883 ranges_by_label = NULL;
31884 rnglist_idx = 0;
31885 have_location_lists = false;
31886 loclabel_num = 0;
31887 poc_label_num = 0;
31888 last_emitted_file = NULL;
31889 label_num = 0;
31890 tmpl_value_parm_die_table = NULL;
31891 generic_type_instances = NULL;
31892 frame_pointer_fb_offset = 0;
31893 frame_pointer_fb_offset_valid = false;
31894 base_types.release ();
31895 XDELETEVEC (producer_string);
31896 producer_string = NULL;
31897 }
31898
31899 #include "gt-dwarf2out.h"