dwarf2out.c (gen_subprogram_die): Use is_unit_die when deciding whether to not re...
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_symview:
1438 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1439 case dw_val_class_lbl_id:
1440 case dw_val_class_lineptr:
1441 case dw_val_class_macptr:
1442 case dw_val_class_loclistsptr:
1443 case dw_val_class_high_pc:
1444 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1445 case dw_val_class_str:
1446 return a->v.val_str == b->v.val_str;
1447 case dw_val_class_flag:
1448 return a->v.val_flag == b->v.val_flag;
1449 case dw_val_class_file:
1450 case dw_val_class_file_implicit:
1451 return a->v.val_file == b->v.val_file;
1452 case dw_val_class_decl_ref:
1453 return a->v.val_decl_ref == b->v.val_decl_ref;
1454
1455 case dw_val_class_const_double:
1456 return (a->v.val_double.high == b->v.val_double.high
1457 && a->v.val_double.low == b->v.val_double.low);
1458
1459 case dw_val_class_wide_int:
1460 return *a->v.val_wide == *b->v.val_wide;
1461
1462 case dw_val_class_vec:
1463 {
1464 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1465 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1466
1467 return (a_len == b_len
1468 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1469 }
1470
1471 case dw_val_class_data8:
1472 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1473
1474 case dw_val_class_vms_delta:
1475 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1476 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1477
1478 case dw_val_class_discr_value:
1479 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1480 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1481 case dw_val_class_discr_list:
1482 /* It makes no sense comparing two discriminant value lists. */
1483 return false;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 /* Compare two location atoms for exact equality. */
1489
1490 static bool
1491 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1492 {
1493 if (a->dw_loc_opc != b->dw_loc_opc)
1494 return false;
1495
1496 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1497 address size, but since we always allocate cleared storage it
1498 should be zero for other types of locations. */
1499 if (a->dtprel != b->dtprel)
1500 return false;
1501
1502 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1503 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1504 }
1505
1506 /* Compare two complete location expressions for exact equality. */
1507
1508 bool
1509 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1510 {
1511 while (1)
1512 {
1513 if (a == b)
1514 return true;
1515 if (a == NULL || b == NULL)
1516 return false;
1517 if (!loc_descr_equal_p_1 (a, b))
1518 return false;
1519
1520 a = a->dw_loc_next;
1521 b = b->dw_loc_next;
1522 }
1523 }
1524
1525
1526 /* Add a constant POLY_OFFSET to a location expression. */
1527
1528 static void
1529 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1530 {
1531 dw_loc_descr_ref loc;
1532 HOST_WIDE_INT *p;
1533
1534 gcc_assert (*list_head != NULL);
1535
1536 if (known_eq (poly_offset, 0))
1537 return;
1538
1539 /* Find the end of the chain. */
1540 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1541 ;
1542
1543 HOST_WIDE_INT offset;
1544 if (!poly_offset.is_constant (&offset))
1545 {
1546 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1547 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1548 return;
1549 }
1550
1551 p = NULL;
1552 if (loc->dw_loc_opc == DW_OP_fbreg
1553 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1554 p = &loc->dw_loc_oprnd1.v.val_int;
1555 else if (loc->dw_loc_opc == DW_OP_bregx)
1556 p = &loc->dw_loc_oprnd2.v.val_int;
1557
1558 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1559 offset. Don't optimize if an signed integer overflow would happen. */
1560 if (p != NULL
1561 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1562 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1563 *p += offset;
1564
1565 else if (offset > 0)
1566 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1567
1568 else
1569 {
1570 loc->dw_loc_next
1571 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1572 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1573 }
1574 }
1575
1576 /* Return a pointer to a newly allocated location description for
1577 REG and OFFSET. */
1578
1579 static inline dw_loc_descr_ref
1580 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1581 {
1582 HOST_WIDE_INT const_offset;
1583 if (offset.is_constant (&const_offset))
1584 {
1585 if (reg <= 31)
1586 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1587 const_offset, 0);
1588 else
1589 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1590 }
1591 else
1592 {
1593 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1594 loc_descr_plus_const (&ret, offset);
1595 return ret;
1596 }
1597 }
1598
1599 /* Add a constant OFFSET to a location list. */
1600
1601 static void
1602 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1603 {
1604 dw_loc_list_ref d;
1605 for (d = list_head; d != NULL; d = d->dw_loc_next)
1606 loc_descr_plus_const (&d->expr, offset);
1607 }
1608
1609 #define DWARF_REF_SIZE \
1610 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1611
1612 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1613 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1614 DW_FORM_data16 with 128 bits. */
1615 #define DWARF_LARGEST_DATA_FORM_BITS \
1616 (dwarf_version >= 5 ? 128 : 64)
1617
1618 /* Utility inline function for construction of ops that were GNU extension
1619 before DWARF 5. */
1620 static inline enum dwarf_location_atom
1621 dwarf_OP (enum dwarf_location_atom op)
1622 {
1623 switch (op)
1624 {
1625 case DW_OP_implicit_pointer:
1626 if (dwarf_version < 5)
1627 return DW_OP_GNU_implicit_pointer;
1628 break;
1629
1630 case DW_OP_entry_value:
1631 if (dwarf_version < 5)
1632 return DW_OP_GNU_entry_value;
1633 break;
1634
1635 case DW_OP_const_type:
1636 if (dwarf_version < 5)
1637 return DW_OP_GNU_const_type;
1638 break;
1639
1640 case DW_OP_regval_type:
1641 if (dwarf_version < 5)
1642 return DW_OP_GNU_regval_type;
1643 break;
1644
1645 case DW_OP_deref_type:
1646 if (dwarf_version < 5)
1647 return DW_OP_GNU_deref_type;
1648 break;
1649
1650 case DW_OP_convert:
1651 if (dwarf_version < 5)
1652 return DW_OP_GNU_convert;
1653 break;
1654
1655 case DW_OP_reinterpret:
1656 if (dwarf_version < 5)
1657 return DW_OP_GNU_reinterpret;
1658 break;
1659
1660 case DW_OP_addrx:
1661 if (dwarf_version < 5)
1662 return DW_OP_GNU_addr_index;
1663 break;
1664
1665 case DW_OP_constx:
1666 if (dwarf_version < 5)
1667 return DW_OP_GNU_const_index;
1668 break;
1669
1670 default:
1671 break;
1672 }
1673 return op;
1674 }
1675
1676 /* Similarly for attributes. */
1677 static inline enum dwarf_attribute
1678 dwarf_AT (enum dwarf_attribute at)
1679 {
1680 switch (at)
1681 {
1682 case DW_AT_call_return_pc:
1683 if (dwarf_version < 5)
1684 return DW_AT_low_pc;
1685 break;
1686
1687 case DW_AT_call_tail_call:
1688 if (dwarf_version < 5)
1689 return DW_AT_GNU_tail_call;
1690 break;
1691
1692 case DW_AT_call_origin:
1693 if (dwarf_version < 5)
1694 return DW_AT_abstract_origin;
1695 break;
1696
1697 case DW_AT_call_target:
1698 if (dwarf_version < 5)
1699 return DW_AT_GNU_call_site_target;
1700 break;
1701
1702 case DW_AT_call_target_clobbered:
1703 if (dwarf_version < 5)
1704 return DW_AT_GNU_call_site_target_clobbered;
1705 break;
1706
1707 case DW_AT_call_parameter:
1708 if (dwarf_version < 5)
1709 return DW_AT_abstract_origin;
1710 break;
1711
1712 case DW_AT_call_value:
1713 if (dwarf_version < 5)
1714 return DW_AT_GNU_call_site_value;
1715 break;
1716
1717 case DW_AT_call_data_value:
1718 if (dwarf_version < 5)
1719 return DW_AT_GNU_call_site_data_value;
1720 break;
1721
1722 case DW_AT_call_all_calls:
1723 if (dwarf_version < 5)
1724 return DW_AT_GNU_all_call_sites;
1725 break;
1726
1727 case DW_AT_call_all_tail_calls:
1728 if (dwarf_version < 5)
1729 return DW_AT_GNU_all_tail_call_sites;
1730 break;
1731
1732 case DW_AT_dwo_name:
1733 if (dwarf_version < 5)
1734 return DW_AT_GNU_dwo_name;
1735 break;
1736
1737 case DW_AT_addr_base:
1738 if (dwarf_version < 5)
1739 return DW_AT_GNU_addr_base;
1740 break;
1741
1742 default:
1743 break;
1744 }
1745 return at;
1746 }
1747
1748 /* And similarly for tags. */
1749 static inline enum dwarf_tag
1750 dwarf_TAG (enum dwarf_tag tag)
1751 {
1752 switch (tag)
1753 {
1754 case DW_TAG_call_site:
1755 if (dwarf_version < 5)
1756 return DW_TAG_GNU_call_site;
1757 break;
1758
1759 case DW_TAG_call_site_parameter:
1760 if (dwarf_version < 5)
1761 return DW_TAG_GNU_call_site_parameter;
1762 break;
1763
1764 default:
1765 break;
1766 }
1767 return tag;
1768 }
1769
1770 /* And similarly for forms. */
1771 static inline enum dwarf_form
1772 dwarf_FORM (enum dwarf_form form)
1773 {
1774 switch (form)
1775 {
1776 case DW_FORM_addrx:
1777 if (dwarf_version < 5)
1778 return DW_FORM_GNU_addr_index;
1779 break;
1780
1781 case DW_FORM_strx:
1782 if (dwarf_version < 5)
1783 return DW_FORM_GNU_str_index;
1784 break;
1785
1786 default:
1787 break;
1788 }
1789 return form;
1790 }
1791
1792 static unsigned long int get_base_type_offset (dw_die_ref);
1793
1794 /* Return the size of a location descriptor. */
1795
1796 static unsigned long
1797 size_of_loc_descr (dw_loc_descr_ref loc)
1798 {
1799 unsigned long size = 1;
1800
1801 switch (loc->dw_loc_opc)
1802 {
1803 case DW_OP_addr:
1804 size += DWARF2_ADDR_SIZE;
1805 break;
1806 case DW_OP_GNU_addr_index:
1807 case DW_OP_addrx:
1808 case DW_OP_GNU_const_index:
1809 case DW_OP_constx:
1810 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1811 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1812 break;
1813 case DW_OP_const1u:
1814 case DW_OP_const1s:
1815 size += 1;
1816 break;
1817 case DW_OP_const2u:
1818 case DW_OP_const2s:
1819 size += 2;
1820 break;
1821 case DW_OP_const4u:
1822 case DW_OP_const4s:
1823 size += 4;
1824 break;
1825 case DW_OP_const8u:
1826 case DW_OP_const8s:
1827 size += 8;
1828 break;
1829 case DW_OP_constu:
1830 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1831 break;
1832 case DW_OP_consts:
1833 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1834 break;
1835 case DW_OP_pick:
1836 size += 1;
1837 break;
1838 case DW_OP_plus_uconst:
1839 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1840 break;
1841 case DW_OP_skip:
1842 case DW_OP_bra:
1843 size += 2;
1844 break;
1845 case DW_OP_breg0:
1846 case DW_OP_breg1:
1847 case DW_OP_breg2:
1848 case DW_OP_breg3:
1849 case DW_OP_breg4:
1850 case DW_OP_breg5:
1851 case DW_OP_breg6:
1852 case DW_OP_breg7:
1853 case DW_OP_breg8:
1854 case DW_OP_breg9:
1855 case DW_OP_breg10:
1856 case DW_OP_breg11:
1857 case DW_OP_breg12:
1858 case DW_OP_breg13:
1859 case DW_OP_breg14:
1860 case DW_OP_breg15:
1861 case DW_OP_breg16:
1862 case DW_OP_breg17:
1863 case DW_OP_breg18:
1864 case DW_OP_breg19:
1865 case DW_OP_breg20:
1866 case DW_OP_breg21:
1867 case DW_OP_breg22:
1868 case DW_OP_breg23:
1869 case DW_OP_breg24:
1870 case DW_OP_breg25:
1871 case DW_OP_breg26:
1872 case DW_OP_breg27:
1873 case DW_OP_breg28:
1874 case DW_OP_breg29:
1875 case DW_OP_breg30:
1876 case DW_OP_breg31:
1877 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1878 break;
1879 case DW_OP_regx:
1880 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1881 break;
1882 case DW_OP_fbreg:
1883 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1884 break;
1885 case DW_OP_bregx:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1888 break;
1889 case DW_OP_piece:
1890 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1891 break;
1892 case DW_OP_bit_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1895 break;
1896 case DW_OP_deref_size:
1897 case DW_OP_xderef_size:
1898 size += 1;
1899 break;
1900 case DW_OP_call2:
1901 size += 2;
1902 break;
1903 case DW_OP_call4:
1904 size += 4;
1905 break;
1906 case DW_OP_call_ref:
1907 case DW_OP_GNU_variable_value:
1908 size += DWARF_REF_SIZE;
1909 break;
1910 case DW_OP_implicit_value:
1911 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1912 + loc->dw_loc_oprnd1.v.val_unsigned;
1913 break;
1914 case DW_OP_implicit_pointer:
1915 case DW_OP_GNU_implicit_pointer:
1916 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1917 break;
1918 case DW_OP_entry_value:
1919 case DW_OP_GNU_entry_value:
1920 {
1921 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1922 size += size_of_uleb128 (op_size) + op_size;
1923 break;
1924 }
1925 case DW_OP_const_type:
1926 case DW_OP_GNU_const_type:
1927 {
1928 unsigned long o
1929 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1930 size += size_of_uleb128 (o) + 1;
1931 switch (loc->dw_loc_oprnd2.val_class)
1932 {
1933 case dw_val_class_vec:
1934 size += loc->dw_loc_oprnd2.v.val_vec.length
1935 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1936 break;
1937 case dw_val_class_const:
1938 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1939 break;
1940 case dw_val_class_const_double:
1941 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_wide_int:
1944 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1945 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1946 break;
1947 default:
1948 gcc_unreachable ();
1949 }
1950 break;
1951 }
1952 case DW_OP_regval_type:
1953 case DW_OP_GNU_regval_type:
1954 {
1955 unsigned long o
1956 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1957 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1958 + size_of_uleb128 (o);
1959 }
1960 break;
1961 case DW_OP_deref_type:
1962 case DW_OP_GNU_deref_type:
1963 {
1964 unsigned long o
1965 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1966 size += 1 + size_of_uleb128 (o);
1967 }
1968 break;
1969 case DW_OP_convert:
1970 case DW_OP_reinterpret:
1971 case DW_OP_GNU_convert:
1972 case DW_OP_GNU_reinterpret:
1973 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1974 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1975 else
1976 {
1977 unsigned long o
1978 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1979 size += size_of_uleb128 (o);
1980 }
1981 break;
1982 case DW_OP_GNU_parameter_ref:
1983 size += 4;
1984 break;
1985 default:
1986 break;
1987 }
1988
1989 return size;
1990 }
1991
1992 /* Return the size of a series of location descriptors. */
1993
1994 unsigned long
1995 size_of_locs (dw_loc_descr_ref loc)
1996 {
1997 dw_loc_descr_ref l;
1998 unsigned long size;
1999
2000 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2001 field, to avoid writing to a PCH file. */
2002 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2003 {
2004 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2005 break;
2006 size += size_of_loc_descr (l);
2007 }
2008 if (! l)
2009 return size;
2010
2011 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2012 {
2013 l->dw_loc_addr = size;
2014 size += size_of_loc_descr (l);
2015 }
2016
2017 return size;
2018 }
2019
2020 /* Return the size of the value in a DW_AT_discr_value attribute. */
2021
2022 static int
2023 size_of_discr_value (dw_discr_value *discr_value)
2024 {
2025 if (discr_value->pos)
2026 return size_of_uleb128 (discr_value->v.uval);
2027 else
2028 return size_of_sleb128 (discr_value->v.sval);
2029 }
2030
2031 /* Return the size of the value in a DW_AT_discr_list attribute. */
2032
2033 static int
2034 size_of_discr_list (dw_discr_list_ref discr_list)
2035 {
2036 int size = 0;
2037
2038 for (dw_discr_list_ref list = discr_list;
2039 list != NULL;
2040 list = list->dw_discr_next)
2041 {
2042 /* One byte for the discriminant value descriptor, and then one or two
2043 LEB128 numbers, depending on whether it's a single case label or a
2044 range label. */
2045 size += 1;
2046 size += size_of_discr_value (&list->dw_discr_lower_bound);
2047 if (list->dw_discr_range != 0)
2048 size += size_of_discr_value (&list->dw_discr_upper_bound);
2049 }
2050 return size;
2051 }
2052
2053 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2054 static void get_ref_die_offset_label (char *, dw_die_ref);
2055 static unsigned long int get_ref_die_offset (dw_die_ref);
2056
2057 /* Output location description stack opcode's operands (if any).
2058 The for_eh_or_skip parameter controls whether register numbers are
2059 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2060 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2061 info). This should be suppressed for the cases that have not been converted
2062 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2063
2064 static void
2065 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2066 {
2067 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2068 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2069
2070 switch (loc->dw_loc_opc)
2071 {
2072 #ifdef DWARF2_DEBUGGING_INFO
2073 case DW_OP_const2u:
2074 case DW_OP_const2s:
2075 dw2_asm_output_data (2, val1->v.val_int, NULL);
2076 break;
2077 case DW_OP_const4u:
2078 if (loc->dtprel)
2079 {
2080 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2081 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2082 val1->v.val_addr);
2083 fputc ('\n', asm_out_file);
2084 break;
2085 }
2086 /* FALLTHRU */
2087 case DW_OP_const4s:
2088 dw2_asm_output_data (4, val1->v.val_int, NULL);
2089 break;
2090 case DW_OP_const8u:
2091 if (loc->dtprel)
2092 {
2093 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2094 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2095 val1->v.val_addr);
2096 fputc ('\n', asm_out_file);
2097 break;
2098 }
2099 /* FALLTHRU */
2100 case DW_OP_const8s:
2101 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2102 dw2_asm_output_data (8, val1->v.val_int, NULL);
2103 break;
2104 case DW_OP_skip:
2105 case DW_OP_bra:
2106 {
2107 int offset;
2108
2109 gcc_assert (val1->val_class == dw_val_class_loc);
2110 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2111
2112 dw2_asm_output_data (2, offset, NULL);
2113 }
2114 break;
2115 case DW_OP_implicit_value:
2116 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2117 switch (val2->val_class)
2118 {
2119 case dw_val_class_const:
2120 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2121 break;
2122 case dw_val_class_vec:
2123 {
2124 unsigned int elt_size = val2->v.val_vec.elt_size;
2125 unsigned int len = val2->v.val_vec.length;
2126 unsigned int i;
2127 unsigned char *p;
2128
2129 if (elt_size > sizeof (HOST_WIDE_INT))
2130 {
2131 elt_size /= 2;
2132 len *= 2;
2133 }
2134 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2135 i < len;
2136 i++, p += elt_size)
2137 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2138 "fp or vector constant word %u", i);
2139 }
2140 break;
2141 case dw_val_class_const_double:
2142 {
2143 unsigned HOST_WIDE_INT first, second;
2144
2145 if (WORDS_BIG_ENDIAN)
2146 {
2147 first = val2->v.val_double.high;
2148 second = val2->v.val_double.low;
2149 }
2150 else
2151 {
2152 first = val2->v.val_double.low;
2153 second = val2->v.val_double.high;
2154 }
2155 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2156 first, NULL);
2157 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2158 second, NULL);
2159 }
2160 break;
2161 case dw_val_class_wide_int:
2162 {
2163 int i;
2164 int len = get_full_len (*val2->v.val_wide);
2165 if (WORDS_BIG_ENDIAN)
2166 for (i = len - 1; i >= 0; --i)
2167 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2168 val2->v.val_wide->elt (i), NULL);
2169 else
2170 for (i = 0; i < len; ++i)
2171 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2172 val2->v.val_wide->elt (i), NULL);
2173 }
2174 break;
2175 case dw_val_class_addr:
2176 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2177 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2178 break;
2179 default:
2180 gcc_unreachable ();
2181 }
2182 break;
2183 #else
2184 case DW_OP_const2u:
2185 case DW_OP_const2s:
2186 case DW_OP_const4u:
2187 case DW_OP_const4s:
2188 case DW_OP_const8u:
2189 case DW_OP_const8s:
2190 case DW_OP_skip:
2191 case DW_OP_bra:
2192 case DW_OP_implicit_value:
2193 /* We currently don't make any attempt to make sure these are
2194 aligned properly like we do for the main unwind info, so
2195 don't support emitting things larger than a byte if we're
2196 only doing unwinding. */
2197 gcc_unreachable ();
2198 #endif
2199 case DW_OP_const1u:
2200 case DW_OP_const1s:
2201 dw2_asm_output_data (1, val1->v.val_int, NULL);
2202 break;
2203 case DW_OP_constu:
2204 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2205 break;
2206 case DW_OP_consts:
2207 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2208 break;
2209 case DW_OP_pick:
2210 dw2_asm_output_data (1, val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_plus_uconst:
2213 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2214 break;
2215 case DW_OP_breg0:
2216 case DW_OP_breg1:
2217 case DW_OP_breg2:
2218 case DW_OP_breg3:
2219 case DW_OP_breg4:
2220 case DW_OP_breg5:
2221 case DW_OP_breg6:
2222 case DW_OP_breg7:
2223 case DW_OP_breg8:
2224 case DW_OP_breg9:
2225 case DW_OP_breg10:
2226 case DW_OP_breg11:
2227 case DW_OP_breg12:
2228 case DW_OP_breg13:
2229 case DW_OP_breg14:
2230 case DW_OP_breg15:
2231 case DW_OP_breg16:
2232 case DW_OP_breg17:
2233 case DW_OP_breg18:
2234 case DW_OP_breg19:
2235 case DW_OP_breg20:
2236 case DW_OP_breg21:
2237 case DW_OP_breg22:
2238 case DW_OP_breg23:
2239 case DW_OP_breg24:
2240 case DW_OP_breg25:
2241 case DW_OP_breg26:
2242 case DW_OP_breg27:
2243 case DW_OP_breg28:
2244 case DW_OP_breg29:
2245 case DW_OP_breg30:
2246 case DW_OP_breg31:
2247 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2248 break;
2249 case DW_OP_regx:
2250 {
2251 unsigned r = val1->v.val_unsigned;
2252 if (for_eh_or_skip >= 0)
2253 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2254 gcc_assert (size_of_uleb128 (r)
2255 == size_of_uleb128 (val1->v.val_unsigned));
2256 dw2_asm_output_data_uleb128 (r, NULL);
2257 }
2258 break;
2259 case DW_OP_fbreg:
2260 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2261 break;
2262 case DW_OP_bregx:
2263 {
2264 unsigned r = val1->v.val_unsigned;
2265 if (for_eh_or_skip >= 0)
2266 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2267 gcc_assert (size_of_uleb128 (r)
2268 == size_of_uleb128 (val1->v.val_unsigned));
2269 dw2_asm_output_data_uleb128 (r, NULL);
2270 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2271 }
2272 break;
2273 case DW_OP_piece:
2274 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2275 break;
2276 case DW_OP_bit_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2279 break;
2280 case DW_OP_deref_size:
2281 case DW_OP_xderef_size:
2282 dw2_asm_output_data (1, val1->v.val_int, NULL);
2283 break;
2284
2285 case DW_OP_addr:
2286 if (loc->dtprel)
2287 {
2288 if (targetm.asm_out.output_dwarf_dtprel)
2289 {
2290 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2291 DWARF2_ADDR_SIZE,
2292 val1->v.val_addr);
2293 fputc ('\n', asm_out_file);
2294 }
2295 else
2296 gcc_unreachable ();
2297 }
2298 else
2299 {
2300 #ifdef DWARF2_DEBUGGING_INFO
2301 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2302 #else
2303 gcc_unreachable ();
2304 #endif
2305 }
2306 break;
2307
2308 case DW_OP_GNU_addr_index:
2309 case DW_OP_addrx:
2310 case DW_OP_GNU_const_index:
2311 case DW_OP_constx:
2312 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2313 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2314 "(index into .debug_addr)");
2315 break;
2316
2317 case DW_OP_call2:
2318 case DW_OP_call4:
2319 {
2320 unsigned long die_offset
2321 = get_ref_die_offset (val1->v.val_die_ref.die);
2322 /* Make sure the offset has been computed and that we can encode it as
2323 an operand. */
2324 gcc_assert (die_offset > 0
2325 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2326 ? 0xffff
2327 : 0xffffffff));
2328 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2329 die_offset, NULL);
2330 }
2331 break;
2332
2333 case DW_OP_call_ref:
2334 case DW_OP_GNU_variable_value:
2335 {
2336 char label[MAX_ARTIFICIAL_LABEL_BYTES
2337 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2338 gcc_assert (val1->val_class == dw_val_class_die_ref);
2339 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2340 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2341 }
2342 break;
2343
2344 case DW_OP_implicit_pointer:
2345 case DW_OP_GNU_implicit_pointer:
2346 {
2347 char label[MAX_ARTIFICIAL_LABEL_BYTES
2348 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2349 gcc_assert (val1->val_class == dw_val_class_die_ref);
2350 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2351 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2352 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2353 }
2354 break;
2355
2356 case DW_OP_entry_value:
2357 case DW_OP_GNU_entry_value:
2358 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2359 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2360 break;
2361
2362 case DW_OP_const_type:
2363 case DW_OP_GNU_const_type:
2364 {
2365 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2366 gcc_assert (o);
2367 dw2_asm_output_data_uleb128 (o, NULL);
2368 switch (val2->val_class)
2369 {
2370 case dw_val_class_const:
2371 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2372 dw2_asm_output_data (1, l, NULL);
2373 dw2_asm_output_data (l, val2->v.val_int, NULL);
2374 break;
2375 case dw_val_class_vec:
2376 {
2377 unsigned int elt_size = val2->v.val_vec.elt_size;
2378 unsigned int len = val2->v.val_vec.length;
2379 unsigned int i;
2380 unsigned char *p;
2381
2382 l = len * elt_size;
2383 dw2_asm_output_data (1, l, NULL);
2384 if (elt_size > sizeof (HOST_WIDE_INT))
2385 {
2386 elt_size /= 2;
2387 len *= 2;
2388 }
2389 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2390 i < len;
2391 i++, p += elt_size)
2392 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2393 "fp or vector constant word %u", i);
2394 }
2395 break;
2396 case dw_val_class_const_double:
2397 {
2398 unsigned HOST_WIDE_INT first, second;
2399 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2400
2401 dw2_asm_output_data (1, 2 * l, NULL);
2402 if (WORDS_BIG_ENDIAN)
2403 {
2404 first = val2->v.val_double.high;
2405 second = val2->v.val_double.low;
2406 }
2407 else
2408 {
2409 first = val2->v.val_double.low;
2410 second = val2->v.val_double.high;
2411 }
2412 dw2_asm_output_data (l, first, NULL);
2413 dw2_asm_output_data (l, second, NULL);
2414 }
2415 break;
2416 case dw_val_class_wide_int:
2417 {
2418 int i;
2419 int len = get_full_len (*val2->v.val_wide);
2420 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2421
2422 dw2_asm_output_data (1, len * l, NULL);
2423 if (WORDS_BIG_ENDIAN)
2424 for (i = len - 1; i >= 0; --i)
2425 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2426 else
2427 for (i = 0; i < len; ++i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 }
2430 break;
2431 default:
2432 gcc_unreachable ();
2433 }
2434 }
2435 break;
2436 case DW_OP_regval_type:
2437 case DW_OP_GNU_regval_type:
2438 {
2439 unsigned r = val1->v.val_unsigned;
2440 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2441 gcc_assert (o);
2442 if (for_eh_or_skip >= 0)
2443 {
2444 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2445 gcc_assert (size_of_uleb128 (r)
2446 == size_of_uleb128 (val1->v.val_unsigned));
2447 }
2448 dw2_asm_output_data_uleb128 (r, NULL);
2449 dw2_asm_output_data_uleb128 (o, NULL);
2450 }
2451 break;
2452 case DW_OP_deref_type:
2453 case DW_OP_GNU_deref_type:
2454 {
2455 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2456 gcc_assert (o);
2457 dw2_asm_output_data (1, val1->v.val_int, NULL);
2458 dw2_asm_output_data_uleb128 (o, NULL);
2459 }
2460 break;
2461 case DW_OP_convert:
2462 case DW_OP_reinterpret:
2463 case DW_OP_GNU_convert:
2464 case DW_OP_GNU_reinterpret:
2465 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2466 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2467 else
2468 {
2469 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2470 gcc_assert (o);
2471 dw2_asm_output_data_uleb128 (o, NULL);
2472 }
2473 break;
2474
2475 case DW_OP_GNU_parameter_ref:
2476 {
2477 unsigned long o;
2478 gcc_assert (val1->val_class == dw_val_class_die_ref);
2479 o = get_ref_die_offset (val1->v.val_die_ref.die);
2480 dw2_asm_output_data (4, o, NULL);
2481 }
2482 break;
2483
2484 default:
2485 /* Other codes have no operands. */
2486 break;
2487 }
2488 }
2489
2490 /* Output a sequence of location operations.
2491 The for_eh_or_skip parameter controls whether register numbers are
2492 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2493 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2494 info). This should be suppressed for the cases that have not been converted
2495 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2496
2497 void
2498 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2499 {
2500 for (; loc != NULL; loc = loc->dw_loc_next)
2501 {
2502 enum dwarf_location_atom opc = loc->dw_loc_opc;
2503 /* Output the opcode. */
2504 if (for_eh_or_skip >= 0
2505 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2506 {
2507 unsigned r = (opc - DW_OP_breg0);
2508 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2509 gcc_assert (r <= 31);
2510 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2511 }
2512 else if (for_eh_or_skip >= 0
2513 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2514 {
2515 unsigned r = (opc - DW_OP_reg0);
2516 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2517 gcc_assert (r <= 31);
2518 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2519 }
2520
2521 dw2_asm_output_data (1, opc,
2522 "%s", dwarf_stack_op_name (opc));
2523
2524 /* Output the operand(s) (if any). */
2525 output_loc_operands (loc, for_eh_or_skip);
2526 }
2527 }
2528
2529 /* Output location description stack opcode's operands (if any).
2530 The output is single bytes on a line, suitable for .cfi_escape. */
2531
2532 static void
2533 output_loc_operands_raw (dw_loc_descr_ref loc)
2534 {
2535 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2536 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2537
2538 switch (loc->dw_loc_opc)
2539 {
2540 case DW_OP_addr:
2541 case DW_OP_GNU_addr_index:
2542 case DW_OP_addrx:
2543 case DW_OP_GNU_const_index:
2544 case DW_OP_constx:
2545 case DW_OP_implicit_value:
2546 /* We cannot output addresses in .cfi_escape, only bytes. */
2547 gcc_unreachable ();
2548
2549 case DW_OP_const1u:
2550 case DW_OP_const1s:
2551 case DW_OP_pick:
2552 case DW_OP_deref_size:
2553 case DW_OP_xderef_size:
2554 fputc (',', asm_out_file);
2555 dw2_asm_output_data_raw (1, val1->v.val_int);
2556 break;
2557
2558 case DW_OP_const2u:
2559 case DW_OP_const2s:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_raw (2, val1->v.val_int);
2562 break;
2563
2564 case DW_OP_const4u:
2565 case DW_OP_const4s:
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (4, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_const8u:
2571 case DW_OP_const8s:
2572 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2573 fputc (',', asm_out_file);
2574 dw2_asm_output_data_raw (8, val1->v.val_int);
2575 break;
2576
2577 case DW_OP_skip:
2578 case DW_OP_bra:
2579 {
2580 int offset;
2581
2582 gcc_assert (val1->val_class == dw_val_class_loc);
2583 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2584
2585 fputc (',', asm_out_file);
2586 dw2_asm_output_data_raw (2, offset);
2587 }
2588 break;
2589
2590 case DW_OP_regx:
2591 {
2592 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2593 gcc_assert (size_of_uleb128 (r)
2594 == size_of_uleb128 (val1->v.val_unsigned));
2595 fputc (',', asm_out_file);
2596 dw2_asm_output_data_uleb128_raw (r);
2597 }
2598 break;
2599
2600 case DW_OP_constu:
2601 case DW_OP_plus_uconst:
2602 case DW_OP_piece:
2603 fputc (',', asm_out_file);
2604 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2605 break;
2606
2607 case DW_OP_bit_piece:
2608 fputc (',', asm_out_file);
2609 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2610 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2611 break;
2612
2613 case DW_OP_consts:
2614 case DW_OP_breg0:
2615 case DW_OP_breg1:
2616 case DW_OP_breg2:
2617 case DW_OP_breg3:
2618 case DW_OP_breg4:
2619 case DW_OP_breg5:
2620 case DW_OP_breg6:
2621 case DW_OP_breg7:
2622 case DW_OP_breg8:
2623 case DW_OP_breg9:
2624 case DW_OP_breg10:
2625 case DW_OP_breg11:
2626 case DW_OP_breg12:
2627 case DW_OP_breg13:
2628 case DW_OP_breg14:
2629 case DW_OP_breg15:
2630 case DW_OP_breg16:
2631 case DW_OP_breg17:
2632 case DW_OP_breg18:
2633 case DW_OP_breg19:
2634 case DW_OP_breg20:
2635 case DW_OP_breg21:
2636 case DW_OP_breg22:
2637 case DW_OP_breg23:
2638 case DW_OP_breg24:
2639 case DW_OP_breg25:
2640 case DW_OP_breg26:
2641 case DW_OP_breg27:
2642 case DW_OP_breg28:
2643 case DW_OP_breg29:
2644 case DW_OP_breg30:
2645 case DW_OP_breg31:
2646 case DW_OP_fbreg:
2647 fputc (',', asm_out_file);
2648 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2649 break;
2650
2651 case DW_OP_bregx:
2652 {
2653 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2654 gcc_assert (size_of_uleb128 (r)
2655 == size_of_uleb128 (val1->v.val_unsigned));
2656 fputc (',', asm_out_file);
2657 dw2_asm_output_data_uleb128_raw (r);
2658 fputc (',', asm_out_file);
2659 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2660 }
2661 break;
2662
2663 case DW_OP_implicit_pointer:
2664 case DW_OP_entry_value:
2665 case DW_OP_const_type:
2666 case DW_OP_regval_type:
2667 case DW_OP_deref_type:
2668 case DW_OP_convert:
2669 case DW_OP_reinterpret:
2670 case DW_OP_GNU_implicit_pointer:
2671 case DW_OP_GNU_entry_value:
2672 case DW_OP_GNU_const_type:
2673 case DW_OP_GNU_regval_type:
2674 case DW_OP_GNU_deref_type:
2675 case DW_OP_GNU_convert:
2676 case DW_OP_GNU_reinterpret:
2677 case DW_OP_GNU_parameter_ref:
2678 gcc_unreachable ();
2679 break;
2680
2681 default:
2682 /* Other codes have no operands. */
2683 break;
2684 }
2685 }
2686
2687 void
2688 output_loc_sequence_raw (dw_loc_descr_ref loc)
2689 {
2690 while (1)
2691 {
2692 enum dwarf_location_atom opc = loc->dw_loc_opc;
2693 /* Output the opcode. */
2694 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2695 {
2696 unsigned r = (opc - DW_OP_breg0);
2697 r = DWARF2_FRAME_REG_OUT (r, 1);
2698 gcc_assert (r <= 31);
2699 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2700 }
2701 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2702 {
2703 unsigned r = (opc - DW_OP_reg0);
2704 r = DWARF2_FRAME_REG_OUT (r, 1);
2705 gcc_assert (r <= 31);
2706 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2707 }
2708 /* Output the opcode. */
2709 fprintf (asm_out_file, "%#x", opc);
2710 output_loc_operands_raw (loc);
2711
2712 if (!loc->dw_loc_next)
2713 break;
2714 loc = loc->dw_loc_next;
2715
2716 fputc (',', asm_out_file);
2717 }
2718 }
2719
2720 /* This function builds a dwarf location descriptor sequence from a
2721 dw_cfa_location, adding the given OFFSET to the result of the
2722 expression. */
2723
2724 struct dw_loc_descr_node *
2725 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2726 {
2727 struct dw_loc_descr_node *head, *tmp;
2728
2729 offset += cfa->offset;
2730
2731 if (cfa->indirect)
2732 {
2733 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2734 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2735 head->dw_loc_oprnd1.val_entry = NULL;
2736 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2737 add_loc_descr (&head, tmp);
2738 loc_descr_plus_const (&head, offset);
2739 }
2740 else
2741 head = new_reg_loc_descr (cfa->reg, offset);
2742
2743 return head;
2744 }
2745
2746 /* This function builds a dwarf location descriptor sequence for
2747 the address at OFFSET from the CFA when stack is aligned to
2748 ALIGNMENT byte. */
2749
2750 struct dw_loc_descr_node *
2751 build_cfa_aligned_loc (dw_cfa_location *cfa,
2752 poly_int64 offset, HOST_WIDE_INT alignment)
2753 {
2754 struct dw_loc_descr_node *head;
2755 unsigned int dwarf_fp
2756 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2757
2758 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2759 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2760 {
2761 head = new_reg_loc_descr (dwarf_fp, 0);
2762 add_loc_descr (&head, int_loc_descriptor (alignment));
2763 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2764 loc_descr_plus_const (&head, offset);
2765 }
2766 else
2767 head = new_reg_loc_descr (dwarf_fp, offset);
2768 return head;
2769 }
2770 \f
2771 /* And now, the support for symbolic debugging information. */
2772
2773 /* .debug_str support. */
2774
2775 static void dwarf2out_init (const char *);
2776 static void dwarf2out_finish (const char *);
2777 static void dwarf2out_early_finish (const char *);
2778 static void dwarf2out_assembly_start (void);
2779 static void dwarf2out_define (unsigned int, const char *);
2780 static void dwarf2out_undef (unsigned int, const char *);
2781 static void dwarf2out_start_source_file (unsigned, const char *);
2782 static void dwarf2out_end_source_file (unsigned);
2783 static void dwarf2out_function_decl (tree);
2784 static void dwarf2out_begin_block (unsigned, unsigned);
2785 static void dwarf2out_end_block (unsigned, unsigned);
2786 static bool dwarf2out_ignore_block (const_tree);
2787 static void dwarf2out_early_global_decl (tree);
2788 static void dwarf2out_late_global_decl (tree);
2789 static void dwarf2out_type_decl (tree, int);
2790 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2791 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2792 dw_die_ref);
2793 static void dwarf2out_abstract_function (tree);
2794 static void dwarf2out_var_location (rtx_insn *);
2795 static void dwarf2out_inline_entry (tree);
2796 static void dwarf2out_size_function (tree);
2797 static void dwarf2out_begin_function (tree);
2798 static void dwarf2out_end_function (unsigned int);
2799 static void dwarf2out_register_main_translation_unit (tree unit);
2800 static void dwarf2out_set_name (tree, tree);
2801 static void dwarf2out_register_external_die (tree decl, const char *sym,
2802 unsigned HOST_WIDE_INT off);
2803 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2804 unsigned HOST_WIDE_INT *off);
2805
2806 /* The debug hooks structure. */
2807
2808 const struct gcc_debug_hooks dwarf2_debug_hooks =
2809 {
2810 dwarf2out_init,
2811 dwarf2out_finish,
2812 dwarf2out_early_finish,
2813 dwarf2out_assembly_start,
2814 dwarf2out_define,
2815 dwarf2out_undef,
2816 dwarf2out_start_source_file,
2817 dwarf2out_end_source_file,
2818 dwarf2out_begin_block,
2819 dwarf2out_end_block,
2820 dwarf2out_ignore_block,
2821 dwarf2out_source_line,
2822 dwarf2out_begin_prologue,
2823 #if VMS_DEBUGGING_INFO
2824 dwarf2out_vms_end_prologue,
2825 dwarf2out_vms_begin_epilogue,
2826 #else
2827 debug_nothing_int_charstar,
2828 debug_nothing_int_charstar,
2829 #endif
2830 dwarf2out_end_epilogue,
2831 dwarf2out_begin_function,
2832 dwarf2out_end_function, /* end_function */
2833 dwarf2out_register_main_translation_unit,
2834 dwarf2out_function_decl, /* function_decl */
2835 dwarf2out_early_global_decl,
2836 dwarf2out_late_global_decl,
2837 dwarf2out_type_decl, /* type_decl */
2838 dwarf2out_imported_module_or_decl,
2839 dwarf2out_die_ref_for_decl,
2840 dwarf2out_register_external_die,
2841 debug_nothing_tree, /* deferred_inline_function */
2842 /* The DWARF 2 backend tries to reduce debugging bloat by not
2843 emitting the abstract description of inline functions until
2844 something tries to reference them. */
2845 dwarf2out_abstract_function, /* outlining_inline_function */
2846 debug_nothing_rtx_code_label, /* label */
2847 debug_nothing_int, /* handle_pch */
2848 dwarf2out_var_location,
2849 dwarf2out_inline_entry, /* inline_entry */
2850 dwarf2out_size_function, /* size_function */
2851 dwarf2out_switch_text_section,
2852 dwarf2out_set_name,
2853 1, /* start_end_main_source_file */
2854 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2855 };
2856
2857 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2858 {
2859 dwarf2out_init,
2860 debug_nothing_charstar,
2861 debug_nothing_charstar,
2862 dwarf2out_assembly_start,
2863 debug_nothing_int_charstar,
2864 debug_nothing_int_charstar,
2865 debug_nothing_int_charstar,
2866 debug_nothing_int,
2867 debug_nothing_int_int, /* begin_block */
2868 debug_nothing_int_int, /* end_block */
2869 debug_true_const_tree, /* ignore_block */
2870 dwarf2out_source_line, /* source_line */
2871 debug_nothing_int_int_charstar, /* begin_prologue */
2872 debug_nothing_int_charstar, /* end_prologue */
2873 debug_nothing_int_charstar, /* begin_epilogue */
2874 debug_nothing_int_charstar, /* end_epilogue */
2875 debug_nothing_tree, /* begin_function */
2876 debug_nothing_int, /* end_function */
2877 debug_nothing_tree, /* register_main_translation_unit */
2878 debug_nothing_tree, /* function_decl */
2879 debug_nothing_tree, /* early_global_decl */
2880 debug_nothing_tree, /* late_global_decl */
2881 debug_nothing_tree_int, /* type_decl */
2882 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2883 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2884 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2885 debug_nothing_tree, /* deferred_inline_function */
2886 debug_nothing_tree, /* outlining_inline_function */
2887 debug_nothing_rtx_code_label, /* label */
2888 debug_nothing_int, /* handle_pch */
2889 debug_nothing_rtx_insn, /* var_location */
2890 debug_nothing_tree, /* inline_entry */
2891 debug_nothing_tree, /* size_function */
2892 debug_nothing_void, /* switch_text_section */
2893 debug_nothing_tree_tree, /* set_name */
2894 0, /* start_end_main_source_file */
2895 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2896 };
2897 \f
2898 /* NOTE: In the comments in this file, many references are made to
2899 "Debugging Information Entries". This term is abbreviated as `DIE'
2900 throughout the remainder of this file. */
2901
2902 /* An internal representation of the DWARF output is built, and then
2903 walked to generate the DWARF debugging info. The walk of the internal
2904 representation is done after the entire program has been compiled.
2905 The types below are used to describe the internal representation. */
2906
2907 /* Whether to put type DIEs into their own section .debug_types instead
2908 of making them part of the .debug_info section. Only supported for
2909 Dwarf V4 or higher and the user didn't disable them through
2910 -fno-debug-types-section. It is more efficient to put them in a
2911 separate comdat sections since the linker will then be able to
2912 remove duplicates. But not all tools support .debug_types sections
2913 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2914 it is DW_UT_type unit type in .debug_info section. */
2915
2916 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2917
2918 /* Various DIE's use offsets relative to the beginning of the
2919 .debug_info section to refer to each other. */
2920
2921 typedef long int dw_offset;
2922
2923 struct comdat_type_node;
2924
2925 /* The entries in the line_info table more-or-less mirror the opcodes
2926 that are used in the real dwarf line table. Arrays of these entries
2927 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2928 supported. */
2929
2930 enum dw_line_info_opcode {
2931 /* Emit DW_LNE_set_address; the operand is the label index. */
2932 LI_set_address,
2933
2934 /* Emit a row to the matrix with the given line. This may be done
2935 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2936 special opcodes. */
2937 LI_set_line,
2938
2939 /* Emit a DW_LNS_set_file. */
2940 LI_set_file,
2941
2942 /* Emit a DW_LNS_set_column. */
2943 LI_set_column,
2944
2945 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2946 LI_negate_stmt,
2947
2948 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2949 LI_set_prologue_end,
2950 LI_set_epilogue_begin,
2951
2952 /* Emit a DW_LNE_set_discriminator. */
2953 LI_set_discriminator,
2954
2955 /* Output a Fixed Advance PC; the target PC is the label index; the
2956 base PC is the previous LI_adv_address or LI_set_address entry.
2957 We only use this when emitting debug views without assembler
2958 support, at explicit user request. Ideally, we should only use
2959 it when the offset might be zero but we can't tell: it's the only
2960 way to maybe change the PC without resetting the view number. */
2961 LI_adv_address
2962 };
2963
2964 typedef struct GTY(()) dw_line_info_struct {
2965 enum dw_line_info_opcode opcode;
2966 unsigned int val;
2967 } dw_line_info_entry;
2968
2969
2970 struct GTY(()) dw_line_info_table {
2971 /* The label that marks the end of this section. */
2972 const char *end_label;
2973
2974 /* The values for the last row of the matrix, as collected in the table.
2975 These are used to minimize the changes to the next row. */
2976 unsigned int file_num;
2977 unsigned int line_num;
2978 unsigned int column_num;
2979 int discrim_num;
2980 bool is_stmt;
2981 bool in_use;
2982
2983 /* This denotes the NEXT view number.
2984
2985 If it is 0, it is known that the NEXT view will be the first view
2986 at the given PC.
2987
2988 If it is -1, we're forcing the view number to be reset, e.g. at a
2989 function entry.
2990
2991 The meaning of other nonzero values depends on whether we're
2992 computing views internally or leaving it for the assembler to do
2993 so. If we're emitting them internally, view denotes the view
2994 number since the last known advance of PC. If we're leaving it
2995 for the assembler, it denotes the LVU label number that we're
2996 going to ask the assembler to assign. */
2997 var_loc_view view;
2998
2999 /* This counts the number of symbolic views emitted in this table
3000 since the latest view reset. Its max value, over all tables,
3001 sets symview_upper_bound. */
3002 var_loc_view symviews_since_reset;
3003
3004 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3005 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3006 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3007 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3008
3009 vec<dw_line_info_entry, va_gc> *entries;
3010 };
3011
3012 /* This is an upper bound for view numbers that the assembler may
3013 assign to symbolic views output in this translation. It is used to
3014 decide how big a field to use to represent view numbers in
3015 symview-classed attributes. */
3016
3017 static var_loc_view symview_upper_bound;
3018
3019 /* If we're keep track of location views and their reset points, and
3020 INSN is a reset point (i.e., it necessarily advances the PC), mark
3021 the next view in TABLE as reset. */
3022
3023 static void
3024 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3025 {
3026 if (!debug_internal_reset_location_views)
3027 return;
3028
3029 /* Maybe turn (part of?) this test into a default target hook. */
3030 int reset = 0;
3031
3032 if (targetm.reset_location_view)
3033 reset = targetm.reset_location_view (insn);
3034
3035 if (reset)
3036 ;
3037 else if (JUMP_TABLE_DATA_P (insn))
3038 reset = 1;
3039 else if (GET_CODE (insn) == USE
3040 || GET_CODE (insn) == CLOBBER
3041 || GET_CODE (insn) == ASM_INPUT
3042 || asm_noperands (insn) >= 0)
3043 ;
3044 else if (get_attr_min_length (insn) > 0)
3045 reset = 1;
3046
3047 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3048 RESET_NEXT_VIEW (table->view);
3049 }
3050
3051 /* Each DIE attribute has a field specifying the attribute kind,
3052 a link to the next attribute in the chain, and an attribute value.
3053 Attributes are typically linked below the DIE they modify. */
3054
3055 typedef struct GTY(()) dw_attr_struct {
3056 enum dwarf_attribute dw_attr;
3057 dw_val_node dw_attr_val;
3058 }
3059 dw_attr_node;
3060
3061
3062 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3063 The children of each node form a circular list linked by
3064 die_sib. die_child points to the node *before* the "first" child node. */
3065
3066 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3067 union die_symbol_or_type_node
3068 {
3069 const char * GTY ((tag ("0"))) die_symbol;
3070 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3071 }
3072 GTY ((desc ("%0.comdat_type_p"))) die_id;
3073 vec<dw_attr_node, va_gc> *die_attr;
3074 dw_die_ref die_parent;
3075 dw_die_ref die_child;
3076 dw_die_ref die_sib;
3077 dw_die_ref die_definition; /* ref from a specification to its definition */
3078 dw_offset die_offset;
3079 unsigned long die_abbrev;
3080 int die_mark;
3081 unsigned int decl_id;
3082 enum dwarf_tag die_tag;
3083 /* Die is used and must not be pruned as unused. */
3084 BOOL_BITFIELD die_perennial_p : 1;
3085 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3086 /* For an external ref to die_symbol if die_offset contains an extra
3087 offset to that symbol. */
3088 BOOL_BITFIELD with_offset : 1;
3089 /* Whether this DIE was removed from the DIE tree, for example via
3090 prune_unused_types. We don't consider those present from the
3091 DIE lookup routines. */
3092 BOOL_BITFIELD removed : 1;
3093 /* Lots of spare bits. */
3094 }
3095 die_node;
3096
3097 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3098 static bool early_dwarf;
3099 static bool early_dwarf_finished;
3100 struct set_early_dwarf {
3101 bool saved;
3102 set_early_dwarf () : saved(early_dwarf)
3103 {
3104 gcc_assert (! early_dwarf_finished);
3105 early_dwarf = true;
3106 }
3107 ~set_early_dwarf () { early_dwarf = saved; }
3108 };
3109
3110 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3111 #define FOR_EACH_CHILD(die, c, expr) do { \
3112 c = die->die_child; \
3113 if (c) do { \
3114 c = c->die_sib; \
3115 expr; \
3116 } while (c != die->die_child); \
3117 } while (0)
3118
3119 /* The pubname structure */
3120
3121 typedef struct GTY(()) pubname_struct {
3122 dw_die_ref die;
3123 const char *name;
3124 }
3125 pubname_entry;
3126
3127
3128 struct GTY(()) dw_ranges {
3129 const char *label;
3130 /* If this is positive, it's a block number, otherwise it's a
3131 bitwise-negated index into dw_ranges_by_label. */
3132 int num;
3133 /* Index for the range list for DW_FORM_rnglistx. */
3134 unsigned int idx : 31;
3135 /* True if this range might be possibly in a different section
3136 from previous entry. */
3137 unsigned int maybe_new_sec : 1;
3138 };
3139
3140 /* A structure to hold a macinfo entry. */
3141
3142 typedef struct GTY(()) macinfo_struct {
3143 unsigned char code;
3144 unsigned HOST_WIDE_INT lineno;
3145 const char *info;
3146 }
3147 macinfo_entry;
3148
3149
3150 struct GTY(()) dw_ranges_by_label {
3151 const char *begin;
3152 const char *end;
3153 };
3154
3155 /* The comdat type node structure. */
3156 struct GTY(()) comdat_type_node
3157 {
3158 dw_die_ref root_die;
3159 dw_die_ref type_die;
3160 dw_die_ref skeleton_die;
3161 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3162 comdat_type_node *next;
3163 };
3164
3165 /* A list of DIEs for which we can't determine ancestry (parent_die
3166 field) just yet. Later in dwarf2out_finish we will fill in the
3167 missing bits. */
3168 typedef struct GTY(()) limbo_die_struct {
3169 dw_die_ref die;
3170 /* The tree for which this DIE was created. We use this to
3171 determine ancestry later. */
3172 tree created_for;
3173 struct limbo_die_struct *next;
3174 }
3175 limbo_die_node;
3176
3177 typedef struct skeleton_chain_struct
3178 {
3179 dw_die_ref old_die;
3180 dw_die_ref new_die;
3181 struct skeleton_chain_struct *parent;
3182 }
3183 skeleton_chain_node;
3184
3185 /* Define a macro which returns nonzero for a TYPE_DECL which was
3186 implicitly generated for a type.
3187
3188 Note that, unlike the C front-end (which generates a NULL named
3189 TYPE_DECL node for each complete tagged type, each array type,
3190 and each function type node created) the C++ front-end generates
3191 a _named_ TYPE_DECL node for each tagged type node created.
3192 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3193 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3194 front-end, but for each type, tagged or not. */
3195
3196 #define TYPE_DECL_IS_STUB(decl) \
3197 (DECL_NAME (decl) == NULL_TREE \
3198 || (DECL_ARTIFICIAL (decl) \
3199 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3200 /* This is necessary for stub decls that \
3201 appear in nested inline functions. */ \
3202 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3203 && (decl_ultimate_origin (decl) \
3204 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3205
3206 /* Information concerning the compilation unit's programming
3207 language, and compiler version. */
3208
3209 /* Fixed size portion of the DWARF compilation unit header. */
3210 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3211 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3212 + (dwarf_version >= 5 ? 4 : 3))
3213
3214 /* Fixed size portion of the DWARF comdat type unit header. */
3215 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3216 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3217 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3218
3219 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3220 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3221 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3222
3223 /* Fixed size portion of public names info. */
3224 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3225
3226 /* Fixed size portion of the address range info. */
3227 #define DWARF_ARANGES_HEADER_SIZE \
3228 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3229 DWARF2_ADDR_SIZE * 2) \
3230 - DWARF_INITIAL_LENGTH_SIZE)
3231
3232 /* Size of padding portion in the address range info. It must be
3233 aligned to twice the pointer size. */
3234 #define DWARF_ARANGES_PAD_SIZE \
3235 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3236 DWARF2_ADDR_SIZE * 2) \
3237 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3238
3239 /* Use assembler line directives if available. */
3240 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3241 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3242 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3243 #else
3244 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3245 #endif
3246 #endif
3247
3248 /* Use assembler views in line directives if available. */
3249 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3250 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3251 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3252 #else
3253 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3254 #endif
3255 #endif
3256
3257 /* Return true if GCC configure detected assembler support for .loc. */
3258
3259 bool
3260 dwarf2out_default_as_loc_support (void)
3261 {
3262 return DWARF2_ASM_LINE_DEBUG_INFO;
3263 #if (GCC_VERSION >= 3000)
3264 # undef DWARF2_ASM_LINE_DEBUG_INFO
3265 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3266 #endif
3267 }
3268
3269 /* Return true if GCC configure detected assembler support for views
3270 in .loc directives. */
3271
3272 bool
3273 dwarf2out_default_as_locview_support (void)
3274 {
3275 return DWARF2_ASM_VIEW_DEBUG_INFO;
3276 #if (GCC_VERSION >= 3000)
3277 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3278 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3279 #endif
3280 }
3281
3282 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3283 view computation, and it refers to a view identifier for which we
3284 will not emit a label because it is known to map to a view number
3285 zero. We won't allocate the bitmap if we're not using assembler
3286 support for location views, but we have to make the variable
3287 visible for GGC and for code that will be optimized out for lack of
3288 support but that's still parsed and compiled. We could abstract it
3289 out with macros, but it's not worth it. */
3290 static GTY(()) bitmap zero_view_p;
3291
3292 /* Evaluate to TRUE iff N is known to identify the first location view
3293 at its PC. When not using assembler location view computation,
3294 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3295 and views label numbers recorded in it are the ones known to be
3296 zero. */
3297 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3298 || (N) == (var_loc_view)-1 \
3299 || (zero_view_p \
3300 && bitmap_bit_p (zero_view_p, (N))))
3301
3302 /* Return true iff we're to emit .loc directives for the assembler to
3303 generate line number sections.
3304
3305 When we're not emitting views, all we need from the assembler is
3306 support for .loc directives.
3307
3308 If we are emitting views, we can only use the assembler's .loc
3309 support if it also supports views.
3310
3311 When the compiler is emitting the line number programs and
3312 computing view numbers itself, it resets view numbers at known PC
3313 changes and counts from that, and then it emits view numbers as
3314 literal constants in locviewlists. There are cases in which the
3315 compiler is not sure about PC changes, e.g. when extra alignment is
3316 requested for a label. In these cases, the compiler may not reset
3317 the view counter, and the potential PC advance in the line number
3318 program will use an opcode that does not reset the view counter
3319 even if the PC actually changes, so that compiler and debug info
3320 consumer can keep view numbers in sync.
3321
3322 When the compiler defers view computation to the assembler, it
3323 emits symbolic view numbers in locviewlists, with the exception of
3324 views known to be zero (forced resets, or reset after
3325 compiler-visible PC changes): instead of emitting symbols for
3326 these, we emit literal zero and assert the assembler agrees with
3327 the compiler's assessment. We could use symbolic views everywhere,
3328 instead of special-casing zero views, but then we'd be unable to
3329 optimize out locviewlists that contain only zeros. */
3330
3331 static bool
3332 output_asm_line_debug_info (void)
3333 {
3334 return (dwarf2out_as_loc_support
3335 && (dwarf2out_as_locview_support
3336 || !debug_variable_location_views));
3337 }
3338
3339 /* Minimum line offset in a special line info. opcode.
3340 This value was chosen to give a reasonable range of values. */
3341 #define DWARF_LINE_BASE -10
3342
3343 /* First special line opcode - leave room for the standard opcodes. */
3344 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3345
3346 /* Range of line offsets in a special line info. opcode. */
3347 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3348
3349 /* Flag that indicates the initial value of the is_stmt_start flag.
3350 In the present implementation, we do not mark any lines as
3351 the beginning of a source statement, because that information
3352 is not made available by the GCC front-end. */
3353 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3354
3355 /* Maximum number of operations per instruction bundle. */
3356 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3357 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3358 #endif
3359
3360 /* This location is used by calc_die_sizes() to keep track
3361 the offset of each DIE within the .debug_info section. */
3362 static unsigned long next_die_offset;
3363
3364 /* Record the root of the DIE's built for the current compilation unit. */
3365 static GTY(()) dw_die_ref single_comp_unit_die;
3366
3367 /* A list of type DIEs that have been separated into comdat sections. */
3368 static GTY(()) comdat_type_node *comdat_type_list;
3369
3370 /* A list of CU DIEs that have been separated. */
3371 static GTY(()) limbo_die_node *cu_die_list;
3372
3373 /* A list of DIEs with a NULL parent waiting to be relocated. */
3374 static GTY(()) limbo_die_node *limbo_die_list;
3375
3376 /* A list of DIEs for which we may have to generate
3377 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3378 static GTY(()) limbo_die_node *deferred_asm_name;
3379
3380 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3381 {
3382 typedef const char *compare_type;
3383
3384 static hashval_t hash (dwarf_file_data *);
3385 static bool equal (dwarf_file_data *, const char *);
3386 };
3387
3388 /* Filenames referenced by this compilation unit. */
3389 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3390
3391 struct decl_die_hasher : ggc_ptr_hash<die_node>
3392 {
3393 typedef tree compare_type;
3394
3395 static hashval_t hash (die_node *);
3396 static bool equal (die_node *, tree);
3397 };
3398 /* A hash table of references to DIE's that describe declarations.
3399 The key is a DECL_UID() which is a unique number identifying each decl. */
3400 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3401
3402 struct GTY ((for_user)) variable_value_struct {
3403 unsigned int decl_id;
3404 vec<dw_die_ref, va_gc> *dies;
3405 };
3406
3407 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3408 {
3409 typedef tree compare_type;
3410
3411 static hashval_t hash (variable_value_struct *);
3412 static bool equal (variable_value_struct *, tree);
3413 };
3414 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3415 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3416 DECL_CONTEXT of the referenced VAR_DECLs. */
3417 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3418
3419 struct block_die_hasher : ggc_ptr_hash<die_struct>
3420 {
3421 static hashval_t hash (die_struct *);
3422 static bool equal (die_struct *, die_struct *);
3423 };
3424
3425 /* A hash table of references to DIE's that describe COMMON blocks.
3426 The key is DECL_UID() ^ die_parent. */
3427 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3428
3429 typedef struct GTY(()) die_arg_entry_struct {
3430 dw_die_ref die;
3431 tree arg;
3432 } die_arg_entry;
3433
3434
3435 /* Node of the variable location list. */
3436 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3437 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3438 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3439 in mode of the EXPR_LIST node and first EXPR_LIST operand
3440 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3441 location or NULL for padding. For larger bitsizes,
3442 mode is 0 and first operand is a CONCAT with bitsize
3443 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3444 NULL as second operand. */
3445 rtx GTY (()) loc;
3446 const char * GTY (()) label;
3447 struct var_loc_node * GTY (()) next;
3448 var_loc_view view;
3449 };
3450
3451 /* Variable location list. */
3452 struct GTY ((for_user)) var_loc_list_def {
3453 struct var_loc_node * GTY (()) first;
3454
3455 /* Pointer to the last but one or last element of the
3456 chained list. If the list is empty, both first and
3457 last are NULL, if the list contains just one node
3458 or the last node certainly is not redundant, it points
3459 to the last node, otherwise points to the last but one.
3460 Do not mark it for GC because it is marked through the chain. */
3461 struct var_loc_node * GTY ((skip ("%h"))) last;
3462
3463 /* Pointer to the last element before section switch,
3464 if NULL, either sections weren't switched or first
3465 is after section switch. */
3466 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3467
3468 /* DECL_UID of the variable decl. */
3469 unsigned int decl_id;
3470 };
3471 typedef struct var_loc_list_def var_loc_list;
3472
3473 /* Call argument location list. */
3474 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3475 rtx GTY (()) call_arg_loc_note;
3476 const char * GTY (()) label;
3477 tree GTY (()) block;
3478 bool tail_call_p;
3479 rtx GTY (()) symbol_ref;
3480 struct call_arg_loc_node * GTY (()) next;
3481 };
3482
3483
3484 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3485 {
3486 typedef const_tree compare_type;
3487
3488 static hashval_t hash (var_loc_list *);
3489 static bool equal (var_loc_list *, const_tree);
3490 };
3491
3492 /* Table of decl location linked lists. */
3493 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3494
3495 /* Head and tail of call_arg_loc chain. */
3496 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3497 static struct call_arg_loc_node *call_arg_loc_last;
3498
3499 /* Number of call sites in the current function. */
3500 static int call_site_count = -1;
3501 /* Number of tail call sites in the current function. */
3502 static int tail_call_site_count = -1;
3503
3504 /* A cached location list. */
3505 struct GTY ((for_user)) cached_dw_loc_list_def {
3506 /* The DECL_UID of the decl that this entry describes. */
3507 unsigned int decl_id;
3508
3509 /* The cached location list. */
3510 dw_loc_list_ref loc_list;
3511 };
3512 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3513
3514 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3515 {
3516
3517 typedef const_tree compare_type;
3518
3519 static hashval_t hash (cached_dw_loc_list *);
3520 static bool equal (cached_dw_loc_list *, const_tree);
3521 };
3522
3523 /* Table of cached location lists. */
3524 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3525
3526 /* A vector of references to DIE's that are uniquely identified by their tag,
3527 presence/absence of children DIE's, and list of attribute/value pairs. */
3528 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3529
3530 /* A hash map to remember the stack usage for DWARF procedures. The value
3531 stored is the stack size difference between before the DWARF procedure
3532 invokation and after it returned. In other words, for a DWARF procedure
3533 that consumes N stack slots and that pushes M ones, this stores M - N. */
3534 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3535
3536 /* A global counter for generating labels for line number data. */
3537 static unsigned int line_info_label_num;
3538
3539 /* The current table to which we should emit line number information
3540 for the current function. This will be set up at the beginning of
3541 assembly for the function. */
3542 static GTY(()) dw_line_info_table *cur_line_info_table;
3543
3544 /* The two default tables of line number info. */
3545 static GTY(()) dw_line_info_table *text_section_line_info;
3546 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3547
3548 /* The set of all non-default tables of line number info. */
3549 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3550
3551 /* A flag to tell pubnames/types export if there is an info section to
3552 refer to. */
3553 static bool info_section_emitted;
3554
3555 /* A pointer to the base of a table that contains a list of publicly
3556 accessible names. */
3557 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3558
3559 /* A pointer to the base of a table that contains a list of publicly
3560 accessible types. */
3561 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3562
3563 /* A pointer to the base of a table that contains a list of macro
3564 defines/undefines (and file start/end markers). */
3565 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3566
3567 /* True if .debug_macinfo or .debug_macros section is going to be
3568 emitted. */
3569 #define have_macinfo \
3570 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3571 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3572 && !macinfo_table->is_empty ())
3573
3574 /* Vector of dies for which we should generate .debug_ranges info. */
3575 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3576
3577 /* Vector of pairs of labels referenced in ranges_table. */
3578 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3579
3580 /* Whether we have location lists that need outputting */
3581 static GTY(()) bool have_location_lists;
3582
3583 /* Unique label counter. */
3584 static GTY(()) unsigned int loclabel_num;
3585
3586 /* Unique label counter for point-of-call tables. */
3587 static GTY(()) unsigned int poc_label_num;
3588
3589 /* The last file entry emitted by maybe_emit_file(). */
3590 static GTY(()) struct dwarf_file_data * last_emitted_file;
3591
3592 /* Number of internal labels generated by gen_internal_sym(). */
3593 static GTY(()) int label_num;
3594
3595 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3596
3597 /* Instances of generic types for which we need to generate debug
3598 info that describe their generic parameters and arguments. That
3599 generation needs to happen once all types are properly laid out so
3600 we do it at the end of compilation. */
3601 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3602
3603 /* Offset from the "steady-state frame pointer" to the frame base,
3604 within the current function. */
3605 static poly_int64 frame_pointer_fb_offset;
3606 static bool frame_pointer_fb_offset_valid;
3607
3608 static vec<dw_die_ref> base_types;
3609
3610 /* Flags to represent a set of attribute classes for attributes that represent
3611 a scalar value (bounds, pointers, ...). */
3612 enum dw_scalar_form
3613 {
3614 dw_scalar_form_constant = 0x01,
3615 dw_scalar_form_exprloc = 0x02,
3616 dw_scalar_form_reference = 0x04
3617 };
3618
3619 /* Forward declarations for functions defined in this file. */
3620
3621 static int is_pseudo_reg (const_rtx);
3622 static tree type_main_variant (tree);
3623 static int is_tagged_type (const_tree);
3624 static const char *dwarf_tag_name (unsigned);
3625 static const char *dwarf_attr_name (unsigned);
3626 static const char *dwarf_form_name (unsigned);
3627 static tree decl_ultimate_origin (const_tree);
3628 static tree decl_class_context (tree);
3629 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3630 static inline enum dw_val_class AT_class (dw_attr_node *);
3631 static inline unsigned int AT_index (dw_attr_node *);
3632 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3633 static inline unsigned AT_flag (dw_attr_node *);
3634 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3635 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3636 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3637 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3638 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3639 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3640 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3641 unsigned int, unsigned char *);
3642 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3643 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3644 static inline const char *AT_string (dw_attr_node *);
3645 static enum dwarf_form AT_string_form (dw_attr_node *);
3646 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3647 static void add_AT_specification (dw_die_ref, dw_die_ref);
3648 static inline dw_die_ref AT_ref (dw_attr_node *);
3649 static inline int AT_ref_external (dw_attr_node *);
3650 static inline void set_AT_ref_external (dw_attr_node *, int);
3651 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3652 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3653 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3654 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3655 dw_loc_list_ref);
3656 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3657 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3660 static void remove_addr_table_entry (addr_table_entry *);
3661 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3662 static inline rtx AT_addr (dw_attr_node *);
3663 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3664 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3665 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3668 const char *);
3669 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3670 unsigned HOST_WIDE_INT);
3671 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3672 unsigned long, bool);
3673 static inline const char *AT_lbl (dw_attr_node *);
3674 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3675 static const char *get_AT_low_pc (dw_die_ref);
3676 static const char *get_AT_hi_pc (dw_die_ref);
3677 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3678 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3679 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3680 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3681 static bool is_cxx (void);
3682 static bool is_cxx (const_tree);
3683 static bool is_fortran (void);
3684 static bool is_ada (void);
3685 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3686 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3687 static void add_child_die (dw_die_ref, dw_die_ref);
3688 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3689 static dw_die_ref lookup_type_die (tree);
3690 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3691 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3692 static void equate_type_number_to_die (tree, dw_die_ref);
3693 static dw_die_ref lookup_decl_die (tree);
3694 static var_loc_list *lookup_decl_loc (const_tree);
3695 static void equate_decl_number_to_die (tree, dw_die_ref);
3696 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3697 static void print_spaces (FILE *);
3698 static void print_die (dw_die_ref, FILE *);
3699 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3700 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3701 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3702 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3703 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3704 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3705 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3706 struct md5_ctx *, int *);
3707 struct checksum_attributes;
3708 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3709 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3710 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3711 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3712 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3713 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3714 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3715 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3716 static int is_type_die (dw_die_ref);
3717 static int is_comdat_die (dw_die_ref);
3718 static inline bool is_template_instantiation (dw_die_ref);
3719 static int is_declaration_die (dw_die_ref);
3720 static int should_move_die_to_comdat (dw_die_ref);
3721 static dw_die_ref clone_as_declaration (dw_die_ref);
3722 static dw_die_ref clone_die (dw_die_ref);
3723 static dw_die_ref clone_tree (dw_die_ref);
3724 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3725 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3726 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3727 static dw_die_ref generate_skeleton (dw_die_ref);
3728 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3729 dw_die_ref,
3730 dw_die_ref);
3731 static void break_out_comdat_types (dw_die_ref);
3732 static void copy_decls_for_unworthy_types (dw_die_ref);
3733
3734 static void add_sibling_attributes (dw_die_ref);
3735 static void output_location_lists (dw_die_ref);
3736 static int constant_size (unsigned HOST_WIDE_INT);
3737 static unsigned long size_of_die (dw_die_ref);
3738 static void calc_die_sizes (dw_die_ref);
3739 static void calc_base_type_die_sizes (void);
3740 static void mark_dies (dw_die_ref);
3741 static void unmark_dies (dw_die_ref);
3742 static void unmark_all_dies (dw_die_ref);
3743 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3744 static unsigned long size_of_aranges (void);
3745 static enum dwarf_form value_format (dw_attr_node *);
3746 static void output_value_format (dw_attr_node *);
3747 static void output_abbrev_section (void);
3748 static void output_die_abbrevs (unsigned long, dw_die_ref);
3749 static void output_die (dw_die_ref);
3750 static void output_compilation_unit_header (enum dwarf_unit_type);
3751 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3752 static void output_comdat_type_unit (comdat_type_node *);
3753 static const char *dwarf2_name (tree, int);
3754 static void add_pubname (tree, dw_die_ref);
3755 static void add_enumerator_pubname (const char *, dw_die_ref);
3756 static void add_pubname_string (const char *, dw_die_ref);
3757 static void add_pubtype (tree, dw_die_ref);
3758 static void output_pubnames (vec<pubname_entry, va_gc> *);
3759 static void output_aranges (void);
3760 static unsigned int add_ranges (const_tree, bool = false);
3761 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3762 bool *, bool);
3763 static void output_ranges (void);
3764 static dw_line_info_table *new_line_info_table (void);
3765 static void output_line_info (bool);
3766 static void output_file_names (void);
3767 static dw_die_ref base_type_die (tree, bool);
3768 static int is_base_type (tree);
3769 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3770 static int decl_quals (const_tree);
3771 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3772 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3773 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3774 static int type_is_enum (const_tree);
3775 static unsigned int dbx_reg_number (const_rtx);
3776 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3777 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3778 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3779 enum var_init_status);
3780 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3783 enum var_init_status);
3784 static int is_based_loc (const_rtx);
3785 static bool resolve_one_addr (rtx *);
3786 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3787 enum var_init_status);
3788 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3789 enum var_init_status);
3790 struct loc_descr_context;
3791 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3792 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3793 static dw_loc_list_ref loc_list_from_tree (tree, int,
3794 struct loc_descr_context *);
3795 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3796 struct loc_descr_context *);
3797 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3798 static tree field_type (const_tree);
3799 static unsigned int simple_type_align_in_bits (const_tree);
3800 static unsigned int simple_decl_align_in_bits (const_tree);
3801 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3802 struct vlr_context;
3803 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3804 HOST_WIDE_INT *);
3805 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3806 dw_loc_list_ref);
3807 static void add_data_member_location_attribute (dw_die_ref, tree,
3808 struct vlr_context *);
3809 static bool add_const_value_attribute (dw_die_ref, rtx);
3810 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3811 static void insert_wide_int (const wide_int &, unsigned char *, int);
3812 static void insert_float (const_rtx, unsigned char *);
3813 static rtx rtl_for_decl_location (tree);
3814 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3815 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3816 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3817 static void add_name_attribute (dw_die_ref, const char *);
3818 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3819 static void add_comp_dir_attribute (dw_die_ref);
3820 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3821 struct loc_descr_context *);
3822 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3823 struct loc_descr_context *);
3824 static void add_subscript_info (dw_die_ref, tree, bool);
3825 static void add_byte_size_attribute (dw_die_ref, tree);
3826 static void add_alignment_attribute (dw_die_ref, tree);
3827 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3828 struct vlr_context *);
3829 static void add_bit_size_attribute (dw_die_ref, tree);
3830 static void add_prototyped_attribute (dw_die_ref, tree);
3831 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3832 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3833 static void add_src_coords_attributes (dw_die_ref, tree);
3834 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3835 static void add_discr_value (dw_die_ref, dw_discr_value *);
3836 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3837 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3838 static void push_decl_scope (tree);
3839 static void pop_decl_scope (void);
3840 static dw_die_ref scope_die_for (tree, dw_die_ref);
3841 static inline int local_scope_p (dw_die_ref);
3842 static inline int class_scope_p (dw_die_ref);
3843 static inline int class_or_namespace_scope_p (dw_die_ref);
3844 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3845 static void add_calling_convention_attribute (dw_die_ref, tree);
3846 static const char *type_tag (const_tree);
3847 static tree member_declared_type (const_tree);
3848 #if 0
3849 static const char *decl_start_label (tree);
3850 #endif
3851 static void gen_array_type_die (tree, dw_die_ref);
3852 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3853 #if 0
3854 static void gen_entry_point_die (tree, dw_die_ref);
3855 #endif
3856 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3857 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3858 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3859 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3860 static void gen_formal_types_die (tree, dw_die_ref);
3861 static void gen_subprogram_die (tree, dw_die_ref);
3862 static void gen_variable_die (tree, tree, dw_die_ref);
3863 static void gen_const_die (tree, dw_die_ref);
3864 static void gen_label_die (tree, dw_die_ref);
3865 static void gen_lexical_block_die (tree, dw_die_ref);
3866 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3867 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3868 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3869 static dw_die_ref gen_compile_unit_die (const char *);
3870 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3871 static void gen_member_die (tree, dw_die_ref);
3872 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3873 enum debug_info_usage);
3874 static void gen_subroutine_type_die (tree, dw_die_ref);
3875 static void gen_typedef_die (tree, dw_die_ref);
3876 static void gen_type_die (tree, dw_die_ref);
3877 static void gen_block_die (tree, dw_die_ref);
3878 static void decls_for_scope (tree, dw_die_ref);
3879 static bool is_naming_typedef_decl (const_tree);
3880 static inline dw_die_ref get_context_die (tree);
3881 static void gen_namespace_die (tree, dw_die_ref);
3882 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3883 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3884 static dw_die_ref force_decl_die (tree);
3885 static dw_die_ref force_type_die (tree);
3886 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3887 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3888 static struct dwarf_file_data * lookup_filename (const char *);
3889 static void retry_incomplete_types (void);
3890 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3891 static void gen_generic_params_dies (tree);
3892 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3893 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3894 static void splice_child_die (dw_die_ref, dw_die_ref);
3895 static int file_info_cmp (const void *, const void *);
3896 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3897 const char *, var_loc_view, const char *);
3898 static void output_loc_list (dw_loc_list_ref);
3899 static char *gen_internal_sym (const char *);
3900 static bool want_pubnames (void);
3901
3902 static void prune_unmark_dies (dw_die_ref);
3903 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3904 static void prune_unused_types_mark (dw_die_ref, int);
3905 static void prune_unused_types_walk (dw_die_ref);
3906 static void prune_unused_types_walk_attribs (dw_die_ref);
3907 static void prune_unused_types_prune (dw_die_ref);
3908 static void prune_unused_types (void);
3909 static int maybe_emit_file (struct dwarf_file_data *fd);
3910 static inline const char *AT_vms_delta1 (dw_attr_node *);
3911 static inline const char *AT_vms_delta2 (dw_attr_node *);
3912 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3913 const char *, const char *);
3914 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3915 static void gen_remaining_tmpl_value_param_die_attribute (void);
3916 static bool generic_type_p (tree);
3917 static void schedule_generic_params_dies_gen (tree t);
3918 static void gen_scheduled_generic_parms_dies (void);
3919 static void resolve_variable_values (void);
3920
3921 static const char *comp_dir_string (void);
3922
3923 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3924
3925 /* enum for tracking thread-local variables whose address is really an offset
3926 relative to the TLS pointer, which will need link-time relocation, but will
3927 not need relocation by the DWARF consumer. */
3928
3929 enum dtprel_bool
3930 {
3931 dtprel_false = 0,
3932 dtprel_true = 1
3933 };
3934
3935 /* Return the operator to use for an address of a variable. For dtprel_true, we
3936 use DW_OP_const*. For regular variables, which need both link-time
3937 relocation and consumer-level relocation (e.g., to account for shared objects
3938 loaded at a random address), we use DW_OP_addr*. */
3939
3940 static inline enum dwarf_location_atom
3941 dw_addr_op (enum dtprel_bool dtprel)
3942 {
3943 if (dtprel == dtprel_true)
3944 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3945 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3946 else
3947 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3948 }
3949
3950 /* Return a pointer to a newly allocated address location description. If
3951 dwarf_split_debug_info is true, then record the address with the appropriate
3952 relocation. */
3953 static inline dw_loc_descr_ref
3954 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3955 {
3956 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3957
3958 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3959 ref->dw_loc_oprnd1.v.val_addr = addr;
3960 ref->dtprel = dtprel;
3961 if (dwarf_split_debug_info)
3962 ref->dw_loc_oprnd1.val_entry
3963 = add_addr_table_entry (addr,
3964 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3965 else
3966 ref->dw_loc_oprnd1.val_entry = NULL;
3967
3968 return ref;
3969 }
3970
3971 /* Section names used to hold DWARF debugging information. */
3972
3973 #ifndef DEBUG_INFO_SECTION
3974 #define DEBUG_INFO_SECTION ".debug_info"
3975 #endif
3976 #ifndef DEBUG_DWO_INFO_SECTION
3977 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3978 #endif
3979 #ifndef DEBUG_LTO_INFO_SECTION
3980 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3981 #endif
3982 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3983 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3984 #endif
3985 #ifndef DEBUG_ABBREV_SECTION
3986 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3987 #endif
3988 #ifndef DEBUG_LTO_ABBREV_SECTION
3989 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3990 #endif
3991 #ifndef DEBUG_DWO_ABBREV_SECTION
3992 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3993 #endif
3994 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3995 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3996 #endif
3997 #ifndef DEBUG_ARANGES_SECTION
3998 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3999 #endif
4000 #ifndef DEBUG_ADDR_SECTION
4001 #define DEBUG_ADDR_SECTION ".debug_addr"
4002 #endif
4003 #ifndef DEBUG_MACINFO_SECTION
4004 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4005 #endif
4006 #ifndef DEBUG_LTO_MACINFO_SECTION
4007 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4008 #endif
4009 #ifndef DEBUG_DWO_MACINFO_SECTION
4010 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4011 #endif
4012 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4013 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4014 #endif
4015 #ifndef DEBUG_MACRO_SECTION
4016 #define DEBUG_MACRO_SECTION ".debug_macro"
4017 #endif
4018 #ifndef DEBUG_LTO_MACRO_SECTION
4019 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4020 #endif
4021 #ifndef DEBUG_DWO_MACRO_SECTION
4022 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4023 #endif
4024 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4025 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4026 #endif
4027 #ifndef DEBUG_LINE_SECTION
4028 #define DEBUG_LINE_SECTION ".debug_line"
4029 #endif
4030 #ifndef DEBUG_LTO_LINE_SECTION
4031 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4032 #endif
4033 #ifndef DEBUG_DWO_LINE_SECTION
4034 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4035 #endif
4036 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4037 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4038 #endif
4039 #ifndef DEBUG_LOC_SECTION
4040 #define DEBUG_LOC_SECTION ".debug_loc"
4041 #endif
4042 #ifndef DEBUG_DWO_LOC_SECTION
4043 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4044 #endif
4045 #ifndef DEBUG_LOCLISTS_SECTION
4046 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4047 #endif
4048 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4049 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4050 #endif
4051 #ifndef DEBUG_PUBNAMES_SECTION
4052 #define DEBUG_PUBNAMES_SECTION \
4053 ((debug_generate_pub_sections == 2) \
4054 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4055 #endif
4056 #ifndef DEBUG_PUBTYPES_SECTION
4057 #define DEBUG_PUBTYPES_SECTION \
4058 ((debug_generate_pub_sections == 2) \
4059 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4060 #endif
4061 #ifndef DEBUG_STR_OFFSETS_SECTION
4062 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4063 #endif
4064 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4065 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4066 #endif
4067 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4068 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4069 #endif
4070 #ifndef DEBUG_STR_SECTION
4071 #define DEBUG_STR_SECTION ".debug_str"
4072 #endif
4073 #ifndef DEBUG_LTO_STR_SECTION
4074 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4075 #endif
4076 #ifndef DEBUG_STR_DWO_SECTION
4077 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4078 #endif
4079 #ifndef DEBUG_LTO_STR_DWO_SECTION
4080 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4081 #endif
4082 #ifndef DEBUG_RANGES_SECTION
4083 #define DEBUG_RANGES_SECTION ".debug_ranges"
4084 #endif
4085 #ifndef DEBUG_RNGLISTS_SECTION
4086 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4087 #endif
4088 #ifndef DEBUG_LINE_STR_SECTION
4089 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4090 #endif
4091 #ifndef DEBUG_LTO_LINE_STR_SECTION
4092 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4093 #endif
4094
4095 /* Standard ELF section names for compiled code and data. */
4096 #ifndef TEXT_SECTION_NAME
4097 #define TEXT_SECTION_NAME ".text"
4098 #endif
4099
4100 /* Section flags for .debug_str section. */
4101 #define DEBUG_STR_SECTION_FLAGS \
4102 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4103 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4104 : SECTION_DEBUG)
4105
4106 /* Section flags for .debug_str.dwo section. */
4107 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4108
4109 /* Attribute used to refer to the macro section. */
4110 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4111 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4112
4113 /* Labels we insert at beginning sections we can reference instead of
4114 the section names themselves. */
4115
4116 #ifndef TEXT_SECTION_LABEL
4117 #define TEXT_SECTION_LABEL "Ltext"
4118 #endif
4119 #ifndef COLD_TEXT_SECTION_LABEL
4120 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4121 #endif
4122 #ifndef DEBUG_LINE_SECTION_LABEL
4123 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4124 #endif
4125 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4126 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4127 #endif
4128 #ifndef DEBUG_INFO_SECTION_LABEL
4129 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4130 #endif
4131 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4132 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4133 #endif
4134 #ifndef DEBUG_ABBREV_SECTION_LABEL
4135 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4136 #endif
4137 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4138 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4139 #endif
4140 #ifndef DEBUG_ADDR_SECTION_LABEL
4141 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4142 #endif
4143 #ifndef DEBUG_LOC_SECTION_LABEL
4144 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4145 #endif
4146 #ifndef DEBUG_RANGES_SECTION_LABEL
4147 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4148 #endif
4149 #ifndef DEBUG_MACINFO_SECTION_LABEL
4150 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4151 #endif
4152 #ifndef DEBUG_MACRO_SECTION_LABEL
4153 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4154 #endif
4155 #define SKELETON_COMP_DIE_ABBREV 1
4156 #define SKELETON_TYPE_DIE_ABBREV 2
4157
4158 /* Definitions of defaults for formats and names of various special
4159 (artificial) labels which may be generated within this file (when the -g
4160 options is used and DWARF2_DEBUGGING_INFO is in effect.
4161 If necessary, these may be overridden from within the tm.h file, but
4162 typically, overriding these defaults is unnecessary. */
4163
4164 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4177 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4178 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4179
4180 #ifndef TEXT_END_LABEL
4181 #define TEXT_END_LABEL "Letext"
4182 #endif
4183 #ifndef COLD_END_LABEL
4184 #define COLD_END_LABEL "Letext_cold"
4185 #endif
4186 #ifndef BLOCK_BEGIN_LABEL
4187 #define BLOCK_BEGIN_LABEL "LBB"
4188 #endif
4189 #ifndef BLOCK_INLINE_ENTRY_LABEL
4190 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4191 #endif
4192 #ifndef BLOCK_END_LABEL
4193 #define BLOCK_END_LABEL "LBE"
4194 #endif
4195 #ifndef LINE_CODE_LABEL
4196 #define LINE_CODE_LABEL "LM"
4197 #endif
4198
4199 \f
4200 /* Return the root of the DIE's built for the current compilation unit. */
4201 static dw_die_ref
4202 comp_unit_die (void)
4203 {
4204 if (!single_comp_unit_die)
4205 single_comp_unit_die = gen_compile_unit_die (NULL);
4206 return single_comp_unit_die;
4207 }
4208
4209 /* We allow a language front-end to designate a function that is to be
4210 called to "demangle" any name before it is put into a DIE. */
4211
4212 static const char *(*demangle_name_func) (const char *);
4213
4214 void
4215 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4216 {
4217 demangle_name_func = func;
4218 }
4219
4220 /* Test if rtl node points to a pseudo register. */
4221
4222 static inline int
4223 is_pseudo_reg (const_rtx rtl)
4224 {
4225 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4226 || (GET_CODE (rtl) == SUBREG
4227 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4228 }
4229
4230 /* Return a reference to a type, with its const and volatile qualifiers
4231 removed. */
4232
4233 static inline tree
4234 type_main_variant (tree type)
4235 {
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 /* ??? There really should be only one main variant among any group of
4239 variants of a given type (and all of the MAIN_VARIANT values for all
4240 members of the group should point to that one type) but sometimes the C
4241 front-end messes this up for array types, so we work around that bug
4242 here. */
4243 if (TREE_CODE (type) == ARRAY_TYPE)
4244 while (type != TYPE_MAIN_VARIANT (type))
4245 type = TYPE_MAIN_VARIANT (type);
4246
4247 return type;
4248 }
4249
4250 /* Return nonzero if the given type node represents a tagged type. */
4251
4252 static inline int
4253 is_tagged_type (const_tree type)
4254 {
4255 enum tree_code code = TREE_CODE (type);
4256
4257 return (code == RECORD_TYPE || code == UNION_TYPE
4258 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4259 }
4260
4261 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4262
4263 static void
4264 get_ref_die_offset_label (char *label, dw_die_ref ref)
4265 {
4266 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4267 }
4268
4269 /* Return die_offset of a DIE reference to a base type. */
4270
4271 static unsigned long int
4272 get_base_type_offset (dw_die_ref ref)
4273 {
4274 if (ref->die_offset)
4275 return ref->die_offset;
4276 if (comp_unit_die ()->die_abbrev)
4277 {
4278 calc_base_type_die_sizes ();
4279 gcc_assert (ref->die_offset);
4280 }
4281 return ref->die_offset;
4282 }
4283
4284 /* Return die_offset of a DIE reference other than base type. */
4285
4286 static unsigned long int
4287 get_ref_die_offset (dw_die_ref ref)
4288 {
4289 gcc_assert (ref->die_offset);
4290 return ref->die_offset;
4291 }
4292
4293 /* Convert a DIE tag into its string name. */
4294
4295 static const char *
4296 dwarf_tag_name (unsigned int tag)
4297 {
4298 const char *name = get_DW_TAG_name (tag);
4299
4300 if (name != NULL)
4301 return name;
4302
4303 return "DW_TAG_<unknown>";
4304 }
4305
4306 /* Convert a DWARF attribute code into its string name. */
4307
4308 static const char *
4309 dwarf_attr_name (unsigned int attr)
4310 {
4311 const char *name;
4312
4313 switch (attr)
4314 {
4315 #if VMS_DEBUGGING_INFO
4316 case DW_AT_HP_prologue:
4317 return "DW_AT_HP_prologue";
4318 #else
4319 case DW_AT_MIPS_loop_unroll_factor:
4320 return "DW_AT_MIPS_loop_unroll_factor";
4321 #endif
4322
4323 #if VMS_DEBUGGING_INFO
4324 case DW_AT_HP_epilogue:
4325 return "DW_AT_HP_epilogue";
4326 #else
4327 case DW_AT_MIPS_stride:
4328 return "DW_AT_MIPS_stride";
4329 #endif
4330 }
4331
4332 name = get_DW_AT_name (attr);
4333
4334 if (name != NULL)
4335 return name;
4336
4337 return "DW_AT_<unknown>";
4338 }
4339
4340 /* Convert a DWARF value form code into its string name. */
4341
4342 static const char *
4343 dwarf_form_name (unsigned int form)
4344 {
4345 const char *name = get_DW_FORM_name (form);
4346
4347 if (name != NULL)
4348 return name;
4349
4350 return "DW_FORM_<unknown>";
4351 }
4352 \f
4353 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4354 instance of an inlined instance of a decl which is local to an inline
4355 function, so we have to trace all of the way back through the origin chain
4356 to find out what sort of node actually served as the original seed for the
4357 given block. */
4358
4359 static tree
4360 decl_ultimate_origin (const_tree decl)
4361 {
4362 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4363 return NULL_TREE;
4364
4365 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4366 we're trying to output the abstract instance of this function. */
4367 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4368 return NULL_TREE;
4369
4370 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4371 most distant ancestor, this should never happen. */
4372 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4373
4374 return DECL_ABSTRACT_ORIGIN (decl);
4375 }
4376
4377 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4378 of a virtual function may refer to a base class, so we check the 'this'
4379 parameter. */
4380
4381 static tree
4382 decl_class_context (tree decl)
4383 {
4384 tree context = NULL_TREE;
4385
4386 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4387 context = DECL_CONTEXT (decl);
4388 else
4389 context = TYPE_MAIN_VARIANT
4390 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4391
4392 if (context && !TYPE_P (context))
4393 context = NULL_TREE;
4394
4395 return context;
4396 }
4397 \f
4398 /* Add an attribute/value pair to a DIE. */
4399
4400 static inline void
4401 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4402 {
4403 /* Maybe this should be an assert? */
4404 if (die == NULL)
4405 return;
4406
4407 if (flag_checking)
4408 {
4409 /* Check we do not add duplicate attrs. Can't use get_AT here
4410 because that recurses to the specification/abstract origin DIE. */
4411 dw_attr_node *a;
4412 unsigned ix;
4413 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4414 gcc_assert (a->dw_attr != attr->dw_attr);
4415 }
4416
4417 vec_safe_reserve (die->die_attr, 1);
4418 vec_safe_push (die->die_attr, *attr);
4419 }
4420
4421 static inline enum dw_val_class
4422 AT_class (dw_attr_node *a)
4423 {
4424 return a->dw_attr_val.val_class;
4425 }
4426
4427 /* Return the index for any attribute that will be referenced with a
4428 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4429 indices are stored in dw_attr_val.v.val_str for reference counting
4430 pruning. */
4431
4432 static inline unsigned int
4433 AT_index (dw_attr_node *a)
4434 {
4435 if (AT_class (a) == dw_val_class_str)
4436 return a->dw_attr_val.v.val_str->index;
4437 else if (a->dw_attr_val.val_entry != NULL)
4438 return a->dw_attr_val.val_entry->index;
4439 return NOT_INDEXED;
4440 }
4441
4442 /* Add a flag value attribute to a DIE. */
4443
4444 static inline void
4445 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4446 {
4447 dw_attr_node attr;
4448
4449 attr.dw_attr = attr_kind;
4450 attr.dw_attr_val.val_class = dw_val_class_flag;
4451 attr.dw_attr_val.val_entry = NULL;
4452 attr.dw_attr_val.v.val_flag = flag;
4453 add_dwarf_attr (die, &attr);
4454 }
4455
4456 static inline unsigned
4457 AT_flag (dw_attr_node *a)
4458 {
4459 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4460 return a->dw_attr_val.v.val_flag;
4461 }
4462
4463 /* Add a signed integer attribute value to a DIE. */
4464
4465 static inline void
4466 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4467 {
4468 dw_attr_node attr;
4469
4470 attr.dw_attr = attr_kind;
4471 attr.dw_attr_val.val_class = dw_val_class_const;
4472 attr.dw_attr_val.val_entry = NULL;
4473 attr.dw_attr_val.v.val_int = int_val;
4474 add_dwarf_attr (die, &attr);
4475 }
4476
4477 static inline HOST_WIDE_INT
4478 AT_int (dw_attr_node *a)
4479 {
4480 gcc_assert (a && (AT_class (a) == dw_val_class_const
4481 || AT_class (a) == dw_val_class_const_implicit));
4482 return a->dw_attr_val.v.val_int;
4483 }
4484
4485 /* Add an unsigned integer attribute value to a DIE. */
4486
4487 static inline void
4488 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4489 unsigned HOST_WIDE_INT unsigned_val)
4490 {
4491 dw_attr_node attr;
4492
4493 attr.dw_attr = attr_kind;
4494 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4495 attr.dw_attr_val.val_entry = NULL;
4496 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4497 add_dwarf_attr (die, &attr);
4498 }
4499
4500 static inline unsigned HOST_WIDE_INT
4501 AT_unsigned (dw_attr_node *a)
4502 {
4503 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4504 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4505 return a->dw_attr_val.v.val_unsigned;
4506 }
4507
4508 /* Add an unsigned wide integer attribute value to a DIE. */
4509
4510 static inline void
4511 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4512 const wide_int& w)
4513 {
4514 dw_attr_node attr;
4515
4516 attr.dw_attr = attr_kind;
4517 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4518 attr.dw_attr_val.val_entry = NULL;
4519 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4520 *attr.dw_attr_val.v.val_wide = w;
4521 add_dwarf_attr (die, &attr);
4522 }
4523
4524 /* Add an unsigned double integer attribute value to a DIE. */
4525
4526 static inline void
4527 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4528 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4529 {
4530 dw_attr_node attr;
4531
4532 attr.dw_attr = attr_kind;
4533 attr.dw_attr_val.val_class = dw_val_class_const_double;
4534 attr.dw_attr_val.val_entry = NULL;
4535 attr.dw_attr_val.v.val_double.high = high;
4536 attr.dw_attr_val.v.val_double.low = low;
4537 add_dwarf_attr (die, &attr);
4538 }
4539
4540 /* Add a floating point attribute value to a DIE and return it. */
4541
4542 static inline void
4543 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4544 unsigned int length, unsigned int elt_size, unsigned char *array)
4545 {
4546 dw_attr_node attr;
4547
4548 attr.dw_attr = attr_kind;
4549 attr.dw_attr_val.val_class = dw_val_class_vec;
4550 attr.dw_attr_val.val_entry = NULL;
4551 attr.dw_attr_val.v.val_vec.length = length;
4552 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4553 attr.dw_attr_val.v.val_vec.array = array;
4554 add_dwarf_attr (die, &attr);
4555 }
4556
4557 /* Add an 8-byte data attribute value to a DIE. */
4558
4559 static inline void
4560 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4561 unsigned char data8[8])
4562 {
4563 dw_attr_node attr;
4564
4565 attr.dw_attr = attr_kind;
4566 attr.dw_attr_val.val_class = dw_val_class_data8;
4567 attr.dw_attr_val.val_entry = NULL;
4568 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4569 add_dwarf_attr (die, &attr);
4570 }
4571
4572 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4573 dwarf_split_debug_info, address attributes in dies destined for the
4574 final executable have force_direct set to avoid using indexed
4575 references. */
4576
4577 static inline void
4578 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4579 bool force_direct)
4580 {
4581 dw_attr_node attr;
4582 char * lbl_id;
4583
4584 lbl_id = xstrdup (lbl_low);
4585 attr.dw_attr = DW_AT_low_pc;
4586 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4587 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4588 if (dwarf_split_debug_info && !force_direct)
4589 attr.dw_attr_val.val_entry
4590 = add_addr_table_entry (lbl_id, ate_kind_label);
4591 else
4592 attr.dw_attr_val.val_entry = NULL;
4593 add_dwarf_attr (die, &attr);
4594
4595 attr.dw_attr = DW_AT_high_pc;
4596 if (dwarf_version < 4)
4597 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4598 else
4599 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4600 lbl_id = xstrdup (lbl_high);
4601 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4602 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4603 && dwarf_split_debug_info && !force_direct)
4604 attr.dw_attr_val.val_entry
4605 = add_addr_table_entry (lbl_id, ate_kind_label);
4606 else
4607 attr.dw_attr_val.val_entry = NULL;
4608 add_dwarf_attr (die, &attr);
4609 }
4610
4611 /* Hash and equality functions for debug_str_hash. */
4612
4613 hashval_t
4614 indirect_string_hasher::hash (indirect_string_node *x)
4615 {
4616 return htab_hash_string (x->str);
4617 }
4618
4619 bool
4620 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4621 {
4622 return strcmp (x1->str, x2) == 0;
4623 }
4624
4625 /* Add STR to the given string hash table. */
4626
4627 static struct indirect_string_node *
4628 find_AT_string_in_table (const char *str,
4629 hash_table<indirect_string_hasher> *table)
4630 {
4631 struct indirect_string_node *node;
4632
4633 indirect_string_node **slot
4634 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4635 if (*slot == NULL)
4636 {
4637 node = ggc_cleared_alloc<indirect_string_node> ();
4638 node->str = ggc_strdup (str);
4639 *slot = node;
4640 }
4641 else
4642 node = *slot;
4643
4644 node->refcount++;
4645 return node;
4646 }
4647
4648 /* Add STR to the indirect string hash table. */
4649
4650 static struct indirect_string_node *
4651 find_AT_string (const char *str)
4652 {
4653 if (! debug_str_hash)
4654 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4655
4656 return find_AT_string_in_table (str, debug_str_hash);
4657 }
4658
4659 /* Add a string attribute value to a DIE. */
4660
4661 static inline void
4662 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4663 {
4664 dw_attr_node attr;
4665 struct indirect_string_node *node;
4666
4667 node = find_AT_string (str);
4668
4669 attr.dw_attr = attr_kind;
4670 attr.dw_attr_val.val_class = dw_val_class_str;
4671 attr.dw_attr_val.val_entry = NULL;
4672 attr.dw_attr_val.v.val_str = node;
4673 add_dwarf_attr (die, &attr);
4674 }
4675
4676 static inline const char *
4677 AT_string (dw_attr_node *a)
4678 {
4679 gcc_assert (a && AT_class (a) == dw_val_class_str);
4680 return a->dw_attr_val.v.val_str->str;
4681 }
4682
4683 /* Call this function directly to bypass AT_string_form's logic to put
4684 the string inline in the die. */
4685
4686 static void
4687 set_indirect_string (struct indirect_string_node *node)
4688 {
4689 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4690 /* Already indirect is a no op. */
4691 if (node->form == DW_FORM_strp
4692 || node->form == DW_FORM_line_strp
4693 || node->form == dwarf_FORM (DW_FORM_strx))
4694 {
4695 gcc_assert (node->label);
4696 return;
4697 }
4698 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4699 ++dw2_string_counter;
4700 node->label = xstrdup (label);
4701
4702 if (!dwarf_split_debug_info)
4703 {
4704 node->form = DW_FORM_strp;
4705 node->index = NOT_INDEXED;
4706 }
4707 else
4708 {
4709 node->form = dwarf_FORM (DW_FORM_strx);
4710 node->index = NO_INDEX_ASSIGNED;
4711 }
4712 }
4713
4714 /* A helper function for dwarf2out_finish, called to reset indirect
4715 string decisions done for early LTO dwarf output before fat object
4716 dwarf output. */
4717
4718 int
4719 reset_indirect_string (indirect_string_node **h, void *)
4720 {
4721 struct indirect_string_node *node = *h;
4722 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4723 {
4724 free (node->label);
4725 node->label = NULL;
4726 node->form = (dwarf_form) 0;
4727 node->index = 0;
4728 }
4729 return 1;
4730 }
4731
4732 /* Find out whether a string should be output inline in DIE
4733 or out-of-line in .debug_str section. */
4734
4735 static enum dwarf_form
4736 find_string_form (struct indirect_string_node *node)
4737 {
4738 unsigned int len;
4739
4740 if (node->form)
4741 return node->form;
4742
4743 len = strlen (node->str) + 1;
4744
4745 /* If the string is shorter or equal to the size of the reference, it is
4746 always better to put it inline. */
4747 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4748 return node->form = DW_FORM_string;
4749
4750 /* If we cannot expect the linker to merge strings in .debug_str
4751 section, only put it into .debug_str if it is worth even in this
4752 single module. */
4753 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4754 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4755 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4756 return node->form = DW_FORM_string;
4757
4758 set_indirect_string (node);
4759
4760 return node->form;
4761 }
4762
4763 /* Find out whether the string referenced from the attribute should be
4764 output inline in DIE or out-of-line in .debug_str section. */
4765
4766 static enum dwarf_form
4767 AT_string_form (dw_attr_node *a)
4768 {
4769 gcc_assert (a && AT_class (a) == dw_val_class_str);
4770 return find_string_form (a->dw_attr_val.v.val_str);
4771 }
4772
4773 /* Add a DIE reference attribute value to a DIE. */
4774
4775 static inline void
4776 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4777 {
4778 dw_attr_node attr;
4779 gcc_checking_assert (targ_die != NULL);
4780
4781 /* With LTO we can end up trying to reference something we didn't create
4782 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4783 if (targ_die == NULL)
4784 return;
4785
4786 attr.dw_attr = attr_kind;
4787 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4788 attr.dw_attr_val.val_entry = NULL;
4789 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4790 attr.dw_attr_val.v.val_die_ref.external = 0;
4791 add_dwarf_attr (die, &attr);
4792 }
4793
4794 /* Change DIE reference REF to point to NEW_DIE instead. */
4795
4796 static inline void
4797 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4798 {
4799 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4800 ref->dw_attr_val.v.val_die_ref.die = new_die;
4801 ref->dw_attr_val.v.val_die_ref.external = 0;
4802 }
4803
4804 /* Add an AT_specification attribute to a DIE, and also make the back
4805 pointer from the specification to the definition. */
4806
4807 static inline void
4808 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4809 {
4810 add_AT_die_ref (die, DW_AT_specification, targ_die);
4811 gcc_assert (!targ_die->die_definition);
4812 targ_die->die_definition = die;
4813 }
4814
4815 static inline dw_die_ref
4816 AT_ref (dw_attr_node *a)
4817 {
4818 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4819 return a->dw_attr_val.v.val_die_ref.die;
4820 }
4821
4822 static inline int
4823 AT_ref_external (dw_attr_node *a)
4824 {
4825 if (a && AT_class (a) == dw_val_class_die_ref)
4826 return a->dw_attr_val.v.val_die_ref.external;
4827
4828 return 0;
4829 }
4830
4831 static inline void
4832 set_AT_ref_external (dw_attr_node *a, int i)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4835 a->dw_attr_val.v.val_die_ref.external = i;
4836 }
4837
4838 /* Add an FDE reference attribute value to a DIE. */
4839
4840 static inline void
4841 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_fde_index = targ_fde;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 /* Add a location description attribute value to a DIE. */
4853
4854 static inline void
4855 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4856 {
4857 dw_attr_node attr;
4858
4859 attr.dw_attr = attr_kind;
4860 attr.dw_attr_val.val_class = dw_val_class_loc;
4861 attr.dw_attr_val.val_entry = NULL;
4862 attr.dw_attr_val.v.val_loc = loc;
4863 add_dwarf_attr (die, &attr);
4864 }
4865
4866 static inline dw_loc_descr_ref
4867 AT_loc (dw_attr_node *a)
4868 {
4869 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4870 return a->dw_attr_val.v.val_loc;
4871 }
4872
4873 static inline void
4874 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4875 {
4876 dw_attr_node attr;
4877
4878 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4879 return;
4880
4881 attr.dw_attr = attr_kind;
4882 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4883 attr.dw_attr_val.val_entry = NULL;
4884 attr.dw_attr_val.v.val_loc_list = loc_list;
4885 add_dwarf_attr (die, &attr);
4886 have_location_lists = true;
4887 }
4888
4889 static inline dw_loc_list_ref
4890 AT_loc_list (dw_attr_node *a)
4891 {
4892 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4893 return a->dw_attr_val.v.val_loc_list;
4894 }
4895
4896 /* Add a view list attribute to DIE. It must have a DW_AT_location
4897 attribute, because the view list complements the location list. */
4898
4899 static inline void
4900 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4901 {
4902 dw_attr_node attr;
4903
4904 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4905 return;
4906
4907 attr.dw_attr = attr_kind;
4908 attr.dw_attr_val.val_class = dw_val_class_view_list;
4909 attr.dw_attr_val.val_entry = NULL;
4910 attr.dw_attr_val.v.val_view_list = die;
4911 add_dwarf_attr (die, &attr);
4912 gcc_checking_assert (get_AT (die, DW_AT_location));
4913 gcc_assert (have_location_lists);
4914 }
4915
4916 /* Return a pointer to the location list referenced by the attribute.
4917 If the named attribute is a view list, look up the corresponding
4918 DW_AT_location attribute and return its location list. */
4919
4920 static inline dw_loc_list_ref *
4921 AT_loc_list_ptr (dw_attr_node *a)
4922 {
4923 gcc_assert (a);
4924 switch (AT_class (a))
4925 {
4926 case dw_val_class_loc_list:
4927 return &a->dw_attr_val.v.val_loc_list;
4928 case dw_val_class_view_list:
4929 {
4930 dw_attr_node *l;
4931 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4932 if (!l)
4933 return NULL;
4934 gcc_checking_assert (l + 1 == a);
4935 return AT_loc_list_ptr (l);
4936 }
4937 default:
4938 gcc_unreachable ();
4939 }
4940 }
4941
4942 /* Return the location attribute value associated with a view list
4943 attribute value. */
4944
4945 static inline dw_val_node *
4946 view_list_to_loc_list_val_node (dw_val_node *val)
4947 {
4948 gcc_assert (val->val_class == dw_val_class_view_list);
4949 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4950 if (!loc)
4951 return NULL;
4952 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4953 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4954 return &loc->dw_attr_val;
4955 }
4956
4957 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4958 {
4959 static hashval_t hash (addr_table_entry *);
4960 static bool equal (addr_table_entry *, addr_table_entry *);
4961 };
4962
4963 /* Table of entries into the .debug_addr section. */
4964
4965 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4966
4967 /* Hash an address_table_entry. */
4968
4969 hashval_t
4970 addr_hasher::hash (addr_table_entry *a)
4971 {
4972 inchash::hash hstate;
4973 switch (a->kind)
4974 {
4975 case ate_kind_rtx:
4976 hstate.add_int (0);
4977 break;
4978 case ate_kind_rtx_dtprel:
4979 hstate.add_int (1);
4980 break;
4981 case ate_kind_label:
4982 return htab_hash_string (a->addr.label);
4983 default:
4984 gcc_unreachable ();
4985 }
4986 inchash::add_rtx (a->addr.rtl, hstate);
4987 return hstate.end ();
4988 }
4989
4990 /* Determine equality for two address_table_entries. */
4991
4992 bool
4993 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4994 {
4995 if (a1->kind != a2->kind)
4996 return 0;
4997 switch (a1->kind)
4998 {
4999 case ate_kind_rtx:
5000 case ate_kind_rtx_dtprel:
5001 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
5002 case ate_kind_label:
5003 return strcmp (a1->addr.label, a2->addr.label) == 0;
5004 default:
5005 gcc_unreachable ();
5006 }
5007 }
5008
5009 /* Initialize an addr_table_entry. */
5010
5011 void
5012 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5013 {
5014 e->kind = kind;
5015 switch (kind)
5016 {
5017 case ate_kind_rtx:
5018 case ate_kind_rtx_dtprel:
5019 e->addr.rtl = (rtx) addr;
5020 break;
5021 case ate_kind_label:
5022 e->addr.label = (char *) addr;
5023 break;
5024 }
5025 e->refcount = 0;
5026 e->index = NO_INDEX_ASSIGNED;
5027 }
5028
5029 /* Add attr to the address table entry to the table. Defer setting an
5030 index until output time. */
5031
5032 static addr_table_entry *
5033 add_addr_table_entry (void *addr, enum ate_kind kind)
5034 {
5035 addr_table_entry *node;
5036 addr_table_entry finder;
5037
5038 gcc_assert (dwarf_split_debug_info);
5039 if (! addr_index_table)
5040 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5041 init_addr_table_entry (&finder, kind, addr);
5042 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5043
5044 if (*slot == HTAB_EMPTY_ENTRY)
5045 {
5046 node = ggc_cleared_alloc<addr_table_entry> ();
5047 init_addr_table_entry (node, kind, addr);
5048 *slot = node;
5049 }
5050 else
5051 node = *slot;
5052
5053 node->refcount++;
5054 return node;
5055 }
5056
5057 /* Remove an entry from the addr table by decrementing its refcount.
5058 Strictly, decrementing the refcount would be enough, but the
5059 assertion that the entry is actually in the table has found
5060 bugs. */
5061
5062 static void
5063 remove_addr_table_entry (addr_table_entry *entry)
5064 {
5065 gcc_assert (dwarf_split_debug_info && addr_index_table);
5066 /* After an index is assigned, the table is frozen. */
5067 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5068 entry->refcount--;
5069 }
5070
5071 /* Given a location list, remove all addresses it refers to from the
5072 address_table. */
5073
5074 static void
5075 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5076 {
5077 for (; descr; descr = descr->dw_loc_next)
5078 if (descr->dw_loc_oprnd1.val_entry != NULL)
5079 {
5080 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5081 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5082 }
5083 }
5084
5085 /* A helper function for dwarf2out_finish called through
5086 htab_traverse. Assign an addr_table_entry its index. All entries
5087 must be collected into the table when this function is called,
5088 because the indexing code relies on htab_traverse to traverse nodes
5089 in the same order for each run. */
5090
5091 int
5092 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5093 {
5094 addr_table_entry *node = *h;
5095
5096 /* Don't index unreferenced nodes. */
5097 if (node->refcount == 0)
5098 return 1;
5099
5100 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5101 node->index = *index;
5102 *index += 1;
5103
5104 return 1;
5105 }
5106
5107 /* Add an address constant attribute value to a DIE. When using
5108 dwarf_split_debug_info, address attributes in dies destined for the
5109 final executable should be direct references--setting the parameter
5110 force_direct ensures this behavior. */
5111
5112 static inline void
5113 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5114 bool force_direct)
5115 {
5116 dw_attr_node attr;
5117
5118 attr.dw_attr = attr_kind;
5119 attr.dw_attr_val.val_class = dw_val_class_addr;
5120 attr.dw_attr_val.v.val_addr = addr;
5121 if (dwarf_split_debug_info && !force_direct)
5122 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5123 else
5124 attr.dw_attr_val.val_entry = NULL;
5125 add_dwarf_attr (die, &attr);
5126 }
5127
5128 /* Get the RTX from to an address DIE attribute. */
5129
5130 static inline rtx
5131 AT_addr (dw_attr_node *a)
5132 {
5133 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5134 return a->dw_attr_val.v.val_addr;
5135 }
5136
5137 /* Add a file attribute value to a DIE. */
5138
5139 static inline void
5140 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5141 struct dwarf_file_data *fd)
5142 {
5143 dw_attr_node attr;
5144
5145 attr.dw_attr = attr_kind;
5146 attr.dw_attr_val.val_class = dw_val_class_file;
5147 attr.dw_attr_val.val_entry = NULL;
5148 attr.dw_attr_val.v.val_file = fd;
5149 add_dwarf_attr (die, &attr);
5150 }
5151
5152 /* Get the dwarf_file_data from a file DIE attribute. */
5153
5154 static inline struct dwarf_file_data *
5155 AT_file (dw_attr_node *a)
5156 {
5157 gcc_assert (a && (AT_class (a) == dw_val_class_file
5158 || AT_class (a) == dw_val_class_file_implicit));
5159 return a->dw_attr_val.v.val_file;
5160 }
5161
5162 /* Add a vms delta attribute value to a DIE. */
5163
5164 static inline void
5165 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5166 const char *lbl1, const char *lbl2)
5167 {
5168 dw_attr_node attr;
5169
5170 attr.dw_attr = attr_kind;
5171 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5172 attr.dw_attr_val.val_entry = NULL;
5173 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5174 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5175 add_dwarf_attr (die, &attr);
5176 }
5177
5178 /* Add a symbolic view identifier attribute value to a DIE. */
5179
5180 static inline void
5181 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5182 const char *view_label)
5183 {
5184 dw_attr_node attr;
5185
5186 attr.dw_attr = attr_kind;
5187 attr.dw_attr_val.val_class = dw_val_class_symview;
5188 attr.dw_attr_val.val_entry = NULL;
5189 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5190 add_dwarf_attr (die, &attr);
5191 }
5192
5193 /* Add a label identifier attribute value to a DIE. */
5194
5195 static inline void
5196 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5197 const char *lbl_id)
5198 {
5199 dw_attr_node attr;
5200
5201 attr.dw_attr = attr_kind;
5202 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5203 attr.dw_attr_val.val_entry = NULL;
5204 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5205 if (dwarf_split_debug_info)
5206 attr.dw_attr_val.val_entry
5207 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5208 ate_kind_label);
5209 add_dwarf_attr (die, &attr);
5210 }
5211
5212 /* Add a section offset attribute value to a DIE, an offset into the
5213 debug_line section. */
5214
5215 static inline void
5216 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5217 const char *label)
5218 {
5219 dw_attr_node attr;
5220
5221 attr.dw_attr = attr_kind;
5222 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5223 attr.dw_attr_val.val_entry = NULL;
5224 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5225 add_dwarf_attr (die, &attr);
5226 }
5227
5228 /* Add a section offset attribute value to a DIE, an offset into the
5229 debug_loclists section. */
5230
5231 static inline void
5232 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5233 const char *label)
5234 {
5235 dw_attr_node attr;
5236
5237 attr.dw_attr = attr_kind;
5238 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5239 attr.dw_attr_val.val_entry = NULL;
5240 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5241 add_dwarf_attr (die, &attr);
5242 }
5243
5244 /* Add a section offset attribute value to a DIE, an offset into the
5245 debug_macinfo section. */
5246
5247 static inline void
5248 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5249 const char *label)
5250 {
5251 dw_attr_node attr;
5252
5253 attr.dw_attr = attr_kind;
5254 attr.dw_attr_val.val_class = dw_val_class_macptr;
5255 attr.dw_attr_val.val_entry = NULL;
5256 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5257 add_dwarf_attr (die, &attr);
5258 }
5259
5260 /* Add an offset attribute value to a DIE. */
5261
5262 static inline void
5263 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5264 unsigned HOST_WIDE_INT offset)
5265 {
5266 dw_attr_node attr;
5267
5268 attr.dw_attr = attr_kind;
5269 attr.dw_attr_val.val_class = dw_val_class_offset;
5270 attr.dw_attr_val.val_entry = NULL;
5271 attr.dw_attr_val.v.val_offset = offset;
5272 add_dwarf_attr (die, &attr);
5273 }
5274
5275 /* Add a range_list attribute value to a DIE. When using
5276 dwarf_split_debug_info, address attributes in dies destined for the
5277 final executable should be direct references--setting the parameter
5278 force_direct ensures this behavior. */
5279
5280 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5281 #define RELOCATED_OFFSET (NULL)
5282
5283 static void
5284 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5285 long unsigned int offset, bool force_direct)
5286 {
5287 dw_attr_node attr;
5288
5289 attr.dw_attr = attr_kind;
5290 attr.dw_attr_val.val_class = dw_val_class_range_list;
5291 /* For the range_list attribute, use val_entry to store whether the
5292 offset should follow split-debug-info or normal semantics. This
5293 value is read in output_range_list_offset. */
5294 if (dwarf_split_debug_info && !force_direct)
5295 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5296 else
5297 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5298 attr.dw_attr_val.v.val_offset = offset;
5299 add_dwarf_attr (die, &attr);
5300 }
5301
5302 /* Return the start label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta1 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl1;
5309 }
5310
5311 /* Return the end label of a delta attribute. */
5312
5313 static inline const char *
5314 AT_vms_delta2 (dw_attr_node *a)
5315 {
5316 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5317 return a->dw_attr_val.v.val_vms_delta.lbl2;
5318 }
5319
5320 static inline const char *
5321 AT_lbl (dw_attr_node *a)
5322 {
5323 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5324 || AT_class (a) == dw_val_class_lineptr
5325 || AT_class (a) == dw_val_class_macptr
5326 || AT_class (a) == dw_val_class_loclistsptr
5327 || AT_class (a) == dw_val_class_high_pc));
5328 return a->dw_attr_val.v.val_lbl_id;
5329 }
5330
5331 /* Get the attribute of type attr_kind. */
5332
5333 static dw_attr_node *
5334 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5335 {
5336 dw_attr_node *a;
5337 unsigned ix;
5338 dw_die_ref spec = NULL;
5339
5340 if (! die)
5341 return NULL;
5342
5343 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5344 if (a->dw_attr == attr_kind)
5345 return a;
5346 else if (a->dw_attr == DW_AT_specification
5347 || a->dw_attr == DW_AT_abstract_origin)
5348 spec = AT_ref (a);
5349
5350 if (spec)
5351 return get_AT (spec, attr_kind);
5352
5353 return NULL;
5354 }
5355
5356 /* Returns the parent of the declaration of DIE. */
5357
5358 static dw_die_ref
5359 get_die_parent (dw_die_ref die)
5360 {
5361 dw_die_ref t;
5362
5363 if (!die)
5364 return NULL;
5365
5366 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5367 || (t = get_AT_ref (die, DW_AT_specification)))
5368 die = t;
5369
5370 return die->die_parent;
5371 }
5372
5373 /* Return the "low pc" attribute value, typically associated with a subprogram
5374 DIE. Return null if the "low pc" attribute is either not present, or if it
5375 cannot be represented as an assembler label identifier. */
5376
5377 static inline const char *
5378 get_AT_low_pc (dw_die_ref die)
5379 {
5380 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5381
5382 return a ? AT_lbl (a) : NULL;
5383 }
5384
5385 /* Return the "high pc" attribute value, typically associated with a subprogram
5386 DIE. Return null if the "high pc" attribute is either not present, or if it
5387 cannot be represented as an assembler label identifier. */
5388
5389 static inline const char *
5390 get_AT_hi_pc (dw_die_ref die)
5391 {
5392 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5393
5394 return a ? AT_lbl (a) : NULL;
5395 }
5396
5397 /* Return the value of the string attribute designated by ATTR_KIND, or
5398 NULL if it is not present. */
5399
5400 static inline const char *
5401 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5402 {
5403 dw_attr_node *a = get_AT (die, attr_kind);
5404
5405 return a ? AT_string (a) : NULL;
5406 }
5407
5408 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5409 if it is not present. */
5410
5411 static inline int
5412 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5413 {
5414 dw_attr_node *a = get_AT (die, attr_kind);
5415
5416 return a ? AT_flag (a) : 0;
5417 }
5418
5419 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5420 if it is not present. */
5421
5422 static inline unsigned
5423 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5424 {
5425 dw_attr_node *a = get_AT (die, attr_kind);
5426
5427 return a ? AT_unsigned (a) : 0;
5428 }
5429
5430 static inline dw_die_ref
5431 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5432 {
5433 dw_attr_node *a = get_AT (die, attr_kind);
5434
5435 return a ? AT_ref (a) : NULL;
5436 }
5437
5438 static inline struct dwarf_file_data *
5439 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5440 {
5441 dw_attr_node *a = get_AT (die, attr_kind);
5442
5443 return a ? AT_file (a) : NULL;
5444 }
5445
5446 /* Return TRUE if the language is C++. */
5447
5448 static inline bool
5449 is_cxx (void)
5450 {
5451 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5452
5453 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5454 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5455 }
5456
5457 /* Return TRUE if DECL was created by the C++ frontend. */
5458
5459 static bool
5460 is_cxx (const_tree decl)
5461 {
5462 if (in_lto_p)
5463 {
5464 const_tree context = get_ultimate_context (decl);
5465 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5466 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5467 }
5468 return is_cxx ();
5469 }
5470
5471 /* Return TRUE if the language is Fortran. */
5472
5473 static inline bool
5474 is_fortran (void)
5475 {
5476 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5477
5478 return (lang == DW_LANG_Fortran77
5479 || lang == DW_LANG_Fortran90
5480 || lang == DW_LANG_Fortran95
5481 || lang == DW_LANG_Fortran03
5482 || lang == DW_LANG_Fortran08);
5483 }
5484
5485 static inline bool
5486 is_fortran (const_tree decl)
5487 {
5488 if (in_lto_p)
5489 {
5490 const_tree context = get_ultimate_context (decl);
5491 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5492 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5493 "GNU Fortran", 11) == 0
5494 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5495 "GNU F77") == 0);
5496 }
5497 return is_fortran ();
5498 }
5499
5500 /* Return TRUE if the language is Ada. */
5501
5502 static inline bool
5503 is_ada (void)
5504 {
5505 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5506
5507 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5508 }
5509
5510 /* Remove the specified attribute if present. Return TRUE if removal
5511 was successful. */
5512
5513 static bool
5514 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5515 {
5516 dw_attr_node *a;
5517 unsigned ix;
5518
5519 if (! die)
5520 return false;
5521
5522 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5523 if (a->dw_attr == attr_kind)
5524 {
5525 if (AT_class (a) == dw_val_class_str)
5526 if (a->dw_attr_val.v.val_str->refcount)
5527 a->dw_attr_val.v.val_str->refcount--;
5528
5529 /* vec::ordered_remove should help reduce the number of abbrevs
5530 that are needed. */
5531 die->die_attr->ordered_remove (ix);
5532 return true;
5533 }
5534 return false;
5535 }
5536
5537 /* Remove CHILD from its parent. PREV must have the property that
5538 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5539
5540 static void
5541 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5542 {
5543 gcc_assert (child->die_parent == prev->die_parent);
5544 gcc_assert (prev->die_sib == child);
5545 if (prev == child)
5546 {
5547 gcc_assert (child->die_parent->die_child == child);
5548 prev = NULL;
5549 }
5550 else
5551 prev->die_sib = child->die_sib;
5552 if (child->die_parent->die_child == child)
5553 child->die_parent->die_child = prev;
5554 child->die_sib = NULL;
5555 }
5556
5557 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5558 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5559
5560 static void
5561 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5562 {
5563 dw_die_ref parent = old_child->die_parent;
5564
5565 gcc_assert (parent == prev->die_parent);
5566 gcc_assert (prev->die_sib == old_child);
5567
5568 new_child->die_parent = parent;
5569 if (prev == old_child)
5570 {
5571 gcc_assert (parent->die_child == old_child);
5572 new_child->die_sib = new_child;
5573 }
5574 else
5575 {
5576 prev->die_sib = new_child;
5577 new_child->die_sib = old_child->die_sib;
5578 }
5579 if (old_child->die_parent->die_child == old_child)
5580 old_child->die_parent->die_child = new_child;
5581 old_child->die_sib = NULL;
5582 }
5583
5584 /* Move all children from OLD_PARENT to NEW_PARENT. */
5585
5586 static void
5587 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5588 {
5589 dw_die_ref c;
5590 new_parent->die_child = old_parent->die_child;
5591 old_parent->die_child = NULL;
5592 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5593 }
5594
5595 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5596 matches TAG. */
5597
5598 static void
5599 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5600 {
5601 dw_die_ref c;
5602
5603 c = die->die_child;
5604 if (c) do {
5605 dw_die_ref prev = c;
5606 c = c->die_sib;
5607 while (c->die_tag == tag)
5608 {
5609 remove_child_with_prev (c, prev);
5610 c->die_parent = NULL;
5611 /* Might have removed every child. */
5612 if (die->die_child == NULL)
5613 return;
5614 c = prev->die_sib;
5615 }
5616 } while (c != die->die_child);
5617 }
5618
5619 /* Add a CHILD_DIE as the last child of DIE. */
5620
5621 static void
5622 add_child_die (dw_die_ref die, dw_die_ref child_die)
5623 {
5624 /* FIXME this should probably be an assert. */
5625 if (! die || ! child_die)
5626 return;
5627 gcc_assert (die != child_die);
5628
5629 child_die->die_parent = die;
5630 if (die->die_child)
5631 {
5632 child_die->die_sib = die->die_child->die_sib;
5633 die->die_child->die_sib = child_die;
5634 }
5635 else
5636 child_die->die_sib = child_die;
5637 die->die_child = child_die;
5638 }
5639
5640 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5641
5642 static void
5643 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5644 dw_die_ref after_die)
5645 {
5646 gcc_assert (die
5647 && child_die
5648 && after_die
5649 && die->die_child
5650 && die != child_die);
5651
5652 child_die->die_parent = die;
5653 child_die->die_sib = after_die->die_sib;
5654 after_die->die_sib = child_die;
5655 if (die->die_child == after_die)
5656 die->die_child = child_die;
5657 }
5658
5659 /* Unassociate CHILD from its parent, and make its parent be
5660 NEW_PARENT. */
5661
5662 static void
5663 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5664 {
5665 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5666 if (p->die_sib == child)
5667 {
5668 remove_child_with_prev (child, p);
5669 break;
5670 }
5671 add_child_die (new_parent, child);
5672 }
5673
5674 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5675 is the specification, to the end of PARENT's list of children.
5676 This is done by removing and re-adding it. */
5677
5678 static void
5679 splice_child_die (dw_die_ref parent, dw_die_ref child)
5680 {
5681 /* We want the declaration DIE from inside the class, not the
5682 specification DIE at toplevel. */
5683 if (child->die_parent != parent)
5684 {
5685 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5686
5687 if (tmp)
5688 child = tmp;
5689 }
5690
5691 gcc_assert (child->die_parent == parent
5692 || (child->die_parent
5693 == get_AT_ref (parent, DW_AT_specification)));
5694
5695 reparent_child (child, parent);
5696 }
5697
5698 /* Create and return a new die with TAG_VALUE as tag. */
5699
5700 static inline dw_die_ref
5701 new_die_raw (enum dwarf_tag tag_value)
5702 {
5703 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5704 die->die_tag = tag_value;
5705 return die;
5706 }
5707
5708 /* Create and return a new die with a parent of PARENT_DIE. If
5709 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5710 associated tree T must be supplied to determine parenthood
5711 later. */
5712
5713 static inline dw_die_ref
5714 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5715 {
5716 dw_die_ref die = new_die_raw (tag_value);
5717
5718 if (parent_die != NULL)
5719 add_child_die (parent_die, die);
5720 else
5721 {
5722 limbo_die_node *limbo_node;
5723
5724 /* No DIEs created after early dwarf should end up in limbo,
5725 because the limbo list should not persist past LTO
5726 streaming. */
5727 if (tag_value != DW_TAG_compile_unit
5728 /* These are allowed because they're generated while
5729 breaking out COMDAT units late. */
5730 && tag_value != DW_TAG_type_unit
5731 && tag_value != DW_TAG_skeleton_unit
5732 && !early_dwarf
5733 /* Allow nested functions to live in limbo because they will
5734 only temporarily live there, as decls_for_scope will fix
5735 them up. */
5736 && (TREE_CODE (t) != FUNCTION_DECL
5737 || !decl_function_context (t))
5738 /* Same as nested functions above but for types. Types that
5739 are local to a function will be fixed in
5740 decls_for_scope. */
5741 && (!RECORD_OR_UNION_TYPE_P (t)
5742 || !TYPE_CONTEXT (t)
5743 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5744 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5745 especially in the ltrans stage, but once we implement LTO
5746 dwarf streaming, we should remove this exception. */
5747 && !in_lto_p)
5748 {
5749 fprintf (stderr, "symbol ended up in limbo too late:");
5750 debug_generic_stmt (t);
5751 gcc_unreachable ();
5752 }
5753
5754 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5755 limbo_node->die = die;
5756 limbo_node->created_for = t;
5757 limbo_node->next = limbo_die_list;
5758 limbo_die_list = limbo_node;
5759 }
5760
5761 return die;
5762 }
5763
5764 /* Return the DIE associated with the given type specifier. */
5765
5766 static inline dw_die_ref
5767 lookup_type_die (tree type)
5768 {
5769 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5770 if (die && die->removed)
5771 {
5772 TYPE_SYMTAB_DIE (type) = NULL;
5773 return NULL;
5774 }
5775 return die;
5776 }
5777
5778 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5779 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5780 anonymous type instead the one of the naming typedef. */
5781
5782 static inline dw_die_ref
5783 strip_naming_typedef (tree type, dw_die_ref type_die)
5784 {
5785 if (type
5786 && TREE_CODE (type) == RECORD_TYPE
5787 && type_die
5788 && type_die->die_tag == DW_TAG_typedef
5789 && is_naming_typedef_decl (TYPE_NAME (type)))
5790 type_die = get_AT_ref (type_die, DW_AT_type);
5791 return type_die;
5792 }
5793
5794 /* Like lookup_type_die, but if type is an anonymous type named by a
5795 typedef[1], return the DIE of the anonymous type instead the one of
5796 the naming typedef. This is because in gen_typedef_die, we did
5797 equate the anonymous struct named by the typedef with the DIE of
5798 the naming typedef. So by default, lookup_type_die on an anonymous
5799 struct yields the DIE of the naming typedef.
5800
5801 [1]: Read the comment of is_naming_typedef_decl to learn about what
5802 a naming typedef is. */
5803
5804 static inline dw_die_ref
5805 lookup_type_die_strip_naming_typedef (tree type)
5806 {
5807 dw_die_ref die = lookup_type_die (type);
5808 return strip_naming_typedef (type, die);
5809 }
5810
5811 /* Equate a DIE to a given type specifier. */
5812
5813 static inline void
5814 equate_type_number_to_die (tree type, dw_die_ref type_die)
5815 {
5816 TYPE_SYMTAB_DIE (type) = type_die;
5817 }
5818
5819 /* Returns a hash value for X (which really is a die_struct). */
5820
5821 inline hashval_t
5822 decl_die_hasher::hash (die_node *x)
5823 {
5824 return (hashval_t) x->decl_id;
5825 }
5826
5827 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5828
5829 inline bool
5830 decl_die_hasher::equal (die_node *x, tree y)
5831 {
5832 return (x->decl_id == DECL_UID (y));
5833 }
5834
5835 /* Return the DIE associated with a given declaration. */
5836
5837 static inline dw_die_ref
5838 lookup_decl_die (tree decl)
5839 {
5840 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5841 NO_INSERT);
5842 if (!die)
5843 return NULL;
5844 if ((*die)->removed)
5845 {
5846 decl_die_table->clear_slot (die);
5847 return NULL;
5848 }
5849 return *die;
5850 }
5851
5852
5853 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5854 style reference. Return true if we found one refering to a DIE for
5855 DECL, otherwise return false. */
5856
5857 static bool
5858 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5859 unsigned HOST_WIDE_INT *off)
5860 {
5861 dw_die_ref die;
5862
5863 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
5864 && !decl_die_table)
5865 return false;
5866
5867 if (TREE_CODE (decl) == BLOCK)
5868 die = BLOCK_DIE (decl);
5869 else
5870 die = lookup_decl_die (decl);
5871 if (!die)
5872 return false;
5873
5874 /* During WPA stage and incremental linking we currently use DIEs
5875 to store the decl <-> label + offset map. That's quite inefficient
5876 but it works for now. */
5877 if (flag_wpa
5878 || flag_incremental_link == INCREMENTAL_LINK_LTO)
5879 {
5880 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5881 if (!ref)
5882 {
5883 gcc_assert (die == comp_unit_die ());
5884 return false;
5885 }
5886 *off = ref->die_offset;
5887 *sym = ref->die_id.die_symbol;
5888 return true;
5889 }
5890
5891 /* Similar to get_ref_die_offset_label, but using the "correct"
5892 label. */
5893 *off = die->die_offset;
5894 while (die->die_parent)
5895 die = die->die_parent;
5896 /* For the containing CU DIE we compute a die_symbol in
5897 compute_comp_unit_symbol. */
5898 gcc_assert (die->die_tag == DW_TAG_compile_unit
5899 && die->die_id.die_symbol != NULL);
5900 *sym = die->die_id.die_symbol;
5901 return true;
5902 }
5903
5904 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5905
5906 static void
5907 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5908 const char *symbol, HOST_WIDE_INT offset)
5909 {
5910 /* Create a fake DIE that contains the reference. Don't use
5911 new_die because we don't want to end up in the limbo list. */
5912 dw_die_ref ref = new_die_raw (die->die_tag);
5913 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5914 ref->die_offset = offset;
5915 ref->with_offset = 1;
5916 add_AT_die_ref (die, attr_kind, ref);
5917 }
5918
5919 /* Create a DIE for DECL if required and add a reference to a DIE
5920 at SYMBOL + OFFSET which contains attributes dumped early. */
5921
5922 static void
5923 dwarf2out_register_external_die (tree decl, const char *sym,
5924 unsigned HOST_WIDE_INT off)
5925 {
5926 if (debug_info_level == DINFO_LEVEL_NONE)
5927 return;
5928
5929 if ((flag_wpa
5930 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5931 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5932
5933 dw_die_ref die
5934 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5935 gcc_assert (!die);
5936
5937 tree ctx;
5938 dw_die_ref parent = NULL;
5939 /* Need to lookup a DIE for the decls context - the containing
5940 function or translation unit. */
5941 if (TREE_CODE (decl) == BLOCK)
5942 {
5943 ctx = BLOCK_SUPERCONTEXT (decl);
5944 /* ??? We do not output DIEs for all scopes thus skip as
5945 many DIEs as needed. */
5946 while (TREE_CODE (ctx) == BLOCK
5947 && !BLOCK_DIE (ctx))
5948 ctx = BLOCK_SUPERCONTEXT (ctx);
5949 }
5950 else
5951 ctx = DECL_CONTEXT (decl);
5952 /* Peel types in the context stack. */
5953 while (ctx && TYPE_P (ctx))
5954 ctx = TYPE_CONTEXT (ctx);
5955 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5956 if (debug_info_level <= DINFO_LEVEL_TERSE)
5957 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5958 ctx = DECL_CONTEXT (ctx);
5959 if (ctx)
5960 {
5961 if (TREE_CODE (ctx) == BLOCK)
5962 parent = BLOCK_DIE (ctx);
5963 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5964 /* Keep the 1:1 association during WPA. */
5965 && !flag_wpa
5966 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5967 /* Otherwise all late annotations go to the main CU which
5968 imports the original CUs. */
5969 parent = comp_unit_die ();
5970 else if (TREE_CODE (ctx) == FUNCTION_DECL
5971 && TREE_CODE (decl) != PARM_DECL
5972 && TREE_CODE (decl) != BLOCK)
5973 /* Leave function local entities parent determination to when
5974 we process scope vars. */
5975 ;
5976 else
5977 parent = lookup_decl_die (ctx);
5978 }
5979 else
5980 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5981 Handle this case gracefully by globalizing stuff. */
5982 parent = comp_unit_die ();
5983 /* Create a DIE "stub". */
5984 switch (TREE_CODE (decl))
5985 {
5986 case TRANSLATION_UNIT_DECL:
5987 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5988 {
5989 die = comp_unit_die ();
5990 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5991 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5992 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5993 to create a DIE for the original CUs. */
5994 return;
5995 }
5996 /* Keep the 1:1 association during WPA. */
5997 die = new_die (DW_TAG_compile_unit, NULL, decl);
5998 break;
5999 case NAMESPACE_DECL:
6000 if (is_fortran (decl))
6001 die = new_die (DW_TAG_module, parent, decl);
6002 else
6003 die = new_die (DW_TAG_namespace, parent, decl);
6004 break;
6005 case FUNCTION_DECL:
6006 die = new_die (DW_TAG_subprogram, parent, decl);
6007 break;
6008 case VAR_DECL:
6009 die = new_die (DW_TAG_variable, parent, decl);
6010 break;
6011 case RESULT_DECL:
6012 die = new_die (DW_TAG_variable, parent, decl);
6013 break;
6014 case PARM_DECL:
6015 die = new_die (DW_TAG_formal_parameter, parent, decl);
6016 break;
6017 case CONST_DECL:
6018 die = new_die (DW_TAG_constant, parent, decl);
6019 break;
6020 case LABEL_DECL:
6021 die = new_die (DW_TAG_label, parent, decl);
6022 break;
6023 case BLOCK:
6024 die = new_die (DW_TAG_lexical_block, parent, decl);
6025 break;
6026 default:
6027 gcc_unreachable ();
6028 }
6029 if (TREE_CODE (decl) == BLOCK)
6030 BLOCK_DIE (decl) = die;
6031 else
6032 equate_decl_number_to_die (decl, die);
6033
6034 /* Add a reference to the DIE providing early debug at $sym + off. */
6035 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6036 }
6037
6038 /* Returns a hash value for X (which really is a var_loc_list). */
6039
6040 inline hashval_t
6041 decl_loc_hasher::hash (var_loc_list *x)
6042 {
6043 return (hashval_t) x->decl_id;
6044 }
6045
6046 /* Return nonzero if decl_id of var_loc_list X is the same as
6047 UID of decl *Y. */
6048
6049 inline bool
6050 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6051 {
6052 return (x->decl_id == DECL_UID (y));
6053 }
6054
6055 /* Return the var_loc list associated with a given declaration. */
6056
6057 static inline var_loc_list *
6058 lookup_decl_loc (const_tree decl)
6059 {
6060 if (!decl_loc_table)
6061 return NULL;
6062 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6063 }
6064
6065 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6066
6067 inline hashval_t
6068 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6069 {
6070 return (hashval_t) x->decl_id;
6071 }
6072
6073 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6074 UID of decl *Y. */
6075
6076 inline bool
6077 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6078 {
6079 return (x->decl_id == DECL_UID (y));
6080 }
6081
6082 /* Equate a DIE to a particular declaration. */
6083
6084 static void
6085 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6086 {
6087 unsigned int decl_id = DECL_UID (decl);
6088
6089 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6090 decl_die->decl_id = decl_id;
6091 }
6092
6093 /* Return how many bits covers PIECE EXPR_LIST. */
6094
6095 static HOST_WIDE_INT
6096 decl_piece_bitsize (rtx piece)
6097 {
6098 int ret = (int) GET_MODE (piece);
6099 if (ret)
6100 return ret;
6101 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6102 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6103 return INTVAL (XEXP (XEXP (piece, 0), 0));
6104 }
6105
6106 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6107
6108 static rtx *
6109 decl_piece_varloc_ptr (rtx piece)
6110 {
6111 if ((int) GET_MODE (piece))
6112 return &XEXP (piece, 0);
6113 else
6114 return &XEXP (XEXP (piece, 0), 1);
6115 }
6116
6117 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6118 Next is the chain of following piece nodes. */
6119
6120 static rtx_expr_list *
6121 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6122 {
6123 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6124 return alloc_EXPR_LIST (bitsize, loc_note, next);
6125 else
6126 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6127 GEN_INT (bitsize),
6128 loc_note), next);
6129 }
6130
6131 /* Return rtx that should be stored into loc field for
6132 LOC_NOTE and BITPOS/BITSIZE. */
6133
6134 static rtx
6135 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6136 HOST_WIDE_INT bitsize)
6137 {
6138 if (bitsize != -1)
6139 {
6140 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6141 if (bitpos != 0)
6142 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6143 }
6144 return loc_note;
6145 }
6146
6147 /* This function either modifies location piece list *DEST in
6148 place (if SRC and INNER is NULL), or copies location piece list
6149 *SRC to *DEST while modifying it. Location BITPOS is modified
6150 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6151 not copied and if needed some padding around it is added.
6152 When modifying in place, DEST should point to EXPR_LIST where
6153 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6154 to the start of the whole list and INNER points to the EXPR_LIST
6155 where earlier pieces cover PIECE_BITPOS bits. */
6156
6157 static void
6158 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6159 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6160 HOST_WIDE_INT bitsize, rtx loc_note)
6161 {
6162 HOST_WIDE_INT diff;
6163 bool copy = inner != NULL;
6164
6165 if (copy)
6166 {
6167 /* First copy all nodes preceding the current bitpos. */
6168 while (src != inner)
6169 {
6170 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6171 decl_piece_bitsize (*src), NULL_RTX);
6172 dest = &XEXP (*dest, 1);
6173 src = &XEXP (*src, 1);
6174 }
6175 }
6176 /* Add padding if needed. */
6177 if (bitpos != piece_bitpos)
6178 {
6179 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6180 copy ? NULL_RTX : *dest);
6181 dest = &XEXP (*dest, 1);
6182 }
6183 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6184 {
6185 gcc_assert (!copy);
6186 /* A piece with correct bitpos and bitsize already exist,
6187 just update the location for it and return. */
6188 *decl_piece_varloc_ptr (*dest) = loc_note;
6189 return;
6190 }
6191 /* Add the piece that changed. */
6192 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6193 dest = &XEXP (*dest, 1);
6194 /* Skip over pieces that overlap it. */
6195 diff = bitpos - piece_bitpos + bitsize;
6196 if (!copy)
6197 src = dest;
6198 while (diff > 0 && *src)
6199 {
6200 rtx piece = *src;
6201 diff -= decl_piece_bitsize (piece);
6202 if (copy)
6203 src = &XEXP (piece, 1);
6204 else
6205 {
6206 *src = XEXP (piece, 1);
6207 free_EXPR_LIST_node (piece);
6208 }
6209 }
6210 /* Add padding if needed. */
6211 if (diff < 0 && *src)
6212 {
6213 if (!copy)
6214 dest = src;
6215 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6216 dest = &XEXP (*dest, 1);
6217 }
6218 if (!copy)
6219 return;
6220 /* Finally copy all nodes following it. */
6221 while (*src)
6222 {
6223 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6224 decl_piece_bitsize (*src), NULL_RTX);
6225 dest = &XEXP (*dest, 1);
6226 src = &XEXP (*src, 1);
6227 }
6228 }
6229
6230 /* Add a variable location node to the linked list for DECL. */
6231
6232 static struct var_loc_node *
6233 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6234 {
6235 unsigned int decl_id;
6236 var_loc_list *temp;
6237 struct var_loc_node *loc = NULL;
6238 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6239
6240 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6241 {
6242 tree realdecl = DECL_DEBUG_EXPR (decl);
6243 if (handled_component_p (realdecl)
6244 || (TREE_CODE (realdecl) == MEM_REF
6245 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6246 {
6247 bool reverse;
6248 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6249 &bitsize, &reverse);
6250 if (!innerdecl
6251 || !DECL_P (innerdecl)
6252 || DECL_IGNORED_P (innerdecl)
6253 || TREE_STATIC (innerdecl)
6254 || bitsize == 0
6255 || bitpos + bitsize > 256)
6256 return NULL;
6257 decl = innerdecl;
6258 }
6259 }
6260
6261 decl_id = DECL_UID (decl);
6262 var_loc_list **slot
6263 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6264 if (*slot == NULL)
6265 {
6266 temp = ggc_cleared_alloc<var_loc_list> ();
6267 temp->decl_id = decl_id;
6268 *slot = temp;
6269 }
6270 else
6271 temp = *slot;
6272
6273 /* For PARM_DECLs try to keep around the original incoming value,
6274 even if that means we'll emit a zero-range .debug_loc entry. */
6275 if (temp->last
6276 && temp->first == temp->last
6277 && TREE_CODE (decl) == PARM_DECL
6278 && NOTE_P (temp->first->loc)
6279 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6280 && DECL_INCOMING_RTL (decl)
6281 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6282 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6283 == GET_CODE (DECL_INCOMING_RTL (decl))
6284 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6285 && (bitsize != -1
6286 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6287 NOTE_VAR_LOCATION_LOC (loc_note))
6288 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6289 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6290 {
6291 loc = ggc_cleared_alloc<var_loc_node> ();
6292 temp->first->next = loc;
6293 temp->last = loc;
6294 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6295 }
6296 else if (temp->last)
6297 {
6298 struct var_loc_node *last = temp->last, *unused = NULL;
6299 rtx *piece_loc = NULL, last_loc_note;
6300 HOST_WIDE_INT piece_bitpos = 0;
6301 if (last->next)
6302 {
6303 last = last->next;
6304 gcc_assert (last->next == NULL);
6305 }
6306 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6307 {
6308 piece_loc = &last->loc;
6309 do
6310 {
6311 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6312 if (piece_bitpos + cur_bitsize > bitpos)
6313 break;
6314 piece_bitpos += cur_bitsize;
6315 piece_loc = &XEXP (*piece_loc, 1);
6316 }
6317 while (*piece_loc);
6318 }
6319 /* TEMP->LAST here is either pointer to the last but one or
6320 last element in the chained list, LAST is pointer to the
6321 last element. */
6322 if (label && strcmp (last->label, label) == 0 && last->view == view)
6323 {
6324 /* For SRA optimized variables if there weren't any real
6325 insns since last note, just modify the last node. */
6326 if (piece_loc != NULL)
6327 {
6328 adjust_piece_list (piece_loc, NULL, NULL,
6329 bitpos, piece_bitpos, bitsize, loc_note);
6330 return NULL;
6331 }
6332 /* If the last note doesn't cover any instructions, remove it. */
6333 if (temp->last != last)
6334 {
6335 temp->last->next = NULL;
6336 unused = last;
6337 last = temp->last;
6338 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6339 }
6340 else
6341 {
6342 gcc_assert (temp->first == temp->last
6343 || (temp->first->next == temp->last
6344 && TREE_CODE (decl) == PARM_DECL));
6345 memset (temp->last, '\0', sizeof (*temp->last));
6346 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6347 return temp->last;
6348 }
6349 }
6350 if (bitsize == -1 && NOTE_P (last->loc))
6351 last_loc_note = last->loc;
6352 else if (piece_loc != NULL
6353 && *piece_loc != NULL_RTX
6354 && piece_bitpos == bitpos
6355 && decl_piece_bitsize (*piece_loc) == bitsize)
6356 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6357 else
6358 last_loc_note = NULL_RTX;
6359 /* If the current location is the same as the end of the list,
6360 and either both or neither of the locations is uninitialized,
6361 we have nothing to do. */
6362 if (last_loc_note == NULL_RTX
6363 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6364 NOTE_VAR_LOCATION_LOC (loc_note)))
6365 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6366 != NOTE_VAR_LOCATION_STATUS (loc_note))
6367 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6368 == VAR_INIT_STATUS_UNINITIALIZED)
6369 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6370 == VAR_INIT_STATUS_UNINITIALIZED))))
6371 {
6372 /* Add LOC to the end of list and update LAST. If the last
6373 element of the list has been removed above, reuse its
6374 memory for the new node, otherwise allocate a new one. */
6375 if (unused)
6376 {
6377 loc = unused;
6378 memset (loc, '\0', sizeof (*loc));
6379 }
6380 else
6381 loc = ggc_cleared_alloc<var_loc_node> ();
6382 if (bitsize == -1 || piece_loc == NULL)
6383 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6384 else
6385 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6386 bitpos, piece_bitpos, bitsize, loc_note);
6387 last->next = loc;
6388 /* Ensure TEMP->LAST will point either to the new last but one
6389 element of the chain, or to the last element in it. */
6390 if (last != temp->last)
6391 temp->last = last;
6392 }
6393 else if (unused)
6394 ggc_free (unused);
6395 }
6396 else
6397 {
6398 loc = ggc_cleared_alloc<var_loc_node> ();
6399 temp->first = loc;
6400 temp->last = loc;
6401 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6402 }
6403 return loc;
6404 }
6405 \f
6406 /* Keep track of the number of spaces used to indent the
6407 output of the debugging routines that print the structure of
6408 the DIE internal representation. */
6409 static int print_indent;
6410
6411 /* Indent the line the number of spaces given by print_indent. */
6412
6413 static inline void
6414 print_spaces (FILE *outfile)
6415 {
6416 fprintf (outfile, "%*s", print_indent, "");
6417 }
6418
6419 /* Print a type signature in hex. */
6420
6421 static inline void
6422 print_signature (FILE *outfile, char *sig)
6423 {
6424 int i;
6425
6426 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6427 fprintf (outfile, "%02x", sig[i] & 0xff);
6428 }
6429
6430 static inline void
6431 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6432 {
6433 if (discr_value->pos)
6434 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6435 else
6436 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6437 }
6438
6439 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6440
6441 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6442 RECURSE, output location descriptor operations. */
6443
6444 static void
6445 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6446 {
6447 switch (val->val_class)
6448 {
6449 case dw_val_class_addr:
6450 fprintf (outfile, "address");
6451 break;
6452 case dw_val_class_offset:
6453 fprintf (outfile, "offset");
6454 break;
6455 case dw_val_class_loc:
6456 fprintf (outfile, "location descriptor");
6457 if (val->v.val_loc == NULL)
6458 fprintf (outfile, " -> <null>\n");
6459 else if (recurse)
6460 {
6461 fprintf (outfile, ":\n");
6462 print_indent += 4;
6463 print_loc_descr (val->v.val_loc, outfile);
6464 print_indent -= 4;
6465 }
6466 else
6467 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6468 break;
6469 case dw_val_class_loc_list:
6470 fprintf (outfile, "location list -> label:%s",
6471 val->v.val_loc_list->ll_symbol);
6472 break;
6473 case dw_val_class_view_list:
6474 val = view_list_to_loc_list_val_node (val);
6475 fprintf (outfile, "location list with views -> labels:%s and %s",
6476 val->v.val_loc_list->ll_symbol,
6477 val->v.val_loc_list->vl_symbol);
6478 break;
6479 case dw_val_class_range_list:
6480 fprintf (outfile, "range list");
6481 break;
6482 case dw_val_class_const:
6483 case dw_val_class_const_implicit:
6484 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6485 break;
6486 case dw_val_class_unsigned_const:
6487 case dw_val_class_unsigned_const_implicit:
6488 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6489 break;
6490 case dw_val_class_const_double:
6491 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6492 HOST_WIDE_INT_PRINT_UNSIGNED")",
6493 val->v.val_double.high,
6494 val->v.val_double.low);
6495 break;
6496 case dw_val_class_wide_int:
6497 {
6498 int i = val->v.val_wide->get_len ();
6499 fprintf (outfile, "constant (");
6500 gcc_assert (i > 0);
6501 if (val->v.val_wide->elt (i - 1) == 0)
6502 fprintf (outfile, "0x");
6503 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6504 val->v.val_wide->elt (--i));
6505 while (--i >= 0)
6506 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6507 val->v.val_wide->elt (i));
6508 fprintf (outfile, ")");
6509 break;
6510 }
6511 case dw_val_class_vec:
6512 fprintf (outfile, "floating-point or vector constant");
6513 break;
6514 case dw_val_class_flag:
6515 fprintf (outfile, "%u", val->v.val_flag);
6516 break;
6517 case dw_val_class_die_ref:
6518 if (val->v.val_die_ref.die != NULL)
6519 {
6520 dw_die_ref die = val->v.val_die_ref.die;
6521
6522 if (die->comdat_type_p)
6523 {
6524 fprintf (outfile, "die -> signature: ");
6525 print_signature (outfile,
6526 die->die_id.die_type_node->signature);
6527 }
6528 else if (die->die_id.die_symbol)
6529 {
6530 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6531 if (die->with_offset)
6532 fprintf (outfile, " + %ld", die->die_offset);
6533 }
6534 else
6535 fprintf (outfile, "die -> %ld", die->die_offset);
6536 fprintf (outfile, " (%p)", (void *) die);
6537 }
6538 else
6539 fprintf (outfile, "die -> <null>");
6540 break;
6541 case dw_val_class_vms_delta:
6542 fprintf (outfile, "delta: @slotcount(%s-%s)",
6543 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6544 break;
6545 case dw_val_class_symview:
6546 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6547 break;
6548 case dw_val_class_lbl_id:
6549 case dw_val_class_lineptr:
6550 case dw_val_class_macptr:
6551 case dw_val_class_loclistsptr:
6552 case dw_val_class_high_pc:
6553 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6554 break;
6555 case dw_val_class_str:
6556 if (val->v.val_str->str != NULL)
6557 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6558 else
6559 fprintf (outfile, "<null>");
6560 break;
6561 case dw_val_class_file:
6562 case dw_val_class_file_implicit:
6563 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6564 val->v.val_file->emitted_number);
6565 break;
6566 case dw_val_class_data8:
6567 {
6568 int i;
6569
6570 for (i = 0; i < 8; i++)
6571 fprintf (outfile, "%02x", val->v.val_data8[i]);
6572 break;
6573 }
6574 case dw_val_class_discr_value:
6575 print_discr_value (outfile, &val->v.val_discr_value);
6576 break;
6577 case dw_val_class_discr_list:
6578 for (dw_discr_list_ref node = val->v.val_discr_list;
6579 node != NULL;
6580 node = node->dw_discr_next)
6581 {
6582 if (node->dw_discr_range)
6583 {
6584 fprintf (outfile, " .. ");
6585 print_discr_value (outfile, &node->dw_discr_lower_bound);
6586 print_discr_value (outfile, &node->dw_discr_upper_bound);
6587 }
6588 else
6589 print_discr_value (outfile, &node->dw_discr_lower_bound);
6590
6591 if (node->dw_discr_next != NULL)
6592 fprintf (outfile, " | ");
6593 }
6594 default:
6595 break;
6596 }
6597 }
6598
6599 /* Likewise, for a DIE attribute. */
6600
6601 static void
6602 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6603 {
6604 print_dw_val (&a->dw_attr_val, recurse, outfile);
6605 }
6606
6607
6608 /* Print the list of operands in the LOC location description to OUTFILE. This
6609 routine is a debugging aid only. */
6610
6611 static void
6612 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6613 {
6614 dw_loc_descr_ref l = loc;
6615
6616 if (loc == NULL)
6617 {
6618 print_spaces (outfile);
6619 fprintf (outfile, "<null>\n");
6620 return;
6621 }
6622
6623 for (l = loc; l != NULL; l = l->dw_loc_next)
6624 {
6625 print_spaces (outfile);
6626 fprintf (outfile, "(%p) %s",
6627 (void *) l,
6628 dwarf_stack_op_name (l->dw_loc_opc));
6629 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6630 {
6631 fprintf (outfile, " ");
6632 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6633 }
6634 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6635 {
6636 fprintf (outfile, ", ");
6637 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6638 }
6639 fprintf (outfile, "\n");
6640 }
6641 }
6642
6643 /* Print the information associated with a given DIE, and its children.
6644 This routine is a debugging aid only. */
6645
6646 static void
6647 print_die (dw_die_ref die, FILE *outfile)
6648 {
6649 dw_attr_node *a;
6650 dw_die_ref c;
6651 unsigned ix;
6652
6653 print_spaces (outfile);
6654 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6655 die->die_offset, dwarf_tag_name (die->die_tag),
6656 (void*) die);
6657 print_spaces (outfile);
6658 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6659 fprintf (outfile, " offset: %ld", die->die_offset);
6660 fprintf (outfile, " mark: %d\n", die->die_mark);
6661
6662 if (die->comdat_type_p)
6663 {
6664 print_spaces (outfile);
6665 fprintf (outfile, " signature: ");
6666 print_signature (outfile, die->die_id.die_type_node->signature);
6667 fprintf (outfile, "\n");
6668 }
6669
6670 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6671 {
6672 print_spaces (outfile);
6673 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6674
6675 print_attribute (a, true, outfile);
6676 fprintf (outfile, "\n");
6677 }
6678
6679 if (die->die_child != NULL)
6680 {
6681 print_indent += 4;
6682 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6683 print_indent -= 4;
6684 }
6685 if (print_indent == 0)
6686 fprintf (outfile, "\n");
6687 }
6688
6689 /* Print the list of operations in the LOC location description. */
6690
6691 DEBUG_FUNCTION void
6692 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6693 {
6694 print_loc_descr (loc, stderr);
6695 }
6696
6697 /* Print the information collected for a given DIE. */
6698
6699 DEBUG_FUNCTION void
6700 debug_dwarf_die (dw_die_ref die)
6701 {
6702 print_die (die, stderr);
6703 }
6704
6705 DEBUG_FUNCTION void
6706 debug (die_struct &ref)
6707 {
6708 print_die (&ref, stderr);
6709 }
6710
6711 DEBUG_FUNCTION void
6712 debug (die_struct *ptr)
6713 {
6714 if (ptr)
6715 debug (*ptr);
6716 else
6717 fprintf (stderr, "<nil>\n");
6718 }
6719
6720
6721 /* Print all DWARF information collected for the compilation unit.
6722 This routine is a debugging aid only. */
6723
6724 DEBUG_FUNCTION void
6725 debug_dwarf (void)
6726 {
6727 print_indent = 0;
6728 print_die (comp_unit_die (), stderr);
6729 }
6730
6731 /* Verify the DIE tree structure. */
6732
6733 DEBUG_FUNCTION void
6734 verify_die (dw_die_ref die)
6735 {
6736 gcc_assert (!die->die_mark);
6737 if (die->die_parent == NULL
6738 && die->die_sib == NULL)
6739 return;
6740 /* Verify the die_sib list is cyclic. */
6741 dw_die_ref x = die;
6742 do
6743 {
6744 x->die_mark = 1;
6745 x = x->die_sib;
6746 }
6747 while (x && !x->die_mark);
6748 gcc_assert (x == die);
6749 x = die;
6750 do
6751 {
6752 /* Verify all dies have the same parent. */
6753 gcc_assert (x->die_parent == die->die_parent);
6754 if (x->die_child)
6755 {
6756 /* Verify the child has the proper parent and recurse. */
6757 gcc_assert (x->die_child->die_parent == x);
6758 verify_die (x->die_child);
6759 }
6760 x->die_mark = 0;
6761 x = x->die_sib;
6762 }
6763 while (x && x->die_mark);
6764 }
6765
6766 /* Sanity checks on DIEs. */
6767
6768 static void
6769 check_die (dw_die_ref die)
6770 {
6771 unsigned ix;
6772 dw_attr_node *a;
6773 bool inline_found = false;
6774 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6775 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6776 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6777 {
6778 switch (a->dw_attr)
6779 {
6780 case DW_AT_inline:
6781 if (a->dw_attr_val.v.val_unsigned)
6782 inline_found = true;
6783 break;
6784 case DW_AT_location:
6785 ++n_location;
6786 break;
6787 case DW_AT_low_pc:
6788 ++n_low_pc;
6789 break;
6790 case DW_AT_high_pc:
6791 ++n_high_pc;
6792 break;
6793 case DW_AT_artificial:
6794 ++n_artificial;
6795 break;
6796 case DW_AT_decl_column:
6797 ++n_decl_column;
6798 break;
6799 case DW_AT_decl_line:
6800 ++n_decl_line;
6801 break;
6802 case DW_AT_decl_file:
6803 ++n_decl_file;
6804 break;
6805 default:
6806 break;
6807 }
6808 }
6809 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6810 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6811 {
6812 fprintf (stderr, "Duplicate attributes in DIE:\n");
6813 debug_dwarf_die (die);
6814 gcc_unreachable ();
6815 }
6816 if (inline_found)
6817 {
6818 /* A debugging information entry that is a member of an abstract
6819 instance tree [that has DW_AT_inline] should not contain any
6820 attributes which describe aspects of the subroutine which vary
6821 between distinct inlined expansions or distinct out-of-line
6822 expansions. */
6823 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6824 gcc_assert (a->dw_attr != DW_AT_low_pc
6825 && a->dw_attr != DW_AT_high_pc
6826 && a->dw_attr != DW_AT_location
6827 && a->dw_attr != DW_AT_frame_base
6828 && a->dw_attr != DW_AT_call_all_calls
6829 && a->dw_attr != DW_AT_GNU_all_call_sites);
6830 }
6831 }
6832 \f
6833 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6834 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6835 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6836
6837 /* Calculate the checksum of a location expression. */
6838
6839 static inline void
6840 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6841 {
6842 int tem;
6843 inchash::hash hstate;
6844 hashval_t hash;
6845
6846 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6847 CHECKSUM (tem);
6848 hash_loc_operands (loc, hstate);
6849 hash = hstate.end();
6850 CHECKSUM (hash);
6851 }
6852
6853 /* Calculate the checksum of an attribute. */
6854
6855 static void
6856 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6857 {
6858 dw_loc_descr_ref loc;
6859 rtx r;
6860
6861 CHECKSUM (at->dw_attr);
6862
6863 /* We don't care that this was compiled with a different compiler
6864 snapshot; if the output is the same, that's what matters. */
6865 if (at->dw_attr == DW_AT_producer)
6866 return;
6867
6868 switch (AT_class (at))
6869 {
6870 case dw_val_class_const:
6871 case dw_val_class_const_implicit:
6872 CHECKSUM (at->dw_attr_val.v.val_int);
6873 break;
6874 case dw_val_class_unsigned_const:
6875 case dw_val_class_unsigned_const_implicit:
6876 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6877 break;
6878 case dw_val_class_const_double:
6879 CHECKSUM (at->dw_attr_val.v.val_double);
6880 break;
6881 case dw_val_class_wide_int:
6882 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6883 get_full_len (*at->dw_attr_val.v.val_wide)
6884 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6885 break;
6886 case dw_val_class_vec:
6887 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6888 (at->dw_attr_val.v.val_vec.length
6889 * at->dw_attr_val.v.val_vec.elt_size));
6890 break;
6891 case dw_val_class_flag:
6892 CHECKSUM (at->dw_attr_val.v.val_flag);
6893 break;
6894 case dw_val_class_str:
6895 CHECKSUM_STRING (AT_string (at));
6896 break;
6897
6898 case dw_val_class_addr:
6899 r = AT_addr (at);
6900 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6901 CHECKSUM_STRING (XSTR (r, 0));
6902 break;
6903
6904 case dw_val_class_offset:
6905 CHECKSUM (at->dw_attr_val.v.val_offset);
6906 break;
6907
6908 case dw_val_class_loc:
6909 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6910 loc_checksum (loc, ctx);
6911 break;
6912
6913 case dw_val_class_die_ref:
6914 die_checksum (AT_ref (at), ctx, mark);
6915 break;
6916
6917 case dw_val_class_fde_ref:
6918 case dw_val_class_vms_delta:
6919 case dw_val_class_symview:
6920 case dw_val_class_lbl_id:
6921 case dw_val_class_lineptr:
6922 case dw_val_class_macptr:
6923 case dw_val_class_loclistsptr:
6924 case dw_val_class_high_pc:
6925 break;
6926
6927 case dw_val_class_file:
6928 case dw_val_class_file_implicit:
6929 CHECKSUM_STRING (AT_file (at)->filename);
6930 break;
6931
6932 case dw_val_class_data8:
6933 CHECKSUM (at->dw_attr_val.v.val_data8);
6934 break;
6935
6936 default:
6937 break;
6938 }
6939 }
6940
6941 /* Calculate the checksum of a DIE. */
6942
6943 static void
6944 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6945 {
6946 dw_die_ref c;
6947 dw_attr_node *a;
6948 unsigned ix;
6949
6950 /* To avoid infinite recursion. */
6951 if (die->die_mark)
6952 {
6953 CHECKSUM (die->die_mark);
6954 return;
6955 }
6956 die->die_mark = ++(*mark);
6957
6958 CHECKSUM (die->die_tag);
6959
6960 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6961 attr_checksum (a, ctx, mark);
6962
6963 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6964 }
6965
6966 #undef CHECKSUM
6967 #undef CHECKSUM_BLOCK
6968 #undef CHECKSUM_STRING
6969
6970 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6971 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6972 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6973 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6974 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6975 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6976 #define CHECKSUM_ATTR(FOO) \
6977 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6978
6979 /* Calculate the checksum of a number in signed LEB128 format. */
6980
6981 static void
6982 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6983 {
6984 unsigned char byte;
6985 bool more;
6986
6987 while (1)
6988 {
6989 byte = (value & 0x7f);
6990 value >>= 7;
6991 more = !((value == 0 && (byte & 0x40) == 0)
6992 || (value == -1 && (byte & 0x40) != 0));
6993 if (more)
6994 byte |= 0x80;
6995 CHECKSUM (byte);
6996 if (!more)
6997 break;
6998 }
6999 }
7000
7001 /* Calculate the checksum of a number in unsigned LEB128 format. */
7002
7003 static void
7004 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7005 {
7006 while (1)
7007 {
7008 unsigned char byte = (value & 0x7f);
7009 value >>= 7;
7010 if (value != 0)
7011 /* More bytes to follow. */
7012 byte |= 0x80;
7013 CHECKSUM (byte);
7014 if (value == 0)
7015 break;
7016 }
7017 }
7018
7019 /* Checksum the context of the DIE. This adds the names of any
7020 surrounding namespaces or structures to the checksum. */
7021
7022 static void
7023 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7024 {
7025 const char *name;
7026 dw_die_ref spec;
7027 int tag = die->die_tag;
7028
7029 if (tag != DW_TAG_namespace
7030 && tag != DW_TAG_structure_type
7031 && tag != DW_TAG_class_type)
7032 return;
7033
7034 name = get_AT_string (die, DW_AT_name);
7035
7036 spec = get_AT_ref (die, DW_AT_specification);
7037 if (spec != NULL)
7038 die = spec;
7039
7040 if (die->die_parent != NULL)
7041 checksum_die_context (die->die_parent, ctx);
7042
7043 CHECKSUM_ULEB128 ('C');
7044 CHECKSUM_ULEB128 (tag);
7045 if (name != NULL)
7046 CHECKSUM_STRING (name);
7047 }
7048
7049 /* Calculate the checksum of a location expression. */
7050
7051 static inline void
7052 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7053 {
7054 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7055 were emitted as a DW_FORM_sdata instead of a location expression. */
7056 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7057 {
7058 CHECKSUM_ULEB128 (DW_FORM_sdata);
7059 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7060 return;
7061 }
7062
7063 /* Otherwise, just checksum the raw location expression. */
7064 while (loc != NULL)
7065 {
7066 inchash::hash hstate;
7067 hashval_t hash;
7068
7069 CHECKSUM_ULEB128 (loc->dtprel);
7070 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7071 hash_loc_operands (loc, hstate);
7072 hash = hstate.end ();
7073 CHECKSUM (hash);
7074 loc = loc->dw_loc_next;
7075 }
7076 }
7077
7078 /* Calculate the checksum of an attribute. */
7079
7080 static void
7081 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7082 struct md5_ctx *ctx, int *mark)
7083 {
7084 dw_loc_descr_ref loc;
7085 rtx r;
7086
7087 if (AT_class (at) == dw_val_class_die_ref)
7088 {
7089 dw_die_ref target_die = AT_ref (at);
7090
7091 /* For pointer and reference types, we checksum only the (qualified)
7092 name of the target type (if there is a name). For friend entries,
7093 we checksum only the (qualified) name of the target type or function.
7094 This allows the checksum to remain the same whether the target type
7095 is complete or not. */
7096 if ((at->dw_attr == DW_AT_type
7097 && (tag == DW_TAG_pointer_type
7098 || tag == DW_TAG_reference_type
7099 || tag == DW_TAG_rvalue_reference_type
7100 || tag == DW_TAG_ptr_to_member_type))
7101 || (at->dw_attr == DW_AT_friend
7102 && tag == DW_TAG_friend))
7103 {
7104 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7105
7106 if (name_attr != NULL)
7107 {
7108 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7109
7110 if (decl == NULL)
7111 decl = target_die;
7112 CHECKSUM_ULEB128 ('N');
7113 CHECKSUM_ULEB128 (at->dw_attr);
7114 if (decl->die_parent != NULL)
7115 checksum_die_context (decl->die_parent, ctx);
7116 CHECKSUM_ULEB128 ('E');
7117 CHECKSUM_STRING (AT_string (name_attr));
7118 return;
7119 }
7120 }
7121
7122 /* For all other references to another DIE, we check to see if the
7123 target DIE has already been visited. If it has, we emit a
7124 backward reference; if not, we descend recursively. */
7125 if (target_die->die_mark > 0)
7126 {
7127 CHECKSUM_ULEB128 ('R');
7128 CHECKSUM_ULEB128 (at->dw_attr);
7129 CHECKSUM_ULEB128 (target_die->die_mark);
7130 }
7131 else
7132 {
7133 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7134
7135 if (decl == NULL)
7136 decl = target_die;
7137 target_die->die_mark = ++(*mark);
7138 CHECKSUM_ULEB128 ('T');
7139 CHECKSUM_ULEB128 (at->dw_attr);
7140 if (decl->die_parent != NULL)
7141 checksum_die_context (decl->die_parent, ctx);
7142 die_checksum_ordered (target_die, ctx, mark);
7143 }
7144 return;
7145 }
7146
7147 CHECKSUM_ULEB128 ('A');
7148 CHECKSUM_ULEB128 (at->dw_attr);
7149
7150 switch (AT_class (at))
7151 {
7152 case dw_val_class_const:
7153 case dw_val_class_const_implicit:
7154 CHECKSUM_ULEB128 (DW_FORM_sdata);
7155 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7156 break;
7157
7158 case dw_val_class_unsigned_const:
7159 case dw_val_class_unsigned_const_implicit:
7160 CHECKSUM_ULEB128 (DW_FORM_sdata);
7161 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7162 break;
7163
7164 case dw_val_class_const_double:
7165 CHECKSUM_ULEB128 (DW_FORM_block);
7166 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7167 CHECKSUM (at->dw_attr_val.v.val_double);
7168 break;
7169
7170 case dw_val_class_wide_int:
7171 CHECKSUM_ULEB128 (DW_FORM_block);
7172 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7173 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7174 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7175 get_full_len (*at->dw_attr_val.v.val_wide)
7176 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7177 break;
7178
7179 case dw_val_class_vec:
7180 CHECKSUM_ULEB128 (DW_FORM_block);
7181 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7182 * at->dw_attr_val.v.val_vec.elt_size);
7183 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7184 (at->dw_attr_val.v.val_vec.length
7185 * at->dw_attr_val.v.val_vec.elt_size));
7186 break;
7187
7188 case dw_val_class_flag:
7189 CHECKSUM_ULEB128 (DW_FORM_flag);
7190 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7191 break;
7192
7193 case dw_val_class_str:
7194 CHECKSUM_ULEB128 (DW_FORM_string);
7195 CHECKSUM_STRING (AT_string (at));
7196 break;
7197
7198 case dw_val_class_addr:
7199 r = AT_addr (at);
7200 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7201 CHECKSUM_ULEB128 (DW_FORM_string);
7202 CHECKSUM_STRING (XSTR (r, 0));
7203 break;
7204
7205 case dw_val_class_offset:
7206 CHECKSUM_ULEB128 (DW_FORM_sdata);
7207 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7208 break;
7209
7210 case dw_val_class_loc:
7211 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7212 loc_checksum_ordered (loc, ctx);
7213 break;
7214
7215 case dw_val_class_fde_ref:
7216 case dw_val_class_symview:
7217 case dw_val_class_lbl_id:
7218 case dw_val_class_lineptr:
7219 case dw_val_class_macptr:
7220 case dw_val_class_loclistsptr:
7221 case dw_val_class_high_pc:
7222 break;
7223
7224 case dw_val_class_file:
7225 case dw_val_class_file_implicit:
7226 CHECKSUM_ULEB128 (DW_FORM_string);
7227 CHECKSUM_STRING (AT_file (at)->filename);
7228 break;
7229
7230 case dw_val_class_data8:
7231 CHECKSUM (at->dw_attr_val.v.val_data8);
7232 break;
7233
7234 default:
7235 break;
7236 }
7237 }
7238
7239 struct checksum_attributes
7240 {
7241 dw_attr_node *at_name;
7242 dw_attr_node *at_type;
7243 dw_attr_node *at_friend;
7244 dw_attr_node *at_accessibility;
7245 dw_attr_node *at_address_class;
7246 dw_attr_node *at_alignment;
7247 dw_attr_node *at_allocated;
7248 dw_attr_node *at_artificial;
7249 dw_attr_node *at_associated;
7250 dw_attr_node *at_binary_scale;
7251 dw_attr_node *at_bit_offset;
7252 dw_attr_node *at_bit_size;
7253 dw_attr_node *at_bit_stride;
7254 dw_attr_node *at_byte_size;
7255 dw_attr_node *at_byte_stride;
7256 dw_attr_node *at_const_value;
7257 dw_attr_node *at_containing_type;
7258 dw_attr_node *at_count;
7259 dw_attr_node *at_data_location;
7260 dw_attr_node *at_data_member_location;
7261 dw_attr_node *at_decimal_scale;
7262 dw_attr_node *at_decimal_sign;
7263 dw_attr_node *at_default_value;
7264 dw_attr_node *at_digit_count;
7265 dw_attr_node *at_discr;
7266 dw_attr_node *at_discr_list;
7267 dw_attr_node *at_discr_value;
7268 dw_attr_node *at_encoding;
7269 dw_attr_node *at_endianity;
7270 dw_attr_node *at_explicit;
7271 dw_attr_node *at_is_optional;
7272 dw_attr_node *at_location;
7273 dw_attr_node *at_lower_bound;
7274 dw_attr_node *at_mutable;
7275 dw_attr_node *at_ordering;
7276 dw_attr_node *at_picture_string;
7277 dw_attr_node *at_prototyped;
7278 dw_attr_node *at_small;
7279 dw_attr_node *at_segment;
7280 dw_attr_node *at_string_length;
7281 dw_attr_node *at_string_length_bit_size;
7282 dw_attr_node *at_string_length_byte_size;
7283 dw_attr_node *at_threads_scaled;
7284 dw_attr_node *at_upper_bound;
7285 dw_attr_node *at_use_location;
7286 dw_attr_node *at_use_UTF8;
7287 dw_attr_node *at_variable_parameter;
7288 dw_attr_node *at_virtuality;
7289 dw_attr_node *at_visibility;
7290 dw_attr_node *at_vtable_elem_location;
7291 };
7292
7293 /* Collect the attributes that we will want to use for the checksum. */
7294
7295 static void
7296 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7297 {
7298 dw_attr_node *a;
7299 unsigned ix;
7300
7301 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7302 {
7303 switch (a->dw_attr)
7304 {
7305 case DW_AT_name:
7306 attrs->at_name = a;
7307 break;
7308 case DW_AT_type:
7309 attrs->at_type = a;
7310 break;
7311 case DW_AT_friend:
7312 attrs->at_friend = a;
7313 break;
7314 case DW_AT_accessibility:
7315 attrs->at_accessibility = a;
7316 break;
7317 case DW_AT_address_class:
7318 attrs->at_address_class = a;
7319 break;
7320 case DW_AT_alignment:
7321 attrs->at_alignment = a;
7322 break;
7323 case DW_AT_allocated:
7324 attrs->at_allocated = a;
7325 break;
7326 case DW_AT_artificial:
7327 attrs->at_artificial = a;
7328 break;
7329 case DW_AT_associated:
7330 attrs->at_associated = a;
7331 break;
7332 case DW_AT_binary_scale:
7333 attrs->at_binary_scale = a;
7334 break;
7335 case DW_AT_bit_offset:
7336 attrs->at_bit_offset = a;
7337 break;
7338 case DW_AT_bit_size:
7339 attrs->at_bit_size = a;
7340 break;
7341 case DW_AT_bit_stride:
7342 attrs->at_bit_stride = a;
7343 break;
7344 case DW_AT_byte_size:
7345 attrs->at_byte_size = a;
7346 break;
7347 case DW_AT_byte_stride:
7348 attrs->at_byte_stride = a;
7349 break;
7350 case DW_AT_const_value:
7351 attrs->at_const_value = a;
7352 break;
7353 case DW_AT_containing_type:
7354 attrs->at_containing_type = a;
7355 break;
7356 case DW_AT_count:
7357 attrs->at_count = a;
7358 break;
7359 case DW_AT_data_location:
7360 attrs->at_data_location = a;
7361 break;
7362 case DW_AT_data_member_location:
7363 attrs->at_data_member_location = a;
7364 break;
7365 case DW_AT_decimal_scale:
7366 attrs->at_decimal_scale = a;
7367 break;
7368 case DW_AT_decimal_sign:
7369 attrs->at_decimal_sign = a;
7370 break;
7371 case DW_AT_default_value:
7372 attrs->at_default_value = a;
7373 break;
7374 case DW_AT_digit_count:
7375 attrs->at_digit_count = a;
7376 break;
7377 case DW_AT_discr:
7378 attrs->at_discr = a;
7379 break;
7380 case DW_AT_discr_list:
7381 attrs->at_discr_list = a;
7382 break;
7383 case DW_AT_discr_value:
7384 attrs->at_discr_value = a;
7385 break;
7386 case DW_AT_encoding:
7387 attrs->at_encoding = a;
7388 break;
7389 case DW_AT_endianity:
7390 attrs->at_endianity = a;
7391 break;
7392 case DW_AT_explicit:
7393 attrs->at_explicit = a;
7394 break;
7395 case DW_AT_is_optional:
7396 attrs->at_is_optional = a;
7397 break;
7398 case DW_AT_location:
7399 attrs->at_location = a;
7400 break;
7401 case DW_AT_lower_bound:
7402 attrs->at_lower_bound = a;
7403 break;
7404 case DW_AT_mutable:
7405 attrs->at_mutable = a;
7406 break;
7407 case DW_AT_ordering:
7408 attrs->at_ordering = a;
7409 break;
7410 case DW_AT_picture_string:
7411 attrs->at_picture_string = a;
7412 break;
7413 case DW_AT_prototyped:
7414 attrs->at_prototyped = a;
7415 break;
7416 case DW_AT_small:
7417 attrs->at_small = a;
7418 break;
7419 case DW_AT_segment:
7420 attrs->at_segment = a;
7421 break;
7422 case DW_AT_string_length:
7423 attrs->at_string_length = a;
7424 break;
7425 case DW_AT_string_length_bit_size:
7426 attrs->at_string_length_bit_size = a;
7427 break;
7428 case DW_AT_string_length_byte_size:
7429 attrs->at_string_length_byte_size = a;
7430 break;
7431 case DW_AT_threads_scaled:
7432 attrs->at_threads_scaled = a;
7433 break;
7434 case DW_AT_upper_bound:
7435 attrs->at_upper_bound = a;
7436 break;
7437 case DW_AT_use_location:
7438 attrs->at_use_location = a;
7439 break;
7440 case DW_AT_use_UTF8:
7441 attrs->at_use_UTF8 = a;
7442 break;
7443 case DW_AT_variable_parameter:
7444 attrs->at_variable_parameter = a;
7445 break;
7446 case DW_AT_virtuality:
7447 attrs->at_virtuality = a;
7448 break;
7449 case DW_AT_visibility:
7450 attrs->at_visibility = a;
7451 break;
7452 case DW_AT_vtable_elem_location:
7453 attrs->at_vtable_elem_location = a;
7454 break;
7455 default:
7456 break;
7457 }
7458 }
7459 }
7460
7461 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7462
7463 static void
7464 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7465 {
7466 dw_die_ref c;
7467 dw_die_ref decl;
7468 struct checksum_attributes attrs;
7469
7470 CHECKSUM_ULEB128 ('D');
7471 CHECKSUM_ULEB128 (die->die_tag);
7472
7473 memset (&attrs, 0, sizeof (attrs));
7474
7475 decl = get_AT_ref (die, DW_AT_specification);
7476 if (decl != NULL)
7477 collect_checksum_attributes (&attrs, decl);
7478 collect_checksum_attributes (&attrs, die);
7479
7480 CHECKSUM_ATTR (attrs.at_name);
7481 CHECKSUM_ATTR (attrs.at_accessibility);
7482 CHECKSUM_ATTR (attrs.at_address_class);
7483 CHECKSUM_ATTR (attrs.at_allocated);
7484 CHECKSUM_ATTR (attrs.at_artificial);
7485 CHECKSUM_ATTR (attrs.at_associated);
7486 CHECKSUM_ATTR (attrs.at_binary_scale);
7487 CHECKSUM_ATTR (attrs.at_bit_offset);
7488 CHECKSUM_ATTR (attrs.at_bit_size);
7489 CHECKSUM_ATTR (attrs.at_bit_stride);
7490 CHECKSUM_ATTR (attrs.at_byte_size);
7491 CHECKSUM_ATTR (attrs.at_byte_stride);
7492 CHECKSUM_ATTR (attrs.at_const_value);
7493 CHECKSUM_ATTR (attrs.at_containing_type);
7494 CHECKSUM_ATTR (attrs.at_count);
7495 CHECKSUM_ATTR (attrs.at_data_location);
7496 CHECKSUM_ATTR (attrs.at_data_member_location);
7497 CHECKSUM_ATTR (attrs.at_decimal_scale);
7498 CHECKSUM_ATTR (attrs.at_decimal_sign);
7499 CHECKSUM_ATTR (attrs.at_default_value);
7500 CHECKSUM_ATTR (attrs.at_digit_count);
7501 CHECKSUM_ATTR (attrs.at_discr);
7502 CHECKSUM_ATTR (attrs.at_discr_list);
7503 CHECKSUM_ATTR (attrs.at_discr_value);
7504 CHECKSUM_ATTR (attrs.at_encoding);
7505 CHECKSUM_ATTR (attrs.at_endianity);
7506 CHECKSUM_ATTR (attrs.at_explicit);
7507 CHECKSUM_ATTR (attrs.at_is_optional);
7508 CHECKSUM_ATTR (attrs.at_location);
7509 CHECKSUM_ATTR (attrs.at_lower_bound);
7510 CHECKSUM_ATTR (attrs.at_mutable);
7511 CHECKSUM_ATTR (attrs.at_ordering);
7512 CHECKSUM_ATTR (attrs.at_picture_string);
7513 CHECKSUM_ATTR (attrs.at_prototyped);
7514 CHECKSUM_ATTR (attrs.at_small);
7515 CHECKSUM_ATTR (attrs.at_segment);
7516 CHECKSUM_ATTR (attrs.at_string_length);
7517 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7518 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7519 CHECKSUM_ATTR (attrs.at_threads_scaled);
7520 CHECKSUM_ATTR (attrs.at_upper_bound);
7521 CHECKSUM_ATTR (attrs.at_use_location);
7522 CHECKSUM_ATTR (attrs.at_use_UTF8);
7523 CHECKSUM_ATTR (attrs.at_variable_parameter);
7524 CHECKSUM_ATTR (attrs.at_virtuality);
7525 CHECKSUM_ATTR (attrs.at_visibility);
7526 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7527 CHECKSUM_ATTR (attrs.at_type);
7528 CHECKSUM_ATTR (attrs.at_friend);
7529 CHECKSUM_ATTR (attrs.at_alignment);
7530
7531 /* Checksum the child DIEs. */
7532 c = die->die_child;
7533 if (c) do {
7534 dw_attr_node *name_attr;
7535
7536 c = c->die_sib;
7537 name_attr = get_AT (c, DW_AT_name);
7538 if (is_template_instantiation (c))
7539 {
7540 /* Ignore instantiations of member type and function templates. */
7541 }
7542 else if (name_attr != NULL
7543 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7544 {
7545 /* Use a shallow checksum for named nested types and member
7546 functions. */
7547 CHECKSUM_ULEB128 ('S');
7548 CHECKSUM_ULEB128 (c->die_tag);
7549 CHECKSUM_STRING (AT_string (name_attr));
7550 }
7551 else
7552 {
7553 /* Use a deep checksum for other children. */
7554 /* Mark this DIE so it gets processed when unmarking. */
7555 if (c->die_mark == 0)
7556 c->die_mark = -1;
7557 die_checksum_ordered (c, ctx, mark);
7558 }
7559 } while (c != die->die_child);
7560
7561 CHECKSUM_ULEB128 (0);
7562 }
7563
7564 /* Add a type name and tag to a hash. */
7565 static void
7566 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7567 {
7568 CHECKSUM_ULEB128 (tag);
7569 CHECKSUM_STRING (name);
7570 }
7571
7572 #undef CHECKSUM
7573 #undef CHECKSUM_STRING
7574 #undef CHECKSUM_ATTR
7575 #undef CHECKSUM_LEB128
7576 #undef CHECKSUM_ULEB128
7577
7578 /* Generate the type signature for DIE. This is computed by generating an
7579 MD5 checksum over the DIE's tag, its relevant attributes, and its
7580 children. Attributes that are references to other DIEs are processed
7581 by recursion, using the MARK field to prevent infinite recursion.
7582 If the DIE is nested inside a namespace or another type, we also
7583 need to include that context in the signature. The lower 64 bits
7584 of the resulting MD5 checksum comprise the signature. */
7585
7586 static void
7587 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7588 {
7589 int mark;
7590 const char *name;
7591 unsigned char checksum[16];
7592 struct md5_ctx ctx;
7593 dw_die_ref decl;
7594 dw_die_ref parent;
7595
7596 name = get_AT_string (die, DW_AT_name);
7597 decl = get_AT_ref (die, DW_AT_specification);
7598 parent = get_die_parent (die);
7599
7600 /* First, compute a signature for just the type name (and its surrounding
7601 context, if any. This is stored in the type unit DIE for link-time
7602 ODR (one-definition rule) checking. */
7603
7604 if (is_cxx () && name != NULL)
7605 {
7606 md5_init_ctx (&ctx);
7607
7608 /* Checksum the names of surrounding namespaces and structures. */
7609 if (parent != NULL)
7610 checksum_die_context (parent, &ctx);
7611
7612 /* Checksum the current DIE. */
7613 die_odr_checksum (die->die_tag, name, &ctx);
7614 md5_finish_ctx (&ctx, checksum);
7615
7616 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7617 }
7618
7619 /* Next, compute the complete type signature. */
7620
7621 md5_init_ctx (&ctx);
7622 mark = 1;
7623 die->die_mark = mark;
7624
7625 /* Checksum the names of surrounding namespaces and structures. */
7626 if (parent != NULL)
7627 checksum_die_context (parent, &ctx);
7628
7629 /* Checksum the DIE and its children. */
7630 die_checksum_ordered (die, &ctx, &mark);
7631 unmark_all_dies (die);
7632 md5_finish_ctx (&ctx, checksum);
7633
7634 /* Store the signature in the type node and link the type DIE and the
7635 type node together. */
7636 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7637 DWARF_TYPE_SIGNATURE_SIZE);
7638 die->comdat_type_p = true;
7639 die->die_id.die_type_node = type_node;
7640 type_node->type_die = die;
7641
7642 /* If the DIE is a specification, link its declaration to the type node
7643 as well. */
7644 if (decl != NULL)
7645 {
7646 decl->comdat_type_p = true;
7647 decl->die_id.die_type_node = type_node;
7648 }
7649 }
7650
7651 /* Do the location expressions look same? */
7652 static inline int
7653 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7654 {
7655 return loc1->dw_loc_opc == loc2->dw_loc_opc
7656 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7657 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7658 }
7659
7660 /* Do the values look the same? */
7661 static int
7662 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7663 {
7664 dw_loc_descr_ref loc1, loc2;
7665 rtx r1, r2;
7666
7667 if (v1->val_class != v2->val_class)
7668 return 0;
7669
7670 switch (v1->val_class)
7671 {
7672 case dw_val_class_const:
7673 case dw_val_class_const_implicit:
7674 return v1->v.val_int == v2->v.val_int;
7675 case dw_val_class_unsigned_const:
7676 case dw_val_class_unsigned_const_implicit:
7677 return v1->v.val_unsigned == v2->v.val_unsigned;
7678 case dw_val_class_const_double:
7679 return v1->v.val_double.high == v2->v.val_double.high
7680 && v1->v.val_double.low == v2->v.val_double.low;
7681 case dw_val_class_wide_int:
7682 return *v1->v.val_wide == *v2->v.val_wide;
7683 case dw_val_class_vec:
7684 if (v1->v.val_vec.length != v2->v.val_vec.length
7685 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7686 return 0;
7687 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7688 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7689 return 0;
7690 return 1;
7691 case dw_val_class_flag:
7692 return v1->v.val_flag == v2->v.val_flag;
7693 case dw_val_class_str:
7694 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7695
7696 case dw_val_class_addr:
7697 r1 = v1->v.val_addr;
7698 r2 = v2->v.val_addr;
7699 if (GET_CODE (r1) != GET_CODE (r2))
7700 return 0;
7701 return !rtx_equal_p (r1, r2);
7702
7703 case dw_val_class_offset:
7704 return v1->v.val_offset == v2->v.val_offset;
7705
7706 case dw_val_class_loc:
7707 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7708 loc1 && loc2;
7709 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7710 if (!same_loc_p (loc1, loc2, mark))
7711 return 0;
7712 return !loc1 && !loc2;
7713
7714 case dw_val_class_die_ref:
7715 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7716
7717 case dw_val_class_symview:
7718 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7719
7720 case dw_val_class_fde_ref:
7721 case dw_val_class_vms_delta:
7722 case dw_val_class_lbl_id:
7723 case dw_val_class_lineptr:
7724 case dw_val_class_macptr:
7725 case dw_val_class_loclistsptr:
7726 case dw_val_class_high_pc:
7727 return 1;
7728
7729 case dw_val_class_file:
7730 case dw_val_class_file_implicit:
7731 return v1->v.val_file == v2->v.val_file;
7732
7733 case dw_val_class_data8:
7734 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7735
7736 default:
7737 return 1;
7738 }
7739 }
7740
7741 /* Do the attributes look the same? */
7742
7743 static int
7744 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7745 {
7746 if (at1->dw_attr != at2->dw_attr)
7747 return 0;
7748
7749 /* We don't care that this was compiled with a different compiler
7750 snapshot; if the output is the same, that's what matters. */
7751 if (at1->dw_attr == DW_AT_producer)
7752 return 1;
7753
7754 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7755 }
7756
7757 /* Do the dies look the same? */
7758
7759 static int
7760 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7761 {
7762 dw_die_ref c1, c2;
7763 dw_attr_node *a1;
7764 unsigned ix;
7765
7766 /* To avoid infinite recursion. */
7767 if (die1->die_mark)
7768 return die1->die_mark == die2->die_mark;
7769 die1->die_mark = die2->die_mark = ++(*mark);
7770
7771 if (die1->die_tag != die2->die_tag)
7772 return 0;
7773
7774 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7775 return 0;
7776
7777 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7778 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7779 return 0;
7780
7781 c1 = die1->die_child;
7782 c2 = die2->die_child;
7783 if (! c1)
7784 {
7785 if (c2)
7786 return 0;
7787 }
7788 else
7789 for (;;)
7790 {
7791 if (!same_die_p (c1, c2, mark))
7792 return 0;
7793 c1 = c1->die_sib;
7794 c2 = c2->die_sib;
7795 if (c1 == die1->die_child)
7796 {
7797 if (c2 == die2->die_child)
7798 break;
7799 else
7800 return 0;
7801 }
7802 }
7803
7804 return 1;
7805 }
7806
7807 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7808 children, and set die_symbol. */
7809
7810 static void
7811 compute_comp_unit_symbol (dw_die_ref unit_die)
7812 {
7813 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7814 const char *base = die_name ? lbasename (die_name) : "anonymous";
7815 char *name = XALLOCAVEC (char, strlen (base) + 64);
7816 char *p;
7817 int i, mark;
7818 unsigned char checksum[16];
7819 struct md5_ctx ctx;
7820
7821 /* Compute the checksum of the DIE, then append part of it as hex digits to
7822 the name filename of the unit. */
7823
7824 md5_init_ctx (&ctx);
7825 mark = 0;
7826 die_checksum (unit_die, &ctx, &mark);
7827 unmark_all_dies (unit_die);
7828 md5_finish_ctx (&ctx, checksum);
7829
7830 /* When we this for comp_unit_die () we have a DW_AT_name that might
7831 not start with a letter but with anything valid for filenames and
7832 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7833 character is not a letter. */
7834 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7835 clean_symbol_name (name);
7836
7837 p = name + strlen (name);
7838 for (i = 0; i < 4; i++)
7839 {
7840 sprintf (p, "%.2x", checksum[i]);
7841 p += 2;
7842 }
7843
7844 unit_die->die_id.die_symbol = xstrdup (name);
7845 }
7846
7847 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7848
7849 static int
7850 is_type_die (dw_die_ref die)
7851 {
7852 switch (die->die_tag)
7853 {
7854 case DW_TAG_array_type:
7855 case DW_TAG_class_type:
7856 case DW_TAG_interface_type:
7857 case DW_TAG_enumeration_type:
7858 case DW_TAG_pointer_type:
7859 case DW_TAG_reference_type:
7860 case DW_TAG_rvalue_reference_type:
7861 case DW_TAG_string_type:
7862 case DW_TAG_structure_type:
7863 case DW_TAG_subroutine_type:
7864 case DW_TAG_union_type:
7865 case DW_TAG_ptr_to_member_type:
7866 case DW_TAG_set_type:
7867 case DW_TAG_subrange_type:
7868 case DW_TAG_base_type:
7869 case DW_TAG_const_type:
7870 case DW_TAG_file_type:
7871 case DW_TAG_packed_type:
7872 case DW_TAG_volatile_type:
7873 case DW_TAG_typedef:
7874 return 1;
7875 default:
7876 return 0;
7877 }
7878 }
7879
7880 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7881 Basically, we want to choose the bits that are likely to be shared between
7882 compilations (types) and leave out the bits that are specific to individual
7883 compilations (functions). */
7884
7885 static int
7886 is_comdat_die (dw_die_ref c)
7887 {
7888 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7889 we do for stabs. The advantage is a greater likelihood of sharing between
7890 objects that don't include headers in the same order (and therefore would
7891 put the base types in a different comdat). jason 8/28/00 */
7892
7893 if (c->die_tag == DW_TAG_base_type)
7894 return 0;
7895
7896 if (c->die_tag == DW_TAG_pointer_type
7897 || c->die_tag == DW_TAG_reference_type
7898 || c->die_tag == DW_TAG_rvalue_reference_type
7899 || c->die_tag == DW_TAG_const_type
7900 || c->die_tag == DW_TAG_volatile_type)
7901 {
7902 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7903
7904 return t ? is_comdat_die (t) : 0;
7905 }
7906
7907 return is_type_die (c);
7908 }
7909
7910 /* Returns true iff C is a compile-unit DIE. */
7911
7912 static inline bool
7913 is_cu_die (dw_die_ref c)
7914 {
7915 return c && (c->die_tag == DW_TAG_compile_unit
7916 || c->die_tag == DW_TAG_skeleton_unit);
7917 }
7918
7919 /* Returns true iff C is a unit DIE of some sort. */
7920
7921 static inline bool
7922 is_unit_die (dw_die_ref c)
7923 {
7924 return c && (c->die_tag == DW_TAG_compile_unit
7925 || c->die_tag == DW_TAG_partial_unit
7926 || c->die_tag == DW_TAG_type_unit
7927 || c->die_tag == DW_TAG_skeleton_unit);
7928 }
7929
7930 /* Returns true iff C is a namespace DIE. */
7931
7932 static inline bool
7933 is_namespace_die (dw_die_ref c)
7934 {
7935 return c && c->die_tag == DW_TAG_namespace;
7936 }
7937
7938 /* Returns true iff C is a class or structure DIE. */
7939
7940 static inline bool
7941 is_class_die (dw_die_ref c)
7942 {
7943 return c && (c->die_tag == DW_TAG_class_type
7944 || c->die_tag == DW_TAG_structure_type);
7945 }
7946
7947 /* Return non-zero if this DIE is a template parameter. */
7948
7949 static inline bool
7950 is_template_parameter (dw_die_ref die)
7951 {
7952 switch (die->die_tag)
7953 {
7954 case DW_TAG_template_type_param:
7955 case DW_TAG_template_value_param:
7956 case DW_TAG_GNU_template_template_param:
7957 case DW_TAG_GNU_template_parameter_pack:
7958 return true;
7959 default:
7960 return false;
7961 }
7962 }
7963
7964 /* Return non-zero if this DIE represents a template instantiation. */
7965
7966 static inline bool
7967 is_template_instantiation (dw_die_ref die)
7968 {
7969 dw_die_ref c;
7970
7971 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7972 return false;
7973 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7974 return false;
7975 }
7976
7977 static char *
7978 gen_internal_sym (const char *prefix)
7979 {
7980 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7981
7982 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7983 return xstrdup (buf);
7984 }
7985
7986 /* Return non-zero if this DIE is a declaration. */
7987
7988 static int
7989 is_declaration_die (dw_die_ref die)
7990 {
7991 dw_attr_node *a;
7992 unsigned ix;
7993
7994 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7995 if (a->dw_attr == DW_AT_declaration)
7996 return 1;
7997
7998 return 0;
7999 }
8000
8001 /* Return non-zero if this DIE is nested inside a subprogram. */
8002
8003 static int
8004 is_nested_in_subprogram (dw_die_ref die)
8005 {
8006 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8007
8008 if (decl == NULL)
8009 decl = die;
8010 return local_scope_p (decl);
8011 }
8012
8013 /* Return non-zero if this DIE contains a defining declaration of a
8014 subprogram. */
8015
8016 static int
8017 contains_subprogram_definition (dw_die_ref die)
8018 {
8019 dw_die_ref c;
8020
8021 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8022 return 1;
8023 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8024 return 0;
8025 }
8026
8027 /* Return non-zero if this is a type DIE that should be moved to a
8028 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8029 unit type. */
8030
8031 static int
8032 should_move_die_to_comdat (dw_die_ref die)
8033 {
8034 switch (die->die_tag)
8035 {
8036 case DW_TAG_class_type:
8037 case DW_TAG_structure_type:
8038 case DW_TAG_enumeration_type:
8039 case DW_TAG_union_type:
8040 /* Don't move declarations, inlined instances, types nested in a
8041 subprogram, or types that contain subprogram definitions. */
8042 if (is_declaration_die (die)
8043 || get_AT (die, DW_AT_abstract_origin)
8044 || is_nested_in_subprogram (die)
8045 || contains_subprogram_definition (die))
8046 return 0;
8047 return 1;
8048 case DW_TAG_array_type:
8049 case DW_TAG_interface_type:
8050 case DW_TAG_pointer_type:
8051 case DW_TAG_reference_type:
8052 case DW_TAG_rvalue_reference_type:
8053 case DW_TAG_string_type:
8054 case DW_TAG_subroutine_type:
8055 case DW_TAG_ptr_to_member_type:
8056 case DW_TAG_set_type:
8057 case DW_TAG_subrange_type:
8058 case DW_TAG_base_type:
8059 case DW_TAG_const_type:
8060 case DW_TAG_file_type:
8061 case DW_TAG_packed_type:
8062 case DW_TAG_volatile_type:
8063 case DW_TAG_typedef:
8064 default:
8065 return 0;
8066 }
8067 }
8068
8069 /* Make a clone of DIE. */
8070
8071 static dw_die_ref
8072 clone_die (dw_die_ref die)
8073 {
8074 dw_die_ref clone = new_die_raw (die->die_tag);
8075 dw_attr_node *a;
8076 unsigned ix;
8077
8078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8079 add_dwarf_attr (clone, a);
8080
8081 return clone;
8082 }
8083
8084 /* Make a clone of the tree rooted at DIE. */
8085
8086 static dw_die_ref
8087 clone_tree (dw_die_ref die)
8088 {
8089 dw_die_ref c;
8090 dw_die_ref clone = clone_die (die);
8091
8092 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8093
8094 return clone;
8095 }
8096
8097 /* Make a clone of DIE as a declaration. */
8098
8099 static dw_die_ref
8100 clone_as_declaration (dw_die_ref die)
8101 {
8102 dw_die_ref clone;
8103 dw_die_ref decl;
8104 dw_attr_node *a;
8105 unsigned ix;
8106
8107 /* If the DIE is already a declaration, just clone it. */
8108 if (is_declaration_die (die))
8109 return clone_die (die);
8110
8111 /* If the DIE is a specification, just clone its declaration DIE. */
8112 decl = get_AT_ref (die, DW_AT_specification);
8113 if (decl != NULL)
8114 {
8115 clone = clone_die (decl);
8116 if (die->comdat_type_p)
8117 add_AT_die_ref (clone, DW_AT_signature, die);
8118 return clone;
8119 }
8120
8121 clone = new_die_raw (die->die_tag);
8122
8123 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8124 {
8125 /* We don't want to copy over all attributes.
8126 For example we don't want DW_AT_byte_size because otherwise we will no
8127 longer have a declaration and GDB will treat it as a definition. */
8128
8129 switch (a->dw_attr)
8130 {
8131 case DW_AT_abstract_origin:
8132 case DW_AT_artificial:
8133 case DW_AT_containing_type:
8134 case DW_AT_external:
8135 case DW_AT_name:
8136 case DW_AT_type:
8137 case DW_AT_virtuality:
8138 case DW_AT_linkage_name:
8139 case DW_AT_MIPS_linkage_name:
8140 add_dwarf_attr (clone, a);
8141 break;
8142 case DW_AT_byte_size:
8143 case DW_AT_alignment:
8144 default:
8145 break;
8146 }
8147 }
8148
8149 if (die->comdat_type_p)
8150 add_AT_die_ref (clone, DW_AT_signature, die);
8151
8152 add_AT_flag (clone, DW_AT_declaration, 1);
8153 return clone;
8154 }
8155
8156
8157 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8158
8159 struct decl_table_entry
8160 {
8161 dw_die_ref orig;
8162 dw_die_ref copy;
8163 };
8164
8165 /* Helpers to manipulate hash table of copied declarations. */
8166
8167 /* Hashtable helpers. */
8168
8169 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8170 {
8171 typedef die_struct *compare_type;
8172 static inline hashval_t hash (const decl_table_entry *);
8173 static inline bool equal (const decl_table_entry *, const die_struct *);
8174 };
8175
8176 inline hashval_t
8177 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8178 {
8179 return htab_hash_pointer (entry->orig);
8180 }
8181
8182 inline bool
8183 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8184 const die_struct *entry2)
8185 {
8186 return entry1->orig == entry2;
8187 }
8188
8189 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8190
8191 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8192 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8193 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8194 to check if the ancestor has already been copied into UNIT. */
8195
8196 static dw_die_ref
8197 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8198 decl_hash_type *decl_table)
8199 {
8200 dw_die_ref parent = die->die_parent;
8201 dw_die_ref new_parent = unit;
8202 dw_die_ref copy;
8203 decl_table_entry **slot = NULL;
8204 struct decl_table_entry *entry = NULL;
8205
8206 if (decl_table)
8207 {
8208 /* Check if the entry has already been copied to UNIT. */
8209 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8210 INSERT);
8211 if (*slot != HTAB_EMPTY_ENTRY)
8212 {
8213 entry = *slot;
8214 return entry->copy;
8215 }
8216
8217 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8218 entry = XCNEW (struct decl_table_entry);
8219 entry->orig = die;
8220 entry->copy = NULL;
8221 *slot = entry;
8222 }
8223
8224 if (parent != NULL)
8225 {
8226 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8227 if (spec != NULL)
8228 parent = spec;
8229 if (!is_unit_die (parent))
8230 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8231 }
8232
8233 copy = clone_as_declaration (die);
8234 add_child_die (new_parent, copy);
8235
8236 if (decl_table)
8237 {
8238 /* Record the pointer to the copy. */
8239 entry->copy = copy;
8240 }
8241
8242 return copy;
8243 }
8244 /* Copy the declaration context to the new type unit DIE. This includes
8245 any surrounding namespace or type declarations. If the DIE has an
8246 AT_specification attribute, it also includes attributes and children
8247 attached to the specification, and returns a pointer to the original
8248 parent of the declaration DIE. Returns NULL otherwise. */
8249
8250 static dw_die_ref
8251 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8252 {
8253 dw_die_ref decl;
8254 dw_die_ref new_decl;
8255 dw_die_ref orig_parent = NULL;
8256
8257 decl = get_AT_ref (die, DW_AT_specification);
8258 if (decl == NULL)
8259 decl = die;
8260 else
8261 {
8262 unsigned ix;
8263 dw_die_ref c;
8264 dw_attr_node *a;
8265
8266 /* The original DIE will be changed to a declaration, and must
8267 be moved to be a child of the original declaration DIE. */
8268 orig_parent = decl->die_parent;
8269
8270 /* Copy the type node pointer from the new DIE to the original
8271 declaration DIE so we can forward references later. */
8272 decl->comdat_type_p = true;
8273 decl->die_id.die_type_node = die->die_id.die_type_node;
8274
8275 remove_AT (die, DW_AT_specification);
8276
8277 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8278 {
8279 if (a->dw_attr != DW_AT_name
8280 && a->dw_attr != DW_AT_declaration
8281 && a->dw_attr != DW_AT_external)
8282 add_dwarf_attr (die, a);
8283 }
8284
8285 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8286 }
8287
8288 if (decl->die_parent != NULL
8289 && !is_unit_die (decl->die_parent))
8290 {
8291 new_decl = copy_ancestor_tree (unit, decl, NULL);
8292 if (new_decl != NULL)
8293 {
8294 remove_AT (new_decl, DW_AT_signature);
8295 add_AT_specification (die, new_decl);
8296 }
8297 }
8298
8299 return orig_parent;
8300 }
8301
8302 /* Generate the skeleton ancestor tree for the given NODE, then clone
8303 the DIE and add the clone into the tree. */
8304
8305 static void
8306 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8307 {
8308 if (node->new_die != NULL)
8309 return;
8310
8311 node->new_die = clone_as_declaration (node->old_die);
8312
8313 if (node->parent != NULL)
8314 {
8315 generate_skeleton_ancestor_tree (node->parent);
8316 add_child_die (node->parent->new_die, node->new_die);
8317 }
8318 }
8319
8320 /* Generate a skeleton tree of DIEs containing any declarations that are
8321 found in the original tree. We traverse the tree looking for declaration
8322 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8323
8324 static void
8325 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8326 {
8327 skeleton_chain_node node;
8328 dw_die_ref c;
8329 dw_die_ref first;
8330 dw_die_ref prev = NULL;
8331 dw_die_ref next = NULL;
8332
8333 node.parent = parent;
8334
8335 first = c = parent->old_die->die_child;
8336 if (c)
8337 next = c->die_sib;
8338 if (c) do {
8339 if (prev == NULL || prev->die_sib == c)
8340 prev = c;
8341 c = next;
8342 next = (c == first ? NULL : c->die_sib);
8343 node.old_die = c;
8344 node.new_die = NULL;
8345 if (is_declaration_die (c))
8346 {
8347 if (is_template_instantiation (c))
8348 {
8349 /* Instantiated templates do not need to be cloned into the
8350 type unit. Just move the DIE and its children back to
8351 the skeleton tree (in the main CU). */
8352 remove_child_with_prev (c, prev);
8353 add_child_die (parent->new_die, c);
8354 c = prev;
8355 }
8356 else if (c->comdat_type_p)
8357 {
8358 /* This is the skeleton of earlier break_out_comdat_types
8359 type. Clone the existing DIE, but keep the children
8360 under the original (which is in the main CU). */
8361 dw_die_ref clone = clone_die (c);
8362
8363 replace_child (c, clone, prev);
8364 generate_skeleton_ancestor_tree (parent);
8365 add_child_die (parent->new_die, c);
8366 c = clone;
8367 continue;
8368 }
8369 else
8370 {
8371 /* Clone the existing DIE, move the original to the skeleton
8372 tree (which is in the main CU), and put the clone, with
8373 all the original's children, where the original came from
8374 (which is about to be moved to the type unit). */
8375 dw_die_ref clone = clone_die (c);
8376 move_all_children (c, clone);
8377
8378 /* If the original has a DW_AT_object_pointer attribute,
8379 it would now point to a child DIE just moved to the
8380 cloned tree, so we need to remove that attribute from
8381 the original. */
8382 remove_AT (c, DW_AT_object_pointer);
8383
8384 replace_child (c, clone, prev);
8385 generate_skeleton_ancestor_tree (parent);
8386 add_child_die (parent->new_die, c);
8387 node.old_die = clone;
8388 node.new_die = c;
8389 c = clone;
8390 }
8391 }
8392 generate_skeleton_bottom_up (&node);
8393 } while (next != NULL);
8394 }
8395
8396 /* Wrapper function for generate_skeleton_bottom_up. */
8397
8398 static dw_die_ref
8399 generate_skeleton (dw_die_ref die)
8400 {
8401 skeleton_chain_node node;
8402
8403 node.old_die = die;
8404 node.new_die = NULL;
8405 node.parent = NULL;
8406
8407 /* If this type definition is nested inside another type,
8408 and is not an instantiation of a template, always leave
8409 at least a declaration in its place. */
8410 if (die->die_parent != NULL
8411 && is_type_die (die->die_parent)
8412 && !is_template_instantiation (die))
8413 node.new_die = clone_as_declaration (die);
8414
8415 generate_skeleton_bottom_up (&node);
8416 return node.new_die;
8417 }
8418
8419 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8420 declaration. The original DIE is moved to a new compile unit so that
8421 existing references to it follow it to the new location. If any of the
8422 original DIE's descendants is a declaration, we need to replace the
8423 original DIE with a skeleton tree and move the declarations back into the
8424 skeleton tree. */
8425
8426 static dw_die_ref
8427 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8428 dw_die_ref prev)
8429 {
8430 dw_die_ref skeleton, orig_parent;
8431
8432 /* Copy the declaration context to the type unit DIE. If the returned
8433 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8434 that DIE. */
8435 orig_parent = copy_declaration_context (unit, child);
8436
8437 skeleton = generate_skeleton (child);
8438 if (skeleton == NULL)
8439 remove_child_with_prev (child, prev);
8440 else
8441 {
8442 skeleton->comdat_type_p = true;
8443 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8444
8445 /* If the original DIE was a specification, we need to put
8446 the skeleton under the parent DIE of the declaration.
8447 This leaves the original declaration in the tree, but
8448 it will be pruned later since there are no longer any
8449 references to it. */
8450 if (orig_parent != NULL)
8451 {
8452 remove_child_with_prev (child, prev);
8453 add_child_die (orig_parent, skeleton);
8454 }
8455 else
8456 replace_child (child, skeleton, prev);
8457 }
8458
8459 return skeleton;
8460 }
8461
8462 static void
8463 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8464 comdat_type_node *type_node,
8465 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8466
8467 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8468 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8469 DWARF procedure references in the DW_AT_location attribute. */
8470
8471 static dw_die_ref
8472 copy_dwarf_procedure (dw_die_ref die,
8473 comdat_type_node *type_node,
8474 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8475 {
8476 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8477
8478 /* DWARF procedures are not supposed to have children... */
8479 gcc_assert (die->die_child == NULL);
8480
8481 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8482 gcc_assert (vec_safe_length (die->die_attr) == 1
8483 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8484
8485 /* Do not copy more than once DWARF procedures. */
8486 bool existed;
8487 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8488 if (existed)
8489 return die_copy;
8490
8491 die_copy = clone_die (die);
8492 add_child_die (type_node->root_die, die_copy);
8493 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8494 return die_copy;
8495 }
8496
8497 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8498 procedures in DIE's attributes. */
8499
8500 static void
8501 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8502 comdat_type_node *type_node,
8503 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8504 {
8505 dw_attr_node *a;
8506 unsigned i;
8507
8508 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8509 {
8510 dw_loc_descr_ref loc;
8511
8512 if (a->dw_attr_val.val_class != dw_val_class_loc)
8513 continue;
8514
8515 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8516 {
8517 switch (loc->dw_loc_opc)
8518 {
8519 case DW_OP_call2:
8520 case DW_OP_call4:
8521 case DW_OP_call_ref:
8522 gcc_assert (loc->dw_loc_oprnd1.val_class
8523 == dw_val_class_die_ref);
8524 loc->dw_loc_oprnd1.v.val_die_ref.die
8525 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8526 type_node,
8527 copied_dwarf_procs);
8528
8529 default:
8530 break;
8531 }
8532 }
8533 }
8534 }
8535
8536 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8537 rewrite references to point to the copies.
8538
8539 References are looked for in DIE's attributes and recursively in all its
8540 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8541 mapping from old DWARF procedures to their copy. It is used not to copy
8542 twice the same DWARF procedure under TYPE_NODE. */
8543
8544 static void
8545 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8546 comdat_type_node *type_node,
8547 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8548 {
8549 dw_die_ref c;
8550
8551 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8552 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8553 type_node,
8554 copied_dwarf_procs));
8555 }
8556
8557 /* Traverse the DIE and set up additional .debug_types or .debug_info
8558 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8559 section. */
8560
8561 static void
8562 break_out_comdat_types (dw_die_ref die)
8563 {
8564 dw_die_ref c;
8565 dw_die_ref first;
8566 dw_die_ref prev = NULL;
8567 dw_die_ref next = NULL;
8568 dw_die_ref unit = NULL;
8569
8570 first = c = die->die_child;
8571 if (c)
8572 next = c->die_sib;
8573 if (c) do {
8574 if (prev == NULL || prev->die_sib == c)
8575 prev = c;
8576 c = next;
8577 next = (c == first ? NULL : c->die_sib);
8578 if (should_move_die_to_comdat (c))
8579 {
8580 dw_die_ref replacement;
8581 comdat_type_node *type_node;
8582
8583 /* Break out nested types into their own type units. */
8584 break_out_comdat_types (c);
8585
8586 /* Create a new type unit DIE as the root for the new tree, and
8587 add it to the list of comdat types. */
8588 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8589 add_AT_unsigned (unit, DW_AT_language,
8590 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8591 type_node = ggc_cleared_alloc<comdat_type_node> ();
8592 type_node->root_die = unit;
8593 type_node->next = comdat_type_list;
8594 comdat_type_list = type_node;
8595
8596 /* Generate the type signature. */
8597 generate_type_signature (c, type_node);
8598
8599 /* Copy the declaration context, attributes, and children of the
8600 declaration into the new type unit DIE, then remove this DIE
8601 from the main CU (or replace it with a skeleton if necessary). */
8602 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8603 type_node->skeleton_die = replacement;
8604
8605 /* Add the DIE to the new compunit. */
8606 add_child_die (unit, c);
8607
8608 /* Types can reference DWARF procedures for type size or data location
8609 expressions. Calls in DWARF expressions cannot target procedures
8610 that are not in the same section. So we must copy DWARF procedures
8611 along with this type and then rewrite references to them. */
8612 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8613 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8614
8615 if (replacement != NULL)
8616 c = replacement;
8617 }
8618 else if (c->die_tag == DW_TAG_namespace
8619 || c->die_tag == DW_TAG_class_type
8620 || c->die_tag == DW_TAG_structure_type
8621 || c->die_tag == DW_TAG_union_type)
8622 {
8623 /* Look for nested types that can be broken out. */
8624 break_out_comdat_types (c);
8625 }
8626 } while (next != NULL);
8627 }
8628
8629 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8630 Enter all the cloned children into the hash table decl_table. */
8631
8632 static dw_die_ref
8633 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8634 {
8635 dw_die_ref c;
8636 dw_die_ref clone;
8637 struct decl_table_entry *entry;
8638 decl_table_entry **slot;
8639
8640 if (die->die_tag == DW_TAG_subprogram)
8641 clone = clone_as_declaration (die);
8642 else
8643 clone = clone_die (die);
8644
8645 slot = decl_table->find_slot_with_hash (die,
8646 htab_hash_pointer (die), INSERT);
8647
8648 /* Assert that DIE isn't in the hash table yet. If it would be there
8649 before, the ancestors would be necessarily there as well, therefore
8650 clone_tree_partial wouldn't be called. */
8651 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8652
8653 entry = XCNEW (struct decl_table_entry);
8654 entry->orig = die;
8655 entry->copy = clone;
8656 *slot = entry;
8657
8658 if (die->die_tag != DW_TAG_subprogram)
8659 FOR_EACH_CHILD (die, c,
8660 add_child_die (clone, clone_tree_partial (c, decl_table)));
8661
8662 return clone;
8663 }
8664
8665 /* Walk the DIE and its children, looking for references to incomplete
8666 or trivial types that are unmarked (i.e., that are not in the current
8667 type_unit). */
8668
8669 static void
8670 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8671 {
8672 dw_die_ref c;
8673 dw_attr_node *a;
8674 unsigned ix;
8675
8676 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8677 {
8678 if (AT_class (a) == dw_val_class_die_ref)
8679 {
8680 dw_die_ref targ = AT_ref (a);
8681 decl_table_entry **slot;
8682 struct decl_table_entry *entry;
8683
8684 if (targ->die_mark != 0 || targ->comdat_type_p)
8685 continue;
8686
8687 slot = decl_table->find_slot_with_hash (targ,
8688 htab_hash_pointer (targ),
8689 INSERT);
8690
8691 if (*slot != HTAB_EMPTY_ENTRY)
8692 {
8693 /* TARG has already been copied, so we just need to
8694 modify the reference to point to the copy. */
8695 entry = *slot;
8696 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8697 }
8698 else
8699 {
8700 dw_die_ref parent = unit;
8701 dw_die_ref copy = clone_die (targ);
8702
8703 /* Record in DECL_TABLE that TARG has been copied.
8704 Need to do this now, before the recursive call,
8705 because DECL_TABLE may be expanded and SLOT
8706 would no longer be a valid pointer. */
8707 entry = XCNEW (struct decl_table_entry);
8708 entry->orig = targ;
8709 entry->copy = copy;
8710 *slot = entry;
8711
8712 /* If TARG is not a declaration DIE, we need to copy its
8713 children. */
8714 if (!is_declaration_die (targ))
8715 {
8716 FOR_EACH_CHILD (
8717 targ, c,
8718 add_child_die (copy,
8719 clone_tree_partial (c, decl_table)));
8720 }
8721
8722 /* Make sure the cloned tree is marked as part of the
8723 type unit. */
8724 mark_dies (copy);
8725
8726 /* If TARG has surrounding context, copy its ancestor tree
8727 into the new type unit. */
8728 if (targ->die_parent != NULL
8729 && !is_unit_die (targ->die_parent))
8730 parent = copy_ancestor_tree (unit, targ->die_parent,
8731 decl_table);
8732
8733 add_child_die (parent, copy);
8734 a->dw_attr_val.v.val_die_ref.die = copy;
8735
8736 /* Make sure the newly-copied DIE is walked. If it was
8737 installed in a previously-added context, it won't
8738 get visited otherwise. */
8739 if (parent != unit)
8740 {
8741 /* Find the highest point of the newly-added tree,
8742 mark each node along the way, and walk from there. */
8743 parent->die_mark = 1;
8744 while (parent->die_parent
8745 && parent->die_parent->die_mark == 0)
8746 {
8747 parent = parent->die_parent;
8748 parent->die_mark = 1;
8749 }
8750 copy_decls_walk (unit, parent, decl_table);
8751 }
8752 }
8753 }
8754 }
8755
8756 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8757 }
8758
8759 /* Copy declarations for "unworthy" types into the new comdat section.
8760 Incomplete types, modified types, and certain other types aren't broken
8761 out into comdat sections of their own, so they don't have a signature,
8762 and we need to copy the declaration into the same section so that we
8763 don't have an external reference. */
8764
8765 static void
8766 copy_decls_for_unworthy_types (dw_die_ref unit)
8767 {
8768 mark_dies (unit);
8769 decl_hash_type decl_table (10);
8770 copy_decls_walk (unit, unit, &decl_table);
8771 unmark_dies (unit);
8772 }
8773
8774 /* Traverse the DIE and add a sibling attribute if it may have the
8775 effect of speeding up access to siblings. To save some space,
8776 avoid generating sibling attributes for DIE's without children. */
8777
8778 static void
8779 add_sibling_attributes (dw_die_ref die)
8780 {
8781 dw_die_ref c;
8782
8783 if (! die->die_child)
8784 return;
8785
8786 if (die->die_parent && die != die->die_parent->die_child)
8787 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8788
8789 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8790 }
8791
8792 /* Output all location lists for the DIE and its children. */
8793
8794 static void
8795 output_location_lists (dw_die_ref die)
8796 {
8797 dw_die_ref c;
8798 dw_attr_node *a;
8799 unsigned ix;
8800
8801 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8802 if (AT_class (a) == dw_val_class_loc_list)
8803 output_loc_list (AT_loc_list (a));
8804
8805 FOR_EACH_CHILD (die, c, output_location_lists (c));
8806 }
8807
8808 /* During assign_location_list_indexes and output_loclists_offset the
8809 current index, after it the number of assigned indexes (i.e. how
8810 large the .debug_loclists* offset table should be). */
8811 static unsigned int loc_list_idx;
8812
8813 /* Output all location list offsets for the DIE and its children. */
8814
8815 static void
8816 output_loclists_offsets (dw_die_ref die)
8817 {
8818 dw_die_ref c;
8819 dw_attr_node *a;
8820 unsigned ix;
8821
8822 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8823 if (AT_class (a) == dw_val_class_loc_list)
8824 {
8825 dw_loc_list_ref l = AT_loc_list (a);
8826 if (l->offset_emitted)
8827 continue;
8828 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8829 loc_section_label, NULL);
8830 gcc_assert (l->hash == loc_list_idx);
8831 loc_list_idx++;
8832 l->offset_emitted = true;
8833 }
8834
8835 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8836 }
8837
8838 /* Recursively set indexes of location lists. */
8839
8840 static void
8841 assign_location_list_indexes (dw_die_ref die)
8842 {
8843 dw_die_ref c;
8844 dw_attr_node *a;
8845 unsigned ix;
8846
8847 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8848 if (AT_class (a) == dw_val_class_loc_list)
8849 {
8850 dw_loc_list_ref list = AT_loc_list (a);
8851 if (!list->num_assigned)
8852 {
8853 list->num_assigned = true;
8854 list->hash = loc_list_idx++;
8855 }
8856 }
8857
8858 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8859 }
8860
8861 /* We want to limit the number of external references, because they are
8862 larger than local references: a relocation takes multiple words, and
8863 even a sig8 reference is always eight bytes, whereas a local reference
8864 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8865 So if we encounter multiple external references to the same type DIE, we
8866 make a local typedef stub for it and redirect all references there.
8867
8868 This is the element of the hash table for keeping track of these
8869 references. */
8870
8871 struct external_ref
8872 {
8873 dw_die_ref type;
8874 dw_die_ref stub;
8875 unsigned n_refs;
8876 };
8877
8878 /* Hashtable helpers. */
8879
8880 struct external_ref_hasher : free_ptr_hash <external_ref>
8881 {
8882 static inline hashval_t hash (const external_ref *);
8883 static inline bool equal (const external_ref *, const external_ref *);
8884 };
8885
8886 inline hashval_t
8887 external_ref_hasher::hash (const external_ref *r)
8888 {
8889 dw_die_ref die = r->type;
8890 hashval_t h = 0;
8891
8892 /* We can't use the address of the DIE for hashing, because
8893 that will make the order of the stub DIEs non-deterministic. */
8894 if (! die->comdat_type_p)
8895 /* We have a symbol; use it to compute a hash. */
8896 h = htab_hash_string (die->die_id.die_symbol);
8897 else
8898 {
8899 /* We have a type signature; use a subset of the bits as the hash.
8900 The 8-byte signature is at least as large as hashval_t. */
8901 comdat_type_node *type_node = die->die_id.die_type_node;
8902 memcpy (&h, type_node->signature, sizeof (h));
8903 }
8904 return h;
8905 }
8906
8907 inline bool
8908 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8909 {
8910 return r1->type == r2->type;
8911 }
8912
8913 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8914
8915 /* Return a pointer to the external_ref for references to DIE. */
8916
8917 static struct external_ref *
8918 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8919 {
8920 struct external_ref ref, *ref_p;
8921 external_ref **slot;
8922
8923 ref.type = die;
8924 slot = map->find_slot (&ref, INSERT);
8925 if (*slot != HTAB_EMPTY_ENTRY)
8926 return *slot;
8927
8928 ref_p = XCNEW (struct external_ref);
8929 ref_p->type = die;
8930 *slot = ref_p;
8931 return ref_p;
8932 }
8933
8934 /* Subroutine of optimize_external_refs, below.
8935
8936 If we see a type skeleton, record it as our stub. If we see external
8937 references, remember how many we've seen. */
8938
8939 static void
8940 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8941 {
8942 dw_die_ref c;
8943 dw_attr_node *a;
8944 unsigned ix;
8945 struct external_ref *ref_p;
8946
8947 if (is_type_die (die)
8948 && (c = get_AT_ref (die, DW_AT_signature)))
8949 {
8950 /* This is a local skeleton; use it for local references. */
8951 ref_p = lookup_external_ref (map, c);
8952 ref_p->stub = die;
8953 }
8954
8955 /* Scan the DIE references, and remember any that refer to DIEs from
8956 other CUs (i.e. those which are not marked). */
8957 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8958 if (AT_class (a) == dw_val_class_die_ref
8959 && (c = AT_ref (a))->die_mark == 0
8960 && is_type_die (c))
8961 {
8962 ref_p = lookup_external_ref (map, c);
8963 ref_p->n_refs++;
8964 }
8965
8966 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8967 }
8968
8969 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8970 points to an external_ref, DATA is the CU we're processing. If we don't
8971 already have a local stub, and we have multiple refs, build a stub. */
8972
8973 int
8974 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8975 {
8976 struct external_ref *ref_p = *slot;
8977
8978 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8979 {
8980 /* We have multiple references to this type, so build a small stub.
8981 Both of these forms are a bit dodgy from the perspective of the
8982 DWARF standard, since technically they should have names. */
8983 dw_die_ref cu = data;
8984 dw_die_ref type = ref_p->type;
8985 dw_die_ref stub = NULL;
8986
8987 if (type->comdat_type_p)
8988 {
8989 /* If we refer to this type via sig8, use AT_signature. */
8990 stub = new_die (type->die_tag, cu, NULL_TREE);
8991 add_AT_die_ref (stub, DW_AT_signature, type);
8992 }
8993 else
8994 {
8995 /* Otherwise, use a typedef with no name. */
8996 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8997 add_AT_die_ref (stub, DW_AT_type, type);
8998 }
8999
9000 stub->die_mark++;
9001 ref_p->stub = stub;
9002 }
9003 return 1;
9004 }
9005
9006 /* DIE is a unit; look through all the DIE references to see if there are
9007 any external references to types, and if so, create local stubs for
9008 them which will be applied in build_abbrev_table. This is useful because
9009 references to local DIEs are smaller. */
9010
9011 static external_ref_hash_type *
9012 optimize_external_refs (dw_die_ref die)
9013 {
9014 external_ref_hash_type *map = new external_ref_hash_type (10);
9015 optimize_external_refs_1 (die, map);
9016 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9017 return map;
9018 }
9019
9020 /* The following 3 variables are temporaries that are computed only during the
9021 build_abbrev_table call and used and released during the following
9022 optimize_abbrev_table call. */
9023
9024 /* First abbrev_id that can be optimized based on usage. */
9025 static unsigned int abbrev_opt_start;
9026
9027 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9028 abbrev_id smaller than this, because they must be already sized
9029 during build_abbrev_table). */
9030 static unsigned int abbrev_opt_base_type_end;
9031
9032 /* Vector of usage counts during build_abbrev_table. Indexed by
9033 abbrev_id - abbrev_opt_start. */
9034 static vec<unsigned int> abbrev_usage_count;
9035
9036 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9037 static vec<dw_die_ref> sorted_abbrev_dies;
9038
9039 /* The format of each DIE (and its attribute value pairs) is encoded in an
9040 abbreviation table. This routine builds the abbreviation table and assigns
9041 a unique abbreviation id for each abbreviation entry. The children of each
9042 die are visited recursively. */
9043
9044 static void
9045 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9046 {
9047 unsigned int abbrev_id = 0;
9048 dw_die_ref c;
9049 dw_attr_node *a;
9050 unsigned ix;
9051 dw_die_ref abbrev;
9052
9053 /* Scan the DIE references, and replace any that refer to
9054 DIEs from other CUs (i.e. those which are not marked) with
9055 the local stubs we built in optimize_external_refs. */
9056 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9057 if (AT_class (a) == dw_val_class_die_ref
9058 && (c = AT_ref (a))->die_mark == 0)
9059 {
9060 struct external_ref *ref_p;
9061 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9062
9063 ref_p = lookup_external_ref (extern_map, c);
9064 if (ref_p->stub && ref_p->stub != die)
9065 change_AT_die_ref (a, ref_p->stub);
9066 else
9067 /* We aren't changing this reference, so mark it external. */
9068 set_AT_ref_external (a, 1);
9069 }
9070
9071 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9072 {
9073 dw_attr_node *die_a, *abbrev_a;
9074 unsigned ix;
9075 bool ok = true;
9076
9077 if (abbrev_id == 0)
9078 continue;
9079 if (abbrev->die_tag != die->die_tag)
9080 continue;
9081 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9082 continue;
9083
9084 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9085 continue;
9086
9087 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9088 {
9089 abbrev_a = &(*abbrev->die_attr)[ix];
9090 if ((abbrev_a->dw_attr != die_a->dw_attr)
9091 || (value_format (abbrev_a) != value_format (die_a)))
9092 {
9093 ok = false;
9094 break;
9095 }
9096 }
9097 if (ok)
9098 break;
9099 }
9100
9101 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9102 {
9103 vec_safe_push (abbrev_die_table, die);
9104 if (abbrev_opt_start)
9105 abbrev_usage_count.safe_push (0);
9106 }
9107 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9108 {
9109 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9110 sorted_abbrev_dies.safe_push (die);
9111 }
9112
9113 die->die_abbrev = abbrev_id;
9114 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9115 }
9116
9117 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9118 by die_abbrev's usage count, from the most commonly used
9119 abbreviation to the least. */
9120
9121 static int
9122 die_abbrev_cmp (const void *p1, const void *p2)
9123 {
9124 dw_die_ref die1 = *(const dw_die_ref *) p1;
9125 dw_die_ref die2 = *(const dw_die_ref *) p2;
9126
9127 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9128 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9129
9130 if (die1->die_abbrev >= abbrev_opt_base_type_end
9131 && die2->die_abbrev >= abbrev_opt_base_type_end)
9132 {
9133 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9134 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9135 return -1;
9136 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9137 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9138 return 1;
9139 }
9140
9141 /* Stabilize the sort. */
9142 if (die1->die_abbrev < die2->die_abbrev)
9143 return -1;
9144 if (die1->die_abbrev > die2->die_abbrev)
9145 return 1;
9146
9147 return 0;
9148 }
9149
9150 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9151 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9152 into dw_val_class_const_implicit or
9153 dw_val_class_unsigned_const_implicit. */
9154
9155 static void
9156 optimize_implicit_const (unsigned int first_id, unsigned int end,
9157 vec<bool> &implicit_consts)
9158 {
9159 /* It never makes sense if there is just one DIE using the abbreviation. */
9160 if (end < first_id + 2)
9161 return;
9162
9163 dw_attr_node *a;
9164 unsigned ix, i;
9165 dw_die_ref die = sorted_abbrev_dies[first_id];
9166 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9167 if (implicit_consts[ix])
9168 {
9169 enum dw_val_class new_class = dw_val_class_none;
9170 switch (AT_class (a))
9171 {
9172 case dw_val_class_unsigned_const:
9173 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9174 continue;
9175
9176 /* The .debug_abbrev section will grow by
9177 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9178 in all the DIEs using that abbreviation. */
9179 if (constant_size (AT_unsigned (a)) * (end - first_id)
9180 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9181 continue;
9182
9183 new_class = dw_val_class_unsigned_const_implicit;
9184 break;
9185
9186 case dw_val_class_const:
9187 new_class = dw_val_class_const_implicit;
9188 break;
9189
9190 case dw_val_class_file:
9191 new_class = dw_val_class_file_implicit;
9192 break;
9193
9194 default:
9195 continue;
9196 }
9197 for (i = first_id; i < end; i++)
9198 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9199 = new_class;
9200 }
9201 }
9202
9203 /* Attempt to optimize abbreviation table from abbrev_opt_start
9204 abbreviation above. */
9205
9206 static void
9207 optimize_abbrev_table (void)
9208 {
9209 if (abbrev_opt_start
9210 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9211 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9212 {
9213 auto_vec<bool, 32> implicit_consts;
9214 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9215
9216 unsigned int abbrev_id = abbrev_opt_start - 1;
9217 unsigned int first_id = ~0U;
9218 unsigned int last_abbrev_id = 0;
9219 unsigned int i;
9220 dw_die_ref die;
9221 if (abbrev_opt_base_type_end > abbrev_opt_start)
9222 abbrev_id = abbrev_opt_base_type_end - 1;
9223 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9224 most commonly used abbreviations come first. */
9225 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9226 {
9227 dw_attr_node *a;
9228 unsigned ix;
9229
9230 /* If calc_base_type_die_sizes has been called, the CU and
9231 base types after it can't be optimized, because we've already
9232 calculated their DIE offsets. We've sorted them first. */
9233 if (die->die_abbrev < abbrev_opt_base_type_end)
9234 continue;
9235 if (die->die_abbrev != last_abbrev_id)
9236 {
9237 last_abbrev_id = die->die_abbrev;
9238 if (dwarf_version >= 5 && first_id != ~0U)
9239 optimize_implicit_const (first_id, i, implicit_consts);
9240 abbrev_id++;
9241 (*abbrev_die_table)[abbrev_id] = die;
9242 if (dwarf_version >= 5)
9243 {
9244 first_id = i;
9245 implicit_consts.truncate (0);
9246
9247 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9248 switch (AT_class (a))
9249 {
9250 case dw_val_class_const:
9251 case dw_val_class_unsigned_const:
9252 case dw_val_class_file:
9253 implicit_consts.safe_push (true);
9254 break;
9255 default:
9256 implicit_consts.safe_push (false);
9257 break;
9258 }
9259 }
9260 }
9261 else if (dwarf_version >= 5)
9262 {
9263 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9264 if (!implicit_consts[ix])
9265 continue;
9266 else
9267 {
9268 dw_attr_node *other_a
9269 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9270 if (!dw_val_equal_p (&a->dw_attr_val,
9271 &other_a->dw_attr_val))
9272 implicit_consts[ix] = false;
9273 }
9274 }
9275 die->die_abbrev = abbrev_id;
9276 }
9277 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9278 if (dwarf_version >= 5 && first_id != ~0U)
9279 optimize_implicit_const (first_id, i, implicit_consts);
9280 }
9281
9282 abbrev_opt_start = 0;
9283 abbrev_opt_base_type_end = 0;
9284 abbrev_usage_count.release ();
9285 sorted_abbrev_dies.release ();
9286 }
9287 \f
9288 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9289
9290 static int
9291 constant_size (unsigned HOST_WIDE_INT value)
9292 {
9293 int log;
9294
9295 if (value == 0)
9296 log = 0;
9297 else
9298 log = floor_log2 (value);
9299
9300 log = log / 8;
9301 log = 1 << (floor_log2 (log) + 1);
9302
9303 return log;
9304 }
9305
9306 /* Return the size of a DIE as it is represented in the
9307 .debug_info section. */
9308
9309 static unsigned long
9310 size_of_die (dw_die_ref die)
9311 {
9312 unsigned long size = 0;
9313 dw_attr_node *a;
9314 unsigned ix;
9315 enum dwarf_form form;
9316
9317 size += size_of_uleb128 (die->die_abbrev);
9318 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9319 {
9320 switch (AT_class (a))
9321 {
9322 case dw_val_class_addr:
9323 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9324 {
9325 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9326 size += size_of_uleb128 (AT_index (a));
9327 }
9328 else
9329 size += DWARF2_ADDR_SIZE;
9330 break;
9331 case dw_val_class_offset:
9332 size += DWARF_OFFSET_SIZE;
9333 break;
9334 case dw_val_class_loc:
9335 {
9336 unsigned long lsize = size_of_locs (AT_loc (a));
9337
9338 /* Block length. */
9339 if (dwarf_version >= 4)
9340 size += size_of_uleb128 (lsize);
9341 else
9342 size += constant_size (lsize);
9343 size += lsize;
9344 }
9345 break;
9346 case dw_val_class_loc_list:
9347 case dw_val_class_view_list:
9348 if (dwarf_split_debug_info && dwarf_version >= 5)
9349 {
9350 gcc_assert (AT_loc_list (a)->num_assigned);
9351 size += size_of_uleb128 (AT_loc_list (a)->hash);
9352 }
9353 else
9354 size += DWARF_OFFSET_SIZE;
9355 break;
9356 case dw_val_class_range_list:
9357 if (value_format (a) == DW_FORM_rnglistx)
9358 {
9359 gcc_assert (rnglist_idx);
9360 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9361 size += size_of_uleb128 (r->idx);
9362 }
9363 else
9364 size += DWARF_OFFSET_SIZE;
9365 break;
9366 case dw_val_class_const:
9367 size += size_of_sleb128 (AT_int (a));
9368 break;
9369 case dw_val_class_unsigned_const:
9370 {
9371 int csize = constant_size (AT_unsigned (a));
9372 if (dwarf_version == 3
9373 && a->dw_attr == DW_AT_data_member_location
9374 && csize >= 4)
9375 size += size_of_uleb128 (AT_unsigned (a));
9376 else
9377 size += csize;
9378 }
9379 break;
9380 case dw_val_class_symview:
9381 if (symview_upper_bound <= 0xff)
9382 size += 1;
9383 else if (symview_upper_bound <= 0xffff)
9384 size += 2;
9385 else if (symview_upper_bound <= 0xffffffff)
9386 size += 4;
9387 else
9388 size += 8;
9389 break;
9390 case dw_val_class_const_implicit:
9391 case dw_val_class_unsigned_const_implicit:
9392 case dw_val_class_file_implicit:
9393 /* These occupy no size in the DIE, just an extra sleb128 in
9394 .debug_abbrev. */
9395 break;
9396 case dw_val_class_const_double:
9397 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9398 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9399 size++; /* block */
9400 break;
9401 case dw_val_class_wide_int:
9402 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9403 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9404 if (get_full_len (*a->dw_attr_val.v.val_wide)
9405 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9406 size++; /* block */
9407 break;
9408 case dw_val_class_vec:
9409 size += constant_size (a->dw_attr_val.v.val_vec.length
9410 * a->dw_attr_val.v.val_vec.elt_size)
9411 + a->dw_attr_val.v.val_vec.length
9412 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9413 break;
9414 case dw_val_class_flag:
9415 if (dwarf_version >= 4)
9416 /* Currently all add_AT_flag calls pass in 1 as last argument,
9417 so DW_FORM_flag_present can be used. If that ever changes,
9418 we'll need to use DW_FORM_flag and have some optimization
9419 in build_abbrev_table that will change those to
9420 DW_FORM_flag_present if it is set to 1 in all DIEs using
9421 the same abbrev entry. */
9422 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9423 else
9424 size += 1;
9425 break;
9426 case dw_val_class_die_ref:
9427 if (AT_ref_external (a))
9428 {
9429 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9430 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9431 is sized by target address length, whereas in DWARF3
9432 it's always sized as an offset. */
9433 if (use_debug_types)
9434 size += DWARF_TYPE_SIGNATURE_SIZE;
9435 else if (dwarf_version == 2)
9436 size += DWARF2_ADDR_SIZE;
9437 else
9438 size += DWARF_OFFSET_SIZE;
9439 }
9440 else
9441 size += DWARF_OFFSET_SIZE;
9442 break;
9443 case dw_val_class_fde_ref:
9444 size += DWARF_OFFSET_SIZE;
9445 break;
9446 case dw_val_class_lbl_id:
9447 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9448 {
9449 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9450 size += size_of_uleb128 (AT_index (a));
9451 }
9452 else
9453 size += DWARF2_ADDR_SIZE;
9454 break;
9455 case dw_val_class_lineptr:
9456 case dw_val_class_macptr:
9457 case dw_val_class_loclistsptr:
9458 size += DWARF_OFFSET_SIZE;
9459 break;
9460 case dw_val_class_str:
9461 form = AT_string_form (a);
9462 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9463 size += DWARF_OFFSET_SIZE;
9464 else if (form == dwarf_FORM (DW_FORM_strx))
9465 size += size_of_uleb128 (AT_index (a));
9466 else
9467 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9468 break;
9469 case dw_val_class_file:
9470 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9471 break;
9472 case dw_val_class_data8:
9473 size += 8;
9474 break;
9475 case dw_val_class_vms_delta:
9476 size += DWARF_OFFSET_SIZE;
9477 break;
9478 case dw_val_class_high_pc:
9479 size += DWARF2_ADDR_SIZE;
9480 break;
9481 case dw_val_class_discr_value:
9482 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9483 break;
9484 case dw_val_class_discr_list:
9485 {
9486 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9487
9488 /* This is a block, so we have the block length and then its
9489 data. */
9490 size += constant_size (block_size) + block_size;
9491 }
9492 break;
9493 default:
9494 gcc_unreachable ();
9495 }
9496 }
9497
9498 return size;
9499 }
9500
9501 /* Size the debugging information associated with a given DIE. Visits the
9502 DIE's children recursively. Updates the global variable next_die_offset, on
9503 each time through. Uses the current value of next_die_offset to update the
9504 die_offset field in each DIE. */
9505
9506 static void
9507 calc_die_sizes (dw_die_ref die)
9508 {
9509 dw_die_ref c;
9510
9511 gcc_assert (die->die_offset == 0
9512 || (unsigned long int) die->die_offset == next_die_offset);
9513 die->die_offset = next_die_offset;
9514 next_die_offset += size_of_die (die);
9515
9516 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9517
9518 if (die->die_child != NULL)
9519 /* Count the null byte used to terminate sibling lists. */
9520 next_die_offset += 1;
9521 }
9522
9523 /* Size just the base type children at the start of the CU.
9524 This is needed because build_abbrev needs to size locs
9525 and sizing of type based stack ops needs to know die_offset
9526 values for the base types. */
9527
9528 static void
9529 calc_base_type_die_sizes (void)
9530 {
9531 unsigned long die_offset = (dwarf_split_debug_info
9532 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9533 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9534 unsigned int i;
9535 dw_die_ref base_type;
9536 #if ENABLE_ASSERT_CHECKING
9537 dw_die_ref prev = comp_unit_die ()->die_child;
9538 #endif
9539
9540 die_offset += size_of_die (comp_unit_die ());
9541 for (i = 0; base_types.iterate (i, &base_type); i++)
9542 {
9543 #if ENABLE_ASSERT_CHECKING
9544 gcc_assert (base_type->die_offset == 0
9545 && prev->die_sib == base_type
9546 && base_type->die_child == NULL
9547 && base_type->die_abbrev);
9548 prev = base_type;
9549 #endif
9550 if (abbrev_opt_start
9551 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9552 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9553 base_type->die_offset = die_offset;
9554 die_offset += size_of_die (base_type);
9555 }
9556 }
9557
9558 /* Set the marks for a die and its children. We do this so
9559 that we know whether or not a reference needs to use FORM_ref_addr; only
9560 DIEs in the same CU will be marked. We used to clear out the offset
9561 and use that as the flag, but ran into ordering problems. */
9562
9563 static void
9564 mark_dies (dw_die_ref die)
9565 {
9566 dw_die_ref c;
9567
9568 gcc_assert (!die->die_mark);
9569
9570 die->die_mark = 1;
9571 FOR_EACH_CHILD (die, c, mark_dies (c));
9572 }
9573
9574 /* Clear the marks for a die and its children. */
9575
9576 static void
9577 unmark_dies (dw_die_ref die)
9578 {
9579 dw_die_ref c;
9580
9581 if (! use_debug_types)
9582 gcc_assert (die->die_mark);
9583
9584 die->die_mark = 0;
9585 FOR_EACH_CHILD (die, c, unmark_dies (c));
9586 }
9587
9588 /* Clear the marks for a die, its children and referred dies. */
9589
9590 static void
9591 unmark_all_dies (dw_die_ref die)
9592 {
9593 dw_die_ref c;
9594 dw_attr_node *a;
9595 unsigned ix;
9596
9597 if (!die->die_mark)
9598 return;
9599 die->die_mark = 0;
9600
9601 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9602
9603 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9604 if (AT_class (a) == dw_val_class_die_ref)
9605 unmark_all_dies (AT_ref (a));
9606 }
9607
9608 /* Calculate if the entry should appear in the final output file. It may be
9609 from a pruned a type. */
9610
9611 static bool
9612 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9613 {
9614 /* By limiting gnu pubnames to definitions only, gold can generate a
9615 gdb index without entries for declarations, which don't include
9616 enough information to be useful. */
9617 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9618 return false;
9619
9620 if (table == pubname_table)
9621 {
9622 /* Enumerator names are part of the pubname table, but the
9623 parent DW_TAG_enumeration_type die may have been pruned.
9624 Don't output them if that is the case. */
9625 if (p->die->die_tag == DW_TAG_enumerator &&
9626 (p->die->die_parent == NULL
9627 || !p->die->die_parent->die_perennial_p))
9628 return false;
9629
9630 /* Everything else in the pubname table is included. */
9631 return true;
9632 }
9633
9634 /* The pubtypes table shouldn't include types that have been
9635 pruned. */
9636 return (p->die->die_offset != 0
9637 || !flag_eliminate_unused_debug_types);
9638 }
9639
9640 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9641 generated for the compilation unit. */
9642
9643 static unsigned long
9644 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9645 {
9646 unsigned long size;
9647 unsigned i;
9648 pubname_entry *p;
9649 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9650
9651 size = DWARF_PUBNAMES_HEADER_SIZE;
9652 FOR_EACH_VEC_ELT (*names, i, p)
9653 if (include_pubname_in_output (names, p))
9654 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9655
9656 size += DWARF_OFFSET_SIZE;
9657 return size;
9658 }
9659
9660 /* Return the size of the information in the .debug_aranges section. */
9661
9662 static unsigned long
9663 size_of_aranges (void)
9664 {
9665 unsigned long size;
9666
9667 size = DWARF_ARANGES_HEADER_SIZE;
9668
9669 /* Count the address/length pair for this compilation unit. */
9670 if (text_section_used)
9671 size += 2 * DWARF2_ADDR_SIZE;
9672 if (cold_text_section_used)
9673 size += 2 * DWARF2_ADDR_SIZE;
9674 if (have_multiple_function_sections)
9675 {
9676 unsigned fde_idx;
9677 dw_fde_ref fde;
9678
9679 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9680 {
9681 if (DECL_IGNORED_P (fde->decl))
9682 continue;
9683 if (!fde->in_std_section)
9684 size += 2 * DWARF2_ADDR_SIZE;
9685 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9686 size += 2 * DWARF2_ADDR_SIZE;
9687 }
9688 }
9689
9690 /* Count the two zero words used to terminated the address range table. */
9691 size += 2 * DWARF2_ADDR_SIZE;
9692 return size;
9693 }
9694 \f
9695 /* Select the encoding of an attribute value. */
9696
9697 static enum dwarf_form
9698 value_format (dw_attr_node *a)
9699 {
9700 switch (AT_class (a))
9701 {
9702 case dw_val_class_addr:
9703 /* Only very few attributes allow DW_FORM_addr. */
9704 switch (a->dw_attr)
9705 {
9706 case DW_AT_low_pc:
9707 case DW_AT_high_pc:
9708 case DW_AT_entry_pc:
9709 case DW_AT_trampoline:
9710 return (AT_index (a) == NOT_INDEXED
9711 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9712 default:
9713 break;
9714 }
9715 switch (DWARF2_ADDR_SIZE)
9716 {
9717 case 1:
9718 return DW_FORM_data1;
9719 case 2:
9720 return DW_FORM_data2;
9721 case 4:
9722 return DW_FORM_data4;
9723 case 8:
9724 return DW_FORM_data8;
9725 default:
9726 gcc_unreachable ();
9727 }
9728 case dw_val_class_loc_list:
9729 case dw_val_class_view_list:
9730 if (dwarf_split_debug_info
9731 && dwarf_version >= 5
9732 && AT_loc_list (a)->num_assigned)
9733 return DW_FORM_loclistx;
9734 /* FALLTHRU */
9735 case dw_val_class_range_list:
9736 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9737 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9738 care about sizes of .debug* sections in shared libraries and
9739 executables and don't take into account relocations that affect just
9740 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9741 table in the .debug_rnglists section. */
9742 if (dwarf_split_debug_info
9743 && dwarf_version >= 5
9744 && AT_class (a) == dw_val_class_range_list
9745 && rnglist_idx
9746 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9747 return DW_FORM_rnglistx;
9748 if (dwarf_version >= 4)
9749 return DW_FORM_sec_offset;
9750 /* FALLTHRU */
9751 case dw_val_class_vms_delta:
9752 case dw_val_class_offset:
9753 switch (DWARF_OFFSET_SIZE)
9754 {
9755 case 4:
9756 return DW_FORM_data4;
9757 case 8:
9758 return DW_FORM_data8;
9759 default:
9760 gcc_unreachable ();
9761 }
9762 case dw_val_class_loc:
9763 if (dwarf_version >= 4)
9764 return DW_FORM_exprloc;
9765 switch (constant_size (size_of_locs (AT_loc (a))))
9766 {
9767 case 1:
9768 return DW_FORM_block1;
9769 case 2:
9770 return DW_FORM_block2;
9771 case 4:
9772 return DW_FORM_block4;
9773 default:
9774 gcc_unreachable ();
9775 }
9776 case dw_val_class_const:
9777 return DW_FORM_sdata;
9778 case dw_val_class_unsigned_const:
9779 switch (constant_size (AT_unsigned (a)))
9780 {
9781 case 1:
9782 return DW_FORM_data1;
9783 case 2:
9784 return DW_FORM_data2;
9785 case 4:
9786 /* In DWARF3 DW_AT_data_member_location with
9787 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9788 constant, so we need to use DW_FORM_udata if we need
9789 a large constant. */
9790 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9791 return DW_FORM_udata;
9792 return DW_FORM_data4;
9793 case 8:
9794 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9795 return DW_FORM_udata;
9796 return DW_FORM_data8;
9797 default:
9798 gcc_unreachable ();
9799 }
9800 case dw_val_class_const_implicit:
9801 case dw_val_class_unsigned_const_implicit:
9802 case dw_val_class_file_implicit:
9803 return DW_FORM_implicit_const;
9804 case dw_val_class_const_double:
9805 switch (HOST_BITS_PER_WIDE_INT)
9806 {
9807 case 8:
9808 return DW_FORM_data2;
9809 case 16:
9810 return DW_FORM_data4;
9811 case 32:
9812 return DW_FORM_data8;
9813 case 64:
9814 if (dwarf_version >= 5)
9815 return DW_FORM_data16;
9816 /* FALLTHRU */
9817 default:
9818 return DW_FORM_block1;
9819 }
9820 case dw_val_class_wide_int:
9821 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9822 {
9823 case 8:
9824 return DW_FORM_data1;
9825 case 16:
9826 return DW_FORM_data2;
9827 case 32:
9828 return DW_FORM_data4;
9829 case 64:
9830 return DW_FORM_data8;
9831 case 128:
9832 if (dwarf_version >= 5)
9833 return DW_FORM_data16;
9834 /* FALLTHRU */
9835 default:
9836 return DW_FORM_block1;
9837 }
9838 case dw_val_class_symview:
9839 /* ??? We might use uleb128, but then we'd have to compute
9840 .debug_info offsets in the assembler. */
9841 if (symview_upper_bound <= 0xff)
9842 return DW_FORM_data1;
9843 else if (symview_upper_bound <= 0xffff)
9844 return DW_FORM_data2;
9845 else if (symview_upper_bound <= 0xffffffff)
9846 return DW_FORM_data4;
9847 else
9848 return DW_FORM_data8;
9849 case dw_val_class_vec:
9850 switch (constant_size (a->dw_attr_val.v.val_vec.length
9851 * a->dw_attr_val.v.val_vec.elt_size))
9852 {
9853 case 1:
9854 return DW_FORM_block1;
9855 case 2:
9856 return DW_FORM_block2;
9857 case 4:
9858 return DW_FORM_block4;
9859 default:
9860 gcc_unreachable ();
9861 }
9862 case dw_val_class_flag:
9863 if (dwarf_version >= 4)
9864 {
9865 /* Currently all add_AT_flag calls pass in 1 as last argument,
9866 so DW_FORM_flag_present can be used. If that ever changes,
9867 we'll need to use DW_FORM_flag and have some optimization
9868 in build_abbrev_table that will change those to
9869 DW_FORM_flag_present if it is set to 1 in all DIEs using
9870 the same abbrev entry. */
9871 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9872 return DW_FORM_flag_present;
9873 }
9874 return DW_FORM_flag;
9875 case dw_val_class_die_ref:
9876 if (AT_ref_external (a))
9877 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9878 else
9879 return DW_FORM_ref;
9880 case dw_val_class_fde_ref:
9881 return DW_FORM_data;
9882 case dw_val_class_lbl_id:
9883 return (AT_index (a) == NOT_INDEXED
9884 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9885 case dw_val_class_lineptr:
9886 case dw_val_class_macptr:
9887 case dw_val_class_loclistsptr:
9888 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9889 case dw_val_class_str:
9890 return AT_string_form (a);
9891 case dw_val_class_file:
9892 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9893 {
9894 case 1:
9895 return DW_FORM_data1;
9896 case 2:
9897 return DW_FORM_data2;
9898 case 4:
9899 return DW_FORM_data4;
9900 default:
9901 gcc_unreachable ();
9902 }
9903
9904 case dw_val_class_data8:
9905 return DW_FORM_data8;
9906
9907 case dw_val_class_high_pc:
9908 switch (DWARF2_ADDR_SIZE)
9909 {
9910 case 1:
9911 return DW_FORM_data1;
9912 case 2:
9913 return DW_FORM_data2;
9914 case 4:
9915 return DW_FORM_data4;
9916 case 8:
9917 return DW_FORM_data8;
9918 default:
9919 gcc_unreachable ();
9920 }
9921
9922 case dw_val_class_discr_value:
9923 return (a->dw_attr_val.v.val_discr_value.pos
9924 ? DW_FORM_udata
9925 : DW_FORM_sdata);
9926 case dw_val_class_discr_list:
9927 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9928 {
9929 case 1:
9930 return DW_FORM_block1;
9931 case 2:
9932 return DW_FORM_block2;
9933 case 4:
9934 return DW_FORM_block4;
9935 default:
9936 gcc_unreachable ();
9937 }
9938
9939 default:
9940 gcc_unreachable ();
9941 }
9942 }
9943
9944 /* Output the encoding of an attribute value. */
9945
9946 static void
9947 output_value_format (dw_attr_node *a)
9948 {
9949 enum dwarf_form form = value_format (a);
9950
9951 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9952 }
9953
9954 /* Given a die and id, produce the appropriate abbreviations. */
9955
9956 static void
9957 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9958 {
9959 unsigned ix;
9960 dw_attr_node *a_attr;
9961
9962 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9963 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9964 dwarf_tag_name (abbrev->die_tag));
9965
9966 if (abbrev->die_child != NULL)
9967 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9968 else
9969 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9970
9971 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9972 {
9973 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9974 dwarf_attr_name (a_attr->dw_attr));
9975 output_value_format (a_attr);
9976 if (value_format (a_attr) == DW_FORM_implicit_const)
9977 {
9978 if (AT_class (a_attr) == dw_val_class_file_implicit)
9979 {
9980 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9981 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9982 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9983 }
9984 else
9985 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9986 }
9987 }
9988
9989 dw2_asm_output_data (1, 0, NULL);
9990 dw2_asm_output_data (1, 0, NULL);
9991 }
9992
9993
9994 /* Output the .debug_abbrev section which defines the DIE abbreviation
9995 table. */
9996
9997 static void
9998 output_abbrev_section (void)
9999 {
10000 unsigned int abbrev_id;
10001 dw_die_ref abbrev;
10002
10003 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10004 if (abbrev_id != 0)
10005 output_die_abbrevs (abbrev_id, abbrev);
10006
10007 /* Terminate the table. */
10008 dw2_asm_output_data (1, 0, NULL);
10009 }
10010
10011 /* Return a new location list, given the begin and end range, and the
10012 expression. */
10013
10014 static inline dw_loc_list_ref
10015 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10016 const char *end, var_loc_view vend,
10017 const char *section)
10018 {
10019 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10020
10021 retlist->begin = begin;
10022 retlist->begin_entry = NULL;
10023 retlist->end = end;
10024 retlist->expr = expr;
10025 retlist->section = section;
10026 retlist->vbegin = vbegin;
10027 retlist->vend = vend;
10028
10029 return retlist;
10030 }
10031
10032 /* Return true iff there's any nonzero view number in the loc list.
10033
10034 ??? When views are not enabled, we'll often extend a single range
10035 to the entire function, so that we emit a single location
10036 expression rather than a location list. With views, even with a
10037 single range, we'll output a list if start or end have a nonzero
10038 view. If we change this, we may want to stop splitting a single
10039 range in dw_loc_list just because of a nonzero view, even if it
10040 straddles across hot/cold partitions. */
10041
10042 static bool
10043 loc_list_has_views (dw_loc_list_ref list)
10044 {
10045 if (!debug_variable_location_views)
10046 return false;
10047
10048 for (dw_loc_list_ref loc = list;
10049 loc != NULL; loc = loc->dw_loc_next)
10050 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10051 return true;
10052
10053 return false;
10054 }
10055
10056 /* Generate a new internal symbol for this location list node, if it
10057 hasn't got one yet. */
10058
10059 static inline void
10060 gen_llsym (dw_loc_list_ref list)
10061 {
10062 gcc_assert (!list->ll_symbol);
10063 list->ll_symbol = gen_internal_sym ("LLST");
10064
10065 if (!loc_list_has_views (list))
10066 return;
10067
10068 if (dwarf2out_locviews_in_attribute ())
10069 {
10070 /* Use the same label_num for the view list. */
10071 label_num--;
10072 list->vl_symbol = gen_internal_sym ("LVUS");
10073 }
10074 else
10075 list->vl_symbol = list->ll_symbol;
10076 }
10077
10078 /* Generate a symbol for the list, but only if we really want to emit
10079 it as a list. */
10080
10081 static inline void
10082 maybe_gen_llsym (dw_loc_list_ref list)
10083 {
10084 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10085 return;
10086
10087 gen_llsym (list);
10088 }
10089
10090 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10091 NULL, don't consider size of the location expression. If we're not
10092 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10093 representation in *SIZEP. */
10094
10095 static bool
10096 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10097 {
10098 /* Don't output an entry that starts and ends at the same address. */
10099 if (strcmp (curr->begin, curr->end) == 0
10100 && curr->vbegin == curr->vend && !curr->force)
10101 return true;
10102
10103 if (!sizep)
10104 return false;
10105
10106 unsigned long size = size_of_locs (curr->expr);
10107
10108 /* If the expression is too large, drop it on the floor. We could
10109 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10110 in the expression, but >= 64KB expressions for a single value
10111 in a single range are unlikely very useful. */
10112 if (dwarf_version < 5 && size > 0xffff)
10113 return true;
10114
10115 *sizep = size;
10116
10117 return false;
10118 }
10119
10120 /* Output a view pair loclist entry for CURR, if it requires one. */
10121
10122 static void
10123 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10124 {
10125 if (!dwarf2out_locviews_in_loclist ())
10126 return;
10127
10128 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10129 return;
10130
10131 #ifdef DW_LLE_view_pair
10132 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10133
10134 if (dwarf2out_as_locview_support)
10135 {
10136 if (ZERO_VIEW_P (curr->vbegin))
10137 dw2_asm_output_data_uleb128 (0, "Location view begin");
10138 else
10139 {
10140 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10141 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10142 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10143 }
10144
10145 if (ZERO_VIEW_P (curr->vend))
10146 dw2_asm_output_data_uleb128 (0, "Location view end");
10147 else
10148 {
10149 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10150 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10151 dw2_asm_output_symname_uleb128 (label, "Location view end");
10152 }
10153 }
10154 else
10155 {
10156 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10157 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10158 }
10159 #endif /* DW_LLE_view_pair */
10160
10161 return;
10162 }
10163
10164 /* Output the location list given to us. */
10165
10166 static void
10167 output_loc_list (dw_loc_list_ref list_head)
10168 {
10169 int vcount = 0, lcount = 0;
10170
10171 if (list_head->emitted)
10172 return;
10173 list_head->emitted = true;
10174
10175 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10176 {
10177 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10178
10179 for (dw_loc_list_ref curr = list_head; curr != NULL;
10180 curr = curr->dw_loc_next)
10181 {
10182 unsigned long size;
10183
10184 if (skip_loc_list_entry (curr, &size))
10185 continue;
10186
10187 vcount++;
10188
10189 /* ?? dwarf_split_debug_info? */
10190 if (dwarf2out_as_locview_support)
10191 {
10192 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10193
10194 if (!ZERO_VIEW_P (curr->vbegin))
10195 {
10196 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10197 dw2_asm_output_symname_uleb128 (label,
10198 "View list begin (%s)",
10199 list_head->vl_symbol);
10200 }
10201 else
10202 dw2_asm_output_data_uleb128 (0,
10203 "View list begin (%s)",
10204 list_head->vl_symbol);
10205
10206 if (!ZERO_VIEW_P (curr->vend))
10207 {
10208 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10209 dw2_asm_output_symname_uleb128 (label,
10210 "View list end (%s)",
10211 list_head->vl_symbol);
10212 }
10213 else
10214 dw2_asm_output_data_uleb128 (0,
10215 "View list end (%s)",
10216 list_head->vl_symbol);
10217 }
10218 else
10219 {
10220 dw2_asm_output_data_uleb128 (curr->vbegin,
10221 "View list begin (%s)",
10222 list_head->vl_symbol);
10223 dw2_asm_output_data_uleb128 (curr->vend,
10224 "View list end (%s)",
10225 list_head->vl_symbol);
10226 }
10227 }
10228 }
10229
10230 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10231
10232 const char *last_section = NULL;
10233 const char *base_label = NULL;
10234
10235 /* Walk the location list, and output each range + expression. */
10236 for (dw_loc_list_ref curr = list_head; curr != NULL;
10237 curr = curr->dw_loc_next)
10238 {
10239 unsigned long size;
10240
10241 /* Skip this entry? If we skip it here, we must skip it in the
10242 view list above as well. */
10243 if (skip_loc_list_entry (curr, &size))
10244 continue;
10245
10246 lcount++;
10247
10248 if (dwarf_version >= 5)
10249 {
10250 if (dwarf_split_debug_info)
10251 {
10252 dwarf2out_maybe_output_loclist_view_pair (curr);
10253 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10254 uleb128 index into .debug_addr and uleb128 length. */
10255 dw2_asm_output_data (1, DW_LLE_startx_length,
10256 "DW_LLE_startx_length (%s)",
10257 list_head->ll_symbol);
10258 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10259 "Location list range start index "
10260 "(%s)", curr->begin);
10261 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10262 For that case we probably need to emit DW_LLE_startx_endx,
10263 but we'd need 2 .debug_addr entries rather than just one. */
10264 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10265 "Location list length (%s)",
10266 list_head->ll_symbol);
10267 }
10268 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10269 {
10270 dwarf2out_maybe_output_loclist_view_pair (curr);
10271 /* If all code is in .text section, the base address is
10272 already provided by the CU attributes. Use
10273 DW_LLE_offset_pair where both addresses are uleb128 encoded
10274 offsets against that base. */
10275 dw2_asm_output_data (1, DW_LLE_offset_pair,
10276 "DW_LLE_offset_pair (%s)",
10277 list_head->ll_symbol);
10278 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10279 "Location list begin address (%s)",
10280 list_head->ll_symbol);
10281 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10282 "Location list end address (%s)",
10283 list_head->ll_symbol);
10284 }
10285 else if (HAVE_AS_LEB128)
10286 {
10287 /* Otherwise, find out how many consecutive entries could share
10288 the same base entry. If just one, emit DW_LLE_start_length,
10289 otherwise emit DW_LLE_base_address for the base address
10290 followed by a series of DW_LLE_offset_pair. */
10291 if (last_section == NULL || curr->section != last_section)
10292 {
10293 dw_loc_list_ref curr2;
10294 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10295 curr2 = curr2->dw_loc_next)
10296 {
10297 if (strcmp (curr2->begin, curr2->end) == 0
10298 && !curr2->force)
10299 continue;
10300 break;
10301 }
10302 if (curr2 == NULL || curr->section != curr2->section)
10303 last_section = NULL;
10304 else
10305 {
10306 last_section = curr->section;
10307 base_label = curr->begin;
10308 dw2_asm_output_data (1, DW_LLE_base_address,
10309 "DW_LLE_base_address (%s)",
10310 list_head->ll_symbol);
10311 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10312 "Base address (%s)",
10313 list_head->ll_symbol);
10314 }
10315 }
10316 /* Only one entry with the same base address. Use
10317 DW_LLE_start_length with absolute address and uleb128
10318 length. */
10319 if (last_section == NULL)
10320 {
10321 dwarf2out_maybe_output_loclist_view_pair (curr);
10322 dw2_asm_output_data (1, DW_LLE_start_length,
10323 "DW_LLE_start_length (%s)",
10324 list_head->ll_symbol);
10325 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10326 "Location list begin address (%s)",
10327 list_head->ll_symbol);
10328 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10329 "Location list length "
10330 "(%s)", list_head->ll_symbol);
10331 }
10332 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10333 DW_LLE_base_address. */
10334 else
10335 {
10336 dwarf2out_maybe_output_loclist_view_pair (curr);
10337 dw2_asm_output_data (1, DW_LLE_offset_pair,
10338 "DW_LLE_offset_pair (%s)",
10339 list_head->ll_symbol);
10340 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10341 "Location list begin address "
10342 "(%s)", list_head->ll_symbol);
10343 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10344 "Location list end address "
10345 "(%s)", list_head->ll_symbol);
10346 }
10347 }
10348 /* The assembler does not support .uleb128 directive. Emit
10349 DW_LLE_start_end with a pair of absolute addresses. */
10350 else
10351 {
10352 dwarf2out_maybe_output_loclist_view_pair (curr);
10353 dw2_asm_output_data (1, DW_LLE_start_end,
10354 "DW_LLE_start_end (%s)",
10355 list_head->ll_symbol);
10356 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10357 "Location list begin address (%s)",
10358 list_head->ll_symbol);
10359 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10360 "Location list end address (%s)",
10361 list_head->ll_symbol);
10362 }
10363 }
10364 else if (dwarf_split_debug_info)
10365 {
10366 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10367 and 4 byte length. */
10368 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10369 "Location list start/length entry (%s)",
10370 list_head->ll_symbol);
10371 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10372 "Location list range start index (%s)",
10373 curr->begin);
10374 /* The length field is 4 bytes. If we ever need to support
10375 an 8-byte length, we can add a new DW_LLE code or fall back
10376 to DW_LLE_GNU_start_end_entry. */
10377 dw2_asm_output_delta (4, curr->end, curr->begin,
10378 "Location list range length (%s)",
10379 list_head->ll_symbol);
10380 }
10381 else if (!have_multiple_function_sections)
10382 {
10383 /* Pair of relative addresses against start of text section. */
10384 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10385 "Location list begin address (%s)",
10386 list_head->ll_symbol);
10387 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10388 "Location list end address (%s)",
10389 list_head->ll_symbol);
10390 }
10391 else
10392 {
10393 /* Pair of absolute addresses. */
10394 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10395 "Location list begin address (%s)",
10396 list_head->ll_symbol);
10397 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10398 "Location list end address (%s)",
10399 list_head->ll_symbol);
10400 }
10401
10402 /* Output the block length for this list of location operations. */
10403 if (dwarf_version >= 5)
10404 dw2_asm_output_data_uleb128 (size, "Location expression size");
10405 else
10406 {
10407 gcc_assert (size <= 0xffff);
10408 dw2_asm_output_data (2, size, "Location expression size");
10409 }
10410
10411 output_loc_sequence (curr->expr, -1);
10412 }
10413
10414 /* And finally list termination. */
10415 if (dwarf_version >= 5)
10416 dw2_asm_output_data (1, DW_LLE_end_of_list,
10417 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10418 else if (dwarf_split_debug_info)
10419 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10420 "Location list terminator (%s)",
10421 list_head->ll_symbol);
10422 else
10423 {
10424 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10425 "Location list terminator begin (%s)",
10426 list_head->ll_symbol);
10427 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10428 "Location list terminator end (%s)",
10429 list_head->ll_symbol);
10430 }
10431
10432 gcc_assert (!list_head->vl_symbol
10433 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10434 }
10435
10436 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10437 section. Emit a relocated reference if val_entry is NULL, otherwise,
10438 emit an indirect reference. */
10439
10440 static void
10441 output_range_list_offset (dw_attr_node *a)
10442 {
10443 const char *name = dwarf_attr_name (a->dw_attr);
10444
10445 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10446 {
10447 if (dwarf_version >= 5)
10448 {
10449 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10450 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10451 debug_ranges_section, "%s", name);
10452 }
10453 else
10454 {
10455 char *p = strchr (ranges_section_label, '\0');
10456 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10457 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10458 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10459 debug_ranges_section, "%s", name);
10460 *p = '\0';
10461 }
10462 }
10463 else if (dwarf_version >= 5)
10464 {
10465 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10466 gcc_assert (rnglist_idx);
10467 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10468 }
10469 else
10470 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10471 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10472 "%s (offset from %s)", name, ranges_section_label);
10473 }
10474
10475 /* Output the offset into the debug_loc section. */
10476
10477 static void
10478 output_loc_list_offset (dw_attr_node *a)
10479 {
10480 char *sym = AT_loc_list (a)->ll_symbol;
10481
10482 gcc_assert (sym);
10483 if (!dwarf_split_debug_info)
10484 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10485 "%s", dwarf_attr_name (a->dw_attr));
10486 else if (dwarf_version >= 5)
10487 {
10488 gcc_assert (AT_loc_list (a)->num_assigned);
10489 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10490 dwarf_attr_name (a->dw_attr),
10491 sym);
10492 }
10493 else
10494 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10495 "%s", dwarf_attr_name (a->dw_attr));
10496 }
10497
10498 /* Output the offset into the debug_loc section. */
10499
10500 static void
10501 output_view_list_offset (dw_attr_node *a)
10502 {
10503 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10504
10505 gcc_assert (sym);
10506 if (dwarf_split_debug_info)
10507 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10508 "%s", dwarf_attr_name (a->dw_attr));
10509 else
10510 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10511 "%s", dwarf_attr_name (a->dw_attr));
10512 }
10513
10514 /* Output an attribute's index or value appropriately. */
10515
10516 static void
10517 output_attr_index_or_value (dw_attr_node *a)
10518 {
10519 const char *name = dwarf_attr_name (a->dw_attr);
10520
10521 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10522 {
10523 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10524 return;
10525 }
10526 switch (AT_class (a))
10527 {
10528 case dw_val_class_addr:
10529 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10530 break;
10531 case dw_val_class_high_pc:
10532 case dw_val_class_lbl_id:
10533 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10534 break;
10535 default:
10536 gcc_unreachable ();
10537 }
10538 }
10539
10540 /* Output a type signature. */
10541
10542 static inline void
10543 output_signature (const char *sig, const char *name)
10544 {
10545 int i;
10546
10547 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10548 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10549 }
10550
10551 /* Output a discriminant value. */
10552
10553 static inline void
10554 output_discr_value (dw_discr_value *discr_value, const char *name)
10555 {
10556 if (discr_value->pos)
10557 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10558 else
10559 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10560 }
10561
10562 /* Output the DIE and its attributes. Called recursively to generate
10563 the definitions of each child DIE. */
10564
10565 static void
10566 output_die (dw_die_ref die)
10567 {
10568 dw_attr_node *a;
10569 dw_die_ref c;
10570 unsigned long size;
10571 unsigned ix;
10572
10573 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10574 (unsigned long)die->die_offset,
10575 dwarf_tag_name (die->die_tag));
10576
10577 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10578 {
10579 const char *name = dwarf_attr_name (a->dw_attr);
10580
10581 switch (AT_class (a))
10582 {
10583 case dw_val_class_addr:
10584 output_attr_index_or_value (a);
10585 break;
10586
10587 case dw_val_class_offset:
10588 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10589 "%s", name);
10590 break;
10591
10592 case dw_val_class_range_list:
10593 output_range_list_offset (a);
10594 break;
10595
10596 case dw_val_class_loc:
10597 size = size_of_locs (AT_loc (a));
10598
10599 /* Output the block length for this list of location operations. */
10600 if (dwarf_version >= 4)
10601 dw2_asm_output_data_uleb128 (size, "%s", name);
10602 else
10603 dw2_asm_output_data (constant_size (size), size, "%s", name);
10604
10605 output_loc_sequence (AT_loc (a), -1);
10606 break;
10607
10608 case dw_val_class_const:
10609 /* ??? It would be slightly more efficient to use a scheme like is
10610 used for unsigned constants below, but gdb 4.x does not sign
10611 extend. Gdb 5.x does sign extend. */
10612 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10613 break;
10614
10615 case dw_val_class_unsigned_const:
10616 {
10617 int csize = constant_size (AT_unsigned (a));
10618 if (dwarf_version == 3
10619 && a->dw_attr == DW_AT_data_member_location
10620 && csize >= 4)
10621 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10622 else
10623 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10624 }
10625 break;
10626
10627 case dw_val_class_symview:
10628 {
10629 int vsize;
10630 if (symview_upper_bound <= 0xff)
10631 vsize = 1;
10632 else if (symview_upper_bound <= 0xffff)
10633 vsize = 2;
10634 else if (symview_upper_bound <= 0xffffffff)
10635 vsize = 4;
10636 else
10637 vsize = 8;
10638 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10639 "%s", name);
10640 }
10641 break;
10642
10643 case dw_val_class_const_implicit:
10644 if (flag_debug_asm)
10645 fprintf (asm_out_file, "\t\t\t%s %s ("
10646 HOST_WIDE_INT_PRINT_DEC ")\n",
10647 ASM_COMMENT_START, name, AT_int (a));
10648 break;
10649
10650 case dw_val_class_unsigned_const_implicit:
10651 if (flag_debug_asm)
10652 fprintf (asm_out_file, "\t\t\t%s %s ("
10653 HOST_WIDE_INT_PRINT_HEX ")\n",
10654 ASM_COMMENT_START, name, AT_unsigned (a));
10655 break;
10656
10657 case dw_val_class_const_double:
10658 {
10659 unsigned HOST_WIDE_INT first, second;
10660
10661 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10662 dw2_asm_output_data (1,
10663 HOST_BITS_PER_DOUBLE_INT
10664 / HOST_BITS_PER_CHAR,
10665 NULL);
10666
10667 if (WORDS_BIG_ENDIAN)
10668 {
10669 first = a->dw_attr_val.v.val_double.high;
10670 second = a->dw_attr_val.v.val_double.low;
10671 }
10672 else
10673 {
10674 first = a->dw_attr_val.v.val_double.low;
10675 second = a->dw_attr_val.v.val_double.high;
10676 }
10677
10678 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10679 first, "%s", name);
10680 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10681 second, NULL);
10682 }
10683 break;
10684
10685 case dw_val_class_wide_int:
10686 {
10687 int i;
10688 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10689 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10690 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10691 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10692 * l, NULL);
10693
10694 if (WORDS_BIG_ENDIAN)
10695 for (i = len - 1; i >= 0; --i)
10696 {
10697 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10698 "%s", name);
10699 name = "";
10700 }
10701 else
10702 for (i = 0; i < len; ++i)
10703 {
10704 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10705 "%s", name);
10706 name = "";
10707 }
10708 }
10709 break;
10710
10711 case dw_val_class_vec:
10712 {
10713 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10714 unsigned int len = a->dw_attr_val.v.val_vec.length;
10715 unsigned int i;
10716 unsigned char *p;
10717
10718 dw2_asm_output_data (constant_size (len * elt_size),
10719 len * elt_size, "%s", name);
10720 if (elt_size > sizeof (HOST_WIDE_INT))
10721 {
10722 elt_size /= 2;
10723 len *= 2;
10724 }
10725 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10726 i < len;
10727 i++, p += elt_size)
10728 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10729 "fp or vector constant word %u", i);
10730 break;
10731 }
10732
10733 case dw_val_class_flag:
10734 if (dwarf_version >= 4)
10735 {
10736 /* Currently all add_AT_flag calls pass in 1 as last argument,
10737 so DW_FORM_flag_present can be used. If that ever changes,
10738 we'll need to use DW_FORM_flag and have some optimization
10739 in build_abbrev_table that will change those to
10740 DW_FORM_flag_present if it is set to 1 in all DIEs using
10741 the same abbrev entry. */
10742 gcc_assert (AT_flag (a) == 1);
10743 if (flag_debug_asm)
10744 fprintf (asm_out_file, "\t\t\t%s %s\n",
10745 ASM_COMMENT_START, name);
10746 break;
10747 }
10748 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10749 break;
10750
10751 case dw_val_class_loc_list:
10752 output_loc_list_offset (a);
10753 break;
10754
10755 case dw_val_class_view_list:
10756 output_view_list_offset (a);
10757 break;
10758
10759 case dw_val_class_die_ref:
10760 if (AT_ref_external (a))
10761 {
10762 if (AT_ref (a)->comdat_type_p)
10763 {
10764 comdat_type_node *type_node
10765 = AT_ref (a)->die_id.die_type_node;
10766
10767 gcc_assert (type_node);
10768 output_signature (type_node->signature, name);
10769 }
10770 else
10771 {
10772 const char *sym = AT_ref (a)->die_id.die_symbol;
10773 int size;
10774
10775 gcc_assert (sym);
10776 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10777 length, whereas in DWARF3 it's always sized as an
10778 offset. */
10779 if (dwarf_version == 2)
10780 size = DWARF2_ADDR_SIZE;
10781 else
10782 size = DWARF_OFFSET_SIZE;
10783 /* ??? We cannot unconditionally output die_offset if
10784 non-zero - others might create references to those
10785 DIEs via symbols.
10786 And we do not clear its DIE offset after outputting it
10787 (and the label refers to the actual DIEs, not the
10788 DWARF CU unit header which is when using label + offset
10789 would be the correct thing to do).
10790 ??? This is the reason for the with_offset flag. */
10791 if (AT_ref (a)->with_offset)
10792 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10793 debug_info_section, "%s", name);
10794 else
10795 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10796 name);
10797 }
10798 }
10799 else
10800 {
10801 gcc_assert (AT_ref (a)->die_offset);
10802 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10803 "%s", name);
10804 }
10805 break;
10806
10807 case dw_val_class_fde_ref:
10808 {
10809 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10810
10811 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10812 a->dw_attr_val.v.val_fde_index * 2);
10813 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10814 "%s", name);
10815 }
10816 break;
10817
10818 case dw_val_class_vms_delta:
10819 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10820 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10821 AT_vms_delta2 (a), AT_vms_delta1 (a),
10822 "%s", name);
10823 #else
10824 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10825 AT_vms_delta2 (a), AT_vms_delta1 (a),
10826 "%s", name);
10827 #endif
10828 break;
10829
10830 case dw_val_class_lbl_id:
10831 output_attr_index_or_value (a);
10832 break;
10833
10834 case dw_val_class_lineptr:
10835 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10836 debug_line_section, "%s", name);
10837 break;
10838
10839 case dw_val_class_macptr:
10840 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10841 debug_macinfo_section, "%s", name);
10842 break;
10843
10844 case dw_val_class_loclistsptr:
10845 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10846 debug_loc_section, "%s", name);
10847 break;
10848
10849 case dw_val_class_str:
10850 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10851 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10852 a->dw_attr_val.v.val_str->label,
10853 debug_str_section,
10854 "%s: \"%s\"", name, AT_string (a));
10855 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10856 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10857 a->dw_attr_val.v.val_str->label,
10858 debug_line_str_section,
10859 "%s: \"%s\"", name, AT_string (a));
10860 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10861 dw2_asm_output_data_uleb128 (AT_index (a),
10862 "%s: \"%s\"", name, AT_string (a));
10863 else
10864 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10865 break;
10866
10867 case dw_val_class_file:
10868 {
10869 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10870
10871 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10872 a->dw_attr_val.v.val_file->filename);
10873 break;
10874 }
10875
10876 case dw_val_class_file_implicit:
10877 if (flag_debug_asm)
10878 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10879 ASM_COMMENT_START, name,
10880 maybe_emit_file (a->dw_attr_val.v.val_file),
10881 a->dw_attr_val.v.val_file->filename);
10882 break;
10883
10884 case dw_val_class_data8:
10885 {
10886 int i;
10887
10888 for (i = 0; i < 8; i++)
10889 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10890 i == 0 ? "%s" : NULL, name);
10891 break;
10892 }
10893
10894 case dw_val_class_high_pc:
10895 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10896 get_AT_low_pc (die), "DW_AT_high_pc");
10897 break;
10898
10899 case dw_val_class_discr_value:
10900 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10901 break;
10902
10903 case dw_val_class_discr_list:
10904 {
10905 dw_discr_list_ref list = AT_discr_list (a);
10906 const int size = size_of_discr_list (list);
10907
10908 /* This is a block, so output its length first. */
10909 dw2_asm_output_data (constant_size (size), size,
10910 "%s: block size", name);
10911
10912 for (; list != NULL; list = list->dw_discr_next)
10913 {
10914 /* One byte for the discriminant value descriptor, and then as
10915 many LEB128 numbers as required. */
10916 if (list->dw_discr_range)
10917 dw2_asm_output_data (1, DW_DSC_range,
10918 "%s: DW_DSC_range", name);
10919 else
10920 dw2_asm_output_data (1, DW_DSC_label,
10921 "%s: DW_DSC_label", name);
10922
10923 output_discr_value (&list->dw_discr_lower_bound, name);
10924 if (list->dw_discr_range)
10925 output_discr_value (&list->dw_discr_upper_bound, name);
10926 }
10927 break;
10928 }
10929
10930 default:
10931 gcc_unreachable ();
10932 }
10933 }
10934
10935 FOR_EACH_CHILD (die, c, output_die (c));
10936
10937 /* Add null byte to terminate sibling list. */
10938 if (die->die_child != NULL)
10939 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10940 (unsigned long) die->die_offset);
10941 }
10942
10943 /* Output the dwarf version number. */
10944
10945 static void
10946 output_dwarf_version ()
10947 {
10948 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10949 views in loclist. That will change eventually. */
10950 if (dwarf_version == 6)
10951 {
10952 static bool once;
10953 if (!once)
10954 {
10955 warning (0,
10956 "-gdwarf-6 is output as version 5 with incompatibilities");
10957 once = true;
10958 }
10959 dw2_asm_output_data (2, 5, "DWARF version number");
10960 }
10961 else
10962 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10963 }
10964
10965 /* Output the compilation unit that appears at the beginning of the
10966 .debug_info section, and precedes the DIE descriptions. */
10967
10968 static void
10969 output_compilation_unit_header (enum dwarf_unit_type ut)
10970 {
10971 if (!XCOFF_DEBUGGING_INFO)
10972 {
10973 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10974 dw2_asm_output_data (4, 0xffffffff,
10975 "Initial length escape value indicating 64-bit DWARF extension");
10976 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10977 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10978 "Length of Compilation Unit Info");
10979 }
10980
10981 output_dwarf_version ();
10982 if (dwarf_version >= 5)
10983 {
10984 const char *name;
10985 switch (ut)
10986 {
10987 case DW_UT_compile: name = "DW_UT_compile"; break;
10988 case DW_UT_type: name = "DW_UT_type"; break;
10989 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10990 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10991 default: gcc_unreachable ();
10992 }
10993 dw2_asm_output_data (1, ut, "%s", name);
10994 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10995 }
10996 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10997 debug_abbrev_section,
10998 "Offset Into Abbrev. Section");
10999 if (dwarf_version < 5)
11000 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11001 }
11002
11003 /* Output the compilation unit DIE and its children. */
11004
11005 static void
11006 output_comp_unit (dw_die_ref die, int output_if_empty,
11007 const unsigned char *dwo_id)
11008 {
11009 const char *secname, *oldsym;
11010 char *tmp;
11011
11012 /* Unless we are outputting main CU, we may throw away empty ones. */
11013 if (!output_if_empty && die->die_child == NULL)
11014 return;
11015
11016 /* Even if there are no children of this DIE, we must output the information
11017 about the compilation unit. Otherwise, on an empty translation unit, we
11018 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11019 will then complain when examining the file. First mark all the DIEs in
11020 this CU so we know which get local refs. */
11021 mark_dies (die);
11022
11023 external_ref_hash_type *extern_map = optimize_external_refs (die);
11024
11025 /* For now, optimize only the main CU, in order to optimize the rest
11026 we'd need to see all of them earlier. Leave the rest for post-linking
11027 tools like DWZ. */
11028 if (die == comp_unit_die ())
11029 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11030
11031 build_abbrev_table (die, extern_map);
11032
11033 optimize_abbrev_table ();
11034
11035 delete extern_map;
11036
11037 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11038 next_die_offset = (dwo_id
11039 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11040 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11041 calc_die_sizes (die);
11042
11043 oldsym = die->die_id.die_symbol;
11044 if (oldsym && die->comdat_type_p)
11045 {
11046 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11047
11048 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11049 secname = tmp;
11050 die->die_id.die_symbol = NULL;
11051 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11052 }
11053 else
11054 {
11055 switch_to_section (debug_info_section);
11056 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11057 info_section_emitted = true;
11058 }
11059
11060 /* For LTO cross unit DIE refs we want a symbol on the start of the
11061 debuginfo section, not on the CU DIE. */
11062 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11063 {
11064 /* ??? No way to get visibility assembled without a decl. */
11065 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11066 get_identifier (oldsym), char_type_node);
11067 TREE_PUBLIC (decl) = true;
11068 TREE_STATIC (decl) = true;
11069 DECL_ARTIFICIAL (decl) = true;
11070 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11071 DECL_VISIBILITY_SPECIFIED (decl) = true;
11072 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11073 #ifdef ASM_WEAKEN_LABEL
11074 /* We prefer a .weak because that handles duplicates from duplicate
11075 archive members in a graceful way. */
11076 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11077 #else
11078 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11079 #endif
11080 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11081 }
11082
11083 /* Output debugging information. */
11084 output_compilation_unit_header (dwo_id
11085 ? DW_UT_split_compile : DW_UT_compile);
11086 if (dwarf_version >= 5)
11087 {
11088 if (dwo_id != NULL)
11089 for (int i = 0; i < 8; i++)
11090 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11091 }
11092 output_die (die);
11093
11094 /* Leave the marks on the main CU, so we can check them in
11095 output_pubnames. */
11096 if (oldsym)
11097 {
11098 unmark_dies (die);
11099 die->die_id.die_symbol = oldsym;
11100 }
11101 }
11102
11103 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11104 and .debug_pubtypes. This is configured per-target, but can be
11105 overridden by the -gpubnames or -gno-pubnames options. */
11106
11107 static inline bool
11108 want_pubnames (void)
11109 {
11110 if (debug_info_level <= DINFO_LEVEL_TERSE)
11111 return false;
11112 if (debug_generate_pub_sections != -1)
11113 return debug_generate_pub_sections;
11114 return targetm.want_debug_pub_sections;
11115 }
11116
11117 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11118
11119 static void
11120 add_AT_pubnames (dw_die_ref die)
11121 {
11122 if (want_pubnames ())
11123 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11124 }
11125
11126 /* Add a string attribute value to a skeleton DIE. */
11127
11128 static inline void
11129 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11130 const char *str)
11131 {
11132 dw_attr_node attr;
11133 struct indirect_string_node *node;
11134
11135 if (! skeleton_debug_str_hash)
11136 skeleton_debug_str_hash
11137 = hash_table<indirect_string_hasher>::create_ggc (10);
11138
11139 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11140 find_string_form (node);
11141 if (node->form == dwarf_FORM (DW_FORM_strx))
11142 node->form = DW_FORM_strp;
11143
11144 attr.dw_attr = attr_kind;
11145 attr.dw_attr_val.val_class = dw_val_class_str;
11146 attr.dw_attr_val.val_entry = NULL;
11147 attr.dw_attr_val.v.val_str = node;
11148 add_dwarf_attr (die, &attr);
11149 }
11150
11151 /* Helper function to generate top-level dies for skeleton debug_info and
11152 debug_types. */
11153
11154 static void
11155 add_top_level_skeleton_die_attrs (dw_die_ref die)
11156 {
11157 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11158 const char *comp_dir = comp_dir_string ();
11159
11160 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11161 if (comp_dir != NULL)
11162 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11163 add_AT_pubnames (die);
11164 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11165 }
11166
11167 /* Output skeleton debug sections that point to the dwo file. */
11168
11169 static void
11170 output_skeleton_debug_sections (dw_die_ref comp_unit,
11171 const unsigned char *dwo_id)
11172 {
11173 /* These attributes will be found in the full debug_info section. */
11174 remove_AT (comp_unit, DW_AT_producer);
11175 remove_AT (comp_unit, DW_AT_language);
11176
11177 switch_to_section (debug_skeleton_info_section);
11178 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11179
11180 /* Produce the skeleton compilation-unit header. This one differs enough from
11181 a normal CU header that it's better not to call output_compilation_unit
11182 header. */
11183 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11184 dw2_asm_output_data (4, 0xffffffff,
11185 "Initial length escape value indicating 64-bit "
11186 "DWARF extension");
11187
11188 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11189 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11190 - DWARF_INITIAL_LENGTH_SIZE
11191 + size_of_die (comp_unit),
11192 "Length of Compilation Unit Info");
11193 output_dwarf_version ();
11194 if (dwarf_version >= 5)
11195 {
11196 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11197 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11198 }
11199 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11200 debug_skeleton_abbrev_section,
11201 "Offset Into Abbrev. Section");
11202 if (dwarf_version < 5)
11203 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11204 else
11205 for (int i = 0; i < 8; i++)
11206 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11207
11208 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11209 output_die (comp_unit);
11210
11211 /* Build the skeleton debug_abbrev section. */
11212 switch_to_section (debug_skeleton_abbrev_section);
11213 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11214
11215 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11216
11217 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11218 }
11219
11220 /* Output a comdat type unit DIE and its children. */
11221
11222 static void
11223 output_comdat_type_unit (comdat_type_node *node)
11224 {
11225 const char *secname;
11226 char *tmp;
11227 int i;
11228 #if defined (OBJECT_FORMAT_ELF)
11229 tree comdat_key;
11230 #endif
11231
11232 /* First mark all the DIEs in this CU so we know which get local refs. */
11233 mark_dies (node->root_die);
11234
11235 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11236
11237 build_abbrev_table (node->root_die, extern_map);
11238
11239 delete extern_map;
11240 extern_map = NULL;
11241
11242 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11243 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11244 calc_die_sizes (node->root_die);
11245
11246 #if defined (OBJECT_FORMAT_ELF)
11247 if (dwarf_version >= 5)
11248 {
11249 if (!dwarf_split_debug_info)
11250 secname = ".debug_info";
11251 else
11252 secname = ".debug_info.dwo";
11253 }
11254 else if (!dwarf_split_debug_info)
11255 secname = ".debug_types";
11256 else
11257 secname = ".debug_types.dwo";
11258
11259 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11260 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11261 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11262 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11263 comdat_key = get_identifier (tmp);
11264 targetm.asm_out.named_section (secname,
11265 SECTION_DEBUG | SECTION_LINKONCE,
11266 comdat_key);
11267 #else
11268 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11269 sprintf (tmp, (dwarf_version >= 5
11270 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11271 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11272 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11273 secname = tmp;
11274 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11275 #endif
11276
11277 /* Output debugging information. */
11278 output_compilation_unit_header (dwarf_split_debug_info
11279 ? DW_UT_split_type : DW_UT_type);
11280 output_signature (node->signature, "Type Signature");
11281 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11282 "Offset to Type DIE");
11283 output_die (node->root_die);
11284
11285 unmark_dies (node->root_die);
11286 }
11287
11288 /* Return the DWARF2/3 pubname associated with a decl. */
11289
11290 static const char *
11291 dwarf2_name (tree decl, int scope)
11292 {
11293 if (DECL_NAMELESS (decl))
11294 return NULL;
11295 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11296 }
11297
11298 /* Add a new entry to .debug_pubnames if appropriate. */
11299
11300 static void
11301 add_pubname_string (const char *str, dw_die_ref die)
11302 {
11303 pubname_entry e;
11304
11305 e.die = die;
11306 e.name = xstrdup (str);
11307 vec_safe_push (pubname_table, e);
11308 }
11309
11310 static void
11311 add_pubname (tree decl, dw_die_ref die)
11312 {
11313 if (!want_pubnames ())
11314 return;
11315
11316 /* Don't add items to the table when we expect that the consumer will have
11317 just read the enclosing die. For example, if the consumer is looking at a
11318 class_member, it will either be inside the class already, or will have just
11319 looked up the class to find the member. Either way, searching the class is
11320 faster than searching the index. */
11321 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11322 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11323 {
11324 const char *name = dwarf2_name (decl, 1);
11325
11326 if (name)
11327 add_pubname_string (name, die);
11328 }
11329 }
11330
11331 /* Add an enumerator to the pubnames section. */
11332
11333 static void
11334 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11335 {
11336 pubname_entry e;
11337
11338 gcc_assert (scope_name);
11339 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11340 e.die = die;
11341 vec_safe_push (pubname_table, e);
11342 }
11343
11344 /* Add a new entry to .debug_pubtypes if appropriate. */
11345
11346 static void
11347 add_pubtype (tree decl, dw_die_ref die)
11348 {
11349 pubname_entry e;
11350
11351 if (!want_pubnames ())
11352 return;
11353
11354 if ((TREE_PUBLIC (decl)
11355 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11356 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11357 {
11358 tree scope = NULL;
11359 const char *scope_name = "";
11360 const char *sep = is_cxx () ? "::" : ".";
11361 const char *name;
11362
11363 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11364 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11365 {
11366 scope_name = lang_hooks.dwarf_name (scope, 1);
11367 if (scope_name != NULL && scope_name[0] != '\0')
11368 scope_name = concat (scope_name, sep, NULL);
11369 else
11370 scope_name = "";
11371 }
11372
11373 if (TYPE_P (decl))
11374 name = type_tag (decl);
11375 else
11376 name = lang_hooks.dwarf_name (decl, 1);
11377
11378 /* If we don't have a name for the type, there's no point in adding
11379 it to the table. */
11380 if (name != NULL && name[0] != '\0')
11381 {
11382 e.die = die;
11383 e.name = concat (scope_name, name, NULL);
11384 vec_safe_push (pubtype_table, e);
11385 }
11386
11387 /* Although it might be more consistent to add the pubinfo for the
11388 enumerators as their dies are created, they should only be added if the
11389 enum type meets the criteria above. So rather than re-check the parent
11390 enum type whenever an enumerator die is created, just output them all
11391 here. This isn't protected by the name conditional because anonymous
11392 enums don't have names. */
11393 if (die->die_tag == DW_TAG_enumeration_type)
11394 {
11395 dw_die_ref c;
11396
11397 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11398 }
11399 }
11400 }
11401
11402 /* Output a single entry in the pubnames table. */
11403
11404 static void
11405 output_pubname (dw_offset die_offset, pubname_entry *entry)
11406 {
11407 dw_die_ref die = entry->die;
11408 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11409
11410 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11411
11412 if (debug_generate_pub_sections == 2)
11413 {
11414 /* This logic follows gdb's method for determining the value of the flag
11415 byte. */
11416 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11417 switch (die->die_tag)
11418 {
11419 case DW_TAG_typedef:
11420 case DW_TAG_base_type:
11421 case DW_TAG_subrange_type:
11422 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11423 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11424 break;
11425 case DW_TAG_enumerator:
11426 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11427 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11428 if (!is_cxx ())
11429 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11430 break;
11431 case DW_TAG_subprogram:
11432 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11433 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11434 if (!is_ada ())
11435 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11436 break;
11437 case DW_TAG_constant:
11438 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11439 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11440 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11441 break;
11442 case DW_TAG_variable:
11443 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11444 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11445 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11446 break;
11447 case DW_TAG_namespace:
11448 case DW_TAG_imported_declaration:
11449 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11450 break;
11451 case DW_TAG_class_type:
11452 case DW_TAG_interface_type:
11453 case DW_TAG_structure_type:
11454 case DW_TAG_union_type:
11455 case DW_TAG_enumeration_type:
11456 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11457 if (!is_cxx ())
11458 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11459 break;
11460 default:
11461 /* An unusual tag. Leave the flag-byte empty. */
11462 break;
11463 }
11464 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11465 "GDB-index flags");
11466 }
11467
11468 dw2_asm_output_nstring (entry->name, -1, "external name");
11469 }
11470
11471
11472 /* Output the public names table used to speed up access to externally
11473 visible names; or the public types table used to find type definitions. */
11474
11475 static void
11476 output_pubnames (vec<pubname_entry, va_gc> *names)
11477 {
11478 unsigned i;
11479 unsigned long pubnames_length = size_of_pubnames (names);
11480 pubname_entry *pub;
11481
11482 if (!XCOFF_DEBUGGING_INFO)
11483 {
11484 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11485 dw2_asm_output_data (4, 0xffffffff,
11486 "Initial length escape value indicating 64-bit DWARF extension");
11487 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11488 "Pub Info Length");
11489 }
11490
11491 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11492 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11493
11494 if (dwarf_split_debug_info)
11495 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11496 debug_skeleton_info_section,
11497 "Offset of Compilation Unit Info");
11498 else
11499 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11500 debug_info_section,
11501 "Offset of Compilation Unit Info");
11502 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11503 "Compilation Unit Length");
11504
11505 FOR_EACH_VEC_ELT (*names, i, pub)
11506 {
11507 if (include_pubname_in_output (names, pub))
11508 {
11509 dw_offset die_offset = pub->die->die_offset;
11510
11511 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11512 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11513 gcc_assert (pub->die->die_mark);
11514
11515 /* If we're putting types in their own .debug_types sections,
11516 the .debug_pubtypes table will still point to the compile
11517 unit (not the type unit), so we want to use the offset of
11518 the skeleton DIE (if there is one). */
11519 if (pub->die->comdat_type_p && names == pubtype_table)
11520 {
11521 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11522
11523 if (type_node != NULL)
11524 die_offset = (type_node->skeleton_die != NULL
11525 ? type_node->skeleton_die->die_offset
11526 : comp_unit_die ()->die_offset);
11527 }
11528
11529 output_pubname (die_offset, pub);
11530 }
11531 }
11532
11533 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11534 }
11535
11536 /* Output public names and types tables if necessary. */
11537
11538 static void
11539 output_pubtables (void)
11540 {
11541 if (!want_pubnames () || !info_section_emitted)
11542 return;
11543
11544 switch_to_section (debug_pubnames_section);
11545 output_pubnames (pubname_table);
11546 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11547 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11548 simply won't look for the section. */
11549 switch_to_section (debug_pubtypes_section);
11550 output_pubnames (pubtype_table);
11551 }
11552
11553
11554 /* Output the information that goes into the .debug_aranges table.
11555 Namely, define the beginning and ending address range of the
11556 text section generated for this compilation unit. */
11557
11558 static void
11559 output_aranges (void)
11560 {
11561 unsigned i;
11562 unsigned long aranges_length = size_of_aranges ();
11563
11564 if (!XCOFF_DEBUGGING_INFO)
11565 {
11566 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11567 dw2_asm_output_data (4, 0xffffffff,
11568 "Initial length escape value indicating 64-bit DWARF extension");
11569 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11570 "Length of Address Ranges Info");
11571 }
11572
11573 /* Version number for aranges is still 2, even up to DWARF5. */
11574 dw2_asm_output_data (2, 2, "DWARF aranges version");
11575 if (dwarf_split_debug_info)
11576 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11577 debug_skeleton_info_section,
11578 "Offset of Compilation Unit Info");
11579 else
11580 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11581 debug_info_section,
11582 "Offset of Compilation Unit Info");
11583 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11584 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11585
11586 /* We need to align to twice the pointer size here. */
11587 if (DWARF_ARANGES_PAD_SIZE)
11588 {
11589 /* Pad using a 2 byte words so that padding is correct for any
11590 pointer size. */
11591 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11592 2 * DWARF2_ADDR_SIZE);
11593 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11594 dw2_asm_output_data (2, 0, NULL);
11595 }
11596
11597 /* It is necessary not to output these entries if the sections were
11598 not used; if the sections were not used, the length will be 0 and
11599 the address may end up as 0 if the section is discarded by ld
11600 --gc-sections, leaving an invalid (0, 0) entry that can be
11601 confused with the terminator. */
11602 if (text_section_used)
11603 {
11604 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11605 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11606 text_section_label, "Length");
11607 }
11608 if (cold_text_section_used)
11609 {
11610 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11611 "Address");
11612 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11613 cold_text_section_label, "Length");
11614 }
11615
11616 if (have_multiple_function_sections)
11617 {
11618 unsigned fde_idx;
11619 dw_fde_ref fde;
11620
11621 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11622 {
11623 if (DECL_IGNORED_P (fde->decl))
11624 continue;
11625 if (!fde->in_std_section)
11626 {
11627 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11628 "Address");
11629 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11630 fde->dw_fde_begin, "Length");
11631 }
11632 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11633 {
11634 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11635 "Address");
11636 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11637 fde->dw_fde_second_begin, "Length");
11638 }
11639 }
11640 }
11641
11642 /* Output the terminator words. */
11643 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11644 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11645 }
11646
11647 /* Add a new entry to .debug_ranges. Return its index into
11648 ranges_table vector. */
11649
11650 static unsigned int
11651 add_ranges_num (int num, bool maybe_new_sec)
11652 {
11653 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11654 vec_safe_push (ranges_table, r);
11655 return vec_safe_length (ranges_table) - 1;
11656 }
11657
11658 /* Add a new entry to .debug_ranges corresponding to a block, or a
11659 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11660 this entry might be in a different section from previous range. */
11661
11662 static unsigned int
11663 add_ranges (const_tree block, bool maybe_new_sec)
11664 {
11665 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11666 }
11667
11668 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11669 chain, or middle entry of a chain that will be directly referred to. */
11670
11671 static void
11672 note_rnglist_head (unsigned int offset)
11673 {
11674 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11675 return;
11676 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11677 }
11678
11679 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11680 When using dwarf_split_debug_info, address attributes in dies destined
11681 for the final executable should be direct references--setting the
11682 parameter force_direct ensures this behavior. */
11683
11684 static void
11685 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11686 bool *added, bool force_direct)
11687 {
11688 unsigned int in_use = vec_safe_length (ranges_by_label);
11689 unsigned int offset;
11690 dw_ranges_by_label rbl = { begin, end };
11691 vec_safe_push (ranges_by_label, rbl);
11692 offset = add_ranges_num (-(int)in_use - 1, true);
11693 if (!*added)
11694 {
11695 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11696 *added = true;
11697 note_rnglist_head (offset);
11698 }
11699 }
11700
11701 /* Emit .debug_ranges section. */
11702
11703 static void
11704 output_ranges (void)
11705 {
11706 unsigned i;
11707 static const char *const start_fmt = "Offset %#x";
11708 const char *fmt = start_fmt;
11709 dw_ranges *r;
11710
11711 switch_to_section (debug_ranges_section);
11712 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11713 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11714 {
11715 int block_num = r->num;
11716
11717 if (block_num > 0)
11718 {
11719 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11720 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11721
11722 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11723 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11724
11725 /* If all code is in the text section, then the compilation
11726 unit base address defaults to DW_AT_low_pc, which is the
11727 base of the text section. */
11728 if (!have_multiple_function_sections)
11729 {
11730 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11731 text_section_label,
11732 fmt, i * 2 * DWARF2_ADDR_SIZE);
11733 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11734 text_section_label, NULL);
11735 }
11736
11737 /* Otherwise, the compilation unit base address is zero,
11738 which allows us to use absolute addresses, and not worry
11739 about whether the target supports cross-section
11740 arithmetic. */
11741 else
11742 {
11743 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11744 fmt, i * 2 * DWARF2_ADDR_SIZE);
11745 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11746 }
11747
11748 fmt = NULL;
11749 }
11750
11751 /* Negative block_num stands for an index into ranges_by_label. */
11752 else if (block_num < 0)
11753 {
11754 int lab_idx = - block_num - 1;
11755
11756 if (!have_multiple_function_sections)
11757 {
11758 gcc_unreachable ();
11759 #if 0
11760 /* If we ever use add_ranges_by_labels () for a single
11761 function section, all we have to do is to take out
11762 the #if 0 above. */
11763 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11764 (*ranges_by_label)[lab_idx].begin,
11765 text_section_label,
11766 fmt, i * 2 * DWARF2_ADDR_SIZE);
11767 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11768 (*ranges_by_label)[lab_idx].end,
11769 text_section_label, NULL);
11770 #endif
11771 }
11772 else
11773 {
11774 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11775 (*ranges_by_label)[lab_idx].begin,
11776 fmt, i * 2 * DWARF2_ADDR_SIZE);
11777 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11778 (*ranges_by_label)[lab_idx].end,
11779 NULL);
11780 }
11781 }
11782 else
11783 {
11784 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11785 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11786 fmt = start_fmt;
11787 }
11788 }
11789 }
11790
11791 /* Non-zero if .debug_line_str should be used for .debug_line section
11792 strings or strings that are likely shareable with those. */
11793 #define DWARF5_USE_DEBUG_LINE_STR \
11794 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11795 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11796 /* FIXME: there is no .debug_line_str.dwo section, \
11797 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11798 && !dwarf_split_debug_info)
11799
11800 /* Assign .debug_rnglists indexes. */
11801
11802 static void
11803 index_rnglists (void)
11804 {
11805 unsigned i;
11806 dw_ranges *r;
11807
11808 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11809 if (r->label)
11810 r->idx = rnglist_idx++;
11811 }
11812
11813 /* Emit .debug_rnglists section. */
11814
11815 static void
11816 output_rnglists (unsigned generation)
11817 {
11818 unsigned i;
11819 dw_ranges *r;
11820 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11821 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11822 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11823
11824 switch_to_section (debug_ranges_section);
11825 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11826 /* There are up to 4 unique ranges labels per generation.
11827 See also init_sections_and_labels. */
11828 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11829 2 + generation * 4);
11830 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11831 3 + generation * 4);
11832 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11833 dw2_asm_output_data (4, 0xffffffff,
11834 "Initial length escape value indicating "
11835 "64-bit DWARF extension");
11836 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11837 "Length of Range Lists");
11838 ASM_OUTPUT_LABEL (asm_out_file, l1);
11839 output_dwarf_version ();
11840 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11841 dw2_asm_output_data (1, 0, "Segment Size");
11842 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11843 about relocation sizes and primarily care about the size of .debug*
11844 sections in linked shared libraries and executables, then
11845 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11846 into it are usually larger than just DW_FORM_sec_offset offsets
11847 into the .debug_rnglists section. */
11848 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11849 "Offset Entry Count");
11850 if (dwarf_split_debug_info)
11851 {
11852 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11853 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11854 if (r->label)
11855 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11856 ranges_base_label, NULL);
11857 }
11858
11859 const char *lab = "";
11860 unsigned int len = vec_safe_length (ranges_table);
11861 const char *base = NULL;
11862 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11863 {
11864 int block_num = r->num;
11865
11866 if (r->label)
11867 {
11868 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11869 lab = r->label;
11870 }
11871 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11872 base = NULL;
11873 if (block_num > 0)
11874 {
11875 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11876 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11877
11878 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11879 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11880
11881 if (HAVE_AS_LEB128)
11882 {
11883 /* If all code is in the text section, then the compilation
11884 unit base address defaults to DW_AT_low_pc, which is the
11885 base of the text section. */
11886 if (!have_multiple_function_sections)
11887 {
11888 dw2_asm_output_data (1, DW_RLE_offset_pair,
11889 "DW_RLE_offset_pair (%s)", lab);
11890 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11891 "Range begin address (%s)", lab);
11892 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11893 "Range end address (%s)", lab);
11894 continue;
11895 }
11896 if (base == NULL)
11897 {
11898 dw_ranges *r2 = NULL;
11899 if (i < len - 1)
11900 r2 = &(*ranges_table)[i + 1];
11901 if (r2
11902 && r2->num != 0
11903 && r2->label == NULL
11904 && !r2->maybe_new_sec)
11905 {
11906 dw2_asm_output_data (1, DW_RLE_base_address,
11907 "DW_RLE_base_address (%s)", lab);
11908 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11909 "Base address (%s)", lab);
11910 strcpy (basebuf, blabel);
11911 base = basebuf;
11912 }
11913 }
11914 if (base)
11915 {
11916 dw2_asm_output_data (1, DW_RLE_offset_pair,
11917 "DW_RLE_offset_pair (%s)", lab);
11918 dw2_asm_output_delta_uleb128 (blabel, base,
11919 "Range begin address (%s)", lab);
11920 dw2_asm_output_delta_uleb128 (elabel, base,
11921 "Range end address (%s)", lab);
11922 continue;
11923 }
11924 dw2_asm_output_data (1, DW_RLE_start_length,
11925 "DW_RLE_start_length (%s)", lab);
11926 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11927 "Range begin address (%s)", lab);
11928 dw2_asm_output_delta_uleb128 (elabel, blabel,
11929 "Range length (%s)", lab);
11930 }
11931 else
11932 {
11933 dw2_asm_output_data (1, DW_RLE_start_end,
11934 "DW_RLE_start_end (%s)", lab);
11935 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11936 "Range begin address (%s)", lab);
11937 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11938 "Range end address (%s)", lab);
11939 }
11940 }
11941
11942 /* Negative block_num stands for an index into ranges_by_label. */
11943 else if (block_num < 0)
11944 {
11945 int lab_idx = - block_num - 1;
11946 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11947 const char *elabel = (*ranges_by_label)[lab_idx].end;
11948
11949 if (!have_multiple_function_sections)
11950 gcc_unreachable ();
11951 if (HAVE_AS_LEB128)
11952 {
11953 dw2_asm_output_data (1, DW_RLE_start_length,
11954 "DW_RLE_start_length (%s)", lab);
11955 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11956 "Range begin address (%s)", lab);
11957 dw2_asm_output_delta_uleb128 (elabel, blabel,
11958 "Range length (%s)", lab);
11959 }
11960 else
11961 {
11962 dw2_asm_output_data (1, DW_RLE_start_end,
11963 "DW_RLE_start_end (%s)", lab);
11964 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11965 "Range begin address (%s)", lab);
11966 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11967 "Range end address (%s)", lab);
11968 }
11969 }
11970 else
11971 dw2_asm_output_data (1, DW_RLE_end_of_list,
11972 "DW_RLE_end_of_list (%s)", lab);
11973 }
11974 ASM_OUTPUT_LABEL (asm_out_file, l2);
11975 }
11976
11977 /* Data structure containing information about input files. */
11978 struct file_info
11979 {
11980 const char *path; /* Complete file name. */
11981 const char *fname; /* File name part. */
11982 int length; /* Length of entire string. */
11983 struct dwarf_file_data * file_idx; /* Index in input file table. */
11984 int dir_idx; /* Index in directory table. */
11985 };
11986
11987 /* Data structure containing information about directories with source
11988 files. */
11989 struct dir_info
11990 {
11991 const char *path; /* Path including directory name. */
11992 int length; /* Path length. */
11993 int prefix; /* Index of directory entry which is a prefix. */
11994 int count; /* Number of files in this directory. */
11995 int dir_idx; /* Index of directory used as base. */
11996 };
11997
11998 /* Callback function for file_info comparison. We sort by looking at
11999 the directories in the path. */
12000
12001 static int
12002 file_info_cmp (const void *p1, const void *p2)
12003 {
12004 const struct file_info *const s1 = (const struct file_info *) p1;
12005 const struct file_info *const s2 = (const struct file_info *) p2;
12006 const unsigned char *cp1;
12007 const unsigned char *cp2;
12008
12009 /* Take care of file names without directories. We need to make sure that
12010 we return consistent values to qsort since some will get confused if
12011 we return the same value when identical operands are passed in opposite
12012 orders. So if neither has a directory, return 0 and otherwise return
12013 1 or -1 depending on which one has the directory. We want the one with
12014 the directory to sort after the one without, so all no directory files
12015 are at the start (normally only the compilation unit file). */
12016 if ((s1->path == s1->fname || s2->path == s2->fname))
12017 return (s2->path == s2->fname) - (s1->path == s1->fname);
12018
12019 cp1 = (const unsigned char *) s1->path;
12020 cp2 = (const unsigned char *) s2->path;
12021
12022 while (1)
12023 {
12024 ++cp1;
12025 ++cp2;
12026 /* Reached the end of the first path? If so, handle like above,
12027 but now we want longer directory prefixes before shorter ones. */
12028 if ((cp1 == (const unsigned char *) s1->fname)
12029 || (cp2 == (const unsigned char *) s2->fname))
12030 return ((cp1 == (const unsigned char *) s1->fname)
12031 - (cp2 == (const unsigned char *) s2->fname));
12032
12033 /* Character of current path component the same? */
12034 else if (*cp1 != *cp2)
12035 return *cp1 - *cp2;
12036 }
12037 }
12038
12039 struct file_name_acquire_data
12040 {
12041 struct file_info *files;
12042 int used_files;
12043 int max_files;
12044 };
12045
12046 /* Traversal function for the hash table. */
12047
12048 int
12049 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12050 {
12051 struct dwarf_file_data *d = *slot;
12052 struct file_info *fi;
12053 const char *f;
12054
12055 gcc_assert (fnad->max_files >= d->emitted_number);
12056
12057 if (! d->emitted_number)
12058 return 1;
12059
12060 gcc_assert (fnad->max_files != fnad->used_files);
12061
12062 fi = fnad->files + fnad->used_files++;
12063
12064 /* Skip all leading "./". */
12065 f = d->filename;
12066 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12067 f += 2;
12068
12069 /* Create a new array entry. */
12070 fi->path = f;
12071 fi->length = strlen (f);
12072 fi->file_idx = d;
12073
12074 /* Search for the file name part. */
12075 f = strrchr (f, DIR_SEPARATOR);
12076 #if defined (DIR_SEPARATOR_2)
12077 {
12078 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12079
12080 if (g != NULL)
12081 {
12082 if (f == NULL || f < g)
12083 f = g;
12084 }
12085 }
12086 #endif
12087
12088 fi->fname = f == NULL ? fi->path : f + 1;
12089 return 1;
12090 }
12091
12092 /* Helper function for output_file_names. Emit a FORM encoded
12093 string STR, with assembly comment start ENTRY_KIND and
12094 index IDX */
12095
12096 static void
12097 output_line_string (enum dwarf_form form, const char *str,
12098 const char *entry_kind, unsigned int idx)
12099 {
12100 switch (form)
12101 {
12102 case DW_FORM_string:
12103 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12104 break;
12105 case DW_FORM_line_strp:
12106 if (!debug_line_str_hash)
12107 debug_line_str_hash
12108 = hash_table<indirect_string_hasher>::create_ggc (10);
12109
12110 struct indirect_string_node *node;
12111 node = find_AT_string_in_table (str, debug_line_str_hash);
12112 set_indirect_string (node);
12113 node->form = form;
12114 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12115 debug_line_str_section, "%s: %#x: \"%s\"",
12116 entry_kind, 0, node->str);
12117 break;
12118 default:
12119 gcc_unreachable ();
12120 }
12121 }
12122
12123 /* Output the directory table and the file name table. We try to minimize
12124 the total amount of memory needed. A heuristic is used to avoid large
12125 slowdowns with many input files. */
12126
12127 static void
12128 output_file_names (void)
12129 {
12130 struct file_name_acquire_data fnad;
12131 int numfiles;
12132 struct file_info *files;
12133 struct dir_info *dirs;
12134 int *saved;
12135 int *savehere;
12136 int *backmap;
12137 int ndirs;
12138 int idx_offset;
12139 int i;
12140
12141 if (!last_emitted_file)
12142 {
12143 if (dwarf_version >= 5)
12144 {
12145 dw2_asm_output_data (1, 0, "Directory entry format count");
12146 dw2_asm_output_data_uleb128 (0, "Directories count");
12147 dw2_asm_output_data (1, 0, "File name entry format count");
12148 dw2_asm_output_data_uleb128 (0, "File names count");
12149 }
12150 else
12151 {
12152 dw2_asm_output_data (1, 0, "End directory table");
12153 dw2_asm_output_data (1, 0, "End file name table");
12154 }
12155 return;
12156 }
12157
12158 numfiles = last_emitted_file->emitted_number;
12159
12160 /* Allocate the various arrays we need. */
12161 files = XALLOCAVEC (struct file_info, numfiles);
12162 dirs = XALLOCAVEC (struct dir_info, numfiles);
12163
12164 fnad.files = files;
12165 fnad.used_files = 0;
12166 fnad.max_files = numfiles;
12167 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12168 gcc_assert (fnad.used_files == fnad.max_files);
12169
12170 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12171
12172 /* Find all the different directories used. */
12173 dirs[0].path = files[0].path;
12174 dirs[0].length = files[0].fname - files[0].path;
12175 dirs[0].prefix = -1;
12176 dirs[0].count = 1;
12177 dirs[0].dir_idx = 0;
12178 files[0].dir_idx = 0;
12179 ndirs = 1;
12180
12181 for (i = 1; i < numfiles; i++)
12182 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12183 && memcmp (dirs[ndirs - 1].path, files[i].path,
12184 dirs[ndirs - 1].length) == 0)
12185 {
12186 /* Same directory as last entry. */
12187 files[i].dir_idx = ndirs - 1;
12188 ++dirs[ndirs - 1].count;
12189 }
12190 else
12191 {
12192 int j;
12193
12194 /* This is a new directory. */
12195 dirs[ndirs].path = files[i].path;
12196 dirs[ndirs].length = files[i].fname - files[i].path;
12197 dirs[ndirs].count = 1;
12198 dirs[ndirs].dir_idx = ndirs;
12199 files[i].dir_idx = ndirs;
12200
12201 /* Search for a prefix. */
12202 dirs[ndirs].prefix = -1;
12203 for (j = 0; j < ndirs; j++)
12204 if (dirs[j].length < dirs[ndirs].length
12205 && dirs[j].length > 1
12206 && (dirs[ndirs].prefix == -1
12207 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12208 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12209 dirs[ndirs].prefix = j;
12210
12211 ++ndirs;
12212 }
12213
12214 /* Now to the actual work. We have to find a subset of the directories which
12215 allow expressing the file name using references to the directory table
12216 with the least amount of characters. We do not do an exhaustive search
12217 where we would have to check out every combination of every single
12218 possible prefix. Instead we use a heuristic which provides nearly optimal
12219 results in most cases and never is much off. */
12220 saved = XALLOCAVEC (int, ndirs);
12221 savehere = XALLOCAVEC (int, ndirs);
12222
12223 memset (saved, '\0', ndirs * sizeof (saved[0]));
12224 for (i = 0; i < ndirs; i++)
12225 {
12226 int j;
12227 int total;
12228
12229 /* We can always save some space for the current directory. But this
12230 does not mean it will be enough to justify adding the directory. */
12231 savehere[i] = dirs[i].length;
12232 total = (savehere[i] - saved[i]) * dirs[i].count;
12233
12234 for (j = i + 1; j < ndirs; j++)
12235 {
12236 savehere[j] = 0;
12237 if (saved[j] < dirs[i].length)
12238 {
12239 /* Determine whether the dirs[i] path is a prefix of the
12240 dirs[j] path. */
12241 int k;
12242
12243 k = dirs[j].prefix;
12244 while (k != -1 && k != (int) i)
12245 k = dirs[k].prefix;
12246
12247 if (k == (int) i)
12248 {
12249 /* Yes it is. We can possibly save some memory by
12250 writing the filenames in dirs[j] relative to
12251 dirs[i]. */
12252 savehere[j] = dirs[i].length;
12253 total += (savehere[j] - saved[j]) * dirs[j].count;
12254 }
12255 }
12256 }
12257
12258 /* Check whether we can save enough to justify adding the dirs[i]
12259 directory. */
12260 if (total > dirs[i].length + 1)
12261 {
12262 /* It's worthwhile adding. */
12263 for (j = i; j < ndirs; j++)
12264 if (savehere[j] > 0)
12265 {
12266 /* Remember how much we saved for this directory so far. */
12267 saved[j] = savehere[j];
12268
12269 /* Remember the prefix directory. */
12270 dirs[j].dir_idx = i;
12271 }
12272 }
12273 }
12274
12275 /* Emit the directory name table. */
12276 idx_offset = dirs[0].length > 0 ? 1 : 0;
12277 enum dwarf_form str_form = DW_FORM_string;
12278 enum dwarf_form idx_form = DW_FORM_udata;
12279 if (dwarf_version >= 5)
12280 {
12281 const char *comp_dir = comp_dir_string ();
12282 if (comp_dir == NULL)
12283 comp_dir = "";
12284 dw2_asm_output_data (1, 1, "Directory entry format count");
12285 if (DWARF5_USE_DEBUG_LINE_STR)
12286 str_form = DW_FORM_line_strp;
12287 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12288 dw2_asm_output_data_uleb128 (str_form, "%s",
12289 get_DW_FORM_name (str_form));
12290 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12291 if (str_form == DW_FORM_string)
12292 {
12293 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12294 for (i = 1 - idx_offset; i < ndirs; i++)
12295 dw2_asm_output_nstring (dirs[i].path,
12296 dirs[i].length
12297 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12298 "Directory Entry: %#x", i + idx_offset);
12299 }
12300 else
12301 {
12302 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12303 for (i = 1 - idx_offset; i < ndirs; i++)
12304 {
12305 const char *str
12306 = ggc_alloc_string (dirs[i].path,
12307 dirs[i].length
12308 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12309 output_line_string (str_form, str, "Directory Entry",
12310 (unsigned) i + idx_offset);
12311 }
12312 }
12313 }
12314 else
12315 {
12316 for (i = 1 - idx_offset; i < ndirs; i++)
12317 dw2_asm_output_nstring (dirs[i].path,
12318 dirs[i].length
12319 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12320 "Directory Entry: %#x", i + idx_offset);
12321
12322 dw2_asm_output_data (1, 0, "End directory table");
12323 }
12324
12325 /* We have to emit them in the order of emitted_number since that's
12326 used in the debug info generation. To do this efficiently we
12327 generate a back-mapping of the indices first. */
12328 backmap = XALLOCAVEC (int, numfiles);
12329 for (i = 0; i < numfiles; i++)
12330 backmap[files[i].file_idx->emitted_number - 1] = i;
12331
12332 if (dwarf_version >= 5)
12333 {
12334 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12335 if (filename0 == NULL)
12336 filename0 = "";
12337 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12338 DW_FORM_data2. Choose one based on the number of directories
12339 and how much space would they occupy in each encoding.
12340 If we have at most 256 directories, all indexes fit into
12341 a single byte, so DW_FORM_data1 is most compact (if there
12342 are at most 128 directories, DW_FORM_udata would be as
12343 compact as that, but not shorter and slower to decode). */
12344 if (ndirs + idx_offset <= 256)
12345 idx_form = DW_FORM_data1;
12346 /* If there are more than 65536 directories, we have to use
12347 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12348 Otherwise, compute what space would occupy if all the indexes
12349 used DW_FORM_udata - sum - and compare that to how large would
12350 be DW_FORM_data2 encoding, and pick the more efficient one. */
12351 else if (ndirs + idx_offset <= 65536)
12352 {
12353 unsigned HOST_WIDE_INT sum = 1;
12354 for (i = 0; i < numfiles; i++)
12355 {
12356 int file_idx = backmap[i];
12357 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12358 sum += size_of_uleb128 (dir_idx);
12359 }
12360 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12361 idx_form = DW_FORM_data2;
12362 }
12363 #ifdef VMS_DEBUGGING_INFO
12364 dw2_asm_output_data (1, 4, "File name entry format count");
12365 #else
12366 dw2_asm_output_data (1, 2, "File name entry format count");
12367 #endif
12368 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12369 dw2_asm_output_data_uleb128 (str_form, "%s",
12370 get_DW_FORM_name (str_form));
12371 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12372 "DW_LNCT_directory_index");
12373 dw2_asm_output_data_uleb128 (idx_form, "%s",
12374 get_DW_FORM_name (idx_form));
12375 #ifdef VMS_DEBUGGING_INFO
12376 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12377 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12378 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12379 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12380 #endif
12381 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12382
12383 output_line_string (str_form, filename0, "File Entry", 0);
12384
12385 /* Include directory index. */
12386 if (idx_form != DW_FORM_udata)
12387 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12388 0, NULL);
12389 else
12390 dw2_asm_output_data_uleb128 (0, NULL);
12391
12392 #ifdef VMS_DEBUGGING_INFO
12393 dw2_asm_output_data_uleb128 (0, NULL);
12394 dw2_asm_output_data_uleb128 (0, NULL);
12395 #endif
12396 }
12397
12398 /* Now write all the file names. */
12399 for (i = 0; i < numfiles; i++)
12400 {
12401 int file_idx = backmap[i];
12402 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12403
12404 #ifdef VMS_DEBUGGING_INFO
12405 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12406
12407 /* Setting these fields can lead to debugger miscomparisons,
12408 but VMS Debug requires them to be set correctly. */
12409
12410 int ver;
12411 long long cdt;
12412 long siz;
12413 int maxfilelen = (strlen (files[file_idx].path)
12414 + dirs[dir_idx].length
12415 + MAX_VMS_VERSION_LEN + 1);
12416 char *filebuf = XALLOCAVEC (char, maxfilelen);
12417
12418 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12419 snprintf (filebuf, maxfilelen, "%s;%d",
12420 files[file_idx].path + dirs[dir_idx].length, ver);
12421
12422 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12423
12424 /* Include directory index. */
12425 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12426 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12427 dir_idx + idx_offset, NULL);
12428 else
12429 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12430
12431 /* Modification time. */
12432 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12433 &cdt, 0, 0, 0) == 0)
12434 ? cdt : 0, NULL);
12435
12436 /* File length in bytes. */
12437 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12438 0, &siz, 0, 0) == 0)
12439 ? siz : 0, NULL);
12440 #else
12441 output_line_string (str_form,
12442 files[file_idx].path + dirs[dir_idx].length,
12443 "File Entry", (unsigned) i + 1);
12444
12445 /* Include directory index. */
12446 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12447 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12448 dir_idx + idx_offset, NULL);
12449 else
12450 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12451
12452 if (dwarf_version >= 5)
12453 continue;
12454
12455 /* Modification time. */
12456 dw2_asm_output_data_uleb128 (0, NULL);
12457
12458 /* File length in bytes. */
12459 dw2_asm_output_data_uleb128 (0, NULL);
12460 #endif /* VMS_DEBUGGING_INFO */
12461 }
12462
12463 if (dwarf_version < 5)
12464 dw2_asm_output_data (1, 0, "End file name table");
12465 }
12466
12467
12468 /* Output one line number table into the .debug_line section. */
12469
12470 static void
12471 output_one_line_info_table (dw_line_info_table *table)
12472 {
12473 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12474 unsigned int current_line = 1;
12475 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12476 dw_line_info_entry *ent, *prev_addr;
12477 size_t i;
12478 unsigned int view;
12479
12480 view = 0;
12481
12482 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12483 {
12484 switch (ent->opcode)
12485 {
12486 case LI_set_address:
12487 /* ??? Unfortunately, we have little choice here currently, and
12488 must always use the most general form. GCC does not know the
12489 address delta itself, so we can't use DW_LNS_advance_pc. Many
12490 ports do have length attributes which will give an upper bound
12491 on the address range. We could perhaps use length attributes
12492 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12493 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12494
12495 view = 0;
12496
12497 /* This can handle any delta. This takes
12498 4+DWARF2_ADDR_SIZE bytes. */
12499 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12500 debug_variable_location_views
12501 ? ", reset view to 0" : "");
12502 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12503 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12504 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12505
12506 prev_addr = ent;
12507 break;
12508
12509 case LI_adv_address:
12510 {
12511 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12512 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12513 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12514
12515 view++;
12516
12517 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12518 dw2_asm_output_delta (2, line_label, prev_label,
12519 "from %s to %s", prev_label, line_label);
12520
12521 prev_addr = ent;
12522 break;
12523 }
12524
12525 case LI_set_line:
12526 if (ent->val == current_line)
12527 {
12528 /* We still need to start a new row, so output a copy insn. */
12529 dw2_asm_output_data (1, DW_LNS_copy,
12530 "copy line %u", current_line);
12531 }
12532 else
12533 {
12534 int line_offset = ent->val - current_line;
12535 int line_delta = line_offset - DWARF_LINE_BASE;
12536
12537 current_line = ent->val;
12538 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12539 {
12540 /* This can handle deltas from -10 to 234, using the current
12541 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12542 This takes 1 byte. */
12543 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12544 "line %u", current_line);
12545 }
12546 else
12547 {
12548 /* This can handle any delta. This takes at least 4 bytes,
12549 depending on the value being encoded. */
12550 dw2_asm_output_data (1, DW_LNS_advance_line,
12551 "advance to line %u", current_line);
12552 dw2_asm_output_data_sleb128 (line_offset, NULL);
12553 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12554 }
12555 }
12556 break;
12557
12558 case LI_set_file:
12559 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12560 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12561 break;
12562
12563 case LI_set_column:
12564 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12565 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12566 break;
12567
12568 case LI_negate_stmt:
12569 current_is_stmt = !current_is_stmt;
12570 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12571 "is_stmt %d", current_is_stmt);
12572 break;
12573
12574 case LI_set_prologue_end:
12575 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12576 "set prologue end");
12577 break;
12578
12579 case LI_set_epilogue_begin:
12580 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12581 "set epilogue begin");
12582 break;
12583
12584 case LI_set_discriminator:
12585 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12586 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12587 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12588 dw2_asm_output_data_uleb128 (ent->val, NULL);
12589 break;
12590 }
12591 }
12592
12593 /* Emit debug info for the address of the end of the table. */
12594 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12595 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12596 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12597 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12598
12599 dw2_asm_output_data (1, 0, "end sequence");
12600 dw2_asm_output_data_uleb128 (1, NULL);
12601 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12602 }
12603
12604 /* Output the source line number correspondence information. This
12605 information goes into the .debug_line section. */
12606
12607 static void
12608 output_line_info (bool prologue_only)
12609 {
12610 static unsigned int generation;
12611 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12612 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12613 bool saw_one = false;
12614 int opc;
12615
12616 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12617 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12618 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12619 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12620
12621 if (!XCOFF_DEBUGGING_INFO)
12622 {
12623 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12624 dw2_asm_output_data (4, 0xffffffff,
12625 "Initial length escape value indicating 64-bit DWARF extension");
12626 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12627 "Length of Source Line Info");
12628 }
12629
12630 ASM_OUTPUT_LABEL (asm_out_file, l1);
12631
12632 output_dwarf_version ();
12633 if (dwarf_version >= 5)
12634 {
12635 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12636 dw2_asm_output_data (1, 0, "Segment Size");
12637 }
12638 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12639 ASM_OUTPUT_LABEL (asm_out_file, p1);
12640
12641 /* Define the architecture-dependent minimum instruction length (in bytes).
12642 In this implementation of DWARF, this field is used for information
12643 purposes only. Since GCC generates assembly language, we have no
12644 a priori knowledge of how many instruction bytes are generated for each
12645 source line, and therefore can use only the DW_LNE_set_address and
12646 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12647 this as '1', which is "correct enough" for all architectures,
12648 and don't let the target override. */
12649 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12650
12651 if (dwarf_version >= 4)
12652 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12653 "Maximum Operations Per Instruction");
12654 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12655 "Default is_stmt_start flag");
12656 dw2_asm_output_data (1, DWARF_LINE_BASE,
12657 "Line Base Value (Special Opcodes)");
12658 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12659 "Line Range Value (Special Opcodes)");
12660 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12661 "Special Opcode Base");
12662
12663 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12664 {
12665 int n_op_args;
12666 switch (opc)
12667 {
12668 case DW_LNS_advance_pc:
12669 case DW_LNS_advance_line:
12670 case DW_LNS_set_file:
12671 case DW_LNS_set_column:
12672 case DW_LNS_fixed_advance_pc:
12673 case DW_LNS_set_isa:
12674 n_op_args = 1;
12675 break;
12676 default:
12677 n_op_args = 0;
12678 break;
12679 }
12680
12681 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12682 opc, n_op_args);
12683 }
12684
12685 /* Write out the information about the files we use. */
12686 output_file_names ();
12687 ASM_OUTPUT_LABEL (asm_out_file, p2);
12688 if (prologue_only)
12689 {
12690 /* Output the marker for the end of the line number info. */
12691 ASM_OUTPUT_LABEL (asm_out_file, l2);
12692 return;
12693 }
12694
12695 if (separate_line_info)
12696 {
12697 dw_line_info_table *table;
12698 size_t i;
12699
12700 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12701 if (table->in_use)
12702 {
12703 output_one_line_info_table (table);
12704 saw_one = true;
12705 }
12706 }
12707 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12708 {
12709 output_one_line_info_table (cold_text_section_line_info);
12710 saw_one = true;
12711 }
12712
12713 /* ??? Some Darwin linkers crash on a .debug_line section with no
12714 sequences. Further, merely a DW_LNE_end_sequence entry is not
12715 sufficient -- the address column must also be initialized.
12716 Make sure to output at least one set_address/end_sequence pair,
12717 choosing .text since that section is always present. */
12718 if (text_section_line_info->in_use || !saw_one)
12719 output_one_line_info_table (text_section_line_info);
12720
12721 /* Output the marker for the end of the line number info. */
12722 ASM_OUTPUT_LABEL (asm_out_file, l2);
12723 }
12724 \f
12725 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12726
12727 static inline bool
12728 need_endianity_attribute_p (bool reverse)
12729 {
12730 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12731 }
12732
12733 /* Given a pointer to a tree node for some base type, return a pointer to
12734 a DIE that describes the given type. REVERSE is true if the type is
12735 to be interpreted in the reverse storage order wrt the target order.
12736
12737 This routine must only be called for GCC type nodes that correspond to
12738 Dwarf base (fundamental) types. */
12739
12740 static dw_die_ref
12741 base_type_die (tree type, bool reverse)
12742 {
12743 dw_die_ref base_type_result;
12744 enum dwarf_type encoding;
12745 bool fpt_used = false;
12746 struct fixed_point_type_info fpt_info;
12747 tree type_bias = NULL_TREE;
12748
12749 /* If this is a subtype that should not be emitted as a subrange type,
12750 use the base type. See subrange_type_for_debug_p. */
12751 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12752 type = TREE_TYPE (type);
12753
12754 switch (TREE_CODE (type))
12755 {
12756 case INTEGER_TYPE:
12757 if ((dwarf_version >= 4 || !dwarf_strict)
12758 && TYPE_NAME (type)
12759 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12760 && DECL_IS_BUILTIN (TYPE_NAME (type))
12761 && DECL_NAME (TYPE_NAME (type)))
12762 {
12763 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12764 if (strcmp (name, "char16_t") == 0
12765 || strcmp (name, "char32_t") == 0)
12766 {
12767 encoding = DW_ATE_UTF;
12768 break;
12769 }
12770 }
12771 if ((dwarf_version >= 3 || !dwarf_strict)
12772 && lang_hooks.types.get_fixed_point_type_info)
12773 {
12774 memset (&fpt_info, 0, sizeof (fpt_info));
12775 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12776 {
12777 fpt_used = true;
12778 encoding = ((TYPE_UNSIGNED (type))
12779 ? DW_ATE_unsigned_fixed
12780 : DW_ATE_signed_fixed);
12781 break;
12782 }
12783 }
12784 if (TYPE_STRING_FLAG (type))
12785 {
12786 if (TYPE_UNSIGNED (type))
12787 encoding = DW_ATE_unsigned_char;
12788 else
12789 encoding = DW_ATE_signed_char;
12790 }
12791 else if (TYPE_UNSIGNED (type))
12792 encoding = DW_ATE_unsigned;
12793 else
12794 encoding = DW_ATE_signed;
12795
12796 if (!dwarf_strict
12797 && lang_hooks.types.get_type_bias)
12798 type_bias = lang_hooks.types.get_type_bias (type);
12799 break;
12800
12801 case REAL_TYPE:
12802 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12803 {
12804 if (dwarf_version >= 3 || !dwarf_strict)
12805 encoding = DW_ATE_decimal_float;
12806 else
12807 encoding = DW_ATE_lo_user;
12808 }
12809 else
12810 encoding = DW_ATE_float;
12811 break;
12812
12813 case FIXED_POINT_TYPE:
12814 if (!(dwarf_version >= 3 || !dwarf_strict))
12815 encoding = DW_ATE_lo_user;
12816 else if (TYPE_UNSIGNED (type))
12817 encoding = DW_ATE_unsigned_fixed;
12818 else
12819 encoding = DW_ATE_signed_fixed;
12820 break;
12821
12822 /* Dwarf2 doesn't know anything about complex ints, so use
12823 a user defined type for it. */
12824 case COMPLEX_TYPE:
12825 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12826 encoding = DW_ATE_complex_float;
12827 else
12828 encoding = DW_ATE_lo_user;
12829 break;
12830
12831 case BOOLEAN_TYPE:
12832 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12833 encoding = DW_ATE_boolean;
12834 break;
12835
12836 default:
12837 /* No other TREE_CODEs are Dwarf fundamental types. */
12838 gcc_unreachable ();
12839 }
12840
12841 base_type_result = new_die_raw (DW_TAG_base_type);
12842
12843 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12844 int_size_in_bytes (type));
12845 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12846
12847 if (need_endianity_attribute_p (reverse))
12848 add_AT_unsigned (base_type_result, DW_AT_endianity,
12849 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12850
12851 add_alignment_attribute (base_type_result, type);
12852
12853 if (fpt_used)
12854 {
12855 switch (fpt_info.scale_factor_kind)
12856 {
12857 case fixed_point_scale_factor_binary:
12858 add_AT_int (base_type_result, DW_AT_binary_scale,
12859 fpt_info.scale_factor.binary);
12860 break;
12861
12862 case fixed_point_scale_factor_decimal:
12863 add_AT_int (base_type_result, DW_AT_decimal_scale,
12864 fpt_info.scale_factor.decimal);
12865 break;
12866
12867 case fixed_point_scale_factor_arbitrary:
12868 /* Arbitrary scale factors cannot be described in standard DWARF,
12869 yet. */
12870 if (!dwarf_strict)
12871 {
12872 /* Describe the scale factor as a rational constant. */
12873 const dw_die_ref scale_factor
12874 = new_die (DW_TAG_constant, comp_unit_die (), type);
12875
12876 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12877 fpt_info.scale_factor.arbitrary.numerator);
12878 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12879 fpt_info.scale_factor.arbitrary.denominator);
12880
12881 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12882 }
12883 break;
12884
12885 default:
12886 gcc_unreachable ();
12887 }
12888 }
12889
12890 if (type_bias)
12891 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12892 dw_scalar_form_constant
12893 | dw_scalar_form_exprloc
12894 | dw_scalar_form_reference,
12895 NULL);
12896
12897 return base_type_result;
12898 }
12899
12900 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12901 named 'auto' in its type: return true for it, false otherwise. */
12902
12903 static inline bool
12904 is_cxx_auto (tree type)
12905 {
12906 if (is_cxx ())
12907 {
12908 tree name = TYPE_IDENTIFIER (type);
12909 if (name == get_identifier ("auto")
12910 || name == get_identifier ("decltype(auto)"))
12911 return true;
12912 }
12913 return false;
12914 }
12915
12916 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12917 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12918
12919 static inline int
12920 is_base_type (tree type)
12921 {
12922 switch (TREE_CODE (type))
12923 {
12924 case INTEGER_TYPE:
12925 case REAL_TYPE:
12926 case FIXED_POINT_TYPE:
12927 case COMPLEX_TYPE:
12928 case BOOLEAN_TYPE:
12929 return 1;
12930
12931 case VOID_TYPE:
12932 case ARRAY_TYPE:
12933 case RECORD_TYPE:
12934 case UNION_TYPE:
12935 case QUAL_UNION_TYPE:
12936 case ENUMERAL_TYPE:
12937 case FUNCTION_TYPE:
12938 case METHOD_TYPE:
12939 case POINTER_TYPE:
12940 case REFERENCE_TYPE:
12941 case NULLPTR_TYPE:
12942 case OFFSET_TYPE:
12943 case LANG_TYPE:
12944 case VECTOR_TYPE:
12945 return 0;
12946
12947 default:
12948 if (is_cxx_auto (type))
12949 return 0;
12950 gcc_unreachable ();
12951 }
12952
12953 return 0;
12954 }
12955
12956 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12957 node, return the size in bits for the type if it is a constant, or else
12958 return the alignment for the type if the type's size is not constant, or
12959 else return BITS_PER_WORD if the type actually turns out to be an
12960 ERROR_MARK node. */
12961
12962 static inline unsigned HOST_WIDE_INT
12963 simple_type_size_in_bits (const_tree type)
12964 {
12965 if (TREE_CODE (type) == ERROR_MARK)
12966 return BITS_PER_WORD;
12967 else if (TYPE_SIZE (type) == NULL_TREE)
12968 return 0;
12969 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12970 return tree_to_uhwi (TYPE_SIZE (type));
12971 else
12972 return TYPE_ALIGN (type);
12973 }
12974
12975 /* Similarly, but return an offset_int instead of UHWI. */
12976
12977 static inline offset_int
12978 offset_int_type_size_in_bits (const_tree type)
12979 {
12980 if (TREE_CODE (type) == ERROR_MARK)
12981 return BITS_PER_WORD;
12982 else if (TYPE_SIZE (type) == NULL_TREE)
12983 return 0;
12984 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12985 return wi::to_offset (TYPE_SIZE (type));
12986 else
12987 return TYPE_ALIGN (type);
12988 }
12989
12990 /* Given a pointer to a tree node for a subrange type, return a pointer
12991 to a DIE that describes the given type. */
12992
12993 static dw_die_ref
12994 subrange_type_die (tree type, tree low, tree high, tree bias,
12995 dw_die_ref context_die)
12996 {
12997 dw_die_ref subrange_die;
12998 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12999
13000 if (context_die == NULL)
13001 context_die = comp_unit_die ();
13002
13003 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13004
13005 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13006 {
13007 /* The size of the subrange type and its base type do not match,
13008 so we need to generate a size attribute for the subrange type. */
13009 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13010 }
13011
13012 add_alignment_attribute (subrange_die, type);
13013
13014 if (low)
13015 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13016 if (high)
13017 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13018 if (bias && !dwarf_strict)
13019 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13020 dw_scalar_form_constant
13021 | dw_scalar_form_exprloc
13022 | dw_scalar_form_reference,
13023 NULL);
13024
13025 return subrange_die;
13026 }
13027
13028 /* Returns the (const and/or volatile) cv_qualifiers associated with
13029 the decl node. This will normally be augmented with the
13030 cv_qualifiers of the underlying type in add_type_attribute. */
13031
13032 static int
13033 decl_quals (const_tree decl)
13034 {
13035 return ((TREE_READONLY (decl)
13036 /* The C++ front-end correctly marks reference-typed
13037 variables as readonly, but from a language (and debug
13038 info) standpoint they are not const-qualified. */
13039 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13040 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13041 | (TREE_THIS_VOLATILE (decl)
13042 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13043 }
13044
13045 /* Determine the TYPE whose qualifiers match the largest strict subset
13046 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13047 qualifiers outside QUAL_MASK. */
13048
13049 static int
13050 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13051 {
13052 tree t;
13053 int best_rank = 0, best_qual = 0, max_rank;
13054
13055 type_quals &= qual_mask;
13056 max_rank = popcount_hwi (type_quals) - 1;
13057
13058 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13059 t = TYPE_NEXT_VARIANT (t))
13060 {
13061 int q = TYPE_QUALS (t) & qual_mask;
13062
13063 if ((q & type_quals) == q && q != type_quals
13064 && check_base_type (t, type))
13065 {
13066 int rank = popcount_hwi (q);
13067
13068 if (rank > best_rank)
13069 {
13070 best_rank = rank;
13071 best_qual = q;
13072 }
13073 }
13074 }
13075
13076 return best_qual;
13077 }
13078
13079 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13080 static const dwarf_qual_info_t dwarf_qual_info[] =
13081 {
13082 { TYPE_QUAL_CONST, DW_TAG_const_type },
13083 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13084 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13085 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13086 };
13087 static const unsigned int dwarf_qual_info_size
13088 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13089
13090 /* If DIE is a qualified DIE of some base DIE with the same parent,
13091 return the base DIE, otherwise return NULL. Set MASK to the
13092 qualifiers added compared to the returned DIE. */
13093
13094 static dw_die_ref
13095 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13096 {
13097 unsigned int i;
13098 for (i = 0; i < dwarf_qual_info_size; i++)
13099 if (die->die_tag == dwarf_qual_info[i].t)
13100 break;
13101 if (i == dwarf_qual_info_size)
13102 return NULL;
13103 if (vec_safe_length (die->die_attr) != 1)
13104 return NULL;
13105 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13106 if (type == NULL || type->die_parent != die->die_parent)
13107 return NULL;
13108 *mask |= dwarf_qual_info[i].q;
13109 if (depth)
13110 {
13111 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13112 if (ret)
13113 return ret;
13114 }
13115 return type;
13116 }
13117
13118 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13119 entry that chains the modifiers specified by CV_QUALS in front of the
13120 given type. REVERSE is true if the type is to be interpreted in the
13121 reverse storage order wrt the target order. */
13122
13123 static dw_die_ref
13124 modified_type_die (tree type, int cv_quals, bool reverse,
13125 dw_die_ref context_die)
13126 {
13127 enum tree_code code = TREE_CODE (type);
13128 dw_die_ref mod_type_die;
13129 dw_die_ref sub_die = NULL;
13130 tree item_type = NULL;
13131 tree qualified_type;
13132 tree name, low, high;
13133 dw_die_ref mod_scope;
13134 /* Only these cv-qualifiers are currently handled. */
13135 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13136 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13137 ENCODE_QUAL_ADDR_SPACE(~0U));
13138 const bool reverse_base_type
13139 = need_endianity_attribute_p (reverse) && is_base_type (type);
13140
13141 if (code == ERROR_MARK)
13142 return NULL;
13143
13144 if (lang_hooks.types.get_debug_type)
13145 {
13146 tree debug_type = lang_hooks.types.get_debug_type (type);
13147
13148 if (debug_type != NULL_TREE && debug_type != type)
13149 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13150 }
13151
13152 cv_quals &= cv_qual_mask;
13153
13154 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13155 tag modifier (and not an attribute) old consumers won't be able
13156 to handle it. */
13157 if (dwarf_version < 3)
13158 cv_quals &= ~TYPE_QUAL_RESTRICT;
13159
13160 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13161 if (dwarf_version < 5)
13162 cv_quals &= ~TYPE_QUAL_ATOMIC;
13163
13164 /* See if we already have the appropriately qualified variant of
13165 this type. */
13166 qualified_type = get_qualified_type (type, cv_quals);
13167
13168 if (qualified_type == sizetype)
13169 {
13170 /* Try not to expose the internal sizetype type's name. */
13171 if (TYPE_NAME (qualified_type)
13172 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13173 {
13174 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13175
13176 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13177 && (TYPE_PRECISION (t)
13178 == TYPE_PRECISION (qualified_type))
13179 && (TYPE_UNSIGNED (t)
13180 == TYPE_UNSIGNED (qualified_type)));
13181 qualified_type = t;
13182 }
13183 else if (qualified_type == sizetype
13184 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13185 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13186 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13187 qualified_type = size_type_node;
13188 }
13189
13190 /* If we do, then we can just use its DIE, if it exists. */
13191 if (qualified_type)
13192 {
13193 mod_type_die = lookup_type_die (qualified_type);
13194
13195 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13196 dealt with specially: the DIE with the attribute, if it exists, is
13197 placed immediately after the regular DIE for the same base type. */
13198 if (mod_type_die
13199 && (!reverse_base_type
13200 || ((mod_type_die = mod_type_die->die_sib) != NULL
13201 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13202 return mod_type_die;
13203 }
13204
13205 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13206
13207 /* Handle C typedef types. */
13208 if (name
13209 && TREE_CODE (name) == TYPE_DECL
13210 && DECL_ORIGINAL_TYPE (name)
13211 && !DECL_ARTIFICIAL (name))
13212 {
13213 tree dtype = TREE_TYPE (name);
13214
13215 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13216 if (qualified_type == dtype && !reverse_base_type)
13217 {
13218 tree origin = decl_ultimate_origin (name);
13219
13220 /* Typedef variants that have an abstract origin don't get their own
13221 type DIE (see gen_typedef_die), so fall back on the ultimate
13222 abstract origin instead. */
13223 if (origin != NULL && origin != name)
13224 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13225 context_die);
13226
13227 /* For a named type, use the typedef. */
13228 gen_type_die (qualified_type, context_die);
13229 return lookup_type_die (qualified_type);
13230 }
13231 else
13232 {
13233 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13234 dquals &= cv_qual_mask;
13235 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13236 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13237 /* cv-unqualified version of named type. Just use
13238 the unnamed type to which it refers. */
13239 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13240 reverse, context_die);
13241 /* Else cv-qualified version of named type; fall through. */
13242 }
13243 }
13244
13245 mod_scope = scope_die_for (type, context_die);
13246
13247 if (cv_quals)
13248 {
13249 int sub_quals = 0, first_quals = 0;
13250 unsigned i;
13251 dw_die_ref first = NULL, last = NULL;
13252
13253 /* Determine a lesser qualified type that most closely matches
13254 this one. Then generate DW_TAG_* entries for the remaining
13255 qualifiers. */
13256 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13257 cv_qual_mask);
13258 if (sub_quals && use_debug_types)
13259 {
13260 bool needed = false;
13261 /* If emitting type units, make sure the order of qualifiers
13262 is canonical. Thus, start from unqualified type if
13263 an earlier qualifier is missing in sub_quals, but some later
13264 one is present there. */
13265 for (i = 0; i < dwarf_qual_info_size; i++)
13266 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13267 needed = true;
13268 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13269 {
13270 sub_quals = 0;
13271 break;
13272 }
13273 }
13274 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13275 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13276 {
13277 /* As not all intermediate qualified DIEs have corresponding
13278 tree types, ensure that qualified DIEs in the same scope
13279 as their DW_AT_type are emitted after their DW_AT_type,
13280 only with other qualified DIEs for the same type possibly
13281 in between them. Determine the range of such qualified
13282 DIEs now (first being the base type, last being corresponding
13283 last qualified DIE for it). */
13284 unsigned int count = 0;
13285 first = qualified_die_p (mod_type_die, &first_quals,
13286 dwarf_qual_info_size);
13287 if (first == NULL)
13288 first = mod_type_die;
13289 gcc_assert ((first_quals & ~sub_quals) == 0);
13290 for (count = 0, last = first;
13291 count < (1U << dwarf_qual_info_size);
13292 count++, last = last->die_sib)
13293 {
13294 int quals = 0;
13295 if (last == mod_scope->die_child)
13296 break;
13297 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13298 != first)
13299 break;
13300 }
13301 }
13302
13303 for (i = 0; i < dwarf_qual_info_size; i++)
13304 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13305 {
13306 dw_die_ref d;
13307 if (first && first != last)
13308 {
13309 for (d = first->die_sib; ; d = d->die_sib)
13310 {
13311 int quals = 0;
13312 qualified_die_p (d, &quals, dwarf_qual_info_size);
13313 if (quals == (first_quals | dwarf_qual_info[i].q))
13314 break;
13315 if (d == last)
13316 {
13317 d = NULL;
13318 break;
13319 }
13320 }
13321 if (d)
13322 {
13323 mod_type_die = d;
13324 continue;
13325 }
13326 }
13327 if (first)
13328 {
13329 d = new_die_raw (dwarf_qual_info[i].t);
13330 add_child_die_after (mod_scope, d, last);
13331 last = d;
13332 }
13333 else
13334 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13335 if (mod_type_die)
13336 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13337 mod_type_die = d;
13338 first_quals |= dwarf_qual_info[i].q;
13339 }
13340 }
13341 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13342 {
13343 dwarf_tag tag = DW_TAG_pointer_type;
13344 if (code == REFERENCE_TYPE)
13345 {
13346 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13347 tag = DW_TAG_rvalue_reference_type;
13348 else
13349 tag = DW_TAG_reference_type;
13350 }
13351 mod_type_die = new_die (tag, mod_scope, type);
13352
13353 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13354 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13355 add_alignment_attribute (mod_type_die, type);
13356 item_type = TREE_TYPE (type);
13357
13358 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13359 if (!ADDR_SPACE_GENERIC_P (as))
13360 {
13361 int action = targetm.addr_space.debug (as);
13362 if (action >= 0)
13363 {
13364 /* Positive values indicate an address_class. */
13365 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13366 }
13367 else
13368 {
13369 /* Negative values indicate an (inverted) segment base reg. */
13370 dw_loc_descr_ref d
13371 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13372 add_AT_loc (mod_type_die, DW_AT_segment, d);
13373 }
13374 }
13375 }
13376 else if (code == INTEGER_TYPE
13377 && TREE_TYPE (type) != NULL_TREE
13378 && subrange_type_for_debug_p (type, &low, &high))
13379 {
13380 tree bias = NULL_TREE;
13381 if (lang_hooks.types.get_type_bias)
13382 bias = lang_hooks.types.get_type_bias (type);
13383 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13384 item_type = TREE_TYPE (type);
13385 }
13386 else if (is_base_type (type))
13387 {
13388 mod_type_die = base_type_die (type, reverse);
13389
13390 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13391 if (reverse_base_type)
13392 {
13393 dw_die_ref after_die
13394 = modified_type_die (type, cv_quals, false, context_die);
13395 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13396 }
13397 else
13398 add_child_die (comp_unit_die (), mod_type_die);
13399
13400 add_pubtype (type, mod_type_die);
13401 }
13402 else
13403 {
13404 gen_type_die (type, context_die);
13405
13406 /* We have to get the type_main_variant here (and pass that to the
13407 `lookup_type_die' routine) because the ..._TYPE node we have
13408 might simply be a *copy* of some original type node (where the
13409 copy was created to help us keep track of typedef names) and
13410 that copy might have a different TYPE_UID from the original
13411 ..._TYPE node. */
13412 if (TREE_CODE (type) == FUNCTION_TYPE
13413 || TREE_CODE (type) == METHOD_TYPE)
13414 {
13415 /* For function/method types, can't just use type_main_variant here,
13416 because that can have different ref-qualifiers for C++,
13417 but try to canonicalize. */
13418 tree main = TYPE_MAIN_VARIANT (type);
13419 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13420 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13421 && check_base_type (t, main)
13422 && check_lang_type (t, type))
13423 return lookup_type_die (t);
13424 return lookup_type_die (type);
13425 }
13426 else if (TREE_CODE (type) != VECTOR_TYPE
13427 && TREE_CODE (type) != ARRAY_TYPE)
13428 return lookup_type_die (type_main_variant (type));
13429 else
13430 /* Vectors have the debugging information in the type,
13431 not the main variant. */
13432 return lookup_type_die (type);
13433 }
13434
13435 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13436 don't output a DW_TAG_typedef, since there isn't one in the
13437 user's program; just attach a DW_AT_name to the type.
13438 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13439 if the base type already has the same name. */
13440 if (name
13441 && ((TREE_CODE (name) != TYPE_DECL
13442 && (qualified_type == TYPE_MAIN_VARIANT (type)
13443 || (cv_quals == TYPE_UNQUALIFIED)))
13444 || (TREE_CODE (name) == TYPE_DECL
13445 && TREE_TYPE (name) == qualified_type
13446 && DECL_NAME (name))))
13447 {
13448 if (TREE_CODE (name) == TYPE_DECL)
13449 /* Could just call add_name_and_src_coords_attributes here,
13450 but since this is a builtin type it doesn't have any
13451 useful source coordinates anyway. */
13452 name = DECL_NAME (name);
13453 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13454 }
13455 /* This probably indicates a bug. */
13456 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13457 {
13458 name = TYPE_IDENTIFIER (type);
13459 add_name_attribute (mod_type_die,
13460 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13461 }
13462
13463 if (qualified_type && !reverse_base_type)
13464 equate_type_number_to_die (qualified_type, mod_type_die);
13465
13466 if (item_type)
13467 /* We must do this after the equate_type_number_to_die call, in case
13468 this is a recursive type. This ensures that the modified_type_die
13469 recursion will terminate even if the type is recursive. Recursive
13470 types are possible in Ada. */
13471 sub_die = modified_type_die (item_type,
13472 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13473 reverse,
13474 context_die);
13475
13476 if (sub_die != NULL)
13477 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13478
13479 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13480 if (TYPE_ARTIFICIAL (type))
13481 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13482
13483 return mod_type_die;
13484 }
13485
13486 /* Generate DIEs for the generic parameters of T.
13487 T must be either a generic type or a generic function.
13488 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13489
13490 static void
13491 gen_generic_params_dies (tree t)
13492 {
13493 tree parms, args;
13494 int parms_num, i;
13495 dw_die_ref die = NULL;
13496 int non_default;
13497
13498 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13499 return;
13500
13501 if (TYPE_P (t))
13502 die = lookup_type_die (t);
13503 else if (DECL_P (t))
13504 die = lookup_decl_die (t);
13505
13506 gcc_assert (die);
13507
13508 parms = lang_hooks.get_innermost_generic_parms (t);
13509 if (!parms)
13510 /* T has no generic parameter. It means T is neither a generic type
13511 or function. End of story. */
13512 return;
13513
13514 parms_num = TREE_VEC_LENGTH (parms);
13515 args = lang_hooks.get_innermost_generic_args (t);
13516 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13517 non_default = int_cst_value (TREE_CHAIN (args));
13518 else
13519 non_default = TREE_VEC_LENGTH (args);
13520 for (i = 0; i < parms_num; i++)
13521 {
13522 tree parm, arg, arg_pack_elems;
13523 dw_die_ref parm_die;
13524
13525 parm = TREE_VEC_ELT (parms, i);
13526 arg = TREE_VEC_ELT (args, i);
13527 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13528 gcc_assert (parm && TREE_VALUE (parm) && arg);
13529
13530 if (parm && TREE_VALUE (parm) && arg)
13531 {
13532 /* If PARM represents a template parameter pack,
13533 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13534 by DW_TAG_template_*_parameter DIEs for the argument
13535 pack elements of ARG. Note that ARG would then be
13536 an argument pack. */
13537 if (arg_pack_elems)
13538 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13539 arg_pack_elems,
13540 die);
13541 else
13542 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13543 true /* emit name */, die);
13544 if (i >= non_default)
13545 add_AT_flag (parm_die, DW_AT_default_value, 1);
13546 }
13547 }
13548 }
13549
13550 /* Create and return a DIE for PARM which should be
13551 the representation of a generic type parameter.
13552 For instance, in the C++ front end, PARM would be a template parameter.
13553 ARG is the argument to PARM.
13554 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13555 name of the PARM.
13556 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13557 as a child node. */
13558
13559 static dw_die_ref
13560 generic_parameter_die (tree parm, tree arg,
13561 bool emit_name_p,
13562 dw_die_ref parent_die)
13563 {
13564 dw_die_ref tmpl_die = NULL;
13565 const char *name = NULL;
13566
13567 if (!parm || !DECL_NAME (parm) || !arg)
13568 return NULL;
13569
13570 /* We support non-type generic parameters and arguments,
13571 type generic parameters and arguments, as well as
13572 generic generic parameters (a.k.a. template template parameters in C++)
13573 and arguments. */
13574 if (TREE_CODE (parm) == PARM_DECL)
13575 /* PARM is a nontype generic parameter */
13576 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13577 else if (TREE_CODE (parm) == TYPE_DECL)
13578 /* PARM is a type generic parameter. */
13579 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13580 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13581 /* PARM is a generic generic parameter.
13582 Its DIE is a GNU extension. It shall have a
13583 DW_AT_name attribute to represent the name of the template template
13584 parameter, and a DW_AT_GNU_template_name attribute to represent the
13585 name of the template template argument. */
13586 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13587 parent_die, parm);
13588 else
13589 gcc_unreachable ();
13590
13591 if (tmpl_die)
13592 {
13593 tree tmpl_type;
13594
13595 /* If PARM is a generic parameter pack, it means we are
13596 emitting debug info for a template argument pack element.
13597 In other terms, ARG is a template argument pack element.
13598 In that case, we don't emit any DW_AT_name attribute for
13599 the die. */
13600 if (emit_name_p)
13601 {
13602 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13603 gcc_assert (name);
13604 add_AT_string (tmpl_die, DW_AT_name, name);
13605 }
13606
13607 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13608 {
13609 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13610 TMPL_DIE should have a child DW_AT_type attribute that is set
13611 to the type of the argument to PARM, which is ARG.
13612 If PARM is a type generic parameter, TMPL_DIE should have a
13613 child DW_AT_type that is set to ARG. */
13614 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13615 add_type_attribute (tmpl_die, tmpl_type,
13616 (TREE_THIS_VOLATILE (tmpl_type)
13617 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13618 false, parent_die);
13619 }
13620 else
13621 {
13622 /* So TMPL_DIE is a DIE representing a
13623 a generic generic template parameter, a.k.a template template
13624 parameter in C++ and arg is a template. */
13625
13626 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13627 to the name of the argument. */
13628 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13629 if (name)
13630 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13631 }
13632
13633 if (TREE_CODE (parm) == PARM_DECL)
13634 /* So PARM is a non-type generic parameter.
13635 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13636 attribute of TMPL_DIE which value represents the value
13637 of ARG.
13638 We must be careful here:
13639 The value of ARG might reference some function decls.
13640 We might currently be emitting debug info for a generic
13641 type and types are emitted before function decls, we don't
13642 know if the function decls referenced by ARG will actually be
13643 emitted after cgraph computations.
13644 So must defer the generation of the DW_AT_const_value to
13645 after cgraph is ready. */
13646 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13647 }
13648
13649 return tmpl_die;
13650 }
13651
13652 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13653 PARM_PACK must be a template parameter pack. The returned DIE
13654 will be child DIE of PARENT_DIE. */
13655
13656 static dw_die_ref
13657 template_parameter_pack_die (tree parm_pack,
13658 tree parm_pack_args,
13659 dw_die_ref parent_die)
13660 {
13661 dw_die_ref die;
13662 int j;
13663
13664 gcc_assert (parent_die && parm_pack);
13665
13666 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13667 add_name_and_src_coords_attributes (die, parm_pack);
13668 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13669 generic_parameter_die (parm_pack,
13670 TREE_VEC_ELT (parm_pack_args, j),
13671 false /* Don't emit DW_AT_name */,
13672 die);
13673 return die;
13674 }
13675
13676 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13677 an enumerated type. */
13678
13679 static inline int
13680 type_is_enum (const_tree type)
13681 {
13682 return TREE_CODE (type) == ENUMERAL_TYPE;
13683 }
13684
13685 /* Return the DBX register number described by a given RTL node. */
13686
13687 static unsigned int
13688 dbx_reg_number (const_rtx rtl)
13689 {
13690 unsigned regno = REGNO (rtl);
13691
13692 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13693
13694 #ifdef LEAF_REG_REMAP
13695 if (crtl->uses_only_leaf_regs)
13696 {
13697 int leaf_reg = LEAF_REG_REMAP (regno);
13698 if (leaf_reg != -1)
13699 regno = (unsigned) leaf_reg;
13700 }
13701 #endif
13702
13703 regno = DBX_REGISTER_NUMBER (regno);
13704 gcc_assert (regno != INVALID_REGNUM);
13705 return regno;
13706 }
13707
13708 /* Optionally add a DW_OP_piece term to a location description expression.
13709 DW_OP_piece is only added if the location description expression already
13710 doesn't end with DW_OP_piece. */
13711
13712 static void
13713 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13714 {
13715 dw_loc_descr_ref loc;
13716
13717 if (*list_head != NULL)
13718 {
13719 /* Find the end of the chain. */
13720 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13721 ;
13722
13723 if (loc->dw_loc_opc != DW_OP_piece)
13724 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13725 }
13726 }
13727
13728 /* Return a location descriptor that designates a machine register or
13729 zero if there is none. */
13730
13731 static dw_loc_descr_ref
13732 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13733 {
13734 rtx regs;
13735
13736 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13737 return 0;
13738
13739 /* We only use "frame base" when we're sure we're talking about the
13740 post-prologue local stack frame. We do this by *not* running
13741 register elimination until this point, and recognizing the special
13742 argument pointer and soft frame pointer rtx's.
13743 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13744 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13745 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13746 {
13747 dw_loc_descr_ref result = NULL;
13748
13749 if (dwarf_version >= 4 || !dwarf_strict)
13750 {
13751 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13752 initialized);
13753 if (result)
13754 add_loc_descr (&result,
13755 new_loc_descr (DW_OP_stack_value, 0, 0));
13756 }
13757 return result;
13758 }
13759
13760 regs = targetm.dwarf_register_span (rtl);
13761
13762 if (REG_NREGS (rtl) > 1 || regs)
13763 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13764 else
13765 {
13766 unsigned int dbx_regnum = dbx_reg_number (rtl);
13767 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13768 return 0;
13769 return one_reg_loc_descriptor (dbx_regnum, initialized);
13770 }
13771 }
13772
13773 /* Return a location descriptor that designates a machine register for
13774 a given hard register number. */
13775
13776 static dw_loc_descr_ref
13777 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13778 {
13779 dw_loc_descr_ref reg_loc_descr;
13780
13781 if (regno <= 31)
13782 reg_loc_descr
13783 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13784 else
13785 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13786
13787 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13788 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13789
13790 return reg_loc_descr;
13791 }
13792
13793 /* Given an RTL of a register, return a location descriptor that
13794 designates a value that spans more than one register. */
13795
13796 static dw_loc_descr_ref
13797 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13798 enum var_init_status initialized)
13799 {
13800 int size, i;
13801 dw_loc_descr_ref loc_result = NULL;
13802
13803 /* Simple, contiguous registers. */
13804 if (regs == NULL_RTX)
13805 {
13806 unsigned reg = REGNO (rtl);
13807 int nregs;
13808
13809 #ifdef LEAF_REG_REMAP
13810 if (crtl->uses_only_leaf_regs)
13811 {
13812 int leaf_reg = LEAF_REG_REMAP (reg);
13813 if (leaf_reg != -1)
13814 reg = (unsigned) leaf_reg;
13815 }
13816 #endif
13817
13818 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13819 nregs = REG_NREGS (rtl);
13820
13821 /* At present we only track constant-sized pieces. */
13822 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13823 return NULL;
13824 size /= nregs;
13825
13826 loc_result = NULL;
13827 while (nregs--)
13828 {
13829 dw_loc_descr_ref t;
13830
13831 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13832 VAR_INIT_STATUS_INITIALIZED);
13833 add_loc_descr (&loc_result, t);
13834 add_loc_descr_op_piece (&loc_result, size);
13835 ++reg;
13836 }
13837 return loc_result;
13838 }
13839
13840 /* Now onto stupid register sets in non contiguous locations. */
13841
13842 gcc_assert (GET_CODE (regs) == PARALLEL);
13843
13844 /* At present we only track constant-sized pieces. */
13845 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13846 return NULL;
13847 loc_result = NULL;
13848
13849 for (i = 0; i < XVECLEN (regs, 0); ++i)
13850 {
13851 dw_loc_descr_ref t;
13852
13853 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13854 VAR_INIT_STATUS_INITIALIZED);
13855 add_loc_descr (&loc_result, t);
13856 add_loc_descr_op_piece (&loc_result, size);
13857 }
13858
13859 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13860 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13861 return loc_result;
13862 }
13863
13864 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13865
13866 /* Return a location descriptor that designates a constant i,
13867 as a compound operation from constant (i >> shift), constant shift
13868 and DW_OP_shl. */
13869
13870 static dw_loc_descr_ref
13871 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13872 {
13873 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13874 add_loc_descr (&ret, int_loc_descriptor (shift));
13875 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13876 return ret;
13877 }
13878
13879 /* Return a location descriptor that designates constant POLY_I. */
13880
13881 static dw_loc_descr_ref
13882 int_loc_descriptor (poly_int64 poly_i)
13883 {
13884 enum dwarf_location_atom op;
13885
13886 HOST_WIDE_INT i;
13887 if (!poly_i.is_constant (&i))
13888 {
13889 /* Create location descriptions for the non-constant part and
13890 add any constant offset at the end. */
13891 dw_loc_descr_ref ret = NULL;
13892 HOST_WIDE_INT constant = poly_i.coeffs[0];
13893 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13894 {
13895 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13896 if (coeff != 0)
13897 {
13898 dw_loc_descr_ref start = ret;
13899 unsigned int factor;
13900 int bias;
13901 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13902 (j, &factor, &bias);
13903
13904 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13905 add COEFF * (REGNO / FACTOR) now and subtract
13906 COEFF * BIAS from the final constant part. */
13907 constant -= coeff * bias;
13908 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13909 if (coeff % factor == 0)
13910 coeff /= factor;
13911 else
13912 {
13913 int amount = exact_log2 (factor);
13914 gcc_assert (amount >= 0);
13915 add_loc_descr (&ret, int_loc_descriptor (amount));
13916 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13917 }
13918 if (coeff != 1)
13919 {
13920 add_loc_descr (&ret, int_loc_descriptor (coeff));
13921 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13922 }
13923 if (start)
13924 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13925 }
13926 }
13927 loc_descr_plus_const (&ret, constant);
13928 return ret;
13929 }
13930
13931 /* Pick the smallest representation of a constant, rather than just
13932 defaulting to the LEB encoding. */
13933 if (i >= 0)
13934 {
13935 int clz = clz_hwi (i);
13936 int ctz = ctz_hwi (i);
13937 if (i <= 31)
13938 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13939 else if (i <= 0xff)
13940 op = DW_OP_const1u;
13941 else if (i <= 0xffff)
13942 op = DW_OP_const2u;
13943 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13944 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13945 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13946 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13947 while DW_OP_const4u is 5 bytes. */
13948 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13949 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13950 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13951 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13952 while DW_OP_const4u is 5 bytes. */
13953 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13954
13955 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13956 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13957 <= 4)
13958 {
13959 /* As i >= 2**31, the double cast above will yield a negative number.
13960 Since wrapping is defined in DWARF expressions we can output big
13961 positive integers as small negative ones, regardless of the size
13962 of host wide ints.
13963
13964 Here, since the evaluator will handle 32-bit values and since i >=
13965 2**31, we know it's going to be interpreted as a negative literal:
13966 store it this way if we can do better than 5 bytes this way. */
13967 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13968 }
13969 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13970 op = DW_OP_const4u;
13971
13972 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13973 least 6 bytes: see if we can do better before falling back to it. */
13974 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13975 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13976 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13977 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13978 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13979 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13980 >= HOST_BITS_PER_WIDE_INT)
13981 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13982 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13983 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13984 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13985 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13986 && size_of_uleb128 (i) > 6)
13987 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13988 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13989 else
13990 op = DW_OP_constu;
13991 }
13992 else
13993 {
13994 if (i >= -0x80)
13995 op = DW_OP_const1s;
13996 else if (i >= -0x8000)
13997 op = DW_OP_const2s;
13998 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13999 {
14000 if (size_of_int_loc_descriptor (i) < 5)
14001 {
14002 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14003 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14004 return ret;
14005 }
14006 op = DW_OP_const4s;
14007 }
14008 else
14009 {
14010 if (size_of_int_loc_descriptor (i)
14011 < (unsigned long) 1 + size_of_sleb128 (i))
14012 {
14013 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14014 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14015 return ret;
14016 }
14017 op = DW_OP_consts;
14018 }
14019 }
14020
14021 return new_loc_descr (op, i, 0);
14022 }
14023
14024 /* Likewise, for unsigned constants. */
14025
14026 static dw_loc_descr_ref
14027 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14028 {
14029 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14030 const unsigned HOST_WIDE_INT max_uint
14031 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14032
14033 /* If possible, use the clever signed constants handling. */
14034 if (i <= max_int)
14035 return int_loc_descriptor ((HOST_WIDE_INT) i);
14036
14037 /* Here, we are left with positive numbers that cannot be represented as
14038 HOST_WIDE_INT, i.e.:
14039 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14040
14041 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14042 whereas may be better to output a negative integer: thanks to integer
14043 wrapping, we know that:
14044 x = x - 2 ** DWARF2_ADDR_SIZE
14045 = x - 2 * (max (HOST_WIDE_INT) + 1)
14046 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14047 small negative integers. Let's try that in cases it will clearly improve
14048 the encoding: there is no gain turning DW_OP_const4u into
14049 DW_OP_const4s. */
14050 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14051 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14052 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14053 {
14054 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14055
14056 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14057 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14058 const HOST_WIDE_INT second_shift
14059 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14060
14061 /* So we finally have:
14062 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14063 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14064 return int_loc_descriptor (second_shift);
14065 }
14066
14067 /* Last chance: fallback to a simple constant operation. */
14068 return new_loc_descr
14069 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14070 ? DW_OP_const4u
14071 : DW_OP_const8u,
14072 i, 0);
14073 }
14074
14075 /* Generate and return a location description that computes the unsigned
14076 comparison of the two stack top entries (a OP b where b is the top-most
14077 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14078 LE_EXPR, GT_EXPR or GE_EXPR. */
14079
14080 static dw_loc_descr_ref
14081 uint_comparison_loc_list (enum tree_code kind)
14082 {
14083 enum dwarf_location_atom op, flip_op;
14084 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14085
14086 switch (kind)
14087 {
14088 case LT_EXPR:
14089 op = DW_OP_lt;
14090 break;
14091 case LE_EXPR:
14092 op = DW_OP_le;
14093 break;
14094 case GT_EXPR:
14095 op = DW_OP_gt;
14096 break;
14097 case GE_EXPR:
14098 op = DW_OP_ge;
14099 break;
14100 default:
14101 gcc_unreachable ();
14102 }
14103
14104 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14105 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14106
14107 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14108 possible to perform unsigned comparisons: we just have to distinguish
14109 three cases:
14110
14111 1. when a and b have the same sign (as signed integers); then we should
14112 return: a OP(signed) b;
14113
14114 2. when a is a negative signed integer while b is a positive one, then a
14115 is a greater unsigned integer than b; likewise when a and b's roles
14116 are flipped.
14117
14118 So first, compare the sign of the two operands. */
14119 ret = new_loc_descr (DW_OP_over, 0, 0);
14120 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14121 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14122 /* If they have different signs (i.e. they have different sign bits), then
14123 the stack top value has now the sign bit set and thus it's smaller than
14124 zero. */
14125 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14126 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14127 add_loc_descr (&ret, bra_node);
14128
14129 /* We are in case 1. At this point, we know both operands have the same
14130 sign, to it's safe to use the built-in signed comparison. */
14131 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14132 add_loc_descr (&ret, jmp_node);
14133
14134 /* We are in case 2. Here, we know both operands do not have the same sign,
14135 so we have to flip the signed comparison. */
14136 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14137 tmp = new_loc_descr (flip_op, 0, 0);
14138 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14139 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14140 add_loc_descr (&ret, tmp);
14141
14142 /* This dummy operation is necessary to make the two branches join. */
14143 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14144 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14145 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14146 add_loc_descr (&ret, tmp);
14147
14148 return ret;
14149 }
14150
14151 /* Likewise, but takes the location description lists (might be destructive on
14152 them). Return NULL if either is NULL or if concatenation fails. */
14153
14154 static dw_loc_list_ref
14155 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14156 enum tree_code kind)
14157 {
14158 if (left == NULL || right == NULL)
14159 return NULL;
14160
14161 add_loc_list (&left, right);
14162 if (left == NULL)
14163 return NULL;
14164
14165 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14166 return left;
14167 }
14168
14169 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14170 without actually allocating it. */
14171
14172 static unsigned long
14173 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14174 {
14175 return size_of_int_loc_descriptor (i >> shift)
14176 + size_of_int_loc_descriptor (shift)
14177 + 1;
14178 }
14179
14180 /* Return size_of_locs (int_loc_descriptor (i)) without
14181 actually allocating it. */
14182
14183 static unsigned long
14184 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14185 {
14186 unsigned long s;
14187
14188 if (i >= 0)
14189 {
14190 int clz, ctz;
14191 if (i <= 31)
14192 return 1;
14193 else if (i <= 0xff)
14194 return 2;
14195 else if (i <= 0xffff)
14196 return 3;
14197 clz = clz_hwi (i);
14198 ctz = ctz_hwi (i);
14199 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14200 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14201 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14202 - clz - 5);
14203 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14204 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14205 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14206 - clz - 8);
14207 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14208 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14209 <= 4)
14210 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14211 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14212 return 5;
14213 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14214 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14215 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14216 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14217 - clz - 8);
14218 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14219 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14220 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14221 - clz - 16);
14222 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14223 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14224 && s > 6)
14225 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14226 - clz - 32);
14227 else
14228 return 1 + s;
14229 }
14230 else
14231 {
14232 if (i >= -0x80)
14233 return 2;
14234 else if (i >= -0x8000)
14235 return 3;
14236 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14237 {
14238 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14239 {
14240 s = size_of_int_loc_descriptor (-i) + 1;
14241 if (s < 5)
14242 return s;
14243 }
14244 return 5;
14245 }
14246 else
14247 {
14248 unsigned long r = 1 + size_of_sleb128 (i);
14249 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14250 {
14251 s = size_of_int_loc_descriptor (-i) + 1;
14252 if (s < r)
14253 return s;
14254 }
14255 return r;
14256 }
14257 }
14258 }
14259
14260 /* Return loc description representing "address" of integer value.
14261 This can appear only as toplevel expression. */
14262
14263 static dw_loc_descr_ref
14264 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14265 {
14266 int litsize;
14267 dw_loc_descr_ref loc_result = NULL;
14268
14269 if (!(dwarf_version >= 4 || !dwarf_strict))
14270 return NULL;
14271
14272 litsize = size_of_int_loc_descriptor (i);
14273 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14274 is more compact. For DW_OP_stack_value we need:
14275 litsize + 1 (DW_OP_stack_value)
14276 and for DW_OP_implicit_value:
14277 1 (DW_OP_implicit_value) + 1 (length) + size. */
14278 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14279 {
14280 loc_result = int_loc_descriptor (i);
14281 add_loc_descr (&loc_result,
14282 new_loc_descr (DW_OP_stack_value, 0, 0));
14283 return loc_result;
14284 }
14285
14286 loc_result = new_loc_descr (DW_OP_implicit_value,
14287 size, 0);
14288 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14289 loc_result->dw_loc_oprnd2.v.val_int = i;
14290 return loc_result;
14291 }
14292
14293 /* Return a location descriptor that designates a base+offset location. */
14294
14295 static dw_loc_descr_ref
14296 based_loc_descr (rtx reg, poly_int64 offset,
14297 enum var_init_status initialized)
14298 {
14299 unsigned int regno;
14300 dw_loc_descr_ref result;
14301 dw_fde_ref fde = cfun->fde;
14302
14303 /* We only use "frame base" when we're sure we're talking about the
14304 post-prologue local stack frame. We do this by *not* running
14305 register elimination until this point, and recognizing the special
14306 argument pointer and soft frame pointer rtx's. */
14307 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14308 {
14309 rtx elim = (ira_use_lra_p
14310 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14311 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14312
14313 if (elim != reg)
14314 {
14315 elim = strip_offset_and_add (elim, &offset);
14316 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14317 && (elim == hard_frame_pointer_rtx
14318 || elim == stack_pointer_rtx))
14319 || elim == (frame_pointer_needed
14320 ? hard_frame_pointer_rtx
14321 : stack_pointer_rtx));
14322
14323 /* If drap register is used to align stack, use frame
14324 pointer + offset to access stack variables. If stack
14325 is aligned without drap, use stack pointer + offset to
14326 access stack variables. */
14327 if (crtl->stack_realign_tried
14328 && reg == frame_pointer_rtx)
14329 {
14330 int base_reg
14331 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14332 ? HARD_FRAME_POINTER_REGNUM
14333 : REGNO (elim));
14334 return new_reg_loc_descr (base_reg, offset);
14335 }
14336
14337 gcc_assert (frame_pointer_fb_offset_valid);
14338 offset += frame_pointer_fb_offset;
14339 HOST_WIDE_INT const_offset;
14340 if (offset.is_constant (&const_offset))
14341 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14342 else
14343 {
14344 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14345 loc_descr_plus_const (&ret, offset);
14346 return ret;
14347 }
14348 }
14349 }
14350
14351 regno = REGNO (reg);
14352 #ifdef LEAF_REG_REMAP
14353 if (crtl->uses_only_leaf_regs)
14354 {
14355 int leaf_reg = LEAF_REG_REMAP (regno);
14356 if (leaf_reg != -1)
14357 regno = (unsigned) leaf_reg;
14358 }
14359 #endif
14360 regno = DWARF_FRAME_REGNUM (regno);
14361
14362 HOST_WIDE_INT const_offset;
14363 if (!optimize && fde
14364 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14365 && offset.is_constant (&const_offset))
14366 {
14367 /* Use cfa+offset to represent the location of arguments passed
14368 on the stack when drap is used to align stack.
14369 Only do this when not optimizing, for optimized code var-tracking
14370 is supposed to track where the arguments live and the register
14371 used as vdrap or drap in some spot might be used for something
14372 else in other part of the routine. */
14373 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14374 }
14375
14376 result = new_reg_loc_descr (regno, offset);
14377
14378 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14379 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14380
14381 return result;
14382 }
14383
14384 /* Return true if this RTL expression describes a base+offset calculation. */
14385
14386 static inline int
14387 is_based_loc (const_rtx rtl)
14388 {
14389 return (GET_CODE (rtl) == PLUS
14390 && ((REG_P (XEXP (rtl, 0))
14391 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14392 && CONST_INT_P (XEXP (rtl, 1)))));
14393 }
14394
14395 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14396 failed. */
14397
14398 static dw_loc_descr_ref
14399 tls_mem_loc_descriptor (rtx mem)
14400 {
14401 tree base;
14402 dw_loc_descr_ref loc_result;
14403
14404 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14405 return NULL;
14406
14407 base = get_base_address (MEM_EXPR (mem));
14408 if (base == NULL
14409 || !VAR_P (base)
14410 || !DECL_THREAD_LOCAL_P (base))
14411 return NULL;
14412
14413 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14414 if (loc_result == NULL)
14415 return NULL;
14416
14417 if (maybe_ne (MEM_OFFSET (mem), 0))
14418 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14419
14420 return loc_result;
14421 }
14422
14423 /* Output debug info about reason why we failed to expand expression as dwarf
14424 expression. */
14425
14426 static void
14427 expansion_failed (tree expr, rtx rtl, char const *reason)
14428 {
14429 if (dump_file && (dump_flags & TDF_DETAILS))
14430 {
14431 fprintf (dump_file, "Failed to expand as dwarf: ");
14432 if (expr)
14433 print_generic_expr (dump_file, expr, dump_flags);
14434 if (rtl)
14435 {
14436 fprintf (dump_file, "\n");
14437 print_rtl (dump_file, rtl);
14438 }
14439 fprintf (dump_file, "\nReason: %s\n", reason);
14440 }
14441 }
14442
14443 /* Helper function for const_ok_for_output. */
14444
14445 static bool
14446 const_ok_for_output_1 (rtx rtl)
14447 {
14448 if (targetm.const_not_ok_for_debug_p (rtl))
14449 {
14450 if (GET_CODE (rtl) != UNSPEC)
14451 {
14452 expansion_failed (NULL_TREE, rtl,
14453 "Expression rejected for debug by the backend.\n");
14454 return false;
14455 }
14456
14457 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14458 the target hook doesn't explicitly allow it in debug info, assume
14459 we can't express it in the debug info. */
14460 /* Don't complain about TLS UNSPECs, those are just too hard to
14461 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14462 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14463 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14464 if (flag_checking
14465 && (XVECLEN (rtl, 0) == 0
14466 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14467 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14468 inform (current_function_decl
14469 ? DECL_SOURCE_LOCATION (current_function_decl)
14470 : UNKNOWN_LOCATION,
14471 #if NUM_UNSPEC_VALUES > 0
14472 "non-delegitimized UNSPEC %s (%d) found in variable location",
14473 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14474 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14475 XINT (rtl, 1));
14476 #else
14477 "non-delegitimized UNSPEC %d found in variable location",
14478 XINT (rtl, 1));
14479 #endif
14480 expansion_failed (NULL_TREE, rtl,
14481 "UNSPEC hasn't been delegitimized.\n");
14482 return false;
14483 }
14484
14485 if (CONST_POLY_INT_P (rtl))
14486 return false;
14487
14488 if (targetm.const_not_ok_for_debug_p (rtl))
14489 {
14490 expansion_failed (NULL_TREE, rtl,
14491 "Expression rejected for debug by the backend.\n");
14492 return false;
14493 }
14494
14495 /* FIXME: Refer to PR60655. It is possible for simplification
14496 of rtl expressions in var tracking to produce such expressions.
14497 We should really identify / validate expressions
14498 enclosed in CONST that can be handled by assemblers on various
14499 targets and only handle legitimate cases here. */
14500 switch (GET_CODE (rtl))
14501 {
14502 case SYMBOL_REF:
14503 break;
14504 case NOT:
14505 case NEG:
14506 return false;
14507 default:
14508 return true;
14509 }
14510
14511 if (CONSTANT_POOL_ADDRESS_P (rtl))
14512 {
14513 bool marked;
14514 get_pool_constant_mark (rtl, &marked);
14515 /* If all references to this pool constant were optimized away,
14516 it was not output and thus we can't represent it. */
14517 if (!marked)
14518 {
14519 expansion_failed (NULL_TREE, rtl,
14520 "Constant was removed from constant pool.\n");
14521 return false;
14522 }
14523 }
14524
14525 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14526 return false;
14527
14528 /* Avoid references to external symbols in debug info, on several targets
14529 the linker might even refuse to link when linking a shared library,
14530 and in many other cases the relocations for .debug_info/.debug_loc are
14531 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14532 to be defined within the same shared library or executable are fine. */
14533 if (SYMBOL_REF_EXTERNAL_P (rtl))
14534 {
14535 tree decl = SYMBOL_REF_DECL (rtl);
14536
14537 if (decl == NULL || !targetm.binds_local_p (decl))
14538 {
14539 expansion_failed (NULL_TREE, rtl,
14540 "Symbol not defined in current TU.\n");
14541 return false;
14542 }
14543 }
14544
14545 return true;
14546 }
14547
14548 /* Return true if constant RTL can be emitted in DW_OP_addr or
14549 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14550 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14551
14552 static bool
14553 const_ok_for_output (rtx rtl)
14554 {
14555 if (GET_CODE (rtl) == SYMBOL_REF)
14556 return const_ok_for_output_1 (rtl);
14557
14558 if (GET_CODE (rtl) == CONST)
14559 {
14560 subrtx_var_iterator::array_type array;
14561 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14562 if (!const_ok_for_output_1 (*iter))
14563 return false;
14564 return true;
14565 }
14566
14567 return true;
14568 }
14569
14570 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14571 if possible, NULL otherwise. */
14572
14573 static dw_die_ref
14574 base_type_for_mode (machine_mode mode, bool unsignedp)
14575 {
14576 dw_die_ref type_die;
14577 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14578
14579 if (type == NULL)
14580 return NULL;
14581 switch (TREE_CODE (type))
14582 {
14583 case INTEGER_TYPE:
14584 case REAL_TYPE:
14585 break;
14586 default:
14587 return NULL;
14588 }
14589 type_die = lookup_type_die (type);
14590 if (!type_die)
14591 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14592 comp_unit_die ());
14593 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14594 return NULL;
14595 return type_die;
14596 }
14597
14598 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14599 type matching MODE, or, if MODE is narrower than or as wide as
14600 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14601 possible. */
14602
14603 static dw_loc_descr_ref
14604 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14605 {
14606 machine_mode outer_mode = mode;
14607 dw_die_ref type_die;
14608 dw_loc_descr_ref cvt;
14609
14610 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14611 {
14612 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14613 return op;
14614 }
14615 type_die = base_type_for_mode (outer_mode, 1);
14616 if (type_die == NULL)
14617 return NULL;
14618 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14619 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14620 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14621 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14622 add_loc_descr (&op, cvt);
14623 return op;
14624 }
14625
14626 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14627
14628 static dw_loc_descr_ref
14629 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14630 dw_loc_descr_ref op1)
14631 {
14632 dw_loc_descr_ref ret = op0;
14633 add_loc_descr (&ret, op1);
14634 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14635 if (STORE_FLAG_VALUE != 1)
14636 {
14637 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14638 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14639 }
14640 return ret;
14641 }
14642
14643 /* Subroutine of scompare_loc_descriptor for the case in which we're
14644 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14645 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14646
14647 static dw_loc_descr_ref
14648 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14649 scalar_int_mode op_mode,
14650 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14651 {
14652 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14653 dw_loc_descr_ref cvt;
14654
14655 if (type_die == NULL)
14656 return NULL;
14657 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14658 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14659 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14660 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14661 add_loc_descr (&op0, cvt);
14662 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14663 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14664 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14665 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14666 add_loc_descr (&op1, cvt);
14667 return compare_loc_descriptor (op, op0, op1);
14668 }
14669
14670 /* Subroutine of scompare_loc_descriptor for the case in which we're
14671 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14672 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14673
14674 static dw_loc_descr_ref
14675 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14676 scalar_int_mode op_mode,
14677 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14678 {
14679 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14680 /* For eq/ne, if the operands are known to be zero-extended,
14681 there is no need to do the fancy shifting up. */
14682 if (op == DW_OP_eq || op == DW_OP_ne)
14683 {
14684 dw_loc_descr_ref last0, last1;
14685 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14686 ;
14687 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14688 ;
14689 /* deref_size zero extends, and for constants we can check
14690 whether they are zero extended or not. */
14691 if (((last0->dw_loc_opc == DW_OP_deref_size
14692 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14693 || (CONST_INT_P (XEXP (rtl, 0))
14694 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14695 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14696 && ((last1->dw_loc_opc == DW_OP_deref_size
14697 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14698 || (CONST_INT_P (XEXP (rtl, 1))
14699 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14700 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14701 return compare_loc_descriptor (op, op0, op1);
14702
14703 /* EQ/NE comparison against constant in narrower type than
14704 DWARF2_ADDR_SIZE can be performed either as
14705 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14706 DW_OP_{eq,ne}
14707 or
14708 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14709 DW_OP_{eq,ne}. Pick whatever is shorter. */
14710 if (CONST_INT_P (XEXP (rtl, 1))
14711 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14712 && (size_of_int_loc_descriptor (shift) + 1
14713 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14714 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14715 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14716 & GET_MODE_MASK (op_mode))))
14717 {
14718 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14719 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14720 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14721 & GET_MODE_MASK (op_mode));
14722 return compare_loc_descriptor (op, op0, op1);
14723 }
14724 }
14725 add_loc_descr (&op0, int_loc_descriptor (shift));
14726 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14727 if (CONST_INT_P (XEXP (rtl, 1)))
14728 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14729 else
14730 {
14731 add_loc_descr (&op1, int_loc_descriptor (shift));
14732 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14733 }
14734 return compare_loc_descriptor (op, op0, op1);
14735 }
14736
14737 /* Return location descriptor for unsigned comparison OP RTL. */
14738
14739 static dw_loc_descr_ref
14740 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14741 machine_mode mem_mode)
14742 {
14743 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14744 dw_loc_descr_ref op0, op1;
14745
14746 if (op_mode == VOIDmode)
14747 op_mode = GET_MODE (XEXP (rtl, 1));
14748 if (op_mode == VOIDmode)
14749 return NULL;
14750
14751 scalar_int_mode int_op_mode;
14752 if (dwarf_strict
14753 && dwarf_version < 5
14754 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14755 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14756 return NULL;
14757
14758 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14759 VAR_INIT_STATUS_INITIALIZED);
14760 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14761 VAR_INIT_STATUS_INITIALIZED);
14762
14763 if (op0 == NULL || op1 == NULL)
14764 return NULL;
14765
14766 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14767 {
14768 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14769 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14770
14771 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14772 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14773 }
14774 return compare_loc_descriptor (op, op0, op1);
14775 }
14776
14777 /* Return location descriptor for unsigned comparison OP RTL. */
14778
14779 static dw_loc_descr_ref
14780 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14781 machine_mode mem_mode)
14782 {
14783 dw_loc_descr_ref op0, op1;
14784
14785 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14786 if (test_op_mode == VOIDmode)
14787 test_op_mode = GET_MODE (XEXP (rtl, 1));
14788
14789 scalar_int_mode op_mode;
14790 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14791 return NULL;
14792
14793 if (dwarf_strict
14794 && dwarf_version < 5
14795 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14796 return NULL;
14797
14798 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14799 VAR_INIT_STATUS_INITIALIZED);
14800 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14801 VAR_INIT_STATUS_INITIALIZED);
14802
14803 if (op0 == NULL || op1 == NULL)
14804 return NULL;
14805
14806 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14807 {
14808 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14809 dw_loc_descr_ref last0, last1;
14810 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14811 ;
14812 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14813 ;
14814 if (CONST_INT_P (XEXP (rtl, 0)))
14815 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14816 /* deref_size zero extends, so no need to mask it again. */
14817 else if (last0->dw_loc_opc != DW_OP_deref_size
14818 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14819 {
14820 add_loc_descr (&op0, int_loc_descriptor (mask));
14821 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14822 }
14823 if (CONST_INT_P (XEXP (rtl, 1)))
14824 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14825 /* deref_size zero extends, so no need to mask it again. */
14826 else if (last1->dw_loc_opc != DW_OP_deref_size
14827 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14828 {
14829 add_loc_descr (&op1, int_loc_descriptor (mask));
14830 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14831 }
14832 }
14833 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14834 {
14835 HOST_WIDE_INT bias = 1;
14836 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14837 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14838 if (CONST_INT_P (XEXP (rtl, 1)))
14839 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14840 + INTVAL (XEXP (rtl, 1)));
14841 else
14842 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14843 bias, 0));
14844 }
14845 return compare_loc_descriptor (op, op0, op1);
14846 }
14847
14848 /* Return location descriptor for {U,S}{MIN,MAX}. */
14849
14850 static dw_loc_descr_ref
14851 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14852 machine_mode mem_mode)
14853 {
14854 enum dwarf_location_atom op;
14855 dw_loc_descr_ref op0, op1, ret;
14856 dw_loc_descr_ref bra_node, drop_node;
14857
14858 scalar_int_mode int_mode;
14859 if (dwarf_strict
14860 && dwarf_version < 5
14861 && (!is_a <scalar_int_mode> (mode, &int_mode)
14862 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14863 return NULL;
14864
14865 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14866 VAR_INIT_STATUS_INITIALIZED);
14867 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14868 VAR_INIT_STATUS_INITIALIZED);
14869
14870 if (op0 == NULL || op1 == NULL)
14871 return NULL;
14872
14873 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14874 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14875 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14876 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14877 {
14878 /* Checked by the caller. */
14879 int_mode = as_a <scalar_int_mode> (mode);
14880 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14881 {
14882 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14883 add_loc_descr (&op0, int_loc_descriptor (mask));
14884 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14885 add_loc_descr (&op1, int_loc_descriptor (mask));
14886 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14887 }
14888 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14889 {
14890 HOST_WIDE_INT bias = 1;
14891 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14892 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14893 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14894 }
14895 }
14896 else if (is_a <scalar_int_mode> (mode, &int_mode)
14897 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14898 {
14899 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14900 add_loc_descr (&op0, int_loc_descriptor (shift));
14901 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14902 add_loc_descr (&op1, int_loc_descriptor (shift));
14903 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14904 }
14905 else if (is_a <scalar_int_mode> (mode, &int_mode)
14906 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14907 {
14908 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14909 dw_loc_descr_ref cvt;
14910 if (type_die == NULL)
14911 return NULL;
14912 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14913 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14914 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14915 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14916 add_loc_descr (&op0, cvt);
14917 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14918 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14919 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14920 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14921 add_loc_descr (&op1, cvt);
14922 }
14923
14924 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14925 op = DW_OP_lt;
14926 else
14927 op = DW_OP_gt;
14928 ret = op0;
14929 add_loc_descr (&ret, op1);
14930 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14931 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14932 add_loc_descr (&ret, bra_node);
14933 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14934 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14935 add_loc_descr (&ret, drop_node);
14936 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14937 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14938 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14939 && is_a <scalar_int_mode> (mode, &int_mode)
14940 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14941 ret = convert_descriptor_to_mode (int_mode, ret);
14942 return ret;
14943 }
14944
14945 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14946 but after converting arguments to type_die, afterwards
14947 convert back to unsigned. */
14948
14949 static dw_loc_descr_ref
14950 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14951 scalar_int_mode mode, machine_mode mem_mode)
14952 {
14953 dw_loc_descr_ref cvt, op0, op1;
14954
14955 if (type_die == NULL)
14956 return NULL;
14957 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14958 VAR_INIT_STATUS_INITIALIZED);
14959 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14960 VAR_INIT_STATUS_INITIALIZED);
14961 if (op0 == NULL || op1 == NULL)
14962 return NULL;
14963 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14964 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14965 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14966 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14967 add_loc_descr (&op0, cvt);
14968 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14969 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14970 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14971 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14972 add_loc_descr (&op1, cvt);
14973 add_loc_descr (&op0, op1);
14974 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14975 return convert_descriptor_to_mode (mode, op0);
14976 }
14977
14978 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14979 const0 is DW_OP_lit0 or corresponding typed constant,
14980 const1 is DW_OP_lit1 or corresponding typed constant
14981 and constMSB is constant with just the MSB bit set
14982 for the mode):
14983 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14984 L1: const0 DW_OP_swap
14985 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14986 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14987 L3: DW_OP_drop
14988 L4: DW_OP_nop
14989
14990 CTZ is similar:
14991 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14992 L1: const0 DW_OP_swap
14993 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14994 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14995 L3: DW_OP_drop
14996 L4: DW_OP_nop
14997
14998 FFS is similar:
14999 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15000 L1: const1 DW_OP_swap
15001 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15002 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15003 L3: DW_OP_drop
15004 L4: DW_OP_nop */
15005
15006 static dw_loc_descr_ref
15007 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15008 machine_mode mem_mode)
15009 {
15010 dw_loc_descr_ref op0, ret, tmp;
15011 HOST_WIDE_INT valv;
15012 dw_loc_descr_ref l1jump, l1label;
15013 dw_loc_descr_ref l2jump, l2label;
15014 dw_loc_descr_ref l3jump, l3label;
15015 dw_loc_descr_ref l4jump, l4label;
15016 rtx msb;
15017
15018 if (GET_MODE (XEXP (rtl, 0)) != mode)
15019 return NULL;
15020
15021 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15022 VAR_INIT_STATUS_INITIALIZED);
15023 if (op0 == NULL)
15024 return NULL;
15025 ret = op0;
15026 if (GET_CODE (rtl) == CLZ)
15027 {
15028 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15029 valv = GET_MODE_BITSIZE (mode);
15030 }
15031 else if (GET_CODE (rtl) == FFS)
15032 valv = 0;
15033 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15034 valv = GET_MODE_BITSIZE (mode);
15035 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15036 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15037 add_loc_descr (&ret, l1jump);
15038 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15039 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15040 VAR_INIT_STATUS_INITIALIZED);
15041 if (tmp == NULL)
15042 return NULL;
15043 add_loc_descr (&ret, tmp);
15044 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15045 add_loc_descr (&ret, l4jump);
15046 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15047 ? const1_rtx : const0_rtx,
15048 mode, mem_mode,
15049 VAR_INIT_STATUS_INITIALIZED);
15050 if (l1label == NULL)
15051 return NULL;
15052 add_loc_descr (&ret, l1label);
15053 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15054 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15055 add_loc_descr (&ret, l2label);
15056 if (GET_CODE (rtl) != CLZ)
15057 msb = const1_rtx;
15058 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15059 msb = GEN_INT (HOST_WIDE_INT_1U
15060 << (GET_MODE_BITSIZE (mode) - 1));
15061 else
15062 msb = immed_wide_int_const
15063 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15064 GET_MODE_PRECISION (mode)), mode);
15065 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15066 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15067 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15068 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15069 else
15070 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15071 VAR_INIT_STATUS_INITIALIZED);
15072 if (tmp == NULL)
15073 return NULL;
15074 add_loc_descr (&ret, tmp);
15075 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15076 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15077 add_loc_descr (&ret, l3jump);
15078 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15079 VAR_INIT_STATUS_INITIALIZED);
15080 if (tmp == NULL)
15081 return NULL;
15082 add_loc_descr (&ret, tmp);
15083 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15084 ? DW_OP_shl : DW_OP_shr, 0, 0));
15085 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15086 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15087 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15088 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15089 add_loc_descr (&ret, l2jump);
15090 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15091 add_loc_descr (&ret, l3label);
15092 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15093 add_loc_descr (&ret, l4label);
15094 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15095 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15096 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15097 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15098 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15099 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15100 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15101 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15102 return ret;
15103 }
15104
15105 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15106 const1 is DW_OP_lit1 or corresponding typed constant):
15107 const0 DW_OP_swap
15108 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15109 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15110 L2: DW_OP_drop
15111
15112 PARITY is similar:
15113 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15114 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15115 L2: DW_OP_drop */
15116
15117 static dw_loc_descr_ref
15118 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15119 machine_mode mem_mode)
15120 {
15121 dw_loc_descr_ref op0, ret, tmp;
15122 dw_loc_descr_ref l1jump, l1label;
15123 dw_loc_descr_ref l2jump, l2label;
15124
15125 if (GET_MODE (XEXP (rtl, 0)) != mode)
15126 return NULL;
15127
15128 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15129 VAR_INIT_STATUS_INITIALIZED);
15130 if (op0 == NULL)
15131 return NULL;
15132 ret = op0;
15133 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15134 VAR_INIT_STATUS_INITIALIZED);
15135 if (tmp == NULL)
15136 return NULL;
15137 add_loc_descr (&ret, tmp);
15138 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15139 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15140 add_loc_descr (&ret, l1label);
15141 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15142 add_loc_descr (&ret, l2jump);
15143 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15144 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15145 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15146 VAR_INIT_STATUS_INITIALIZED);
15147 if (tmp == NULL)
15148 return NULL;
15149 add_loc_descr (&ret, tmp);
15150 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15151 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15152 ? DW_OP_plus : DW_OP_xor, 0, 0));
15153 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15154 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15155 VAR_INIT_STATUS_INITIALIZED);
15156 add_loc_descr (&ret, tmp);
15157 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15158 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15159 add_loc_descr (&ret, l1jump);
15160 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15161 add_loc_descr (&ret, l2label);
15162 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15163 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15164 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15165 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15166 return ret;
15167 }
15168
15169 /* BSWAP (constS is initial shift count, either 56 or 24):
15170 constS const0
15171 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15172 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15173 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15174 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15175 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15176
15177 static dw_loc_descr_ref
15178 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15179 machine_mode mem_mode)
15180 {
15181 dw_loc_descr_ref op0, ret, tmp;
15182 dw_loc_descr_ref l1jump, l1label;
15183 dw_loc_descr_ref l2jump, l2label;
15184
15185 if (BITS_PER_UNIT != 8
15186 || (GET_MODE_BITSIZE (mode) != 32
15187 && GET_MODE_BITSIZE (mode) != 64))
15188 return NULL;
15189
15190 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15191 VAR_INIT_STATUS_INITIALIZED);
15192 if (op0 == NULL)
15193 return NULL;
15194
15195 ret = op0;
15196 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15197 mode, mem_mode,
15198 VAR_INIT_STATUS_INITIALIZED);
15199 if (tmp == NULL)
15200 return NULL;
15201 add_loc_descr (&ret, tmp);
15202 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15203 VAR_INIT_STATUS_INITIALIZED);
15204 if (tmp == NULL)
15205 return NULL;
15206 add_loc_descr (&ret, tmp);
15207 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15208 add_loc_descr (&ret, l1label);
15209 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15210 mode, mem_mode,
15211 VAR_INIT_STATUS_INITIALIZED);
15212 add_loc_descr (&ret, tmp);
15213 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15216 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15217 VAR_INIT_STATUS_INITIALIZED);
15218 if (tmp == NULL)
15219 return NULL;
15220 add_loc_descr (&ret, tmp);
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15222 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15223 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15224 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15225 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15226 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15227 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15228 VAR_INIT_STATUS_INITIALIZED);
15229 add_loc_descr (&ret, tmp);
15230 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15231 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15232 add_loc_descr (&ret, l2jump);
15233 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15234 VAR_INIT_STATUS_INITIALIZED);
15235 add_loc_descr (&ret, tmp);
15236 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15237 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15238 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15239 add_loc_descr (&ret, l1jump);
15240 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15241 add_loc_descr (&ret, l2label);
15242 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15243 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15244 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15245 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15246 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15247 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15248 return ret;
15249 }
15250
15251 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15252 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15253 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15254 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15255
15256 ROTATERT is similar:
15257 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15258 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15259 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15260
15261 static dw_loc_descr_ref
15262 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15263 machine_mode mem_mode)
15264 {
15265 rtx rtlop1 = XEXP (rtl, 1);
15266 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15267 int i;
15268
15269 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15270 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15271 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15272 VAR_INIT_STATUS_INITIALIZED);
15273 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15274 VAR_INIT_STATUS_INITIALIZED);
15275 if (op0 == NULL || op1 == NULL)
15276 return NULL;
15277 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15278 for (i = 0; i < 2; i++)
15279 {
15280 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15281 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15282 mode, mem_mode,
15283 VAR_INIT_STATUS_INITIALIZED);
15284 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15285 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15286 ? DW_OP_const4u
15287 : HOST_BITS_PER_WIDE_INT == 64
15288 ? DW_OP_const8u : DW_OP_constu,
15289 GET_MODE_MASK (mode), 0);
15290 else
15291 mask[i] = NULL;
15292 if (mask[i] == NULL)
15293 return NULL;
15294 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15295 }
15296 ret = op0;
15297 add_loc_descr (&ret, op1);
15298 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15299 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15300 if (GET_CODE (rtl) == ROTATERT)
15301 {
15302 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15303 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15304 GET_MODE_BITSIZE (mode), 0));
15305 }
15306 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15307 if (mask[0] != NULL)
15308 add_loc_descr (&ret, mask[0]);
15309 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15310 if (mask[1] != NULL)
15311 {
15312 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15313 add_loc_descr (&ret, mask[1]);
15314 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15315 }
15316 if (GET_CODE (rtl) == ROTATE)
15317 {
15318 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15319 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15320 GET_MODE_BITSIZE (mode), 0));
15321 }
15322 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15323 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15324 return ret;
15325 }
15326
15327 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15328 for DEBUG_PARAMETER_REF RTL. */
15329
15330 static dw_loc_descr_ref
15331 parameter_ref_descriptor (rtx rtl)
15332 {
15333 dw_loc_descr_ref ret;
15334 dw_die_ref ref;
15335
15336 if (dwarf_strict)
15337 return NULL;
15338 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15339 /* With LTO during LTRANS we get the late DIE that refers to the early
15340 DIE, thus we add another indirection here. This seems to confuse
15341 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15342 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15343 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15344 if (ref)
15345 {
15346 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15347 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15348 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15349 }
15350 else
15351 {
15352 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15353 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15354 }
15355 return ret;
15356 }
15357
15358 /* The following routine converts the RTL for a variable or parameter
15359 (resident in memory) into an equivalent Dwarf representation of a
15360 mechanism for getting the address of that same variable onto the top of a
15361 hypothetical "address evaluation" stack.
15362
15363 When creating memory location descriptors, we are effectively transforming
15364 the RTL for a memory-resident object into its Dwarf postfix expression
15365 equivalent. This routine recursively descends an RTL tree, turning
15366 it into Dwarf postfix code as it goes.
15367
15368 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15369
15370 MEM_MODE is the mode of the memory reference, needed to handle some
15371 autoincrement addressing modes.
15372
15373 Return 0 if we can't represent the location. */
15374
15375 dw_loc_descr_ref
15376 mem_loc_descriptor (rtx rtl, machine_mode mode,
15377 machine_mode mem_mode,
15378 enum var_init_status initialized)
15379 {
15380 dw_loc_descr_ref mem_loc_result = NULL;
15381 enum dwarf_location_atom op;
15382 dw_loc_descr_ref op0, op1;
15383 rtx inner = NULL_RTX;
15384 poly_int64 offset;
15385
15386 if (mode == VOIDmode)
15387 mode = GET_MODE (rtl);
15388
15389 /* Note that for a dynamically sized array, the location we will generate a
15390 description of here will be the lowest numbered location which is
15391 actually within the array. That's *not* necessarily the same as the
15392 zeroth element of the array. */
15393
15394 rtl = targetm.delegitimize_address (rtl);
15395
15396 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15397 return NULL;
15398
15399 scalar_int_mode int_mode, inner_mode, op1_mode;
15400 switch (GET_CODE (rtl))
15401 {
15402 case POST_INC:
15403 case POST_DEC:
15404 case POST_MODIFY:
15405 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15406
15407 case SUBREG:
15408 /* The case of a subreg may arise when we have a local (register)
15409 variable or a formal (register) parameter which doesn't quite fill
15410 up an entire register. For now, just assume that it is
15411 legitimate to make the Dwarf info refer to the whole register which
15412 contains the given subreg. */
15413 if (!subreg_lowpart_p (rtl))
15414 break;
15415 inner = SUBREG_REG (rtl);
15416 /* FALLTHRU */
15417 case TRUNCATE:
15418 if (inner == NULL_RTX)
15419 inner = XEXP (rtl, 0);
15420 if (is_a <scalar_int_mode> (mode, &int_mode)
15421 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15422 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15423 #ifdef POINTERS_EXTEND_UNSIGNED
15424 || (int_mode == Pmode && mem_mode != VOIDmode)
15425 #endif
15426 )
15427 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15428 {
15429 mem_loc_result = mem_loc_descriptor (inner,
15430 inner_mode,
15431 mem_mode, initialized);
15432 break;
15433 }
15434 if (dwarf_strict && dwarf_version < 5)
15435 break;
15436 if (is_a <scalar_int_mode> (mode, &int_mode)
15437 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15438 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15439 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15440 {
15441 dw_die_ref type_die;
15442 dw_loc_descr_ref cvt;
15443
15444 mem_loc_result = mem_loc_descriptor (inner,
15445 GET_MODE (inner),
15446 mem_mode, initialized);
15447 if (mem_loc_result == NULL)
15448 break;
15449 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15450 if (type_die == NULL)
15451 {
15452 mem_loc_result = NULL;
15453 break;
15454 }
15455 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15456 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15457 else
15458 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15459 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15460 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15461 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15462 add_loc_descr (&mem_loc_result, cvt);
15463 if (is_a <scalar_int_mode> (mode, &int_mode)
15464 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15465 {
15466 /* Convert it to untyped afterwards. */
15467 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15468 add_loc_descr (&mem_loc_result, cvt);
15469 }
15470 }
15471 break;
15472
15473 case REG:
15474 if (!is_a <scalar_int_mode> (mode, &int_mode)
15475 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15476 && rtl != arg_pointer_rtx
15477 && rtl != frame_pointer_rtx
15478 #ifdef POINTERS_EXTEND_UNSIGNED
15479 && (int_mode != Pmode || mem_mode == VOIDmode)
15480 #endif
15481 ))
15482 {
15483 dw_die_ref type_die;
15484 unsigned int dbx_regnum;
15485
15486 if (dwarf_strict && dwarf_version < 5)
15487 break;
15488 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15489 break;
15490 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15491 if (type_die == NULL)
15492 break;
15493
15494 dbx_regnum = dbx_reg_number (rtl);
15495 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15496 break;
15497 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15498 dbx_regnum, 0);
15499 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15500 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15501 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15502 break;
15503 }
15504 /* Whenever a register number forms a part of the description of the
15505 method for calculating the (dynamic) address of a memory resident
15506 object, DWARF rules require the register number be referred to as
15507 a "base register". This distinction is not based in any way upon
15508 what category of register the hardware believes the given register
15509 belongs to. This is strictly DWARF terminology we're dealing with
15510 here. Note that in cases where the location of a memory-resident
15511 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15512 OP_CONST (0)) the actual DWARF location descriptor that we generate
15513 may just be OP_BASEREG (basereg). This may look deceptively like
15514 the object in question was allocated to a register (rather than in
15515 memory) so DWARF consumers need to be aware of the subtle
15516 distinction between OP_REG and OP_BASEREG. */
15517 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15518 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15519 else if (stack_realign_drap
15520 && crtl->drap_reg
15521 && crtl->args.internal_arg_pointer == rtl
15522 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15523 {
15524 /* If RTL is internal_arg_pointer, which has been optimized
15525 out, use DRAP instead. */
15526 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15527 VAR_INIT_STATUS_INITIALIZED);
15528 }
15529 break;
15530
15531 case SIGN_EXTEND:
15532 case ZERO_EXTEND:
15533 if (!is_a <scalar_int_mode> (mode, &int_mode)
15534 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15535 break;
15536 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15537 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15538 if (op0 == 0)
15539 break;
15540 else if (GET_CODE (rtl) == ZERO_EXTEND
15541 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15542 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15543 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15544 to expand zero extend as two shifts instead of
15545 masking. */
15546 && GET_MODE_SIZE (inner_mode) <= 4)
15547 {
15548 mem_loc_result = op0;
15549 add_loc_descr (&mem_loc_result,
15550 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15551 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15552 }
15553 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15554 {
15555 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15556 shift *= BITS_PER_UNIT;
15557 if (GET_CODE (rtl) == SIGN_EXTEND)
15558 op = DW_OP_shra;
15559 else
15560 op = DW_OP_shr;
15561 mem_loc_result = op0;
15562 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15563 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15564 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15565 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15566 }
15567 else if (!dwarf_strict || dwarf_version >= 5)
15568 {
15569 dw_die_ref type_die1, type_die2;
15570 dw_loc_descr_ref cvt;
15571
15572 type_die1 = base_type_for_mode (inner_mode,
15573 GET_CODE (rtl) == ZERO_EXTEND);
15574 if (type_die1 == NULL)
15575 break;
15576 type_die2 = base_type_for_mode (int_mode, 1);
15577 if (type_die2 == NULL)
15578 break;
15579 mem_loc_result = op0;
15580 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15581 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15582 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15583 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15584 add_loc_descr (&mem_loc_result, cvt);
15585 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15586 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15587 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15588 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15589 add_loc_descr (&mem_loc_result, cvt);
15590 }
15591 break;
15592
15593 case MEM:
15594 {
15595 rtx new_rtl = avoid_constant_pool_reference (rtl);
15596 if (new_rtl != rtl)
15597 {
15598 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15599 initialized);
15600 if (mem_loc_result != NULL)
15601 return mem_loc_result;
15602 }
15603 }
15604 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15605 get_address_mode (rtl), mode,
15606 VAR_INIT_STATUS_INITIALIZED);
15607 if (mem_loc_result == NULL)
15608 mem_loc_result = tls_mem_loc_descriptor (rtl);
15609 if (mem_loc_result != NULL)
15610 {
15611 if (!is_a <scalar_int_mode> (mode, &int_mode)
15612 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15613 {
15614 dw_die_ref type_die;
15615 dw_loc_descr_ref deref;
15616 HOST_WIDE_INT size;
15617
15618 if (dwarf_strict && dwarf_version < 5)
15619 return NULL;
15620 if (!GET_MODE_SIZE (mode).is_constant (&size))
15621 return NULL;
15622 type_die
15623 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15624 if (type_die == NULL)
15625 return NULL;
15626 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15627 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15628 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15629 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15630 add_loc_descr (&mem_loc_result, deref);
15631 }
15632 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15633 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15634 else
15635 add_loc_descr (&mem_loc_result,
15636 new_loc_descr (DW_OP_deref_size,
15637 GET_MODE_SIZE (int_mode), 0));
15638 }
15639 break;
15640
15641 case LO_SUM:
15642 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15643
15644 case LABEL_REF:
15645 /* Some ports can transform a symbol ref into a label ref, because
15646 the symbol ref is too far away and has to be dumped into a constant
15647 pool. */
15648 case CONST:
15649 case SYMBOL_REF:
15650 if (!is_a <scalar_int_mode> (mode, &int_mode)
15651 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15652 #ifdef POINTERS_EXTEND_UNSIGNED
15653 && (int_mode != Pmode || mem_mode == VOIDmode)
15654 #endif
15655 ))
15656 break;
15657 if (GET_CODE (rtl) == SYMBOL_REF
15658 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15659 {
15660 dw_loc_descr_ref temp;
15661
15662 /* If this is not defined, we have no way to emit the data. */
15663 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15664 break;
15665
15666 temp = new_addr_loc_descr (rtl, dtprel_true);
15667
15668 /* We check for DWARF 5 here because gdb did not implement
15669 DW_OP_form_tls_address until after 7.12. */
15670 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15671 ? DW_OP_form_tls_address
15672 : DW_OP_GNU_push_tls_address),
15673 0, 0);
15674 add_loc_descr (&mem_loc_result, temp);
15675
15676 break;
15677 }
15678
15679 if (!const_ok_for_output (rtl))
15680 {
15681 if (GET_CODE (rtl) == CONST)
15682 switch (GET_CODE (XEXP (rtl, 0)))
15683 {
15684 case NOT:
15685 op = DW_OP_not;
15686 goto try_const_unop;
15687 case NEG:
15688 op = DW_OP_neg;
15689 goto try_const_unop;
15690 try_const_unop:
15691 rtx arg;
15692 arg = XEXP (XEXP (rtl, 0), 0);
15693 if (!CONSTANT_P (arg))
15694 arg = gen_rtx_CONST (int_mode, arg);
15695 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15696 initialized);
15697 if (op0)
15698 {
15699 mem_loc_result = op0;
15700 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15701 }
15702 break;
15703 default:
15704 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15705 mem_mode, initialized);
15706 break;
15707 }
15708 break;
15709 }
15710
15711 symref:
15712 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15713 vec_safe_push (used_rtx_array, rtl);
15714 break;
15715
15716 case CONCAT:
15717 case CONCATN:
15718 case VAR_LOCATION:
15719 case DEBUG_IMPLICIT_PTR:
15720 expansion_failed (NULL_TREE, rtl,
15721 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15722 return 0;
15723
15724 case ENTRY_VALUE:
15725 if (dwarf_strict && dwarf_version < 5)
15726 return NULL;
15727 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15728 {
15729 if (!is_a <scalar_int_mode> (mode, &int_mode)
15730 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15731 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15732 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15733 else
15734 {
15735 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15736 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15737 return NULL;
15738 op0 = one_reg_loc_descriptor (dbx_regnum,
15739 VAR_INIT_STATUS_INITIALIZED);
15740 }
15741 }
15742 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15743 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15744 {
15745 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15746 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15747 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15748 return NULL;
15749 }
15750 else
15751 gcc_unreachable ();
15752 if (op0 == NULL)
15753 return NULL;
15754 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15755 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15756 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15757 break;
15758
15759 case DEBUG_PARAMETER_REF:
15760 mem_loc_result = parameter_ref_descriptor (rtl);
15761 break;
15762
15763 case PRE_MODIFY:
15764 /* Extract the PLUS expression nested inside and fall into
15765 PLUS code below. */
15766 rtl = XEXP (rtl, 1);
15767 goto plus;
15768
15769 case PRE_INC:
15770 case PRE_DEC:
15771 /* Turn these into a PLUS expression and fall into the PLUS code
15772 below. */
15773 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15774 gen_int_mode (GET_CODE (rtl) == PRE_INC
15775 ? GET_MODE_UNIT_SIZE (mem_mode)
15776 : -GET_MODE_UNIT_SIZE (mem_mode),
15777 mode));
15778
15779 /* fall through */
15780
15781 case PLUS:
15782 plus:
15783 if (is_based_loc (rtl)
15784 && is_a <scalar_int_mode> (mode, &int_mode)
15785 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15786 || XEXP (rtl, 0) == arg_pointer_rtx
15787 || XEXP (rtl, 0) == frame_pointer_rtx))
15788 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15789 INTVAL (XEXP (rtl, 1)),
15790 VAR_INIT_STATUS_INITIALIZED);
15791 else
15792 {
15793 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15794 VAR_INIT_STATUS_INITIALIZED);
15795 if (mem_loc_result == 0)
15796 break;
15797
15798 if (CONST_INT_P (XEXP (rtl, 1))
15799 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15800 <= DWARF2_ADDR_SIZE))
15801 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15802 else
15803 {
15804 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15805 VAR_INIT_STATUS_INITIALIZED);
15806 if (op1 == 0)
15807 return NULL;
15808 add_loc_descr (&mem_loc_result, op1);
15809 add_loc_descr (&mem_loc_result,
15810 new_loc_descr (DW_OP_plus, 0, 0));
15811 }
15812 }
15813 break;
15814
15815 /* If a pseudo-reg is optimized away, it is possible for it to
15816 be replaced with a MEM containing a multiply or shift. */
15817 case MINUS:
15818 op = DW_OP_minus;
15819 goto do_binop;
15820
15821 case MULT:
15822 op = DW_OP_mul;
15823 goto do_binop;
15824
15825 case DIV:
15826 if ((!dwarf_strict || dwarf_version >= 5)
15827 && is_a <scalar_int_mode> (mode, &int_mode)
15828 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15829 {
15830 mem_loc_result = typed_binop (DW_OP_div, rtl,
15831 base_type_for_mode (mode, 0),
15832 int_mode, mem_mode);
15833 break;
15834 }
15835 op = DW_OP_div;
15836 goto do_binop;
15837
15838 case UMOD:
15839 op = DW_OP_mod;
15840 goto do_binop;
15841
15842 case ASHIFT:
15843 op = DW_OP_shl;
15844 goto do_shift;
15845
15846 case ASHIFTRT:
15847 op = DW_OP_shra;
15848 goto do_shift;
15849
15850 case LSHIFTRT:
15851 op = DW_OP_shr;
15852 goto do_shift;
15853
15854 do_shift:
15855 if (!is_a <scalar_int_mode> (mode, &int_mode))
15856 break;
15857 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15858 VAR_INIT_STATUS_INITIALIZED);
15859 {
15860 rtx rtlop1 = XEXP (rtl, 1);
15861 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15862 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15863 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15864 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15865 VAR_INIT_STATUS_INITIALIZED);
15866 }
15867
15868 if (op0 == 0 || op1 == 0)
15869 break;
15870
15871 mem_loc_result = op0;
15872 add_loc_descr (&mem_loc_result, op1);
15873 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15874 break;
15875
15876 case AND:
15877 op = DW_OP_and;
15878 goto do_binop;
15879
15880 case IOR:
15881 op = DW_OP_or;
15882 goto do_binop;
15883
15884 case XOR:
15885 op = DW_OP_xor;
15886 goto do_binop;
15887
15888 do_binop:
15889 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15890 VAR_INIT_STATUS_INITIALIZED);
15891 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15892 VAR_INIT_STATUS_INITIALIZED);
15893
15894 if (op0 == 0 || op1 == 0)
15895 break;
15896
15897 mem_loc_result = op0;
15898 add_loc_descr (&mem_loc_result, op1);
15899 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15900 break;
15901
15902 case MOD:
15903 if ((!dwarf_strict || dwarf_version >= 5)
15904 && is_a <scalar_int_mode> (mode, &int_mode)
15905 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15906 {
15907 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15908 base_type_for_mode (mode, 0),
15909 int_mode, mem_mode);
15910 break;
15911 }
15912
15913 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15914 VAR_INIT_STATUS_INITIALIZED);
15915 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15916 VAR_INIT_STATUS_INITIALIZED);
15917
15918 if (op0 == 0 || op1 == 0)
15919 break;
15920
15921 mem_loc_result = op0;
15922 add_loc_descr (&mem_loc_result, op1);
15923 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15924 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15925 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15926 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15927 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15928 break;
15929
15930 case UDIV:
15931 if ((!dwarf_strict || dwarf_version >= 5)
15932 && is_a <scalar_int_mode> (mode, &int_mode))
15933 {
15934 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15935 {
15936 op = DW_OP_div;
15937 goto do_binop;
15938 }
15939 mem_loc_result = typed_binop (DW_OP_div, rtl,
15940 base_type_for_mode (int_mode, 1),
15941 int_mode, mem_mode);
15942 }
15943 break;
15944
15945 case NOT:
15946 op = DW_OP_not;
15947 goto do_unop;
15948
15949 case ABS:
15950 op = DW_OP_abs;
15951 goto do_unop;
15952
15953 case NEG:
15954 op = DW_OP_neg;
15955 goto do_unop;
15956
15957 do_unop:
15958 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15959 VAR_INIT_STATUS_INITIALIZED);
15960
15961 if (op0 == 0)
15962 break;
15963
15964 mem_loc_result = op0;
15965 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15966 break;
15967
15968 case CONST_INT:
15969 if (!is_a <scalar_int_mode> (mode, &int_mode)
15970 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15971 #ifdef POINTERS_EXTEND_UNSIGNED
15972 || (int_mode == Pmode
15973 && mem_mode != VOIDmode
15974 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15975 #endif
15976 )
15977 {
15978 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15979 break;
15980 }
15981 if ((!dwarf_strict || dwarf_version >= 5)
15982 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15983 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15984 {
15985 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15986 scalar_int_mode amode;
15987 if (type_die == NULL)
15988 return NULL;
15989 if (INTVAL (rtl) >= 0
15990 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15991 .exists (&amode))
15992 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15993 /* const DW_OP_convert <XXX> vs.
15994 DW_OP_const_type <XXX, 1, const>. */
15995 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15996 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15997 {
15998 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15999 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16000 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16001 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16002 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16003 add_loc_descr (&mem_loc_result, op0);
16004 return mem_loc_result;
16005 }
16006 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16007 INTVAL (rtl));
16008 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16009 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16010 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16011 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16012 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16013 else
16014 {
16015 mem_loc_result->dw_loc_oprnd2.val_class
16016 = dw_val_class_const_double;
16017 mem_loc_result->dw_loc_oprnd2.v.val_double
16018 = double_int::from_shwi (INTVAL (rtl));
16019 }
16020 }
16021 break;
16022
16023 case CONST_DOUBLE:
16024 if (!dwarf_strict || dwarf_version >= 5)
16025 {
16026 dw_die_ref type_die;
16027
16028 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16029 CONST_DOUBLE rtx could represent either a large integer
16030 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16031 the value is always a floating point constant.
16032
16033 When it is an integer, a CONST_DOUBLE is used whenever
16034 the constant requires 2 HWIs to be adequately represented.
16035 We output CONST_DOUBLEs as blocks. */
16036 if (mode == VOIDmode
16037 || (GET_MODE (rtl) == VOIDmode
16038 && maybe_ne (GET_MODE_BITSIZE (mode),
16039 HOST_BITS_PER_DOUBLE_INT)))
16040 break;
16041 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16042 if (type_die == NULL)
16043 return NULL;
16044 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16045 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16046 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16047 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16048 #if TARGET_SUPPORTS_WIDE_INT == 0
16049 if (!SCALAR_FLOAT_MODE_P (mode))
16050 {
16051 mem_loc_result->dw_loc_oprnd2.val_class
16052 = dw_val_class_const_double;
16053 mem_loc_result->dw_loc_oprnd2.v.val_double
16054 = rtx_to_double_int (rtl);
16055 }
16056 else
16057 #endif
16058 {
16059 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16060 unsigned int length = GET_MODE_SIZE (float_mode);
16061 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16062
16063 insert_float (rtl, array);
16064 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16065 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16066 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16067 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16068 }
16069 }
16070 break;
16071
16072 case CONST_WIDE_INT:
16073 if (!dwarf_strict || dwarf_version >= 5)
16074 {
16075 dw_die_ref type_die;
16076
16077 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16078 if (type_die == NULL)
16079 return NULL;
16080 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16081 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16082 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16083 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16084 mem_loc_result->dw_loc_oprnd2.val_class
16085 = dw_val_class_wide_int;
16086 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16087 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16088 }
16089 break;
16090
16091 case CONST_POLY_INT:
16092 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16093 break;
16094
16095 case EQ:
16096 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16097 break;
16098
16099 case GE:
16100 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16101 break;
16102
16103 case GT:
16104 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16105 break;
16106
16107 case LE:
16108 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16109 break;
16110
16111 case LT:
16112 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16113 break;
16114
16115 case NE:
16116 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16117 break;
16118
16119 case GEU:
16120 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16121 break;
16122
16123 case GTU:
16124 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16125 break;
16126
16127 case LEU:
16128 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16129 break;
16130
16131 case LTU:
16132 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16133 break;
16134
16135 case UMIN:
16136 case UMAX:
16137 if (!SCALAR_INT_MODE_P (mode))
16138 break;
16139 /* FALLTHRU */
16140 case SMIN:
16141 case SMAX:
16142 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16143 break;
16144
16145 case ZERO_EXTRACT:
16146 case SIGN_EXTRACT:
16147 if (CONST_INT_P (XEXP (rtl, 1))
16148 && CONST_INT_P (XEXP (rtl, 2))
16149 && is_a <scalar_int_mode> (mode, &int_mode)
16150 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16151 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16152 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16153 && ((unsigned) INTVAL (XEXP (rtl, 1))
16154 + (unsigned) INTVAL (XEXP (rtl, 2))
16155 <= GET_MODE_BITSIZE (int_mode)))
16156 {
16157 int shift, size;
16158 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16159 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16160 if (op0 == 0)
16161 break;
16162 if (GET_CODE (rtl) == SIGN_EXTRACT)
16163 op = DW_OP_shra;
16164 else
16165 op = DW_OP_shr;
16166 mem_loc_result = op0;
16167 size = INTVAL (XEXP (rtl, 1));
16168 shift = INTVAL (XEXP (rtl, 2));
16169 if (BITS_BIG_ENDIAN)
16170 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16171 if (shift + size != (int) DWARF2_ADDR_SIZE)
16172 {
16173 add_loc_descr (&mem_loc_result,
16174 int_loc_descriptor (DWARF2_ADDR_SIZE
16175 - shift - size));
16176 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16177 }
16178 if (size != (int) DWARF2_ADDR_SIZE)
16179 {
16180 add_loc_descr (&mem_loc_result,
16181 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16182 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16183 }
16184 }
16185 break;
16186
16187 case IF_THEN_ELSE:
16188 {
16189 dw_loc_descr_ref op2, bra_node, drop_node;
16190 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16191 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16192 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16193 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16194 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16195 VAR_INIT_STATUS_INITIALIZED);
16196 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16197 VAR_INIT_STATUS_INITIALIZED);
16198 if (op0 == NULL || op1 == NULL || op2 == NULL)
16199 break;
16200
16201 mem_loc_result = op1;
16202 add_loc_descr (&mem_loc_result, op2);
16203 add_loc_descr (&mem_loc_result, op0);
16204 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16205 add_loc_descr (&mem_loc_result, bra_node);
16206 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16207 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16208 add_loc_descr (&mem_loc_result, drop_node);
16209 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16210 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16211 }
16212 break;
16213
16214 case FLOAT_EXTEND:
16215 case FLOAT_TRUNCATE:
16216 case FLOAT:
16217 case UNSIGNED_FLOAT:
16218 case FIX:
16219 case UNSIGNED_FIX:
16220 if (!dwarf_strict || dwarf_version >= 5)
16221 {
16222 dw_die_ref type_die;
16223 dw_loc_descr_ref cvt;
16224
16225 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16226 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16227 if (op0 == NULL)
16228 break;
16229 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16230 && (GET_CODE (rtl) == FLOAT
16231 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16232 {
16233 type_die = base_type_for_mode (int_mode,
16234 GET_CODE (rtl) == UNSIGNED_FLOAT);
16235 if (type_die == NULL)
16236 break;
16237 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16238 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16239 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16240 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16241 add_loc_descr (&op0, cvt);
16242 }
16243 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16244 if (type_die == NULL)
16245 break;
16246 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16247 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16248 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16249 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16250 add_loc_descr (&op0, cvt);
16251 if (is_a <scalar_int_mode> (mode, &int_mode)
16252 && (GET_CODE (rtl) == FIX
16253 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16254 {
16255 op0 = convert_descriptor_to_mode (int_mode, op0);
16256 if (op0 == NULL)
16257 break;
16258 }
16259 mem_loc_result = op0;
16260 }
16261 break;
16262
16263 case CLZ:
16264 case CTZ:
16265 case FFS:
16266 if (is_a <scalar_int_mode> (mode, &int_mode))
16267 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16268 break;
16269
16270 case POPCOUNT:
16271 case PARITY:
16272 if (is_a <scalar_int_mode> (mode, &int_mode))
16273 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16274 break;
16275
16276 case BSWAP:
16277 if (is_a <scalar_int_mode> (mode, &int_mode))
16278 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16279 break;
16280
16281 case ROTATE:
16282 case ROTATERT:
16283 if (is_a <scalar_int_mode> (mode, &int_mode))
16284 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16285 break;
16286
16287 case COMPARE:
16288 /* In theory, we could implement the above. */
16289 /* DWARF cannot represent the unsigned compare operations
16290 natively. */
16291 case SS_MULT:
16292 case US_MULT:
16293 case SS_DIV:
16294 case US_DIV:
16295 case SS_PLUS:
16296 case US_PLUS:
16297 case SS_MINUS:
16298 case US_MINUS:
16299 case SS_NEG:
16300 case US_NEG:
16301 case SS_ABS:
16302 case SS_ASHIFT:
16303 case US_ASHIFT:
16304 case SS_TRUNCATE:
16305 case US_TRUNCATE:
16306 case UNORDERED:
16307 case ORDERED:
16308 case UNEQ:
16309 case UNGE:
16310 case UNGT:
16311 case UNLE:
16312 case UNLT:
16313 case LTGT:
16314 case FRACT_CONVERT:
16315 case UNSIGNED_FRACT_CONVERT:
16316 case SAT_FRACT:
16317 case UNSIGNED_SAT_FRACT:
16318 case SQRT:
16319 case ASM_OPERANDS:
16320 case VEC_MERGE:
16321 case VEC_SELECT:
16322 case VEC_CONCAT:
16323 case VEC_DUPLICATE:
16324 case VEC_SERIES:
16325 case UNSPEC:
16326 case HIGH:
16327 case FMA:
16328 case STRICT_LOW_PART:
16329 case CONST_VECTOR:
16330 case CONST_FIXED:
16331 case CLRSB:
16332 case CLOBBER:
16333 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16334 can't express it in the debug info. This can happen e.g. with some
16335 TLS UNSPECs. */
16336 break;
16337
16338 case CONST_STRING:
16339 resolve_one_addr (&rtl);
16340 goto symref;
16341
16342 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16343 the expression. An UNSPEC rtx represents a raw DWARF operation,
16344 new_loc_descr is called for it to build the operation directly.
16345 Otherwise mem_loc_descriptor is called recursively. */
16346 case PARALLEL:
16347 {
16348 int index = 0;
16349 dw_loc_descr_ref exp_result = NULL;
16350
16351 for (; index < XVECLEN (rtl, 0); index++)
16352 {
16353 rtx elem = XVECEXP (rtl, 0, index);
16354 if (GET_CODE (elem) == UNSPEC)
16355 {
16356 /* Each DWARF operation UNSPEC contain two operands, if
16357 one operand is not used for the operation, const0_rtx is
16358 passed. */
16359 gcc_assert (XVECLEN (elem, 0) == 2);
16360
16361 HOST_WIDE_INT dw_op = XINT (elem, 1);
16362 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16363 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16364 exp_result
16365 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16366 oprnd2);
16367 }
16368 else
16369 exp_result
16370 = mem_loc_descriptor (elem, mode, mem_mode,
16371 VAR_INIT_STATUS_INITIALIZED);
16372
16373 if (!mem_loc_result)
16374 mem_loc_result = exp_result;
16375 else
16376 add_loc_descr (&mem_loc_result, exp_result);
16377 }
16378
16379 break;
16380 }
16381
16382 default:
16383 if (flag_checking)
16384 {
16385 print_rtl (stderr, rtl);
16386 gcc_unreachable ();
16387 }
16388 break;
16389 }
16390
16391 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16392 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16393
16394 return mem_loc_result;
16395 }
16396
16397 /* Return a descriptor that describes the concatenation of two locations.
16398 This is typically a complex variable. */
16399
16400 static dw_loc_descr_ref
16401 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16402 {
16403 /* At present we only track constant-sized pieces. */
16404 unsigned int size0, size1;
16405 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16406 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16407 return 0;
16408
16409 dw_loc_descr_ref cc_loc_result = NULL;
16410 dw_loc_descr_ref x0_ref
16411 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16412 dw_loc_descr_ref x1_ref
16413 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16414
16415 if (x0_ref == 0 || x1_ref == 0)
16416 return 0;
16417
16418 cc_loc_result = x0_ref;
16419 add_loc_descr_op_piece (&cc_loc_result, size0);
16420
16421 add_loc_descr (&cc_loc_result, x1_ref);
16422 add_loc_descr_op_piece (&cc_loc_result, size1);
16423
16424 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16425 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16426
16427 return cc_loc_result;
16428 }
16429
16430 /* Return a descriptor that describes the concatenation of N
16431 locations. */
16432
16433 static dw_loc_descr_ref
16434 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16435 {
16436 unsigned int i;
16437 dw_loc_descr_ref cc_loc_result = NULL;
16438 unsigned int n = XVECLEN (concatn, 0);
16439 unsigned int size;
16440
16441 for (i = 0; i < n; ++i)
16442 {
16443 dw_loc_descr_ref ref;
16444 rtx x = XVECEXP (concatn, 0, i);
16445
16446 /* At present we only track constant-sized pieces. */
16447 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16448 return NULL;
16449
16450 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16451 if (ref == NULL)
16452 return NULL;
16453
16454 add_loc_descr (&cc_loc_result, ref);
16455 add_loc_descr_op_piece (&cc_loc_result, size);
16456 }
16457
16458 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16459 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16460
16461 return cc_loc_result;
16462 }
16463
16464 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16465 for DEBUG_IMPLICIT_PTR RTL. */
16466
16467 static dw_loc_descr_ref
16468 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16469 {
16470 dw_loc_descr_ref ret;
16471 dw_die_ref ref;
16472
16473 if (dwarf_strict && dwarf_version < 5)
16474 return NULL;
16475 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16476 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16477 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16478 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16479 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16480 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16481 if (ref)
16482 {
16483 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16484 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16485 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16486 }
16487 else
16488 {
16489 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16490 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16491 }
16492 return ret;
16493 }
16494
16495 /* Output a proper Dwarf location descriptor for a variable or parameter
16496 which is either allocated in a register or in a memory location. For a
16497 register, we just generate an OP_REG and the register number. For a
16498 memory location we provide a Dwarf postfix expression describing how to
16499 generate the (dynamic) address of the object onto the address stack.
16500
16501 MODE is mode of the decl if this loc_descriptor is going to be used in
16502 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16503 allowed, VOIDmode otherwise.
16504
16505 If we don't know how to describe it, return 0. */
16506
16507 static dw_loc_descr_ref
16508 loc_descriptor (rtx rtl, machine_mode mode,
16509 enum var_init_status initialized)
16510 {
16511 dw_loc_descr_ref loc_result = NULL;
16512 scalar_int_mode int_mode;
16513
16514 switch (GET_CODE (rtl))
16515 {
16516 case SUBREG:
16517 /* The case of a subreg may arise when we have a local (register)
16518 variable or a formal (register) parameter which doesn't quite fill
16519 up an entire register. For now, just assume that it is
16520 legitimate to make the Dwarf info refer to the whole register which
16521 contains the given subreg. */
16522 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16523 loc_result = loc_descriptor (SUBREG_REG (rtl),
16524 GET_MODE (SUBREG_REG (rtl)), initialized);
16525 else
16526 goto do_default;
16527 break;
16528
16529 case REG:
16530 loc_result = reg_loc_descriptor (rtl, initialized);
16531 break;
16532
16533 case MEM:
16534 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16535 GET_MODE (rtl), initialized);
16536 if (loc_result == NULL)
16537 loc_result = tls_mem_loc_descriptor (rtl);
16538 if (loc_result == NULL)
16539 {
16540 rtx new_rtl = avoid_constant_pool_reference (rtl);
16541 if (new_rtl != rtl)
16542 loc_result = loc_descriptor (new_rtl, mode, initialized);
16543 }
16544 break;
16545
16546 case CONCAT:
16547 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16548 initialized);
16549 break;
16550
16551 case CONCATN:
16552 loc_result = concatn_loc_descriptor (rtl, initialized);
16553 break;
16554
16555 case VAR_LOCATION:
16556 /* Single part. */
16557 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16558 {
16559 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16560 if (GET_CODE (loc) == EXPR_LIST)
16561 loc = XEXP (loc, 0);
16562 loc_result = loc_descriptor (loc, mode, initialized);
16563 break;
16564 }
16565
16566 rtl = XEXP (rtl, 1);
16567 /* FALLTHRU */
16568
16569 case PARALLEL:
16570 {
16571 rtvec par_elems = XVEC (rtl, 0);
16572 int num_elem = GET_NUM_ELEM (par_elems);
16573 machine_mode mode;
16574 int i, size;
16575
16576 /* Create the first one, so we have something to add to. */
16577 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16578 VOIDmode, initialized);
16579 if (loc_result == NULL)
16580 return NULL;
16581 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16582 /* At present we only track constant-sized pieces. */
16583 if (!GET_MODE_SIZE (mode).is_constant (&size))
16584 return NULL;
16585 add_loc_descr_op_piece (&loc_result, size);
16586 for (i = 1; i < num_elem; i++)
16587 {
16588 dw_loc_descr_ref temp;
16589
16590 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16591 VOIDmode, initialized);
16592 if (temp == NULL)
16593 return NULL;
16594 add_loc_descr (&loc_result, temp);
16595 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16596 /* At present we only track constant-sized pieces. */
16597 if (!GET_MODE_SIZE (mode).is_constant (&size))
16598 return NULL;
16599 add_loc_descr_op_piece (&loc_result, size);
16600 }
16601 }
16602 break;
16603
16604 case CONST_INT:
16605 if (mode != VOIDmode && mode != BLKmode)
16606 {
16607 int_mode = as_a <scalar_int_mode> (mode);
16608 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16609 INTVAL (rtl));
16610 }
16611 break;
16612
16613 case CONST_DOUBLE:
16614 if (mode == VOIDmode)
16615 mode = GET_MODE (rtl);
16616
16617 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16618 {
16619 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16620
16621 /* Note that a CONST_DOUBLE rtx could represent either an integer
16622 or a floating-point constant. A CONST_DOUBLE is used whenever
16623 the constant requires more than one word in order to be
16624 adequately represented. We output CONST_DOUBLEs as blocks. */
16625 scalar_mode smode = as_a <scalar_mode> (mode);
16626 loc_result = new_loc_descr (DW_OP_implicit_value,
16627 GET_MODE_SIZE (smode), 0);
16628 #if TARGET_SUPPORTS_WIDE_INT == 0
16629 if (!SCALAR_FLOAT_MODE_P (smode))
16630 {
16631 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16632 loc_result->dw_loc_oprnd2.v.val_double
16633 = rtx_to_double_int (rtl);
16634 }
16635 else
16636 #endif
16637 {
16638 unsigned int length = GET_MODE_SIZE (smode);
16639 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16640
16641 insert_float (rtl, array);
16642 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16643 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16644 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16645 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16646 }
16647 }
16648 break;
16649
16650 case CONST_WIDE_INT:
16651 if (mode == VOIDmode)
16652 mode = GET_MODE (rtl);
16653
16654 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16655 {
16656 int_mode = as_a <scalar_int_mode> (mode);
16657 loc_result = new_loc_descr (DW_OP_implicit_value,
16658 GET_MODE_SIZE (int_mode), 0);
16659 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16660 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16661 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16662 }
16663 break;
16664
16665 case CONST_VECTOR:
16666 if (mode == VOIDmode)
16667 mode = GET_MODE (rtl);
16668
16669 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16670 {
16671 unsigned int length;
16672 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16673 return NULL;
16674
16675 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16676 unsigned char *array
16677 = ggc_vec_alloc<unsigned char> (length * elt_size);
16678 unsigned int i;
16679 unsigned char *p;
16680 machine_mode imode = GET_MODE_INNER (mode);
16681
16682 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16683 switch (GET_MODE_CLASS (mode))
16684 {
16685 case MODE_VECTOR_INT:
16686 for (i = 0, p = array; i < length; i++, p += elt_size)
16687 {
16688 rtx elt = CONST_VECTOR_ELT (rtl, i);
16689 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16690 }
16691 break;
16692
16693 case MODE_VECTOR_FLOAT:
16694 for (i = 0, p = array; i < length; i++, p += elt_size)
16695 {
16696 rtx elt = CONST_VECTOR_ELT (rtl, i);
16697 insert_float (elt, p);
16698 }
16699 break;
16700
16701 default:
16702 gcc_unreachable ();
16703 }
16704
16705 loc_result = new_loc_descr (DW_OP_implicit_value,
16706 length * elt_size, 0);
16707 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16708 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16709 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16710 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16711 }
16712 break;
16713
16714 case CONST:
16715 if (mode == VOIDmode
16716 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16717 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16718 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16719 {
16720 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16721 break;
16722 }
16723 /* FALLTHROUGH */
16724 case SYMBOL_REF:
16725 if (!const_ok_for_output (rtl))
16726 break;
16727 /* FALLTHROUGH */
16728 case LABEL_REF:
16729 if (is_a <scalar_int_mode> (mode, &int_mode)
16730 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16731 && (dwarf_version >= 4 || !dwarf_strict))
16732 {
16733 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16734 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16735 vec_safe_push (used_rtx_array, rtl);
16736 }
16737 break;
16738
16739 case DEBUG_IMPLICIT_PTR:
16740 loc_result = implicit_ptr_descriptor (rtl, 0);
16741 break;
16742
16743 case PLUS:
16744 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16745 && CONST_INT_P (XEXP (rtl, 1)))
16746 {
16747 loc_result
16748 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16749 break;
16750 }
16751 /* FALLTHRU */
16752 do_default:
16753 default:
16754 if ((is_a <scalar_int_mode> (mode, &int_mode)
16755 && GET_MODE (rtl) == int_mode
16756 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16757 && dwarf_version >= 4)
16758 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16759 {
16760 /* Value expression. */
16761 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16762 if (loc_result)
16763 add_loc_descr (&loc_result,
16764 new_loc_descr (DW_OP_stack_value, 0, 0));
16765 }
16766 break;
16767 }
16768
16769 return loc_result;
16770 }
16771
16772 /* We need to figure out what section we should use as the base for the
16773 address ranges where a given location is valid.
16774 1. If this particular DECL has a section associated with it, use that.
16775 2. If this function has a section associated with it, use that.
16776 3. Otherwise, use the text section.
16777 XXX: If you split a variable across multiple sections, we won't notice. */
16778
16779 static const char *
16780 secname_for_decl (const_tree decl)
16781 {
16782 const char *secname;
16783
16784 if (VAR_OR_FUNCTION_DECL_P (decl)
16785 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16786 && DECL_SECTION_NAME (decl))
16787 secname = DECL_SECTION_NAME (decl);
16788 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16789 secname = DECL_SECTION_NAME (current_function_decl);
16790 else if (cfun && in_cold_section_p)
16791 secname = crtl->subsections.cold_section_label;
16792 else
16793 secname = text_section_label;
16794
16795 return secname;
16796 }
16797
16798 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16799
16800 static bool
16801 decl_by_reference_p (tree decl)
16802 {
16803 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16804 || VAR_P (decl))
16805 && DECL_BY_REFERENCE (decl));
16806 }
16807
16808 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16809 for VARLOC. */
16810
16811 static dw_loc_descr_ref
16812 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16813 enum var_init_status initialized)
16814 {
16815 int have_address = 0;
16816 dw_loc_descr_ref descr;
16817 machine_mode mode;
16818
16819 if (want_address != 2)
16820 {
16821 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16822 /* Single part. */
16823 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16824 {
16825 varloc = PAT_VAR_LOCATION_LOC (varloc);
16826 if (GET_CODE (varloc) == EXPR_LIST)
16827 varloc = XEXP (varloc, 0);
16828 mode = GET_MODE (varloc);
16829 if (MEM_P (varloc))
16830 {
16831 rtx addr = XEXP (varloc, 0);
16832 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16833 mode, initialized);
16834 if (descr)
16835 have_address = 1;
16836 else
16837 {
16838 rtx x = avoid_constant_pool_reference (varloc);
16839 if (x != varloc)
16840 descr = mem_loc_descriptor (x, mode, VOIDmode,
16841 initialized);
16842 }
16843 }
16844 else
16845 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16846 }
16847 else
16848 return 0;
16849 }
16850 else
16851 {
16852 if (GET_CODE (varloc) == VAR_LOCATION)
16853 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16854 else
16855 mode = DECL_MODE (loc);
16856 descr = loc_descriptor (varloc, mode, initialized);
16857 have_address = 1;
16858 }
16859
16860 if (!descr)
16861 return 0;
16862
16863 if (want_address == 2 && !have_address
16864 && (dwarf_version >= 4 || !dwarf_strict))
16865 {
16866 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16867 {
16868 expansion_failed (loc, NULL_RTX,
16869 "DWARF address size mismatch");
16870 return 0;
16871 }
16872 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16873 have_address = 1;
16874 }
16875 /* Show if we can't fill the request for an address. */
16876 if (want_address && !have_address)
16877 {
16878 expansion_failed (loc, NULL_RTX,
16879 "Want address and only have value");
16880 return 0;
16881 }
16882
16883 /* If we've got an address and don't want one, dereference. */
16884 if (!want_address && have_address)
16885 {
16886 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16887 enum dwarf_location_atom op;
16888
16889 if (size > DWARF2_ADDR_SIZE || size == -1)
16890 {
16891 expansion_failed (loc, NULL_RTX,
16892 "DWARF address size mismatch");
16893 return 0;
16894 }
16895 else if (size == DWARF2_ADDR_SIZE)
16896 op = DW_OP_deref;
16897 else
16898 op = DW_OP_deref_size;
16899
16900 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16901 }
16902
16903 return descr;
16904 }
16905
16906 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16907 if it is not possible. */
16908
16909 static dw_loc_descr_ref
16910 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16911 {
16912 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16913 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16914 else if (dwarf_version >= 3 || !dwarf_strict)
16915 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16916 else
16917 return NULL;
16918 }
16919
16920 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16921 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16922
16923 static dw_loc_descr_ref
16924 dw_sra_loc_expr (tree decl, rtx loc)
16925 {
16926 rtx p;
16927 unsigned HOST_WIDE_INT padsize = 0;
16928 dw_loc_descr_ref descr, *descr_tail;
16929 unsigned HOST_WIDE_INT decl_size;
16930 rtx varloc;
16931 enum var_init_status initialized;
16932
16933 if (DECL_SIZE (decl) == NULL
16934 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16935 return NULL;
16936
16937 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16938 descr = NULL;
16939 descr_tail = &descr;
16940
16941 for (p = loc; p; p = XEXP (p, 1))
16942 {
16943 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16944 rtx loc_note = *decl_piece_varloc_ptr (p);
16945 dw_loc_descr_ref cur_descr;
16946 dw_loc_descr_ref *tail, last = NULL;
16947 unsigned HOST_WIDE_INT opsize = 0;
16948
16949 if (loc_note == NULL_RTX
16950 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16951 {
16952 padsize += bitsize;
16953 continue;
16954 }
16955 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16956 varloc = NOTE_VAR_LOCATION (loc_note);
16957 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16958 if (cur_descr == NULL)
16959 {
16960 padsize += bitsize;
16961 continue;
16962 }
16963
16964 /* Check that cur_descr either doesn't use
16965 DW_OP_*piece operations, or their sum is equal
16966 to bitsize. Otherwise we can't embed it. */
16967 for (tail = &cur_descr; *tail != NULL;
16968 tail = &(*tail)->dw_loc_next)
16969 if ((*tail)->dw_loc_opc == DW_OP_piece)
16970 {
16971 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16972 * BITS_PER_UNIT;
16973 last = *tail;
16974 }
16975 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16976 {
16977 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16978 last = *tail;
16979 }
16980
16981 if (last != NULL && opsize != bitsize)
16982 {
16983 padsize += bitsize;
16984 /* Discard the current piece of the descriptor and release any
16985 addr_table entries it uses. */
16986 remove_loc_list_addr_table_entries (cur_descr);
16987 continue;
16988 }
16989
16990 /* If there is a hole, add DW_OP_*piece after empty DWARF
16991 expression, which means that those bits are optimized out. */
16992 if (padsize)
16993 {
16994 if (padsize > decl_size)
16995 {
16996 remove_loc_list_addr_table_entries (cur_descr);
16997 goto discard_descr;
16998 }
16999 decl_size -= padsize;
17000 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17001 if (*descr_tail == NULL)
17002 {
17003 remove_loc_list_addr_table_entries (cur_descr);
17004 goto discard_descr;
17005 }
17006 descr_tail = &(*descr_tail)->dw_loc_next;
17007 padsize = 0;
17008 }
17009 *descr_tail = cur_descr;
17010 descr_tail = tail;
17011 if (bitsize > decl_size)
17012 goto discard_descr;
17013 decl_size -= bitsize;
17014 if (last == NULL)
17015 {
17016 HOST_WIDE_INT offset = 0;
17017 if (GET_CODE (varloc) == VAR_LOCATION
17018 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17019 {
17020 varloc = PAT_VAR_LOCATION_LOC (varloc);
17021 if (GET_CODE (varloc) == EXPR_LIST)
17022 varloc = XEXP (varloc, 0);
17023 }
17024 do
17025 {
17026 if (GET_CODE (varloc) == CONST
17027 || GET_CODE (varloc) == SIGN_EXTEND
17028 || GET_CODE (varloc) == ZERO_EXTEND)
17029 varloc = XEXP (varloc, 0);
17030 else if (GET_CODE (varloc) == SUBREG)
17031 varloc = SUBREG_REG (varloc);
17032 else
17033 break;
17034 }
17035 while (1);
17036 /* DW_OP_bit_size offset should be zero for register
17037 or implicit location descriptions and empty location
17038 descriptions, but for memory addresses needs big endian
17039 adjustment. */
17040 if (MEM_P (varloc))
17041 {
17042 unsigned HOST_WIDE_INT memsize;
17043 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17044 goto discard_descr;
17045 memsize *= BITS_PER_UNIT;
17046 if (memsize != bitsize)
17047 {
17048 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17049 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17050 goto discard_descr;
17051 if (memsize < bitsize)
17052 goto discard_descr;
17053 if (BITS_BIG_ENDIAN)
17054 offset = memsize - bitsize;
17055 }
17056 }
17057
17058 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17059 if (*descr_tail == NULL)
17060 goto discard_descr;
17061 descr_tail = &(*descr_tail)->dw_loc_next;
17062 }
17063 }
17064
17065 /* If there were any non-empty expressions, add padding till the end of
17066 the decl. */
17067 if (descr != NULL && decl_size != 0)
17068 {
17069 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17070 if (*descr_tail == NULL)
17071 goto discard_descr;
17072 }
17073 return descr;
17074
17075 discard_descr:
17076 /* Discard the descriptor and release any addr_table entries it uses. */
17077 remove_loc_list_addr_table_entries (descr);
17078 return NULL;
17079 }
17080
17081 /* Return the dwarf representation of the location list LOC_LIST of
17082 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17083 function. */
17084
17085 static dw_loc_list_ref
17086 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17087 {
17088 const char *endname, *secname;
17089 var_loc_view endview;
17090 rtx varloc;
17091 enum var_init_status initialized;
17092 struct var_loc_node *node;
17093 dw_loc_descr_ref descr;
17094 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17095 dw_loc_list_ref list = NULL;
17096 dw_loc_list_ref *listp = &list;
17097
17098 /* Now that we know what section we are using for a base,
17099 actually construct the list of locations.
17100 The first location information is what is passed to the
17101 function that creates the location list, and the remaining
17102 locations just get added on to that list.
17103 Note that we only know the start address for a location
17104 (IE location changes), so to build the range, we use
17105 the range [current location start, next location start].
17106 This means we have to special case the last node, and generate
17107 a range of [last location start, end of function label]. */
17108
17109 if (cfun && crtl->has_bb_partition)
17110 {
17111 bool save_in_cold_section_p = in_cold_section_p;
17112 in_cold_section_p = first_function_block_is_cold;
17113 if (loc_list->last_before_switch == NULL)
17114 in_cold_section_p = !in_cold_section_p;
17115 secname = secname_for_decl (decl);
17116 in_cold_section_p = save_in_cold_section_p;
17117 }
17118 else
17119 secname = secname_for_decl (decl);
17120
17121 for (node = loc_list->first; node; node = node->next)
17122 {
17123 bool range_across_switch = false;
17124 if (GET_CODE (node->loc) == EXPR_LIST
17125 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17126 {
17127 if (GET_CODE (node->loc) == EXPR_LIST)
17128 {
17129 descr = NULL;
17130 /* This requires DW_OP_{,bit_}piece, which is not usable
17131 inside DWARF expressions. */
17132 if (want_address == 2)
17133 descr = dw_sra_loc_expr (decl, node->loc);
17134 }
17135 else
17136 {
17137 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17138 varloc = NOTE_VAR_LOCATION (node->loc);
17139 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17140 }
17141 if (descr)
17142 {
17143 /* If section switch happens in between node->label
17144 and node->next->label (or end of function) and
17145 we can't emit it as a single entry list,
17146 emit two ranges, first one ending at the end
17147 of first partition and second one starting at the
17148 beginning of second partition. */
17149 if (node == loc_list->last_before_switch
17150 && (node != loc_list->first || loc_list->first->next
17151 /* If we are to emit a view number, we will emit
17152 a loclist rather than a single location
17153 expression for the entire function (see
17154 loc_list_has_views), so we have to split the
17155 range that straddles across partitions. */
17156 || !ZERO_VIEW_P (node->view))
17157 && current_function_decl)
17158 {
17159 endname = cfun->fde->dw_fde_end;
17160 endview = 0;
17161 range_across_switch = true;
17162 }
17163 /* The variable has a location between NODE->LABEL and
17164 NODE->NEXT->LABEL. */
17165 else if (node->next)
17166 endname = node->next->label, endview = node->next->view;
17167 /* If the variable has a location at the last label
17168 it keeps its location until the end of function. */
17169 else if (!current_function_decl)
17170 endname = text_end_label, endview = 0;
17171 else
17172 {
17173 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17174 current_function_funcdef_no);
17175 endname = ggc_strdup (label_id);
17176 endview = 0;
17177 }
17178
17179 *listp = new_loc_list (descr, node->label, node->view,
17180 endname, endview, secname);
17181 if (TREE_CODE (decl) == PARM_DECL
17182 && node == loc_list->first
17183 && NOTE_P (node->loc)
17184 && strcmp (node->label, endname) == 0)
17185 (*listp)->force = true;
17186 listp = &(*listp)->dw_loc_next;
17187 }
17188 }
17189
17190 if (cfun
17191 && crtl->has_bb_partition
17192 && node == loc_list->last_before_switch)
17193 {
17194 bool save_in_cold_section_p = in_cold_section_p;
17195 in_cold_section_p = !first_function_block_is_cold;
17196 secname = secname_for_decl (decl);
17197 in_cold_section_p = save_in_cold_section_p;
17198 }
17199
17200 if (range_across_switch)
17201 {
17202 if (GET_CODE (node->loc) == EXPR_LIST)
17203 descr = dw_sra_loc_expr (decl, node->loc);
17204 else
17205 {
17206 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17207 varloc = NOTE_VAR_LOCATION (node->loc);
17208 descr = dw_loc_list_1 (decl, varloc, want_address,
17209 initialized);
17210 }
17211 gcc_assert (descr);
17212 /* The variable has a location between NODE->LABEL and
17213 NODE->NEXT->LABEL. */
17214 if (node->next)
17215 endname = node->next->label, endview = node->next->view;
17216 else
17217 endname = cfun->fde->dw_fde_second_end, endview = 0;
17218 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17219 endname, endview, secname);
17220 listp = &(*listp)->dw_loc_next;
17221 }
17222 }
17223
17224 /* Try to avoid the overhead of a location list emitting a location
17225 expression instead, but only if we didn't have more than one
17226 location entry in the first place. If some entries were not
17227 representable, we don't want to pretend a single entry that was
17228 applies to the entire scope in which the variable is
17229 available. */
17230 if (list && loc_list->first->next)
17231 gen_llsym (list);
17232 else
17233 maybe_gen_llsym (list);
17234
17235 return list;
17236 }
17237
17238 /* Return if the loc_list has only single element and thus can be represented
17239 as location description. */
17240
17241 static bool
17242 single_element_loc_list_p (dw_loc_list_ref list)
17243 {
17244 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17245 return !list->ll_symbol;
17246 }
17247
17248 /* Duplicate a single element of location list. */
17249
17250 static inline dw_loc_descr_ref
17251 copy_loc_descr (dw_loc_descr_ref ref)
17252 {
17253 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17254 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17255 return copy;
17256 }
17257
17258 /* To each location in list LIST append loc descr REF. */
17259
17260 static void
17261 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17262 {
17263 dw_loc_descr_ref copy;
17264 add_loc_descr (&list->expr, ref);
17265 list = list->dw_loc_next;
17266 while (list)
17267 {
17268 copy = copy_loc_descr (ref);
17269 add_loc_descr (&list->expr, copy);
17270 while (copy->dw_loc_next)
17271 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17272 list = list->dw_loc_next;
17273 }
17274 }
17275
17276 /* To each location in list LIST prepend loc descr REF. */
17277
17278 static void
17279 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17280 {
17281 dw_loc_descr_ref copy;
17282 dw_loc_descr_ref ref_end = list->expr;
17283 add_loc_descr (&ref, list->expr);
17284 list->expr = ref;
17285 list = list->dw_loc_next;
17286 while (list)
17287 {
17288 dw_loc_descr_ref end = list->expr;
17289 list->expr = copy = copy_loc_descr (ref);
17290 while (copy->dw_loc_next != ref_end)
17291 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17292 copy->dw_loc_next = end;
17293 list = list->dw_loc_next;
17294 }
17295 }
17296
17297 /* Given two lists RET and LIST
17298 produce location list that is result of adding expression in LIST
17299 to expression in RET on each position in program.
17300 Might be destructive on both RET and LIST.
17301
17302 TODO: We handle only simple cases of RET or LIST having at most one
17303 element. General case would involve sorting the lists in program order
17304 and merging them that will need some additional work.
17305 Adding that will improve quality of debug info especially for SRA-ed
17306 structures. */
17307
17308 static void
17309 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17310 {
17311 if (!list)
17312 return;
17313 if (!*ret)
17314 {
17315 *ret = list;
17316 return;
17317 }
17318 if (!list->dw_loc_next)
17319 {
17320 add_loc_descr_to_each (*ret, list->expr);
17321 return;
17322 }
17323 if (!(*ret)->dw_loc_next)
17324 {
17325 prepend_loc_descr_to_each (list, (*ret)->expr);
17326 *ret = list;
17327 return;
17328 }
17329 expansion_failed (NULL_TREE, NULL_RTX,
17330 "Don't know how to merge two non-trivial"
17331 " location lists.\n");
17332 *ret = NULL;
17333 return;
17334 }
17335
17336 /* LOC is constant expression. Try a luck, look it up in constant
17337 pool and return its loc_descr of its address. */
17338
17339 static dw_loc_descr_ref
17340 cst_pool_loc_descr (tree loc)
17341 {
17342 /* Get an RTL for this, if something has been emitted. */
17343 rtx rtl = lookup_constant_def (loc);
17344
17345 if (!rtl || !MEM_P (rtl))
17346 {
17347 gcc_assert (!rtl);
17348 return 0;
17349 }
17350 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17351
17352 /* TODO: We might get more coverage if we was actually delaying expansion
17353 of all expressions till end of compilation when constant pools are fully
17354 populated. */
17355 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17356 {
17357 expansion_failed (loc, NULL_RTX,
17358 "CST value in contant pool but not marked.");
17359 return 0;
17360 }
17361 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17362 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17363 }
17364
17365 /* Return dw_loc_list representing address of addr_expr LOC
17366 by looking for inner INDIRECT_REF expression and turning
17367 it into simple arithmetics.
17368
17369 See loc_list_from_tree for the meaning of CONTEXT. */
17370
17371 static dw_loc_list_ref
17372 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17373 loc_descr_context *context)
17374 {
17375 tree obj, offset;
17376 poly_int64 bitsize, bitpos, bytepos;
17377 machine_mode mode;
17378 int unsignedp, reversep, volatilep = 0;
17379 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17380
17381 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17382 &bitsize, &bitpos, &offset, &mode,
17383 &unsignedp, &reversep, &volatilep);
17384 STRIP_NOPS (obj);
17385 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17386 {
17387 expansion_failed (loc, NULL_RTX, "bitfield access");
17388 return 0;
17389 }
17390 if (!INDIRECT_REF_P (obj))
17391 {
17392 expansion_failed (obj,
17393 NULL_RTX, "no indirect ref in inner refrence");
17394 return 0;
17395 }
17396 if (!offset && known_eq (bitpos, 0))
17397 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17398 context);
17399 else if (toplev
17400 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17401 && (dwarf_version >= 4 || !dwarf_strict))
17402 {
17403 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17404 if (!list_ret)
17405 return 0;
17406 if (offset)
17407 {
17408 /* Variable offset. */
17409 list_ret1 = loc_list_from_tree (offset, 0, context);
17410 if (list_ret1 == 0)
17411 return 0;
17412 add_loc_list (&list_ret, list_ret1);
17413 if (!list_ret)
17414 return 0;
17415 add_loc_descr_to_each (list_ret,
17416 new_loc_descr (DW_OP_plus, 0, 0));
17417 }
17418 HOST_WIDE_INT value;
17419 if (bytepos.is_constant (&value) && value > 0)
17420 add_loc_descr_to_each (list_ret,
17421 new_loc_descr (DW_OP_plus_uconst, value, 0));
17422 else if (maybe_ne (bytepos, 0))
17423 loc_list_plus_const (list_ret, bytepos);
17424 add_loc_descr_to_each (list_ret,
17425 new_loc_descr (DW_OP_stack_value, 0, 0));
17426 }
17427 return list_ret;
17428 }
17429
17430 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17431 all operations from LOC are nops, move to the last one. Insert in NOPS all
17432 operations that are skipped. */
17433
17434 static void
17435 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17436 hash_set<dw_loc_descr_ref> &nops)
17437 {
17438 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17439 {
17440 nops.add (loc);
17441 loc = loc->dw_loc_next;
17442 }
17443 }
17444
17445 /* Helper for loc_descr_without_nops: free the location description operation
17446 P. */
17447
17448 bool
17449 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17450 {
17451 ggc_free (loc);
17452 return true;
17453 }
17454
17455 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17456 finishes LOC. */
17457
17458 static void
17459 loc_descr_without_nops (dw_loc_descr_ref &loc)
17460 {
17461 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17462 return;
17463
17464 /* Set of all DW_OP_nop operations we remove. */
17465 hash_set<dw_loc_descr_ref> nops;
17466
17467 /* First, strip all prefix NOP operations in order to keep the head of the
17468 operations list. */
17469 loc_descr_to_next_no_nop (loc, nops);
17470
17471 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17472 {
17473 /* For control flow operations: strip "prefix" nops in destination
17474 labels. */
17475 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17476 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17477 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17478 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17479
17480 /* Do the same for the operations that follow, then move to the next
17481 iteration. */
17482 if (cur->dw_loc_next != NULL)
17483 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17484 cur = cur->dw_loc_next;
17485 }
17486
17487 nops.traverse<void *, free_loc_descr> (NULL);
17488 }
17489
17490
17491 struct dwarf_procedure_info;
17492
17493 /* Helper structure for location descriptions generation. */
17494 struct loc_descr_context
17495 {
17496 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17497 NULL_TREE if DW_OP_push_object_address in invalid for this location
17498 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17499 tree context_type;
17500 /* The ..._DECL node that should be translated as a
17501 DW_OP_push_object_address operation. */
17502 tree base_decl;
17503 /* Information about the DWARF procedure we are currently generating. NULL if
17504 we are not generating a DWARF procedure. */
17505 struct dwarf_procedure_info *dpi;
17506 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17507 by consumer. Used for DW_TAG_generic_subrange attributes. */
17508 bool placeholder_arg;
17509 /* True if PLACEHOLDER_EXPR has been seen. */
17510 bool placeholder_seen;
17511 };
17512
17513 /* DWARF procedures generation
17514
17515 DWARF expressions (aka. location descriptions) are used to encode variable
17516 things such as sizes or offsets. Such computations can have redundant parts
17517 that can be factorized in order to reduce the size of the output debug
17518 information. This is the whole point of DWARF procedures.
17519
17520 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17521 already factorized into functions ("size functions") in order to handle very
17522 big and complex types. Such functions are quite simple: they have integral
17523 arguments, they return an integral result and their body contains only a
17524 return statement with arithmetic expressions. This is the only kind of
17525 function we are interested in translating into DWARF procedures, here.
17526
17527 DWARF expressions and DWARF procedure are executed using a stack, so we have
17528 to define some calling convention for them to interact. Let's say that:
17529
17530 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17531 all arguments in reverse order (right-to-left) so that when the DWARF
17532 procedure execution starts, the first argument is the top of the stack.
17533
17534 - Then, when returning, the DWARF procedure must have consumed all arguments
17535 on the stack, must have pushed the result and touched nothing else.
17536
17537 - Each integral argument and the result are integral types can be hold in a
17538 single stack slot.
17539
17540 - We call "frame offset" the number of stack slots that are "under DWARF
17541 procedure control": it includes the arguments slots, the temporaries and
17542 the result slot. Thus, it is equal to the number of arguments when the
17543 procedure execution starts and must be equal to one (the result) when it
17544 returns. */
17545
17546 /* Helper structure used when generating operations for a DWARF procedure. */
17547 struct dwarf_procedure_info
17548 {
17549 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17550 currently translated. */
17551 tree fndecl;
17552 /* The number of arguments FNDECL takes. */
17553 unsigned args_count;
17554 };
17555
17556 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17557 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17558 equate it to this DIE. */
17559
17560 static dw_die_ref
17561 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17562 dw_die_ref parent_die)
17563 {
17564 dw_die_ref dwarf_proc_die;
17565
17566 if ((dwarf_version < 3 && dwarf_strict)
17567 || location == NULL)
17568 return NULL;
17569
17570 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17571 if (fndecl)
17572 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17573 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17574 return dwarf_proc_die;
17575 }
17576
17577 /* Return whether TYPE is a supported type as a DWARF procedure argument
17578 type or return type (we handle only scalar types and pointer types that
17579 aren't wider than the DWARF expression evaluation stack. */
17580
17581 static bool
17582 is_handled_procedure_type (tree type)
17583 {
17584 return ((INTEGRAL_TYPE_P (type)
17585 || TREE_CODE (type) == OFFSET_TYPE
17586 || TREE_CODE (type) == POINTER_TYPE)
17587 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17588 }
17589
17590 /* Helper for resolve_args_picking: do the same but stop when coming across
17591 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17592 offset *before* evaluating the corresponding operation. */
17593
17594 static bool
17595 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17596 struct dwarf_procedure_info *dpi,
17597 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17598 {
17599 /* The "frame_offset" identifier is already used to name a macro... */
17600 unsigned frame_offset_ = initial_frame_offset;
17601 dw_loc_descr_ref l;
17602
17603 for (l = loc; l != NULL;)
17604 {
17605 bool existed;
17606 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17607
17608 /* If we already met this node, there is nothing to compute anymore. */
17609 if (existed)
17610 {
17611 /* Make sure that the stack size is consistent wherever the execution
17612 flow comes from. */
17613 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17614 break;
17615 }
17616 l_frame_offset = frame_offset_;
17617
17618 /* If needed, relocate the picking offset with respect to the frame
17619 offset. */
17620 if (l->frame_offset_rel)
17621 {
17622 unsigned HOST_WIDE_INT off;
17623 switch (l->dw_loc_opc)
17624 {
17625 case DW_OP_pick:
17626 off = l->dw_loc_oprnd1.v.val_unsigned;
17627 break;
17628 case DW_OP_dup:
17629 off = 0;
17630 break;
17631 case DW_OP_over:
17632 off = 1;
17633 break;
17634 default:
17635 gcc_unreachable ();
17636 }
17637 /* frame_offset_ is the size of the current stack frame, including
17638 incoming arguments. Besides, the arguments are pushed
17639 right-to-left. Thus, in order to access the Nth argument from
17640 this operation node, the picking has to skip temporaries *plus*
17641 one stack slot per argument (0 for the first one, 1 for the second
17642 one, etc.).
17643
17644 The targetted argument number (N) is already set as the operand,
17645 and the number of temporaries can be computed with:
17646 frame_offsets_ - dpi->args_count */
17647 off += frame_offset_ - dpi->args_count;
17648
17649 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17650 if (off > 255)
17651 return false;
17652
17653 if (off == 0)
17654 {
17655 l->dw_loc_opc = DW_OP_dup;
17656 l->dw_loc_oprnd1.v.val_unsigned = 0;
17657 }
17658 else if (off == 1)
17659 {
17660 l->dw_loc_opc = DW_OP_over;
17661 l->dw_loc_oprnd1.v.val_unsigned = 0;
17662 }
17663 else
17664 {
17665 l->dw_loc_opc = DW_OP_pick;
17666 l->dw_loc_oprnd1.v.val_unsigned = off;
17667 }
17668 }
17669
17670 /* Update frame_offset according to the effect the current operation has
17671 on the stack. */
17672 switch (l->dw_loc_opc)
17673 {
17674 case DW_OP_deref:
17675 case DW_OP_swap:
17676 case DW_OP_rot:
17677 case DW_OP_abs:
17678 case DW_OP_neg:
17679 case DW_OP_not:
17680 case DW_OP_plus_uconst:
17681 case DW_OP_skip:
17682 case DW_OP_reg0:
17683 case DW_OP_reg1:
17684 case DW_OP_reg2:
17685 case DW_OP_reg3:
17686 case DW_OP_reg4:
17687 case DW_OP_reg5:
17688 case DW_OP_reg6:
17689 case DW_OP_reg7:
17690 case DW_OP_reg8:
17691 case DW_OP_reg9:
17692 case DW_OP_reg10:
17693 case DW_OP_reg11:
17694 case DW_OP_reg12:
17695 case DW_OP_reg13:
17696 case DW_OP_reg14:
17697 case DW_OP_reg15:
17698 case DW_OP_reg16:
17699 case DW_OP_reg17:
17700 case DW_OP_reg18:
17701 case DW_OP_reg19:
17702 case DW_OP_reg20:
17703 case DW_OP_reg21:
17704 case DW_OP_reg22:
17705 case DW_OP_reg23:
17706 case DW_OP_reg24:
17707 case DW_OP_reg25:
17708 case DW_OP_reg26:
17709 case DW_OP_reg27:
17710 case DW_OP_reg28:
17711 case DW_OP_reg29:
17712 case DW_OP_reg30:
17713 case DW_OP_reg31:
17714 case DW_OP_bregx:
17715 case DW_OP_piece:
17716 case DW_OP_deref_size:
17717 case DW_OP_nop:
17718 case DW_OP_bit_piece:
17719 case DW_OP_implicit_value:
17720 case DW_OP_stack_value:
17721 break;
17722
17723 case DW_OP_addr:
17724 case DW_OP_const1u:
17725 case DW_OP_const1s:
17726 case DW_OP_const2u:
17727 case DW_OP_const2s:
17728 case DW_OP_const4u:
17729 case DW_OP_const4s:
17730 case DW_OP_const8u:
17731 case DW_OP_const8s:
17732 case DW_OP_constu:
17733 case DW_OP_consts:
17734 case DW_OP_dup:
17735 case DW_OP_over:
17736 case DW_OP_pick:
17737 case DW_OP_lit0:
17738 case DW_OP_lit1:
17739 case DW_OP_lit2:
17740 case DW_OP_lit3:
17741 case DW_OP_lit4:
17742 case DW_OP_lit5:
17743 case DW_OP_lit6:
17744 case DW_OP_lit7:
17745 case DW_OP_lit8:
17746 case DW_OP_lit9:
17747 case DW_OP_lit10:
17748 case DW_OP_lit11:
17749 case DW_OP_lit12:
17750 case DW_OP_lit13:
17751 case DW_OP_lit14:
17752 case DW_OP_lit15:
17753 case DW_OP_lit16:
17754 case DW_OP_lit17:
17755 case DW_OP_lit18:
17756 case DW_OP_lit19:
17757 case DW_OP_lit20:
17758 case DW_OP_lit21:
17759 case DW_OP_lit22:
17760 case DW_OP_lit23:
17761 case DW_OP_lit24:
17762 case DW_OP_lit25:
17763 case DW_OP_lit26:
17764 case DW_OP_lit27:
17765 case DW_OP_lit28:
17766 case DW_OP_lit29:
17767 case DW_OP_lit30:
17768 case DW_OP_lit31:
17769 case DW_OP_breg0:
17770 case DW_OP_breg1:
17771 case DW_OP_breg2:
17772 case DW_OP_breg3:
17773 case DW_OP_breg4:
17774 case DW_OP_breg5:
17775 case DW_OP_breg6:
17776 case DW_OP_breg7:
17777 case DW_OP_breg8:
17778 case DW_OP_breg9:
17779 case DW_OP_breg10:
17780 case DW_OP_breg11:
17781 case DW_OP_breg12:
17782 case DW_OP_breg13:
17783 case DW_OP_breg14:
17784 case DW_OP_breg15:
17785 case DW_OP_breg16:
17786 case DW_OP_breg17:
17787 case DW_OP_breg18:
17788 case DW_OP_breg19:
17789 case DW_OP_breg20:
17790 case DW_OP_breg21:
17791 case DW_OP_breg22:
17792 case DW_OP_breg23:
17793 case DW_OP_breg24:
17794 case DW_OP_breg25:
17795 case DW_OP_breg26:
17796 case DW_OP_breg27:
17797 case DW_OP_breg28:
17798 case DW_OP_breg29:
17799 case DW_OP_breg30:
17800 case DW_OP_breg31:
17801 case DW_OP_fbreg:
17802 case DW_OP_push_object_address:
17803 case DW_OP_call_frame_cfa:
17804 case DW_OP_GNU_variable_value:
17805 ++frame_offset_;
17806 break;
17807
17808 case DW_OP_drop:
17809 case DW_OP_xderef:
17810 case DW_OP_and:
17811 case DW_OP_div:
17812 case DW_OP_minus:
17813 case DW_OP_mod:
17814 case DW_OP_mul:
17815 case DW_OP_or:
17816 case DW_OP_plus:
17817 case DW_OP_shl:
17818 case DW_OP_shr:
17819 case DW_OP_shra:
17820 case DW_OP_xor:
17821 case DW_OP_bra:
17822 case DW_OP_eq:
17823 case DW_OP_ge:
17824 case DW_OP_gt:
17825 case DW_OP_le:
17826 case DW_OP_lt:
17827 case DW_OP_ne:
17828 case DW_OP_regx:
17829 case DW_OP_xderef_size:
17830 --frame_offset_;
17831 break;
17832
17833 case DW_OP_call2:
17834 case DW_OP_call4:
17835 case DW_OP_call_ref:
17836 {
17837 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17838 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17839
17840 if (stack_usage == NULL)
17841 return false;
17842 frame_offset_ += *stack_usage;
17843 break;
17844 }
17845
17846 case DW_OP_implicit_pointer:
17847 case DW_OP_entry_value:
17848 case DW_OP_const_type:
17849 case DW_OP_regval_type:
17850 case DW_OP_deref_type:
17851 case DW_OP_convert:
17852 case DW_OP_reinterpret:
17853 case DW_OP_form_tls_address:
17854 case DW_OP_GNU_push_tls_address:
17855 case DW_OP_GNU_uninit:
17856 case DW_OP_GNU_encoded_addr:
17857 case DW_OP_GNU_implicit_pointer:
17858 case DW_OP_GNU_entry_value:
17859 case DW_OP_GNU_const_type:
17860 case DW_OP_GNU_regval_type:
17861 case DW_OP_GNU_deref_type:
17862 case DW_OP_GNU_convert:
17863 case DW_OP_GNU_reinterpret:
17864 case DW_OP_GNU_parameter_ref:
17865 /* loc_list_from_tree will probably not output these operations for
17866 size functions, so assume they will not appear here. */
17867 /* Fall through... */
17868
17869 default:
17870 gcc_unreachable ();
17871 }
17872
17873 /* Now, follow the control flow (except subroutine calls). */
17874 switch (l->dw_loc_opc)
17875 {
17876 case DW_OP_bra:
17877 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17878 frame_offsets))
17879 return false;
17880 /* Fall through. */
17881
17882 case DW_OP_skip:
17883 l = l->dw_loc_oprnd1.v.val_loc;
17884 break;
17885
17886 case DW_OP_stack_value:
17887 return true;
17888
17889 default:
17890 l = l->dw_loc_next;
17891 break;
17892 }
17893 }
17894
17895 return true;
17896 }
17897
17898 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17899 operations) in order to resolve the operand of DW_OP_pick operations that
17900 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17901 offset *before* LOC is executed. Return if all relocations were
17902 successful. */
17903
17904 static bool
17905 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17906 struct dwarf_procedure_info *dpi)
17907 {
17908 /* Associate to all visited operations the frame offset *before* evaluating
17909 this operation. */
17910 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17911
17912 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17913 frame_offsets);
17914 }
17915
17916 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17917 Return NULL if it is not possible. */
17918
17919 static dw_die_ref
17920 function_to_dwarf_procedure (tree fndecl)
17921 {
17922 struct loc_descr_context ctx;
17923 struct dwarf_procedure_info dpi;
17924 dw_die_ref dwarf_proc_die;
17925 tree tree_body = DECL_SAVED_TREE (fndecl);
17926 dw_loc_descr_ref loc_body, epilogue;
17927
17928 tree cursor;
17929 unsigned i;
17930
17931 /* Do not generate multiple DWARF procedures for the same function
17932 declaration. */
17933 dwarf_proc_die = lookup_decl_die (fndecl);
17934 if (dwarf_proc_die != NULL)
17935 return dwarf_proc_die;
17936
17937 /* DWARF procedures are available starting with the DWARFv3 standard. */
17938 if (dwarf_version < 3 && dwarf_strict)
17939 return NULL;
17940
17941 /* We handle only functions for which we still have a body, that return a
17942 supported type and that takes arguments with supported types. Note that
17943 there is no point translating functions that return nothing. */
17944 if (tree_body == NULL_TREE
17945 || DECL_RESULT (fndecl) == NULL_TREE
17946 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17947 return NULL;
17948
17949 for (cursor = DECL_ARGUMENTS (fndecl);
17950 cursor != NULL_TREE;
17951 cursor = TREE_CHAIN (cursor))
17952 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17953 return NULL;
17954
17955 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17956 if (TREE_CODE (tree_body) != RETURN_EXPR)
17957 return NULL;
17958 tree_body = TREE_OPERAND (tree_body, 0);
17959 if (TREE_CODE (tree_body) != MODIFY_EXPR
17960 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17961 return NULL;
17962 tree_body = TREE_OPERAND (tree_body, 1);
17963
17964 /* Try to translate the body expression itself. Note that this will probably
17965 cause an infinite recursion if its call graph has a cycle. This is very
17966 unlikely for size functions, however, so don't bother with such things at
17967 the moment. */
17968 ctx.context_type = NULL_TREE;
17969 ctx.base_decl = NULL_TREE;
17970 ctx.dpi = &dpi;
17971 ctx.placeholder_arg = false;
17972 ctx.placeholder_seen = false;
17973 dpi.fndecl = fndecl;
17974 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17975 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17976 if (!loc_body)
17977 return NULL;
17978
17979 /* After evaluating all operands in "loc_body", we should still have on the
17980 stack all arguments plus the desired function result (top of the stack).
17981 Generate code in order to keep only the result in our stack frame. */
17982 epilogue = NULL;
17983 for (i = 0; i < dpi.args_count; ++i)
17984 {
17985 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17986 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17987 op_couple->dw_loc_next->dw_loc_next = epilogue;
17988 epilogue = op_couple;
17989 }
17990 add_loc_descr (&loc_body, epilogue);
17991 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17992 return NULL;
17993
17994 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17995 because they are considered useful. Now there is an epilogue, they are
17996 not anymore, so give it another try. */
17997 loc_descr_without_nops (loc_body);
17998
17999 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18000 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18001 though, given that size functions do not come from source, so they should
18002 not have a dedicated DW_TAG_subprogram DIE. */
18003 dwarf_proc_die
18004 = new_dwarf_proc_die (loc_body, fndecl,
18005 get_context_die (DECL_CONTEXT (fndecl)));
18006
18007 /* The called DWARF procedure consumes one stack slot per argument and
18008 returns one stack slot. */
18009 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18010
18011 return dwarf_proc_die;
18012 }
18013
18014
18015 /* Generate Dwarf location list representing LOC.
18016 If WANT_ADDRESS is false, expression computing LOC will be computed
18017 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18018 if WANT_ADDRESS is 2, expression computing address useable in location
18019 will be returned (i.e. DW_OP_reg can be used
18020 to refer to register values).
18021
18022 CONTEXT provides information to customize the location descriptions
18023 generation. Its context_type field specifies what type is implicitly
18024 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18025 will not be generated.
18026
18027 Its DPI field determines whether we are generating a DWARF expression for a
18028 DWARF procedure, so PARM_DECL references are processed specifically.
18029
18030 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18031 and dpi fields were null. */
18032
18033 static dw_loc_list_ref
18034 loc_list_from_tree_1 (tree loc, int want_address,
18035 struct loc_descr_context *context)
18036 {
18037 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18038 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18039 int have_address = 0;
18040 enum dwarf_location_atom op;
18041
18042 /* ??? Most of the time we do not take proper care for sign/zero
18043 extending the values properly. Hopefully this won't be a real
18044 problem... */
18045
18046 if (context != NULL
18047 && context->base_decl == loc
18048 && want_address == 0)
18049 {
18050 if (dwarf_version >= 3 || !dwarf_strict)
18051 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18052 NULL, 0, NULL, 0, NULL);
18053 else
18054 return NULL;
18055 }
18056
18057 switch (TREE_CODE (loc))
18058 {
18059 case ERROR_MARK:
18060 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18061 return 0;
18062
18063 case PLACEHOLDER_EXPR:
18064 /* This case involves extracting fields from an object to determine the
18065 position of other fields. It is supposed to appear only as the first
18066 operand of COMPONENT_REF nodes and to reference precisely the type
18067 that the context allows. */
18068 if (context != NULL
18069 && TREE_TYPE (loc) == context->context_type
18070 && want_address >= 1)
18071 {
18072 if (dwarf_version >= 3 || !dwarf_strict)
18073 {
18074 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18075 have_address = 1;
18076 break;
18077 }
18078 else
18079 return NULL;
18080 }
18081 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18082 the single argument passed by consumer. */
18083 else if (context != NULL
18084 && context->placeholder_arg
18085 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18086 && want_address == 0)
18087 {
18088 ret = new_loc_descr (DW_OP_pick, 0, 0);
18089 ret->frame_offset_rel = 1;
18090 context->placeholder_seen = true;
18091 break;
18092 }
18093 else
18094 expansion_failed (loc, NULL_RTX,
18095 "PLACEHOLDER_EXPR for an unexpected type");
18096 break;
18097
18098 case CALL_EXPR:
18099 {
18100 const int nargs = call_expr_nargs (loc);
18101 tree callee = get_callee_fndecl (loc);
18102 int i;
18103 dw_die_ref dwarf_proc;
18104
18105 if (callee == NULL_TREE)
18106 goto call_expansion_failed;
18107
18108 /* We handle only functions that return an integer. */
18109 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18110 goto call_expansion_failed;
18111
18112 dwarf_proc = function_to_dwarf_procedure (callee);
18113 if (dwarf_proc == NULL)
18114 goto call_expansion_failed;
18115
18116 /* Evaluate arguments right-to-left so that the first argument will
18117 be the top-most one on the stack. */
18118 for (i = nargs - 1; i >= 0; --i)
18119 {
18120 dw_loc_descr_ref loc_descr
18121 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18122 context);
18123
18124 if (loc_descr == NULL)
18125 goto call_expansion_failed;
18126
18127 add_loc_descr (&ret, loc_descr);
18128 }
18129
18130 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18131 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18132 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18133 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18134 add_loc_descr (&ret, ret1);
18135 break;
18136
18137 call_expansion_failed:
18138 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18139 /* There are no opcodes for these operations. */
18140 return 0;
18141 }
18142
18143 case PREINCREMENT_EXPR:
18144 case PREDECREMENT_EXPR:
18145 case POSTINCREMENT_EXPR:
18146 case POSTDECREMENT_EXPR:
18147 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18148 /* There are no opcodes for these operations. */
18149 return 0;
18150
18151 case ADDR_EXPR:
18152 /* If we already want an address, see if there is INDIRECT_REF inside
18153 e.g. for &this->field. */
18154 if (want_address)
18155 {
18156 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18157 (loc, want_address == 2, context);
18158 if (list_ret)
18159 have_address = 1;
18160 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18161 && (ret = cst_pool_loc_descr (loc)))
18162 have_address = 1;
18163 }
18164 /* Otherwise, process the argument and look for the address. */
18165 if (!list_ret && !ret)
18166 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18167 else
18168 {
18169 if (want_address)
18170 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18171 return NULL;
18172 }
18173 break;
18174
18175 case VAR_DECL:
18176 if (DECL_THREAD_LOCAL_P (loc))
18177 {
18178 rtx rtl;
18179 enum dwarf_location_atom tls_op;
18180 enum dtprel_bool dtprel = dtprel_false;
18181
18182 if (targetm.have_tls)
18183 {
18184 /* If this is not defined, we have no way to emit the
18185 data. */
18186 if (!targetm.asm_out.output_dwarf_dtprel)
18187 return 0;
18188
18189 /* The way DW_OP_GNU_push_tls_address is specified, we
18190 can only look up addresses of objects in the current
18191 module. We used DW_OP_addr as first op, but that's
18192 wrong, because DW_OP_addr is relocated by the debug
18193 info consumer, while DW_OP_GNU_push_tls_address
18194 operand shouldn't be. */
18195 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18196 return 0;
18197 dtprel = dtprel_true;
18198 /* We check for DWARF 5 here because gdb did not implement
18199 DW_OP_form_tls_address until after 7.12. */
18200 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18201 : DW_OP_GNU_push_tls_address);
18202 }
18203 else
18204 {
18205 if (!targetm.emutls.debug_form_tls_address
18206 || !(dwarf_version >= 3 || !dwarf_strict))
18207 return 0;
18208 /* We stuffed the control variable into the DECL_VALUE_EXPR
18209 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18210 no longer appear in gimple code. We used the control
18211 variable in specific so that we could pick it up here. */
18212 loc = DECL_VALUE_EXPR (loc);
18213 tls_op = DW_OP_form_tls_address;
18214 }
18215
18216 rtl = rtl_for_decl_location (loc);
18217 if (rtl == NULL_RTX)
18218 return 0;
18219
18220 if (!MEM_P (rtl))
18221 return 0;
18222 rtl = XEXP (rtl, 0);
18223 if (! CONSTANT_P (rtl))
18224 return 0;
18225
18226 ret = new_addr_loc_descr (rtl, dtprel);
18227 ret1 = new_loc_descr (tls_op, 0, 0);
18228 add_loc_descr (&ret, ret1);
18229
18230 have_address = 1;
18231 break;
18232 }
18233 /* FALLTHRU */
18234
18235 case PARM_DECL:
18236 if (context != NULL && context->dpi != NULL
18237 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18238 {
18239 /* We are generating code for a DWARF procedure and we want to access
18240 one of its arguments: find the appropriate argument offset and let
18241 the resolve_args_picking pass compute the offset that complies
18242 with the stack frame size. */
18243 unsigned i = 0;
18244 tree cursor;
18245
18246 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18247 cursor != NULL_TREE && cursor != loc;
18248 cursor = TREE_CHAIN (cursor), ++i)
18249 ;
18250 /* If we are translating a DWARF procedure, all referenced parameters
18251 must belong to the current function. */
18252 gcc_assert (cursor != NULL_TREE);
18253
18254 ret = new_loc_descr (DW_OP_pick, i, 0);
18255 ret->frame_offset_rel = 1;
18256 break;
18257 }
18258 /* FALLTHRU */
18259
18260 case RESULT_DECL:
18261 if (DECL_HAS_VALUE_EXPR_P (loc))
18262 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18263 want_address, context);
18264 /* FALLTHRU */
18265
18266 case FUNCTION_DECL:
18267 {
18268 rtx rtl;
18269 var_loc_list *loc_list = lookup_decl_loc (loc);
18270
18271 if (loc_list && loc_list->first)
18272 {
18273 list_ret = dw_loc_list (loc_list, loc, want_address);
18274 have_address = want_address != 0;
18275 break;
18276 }
18277 rtl = rtl_for_decl_location (loc);
18278 if (rtl == NULL_RTX)
18279 {
18280 if (TREE_CODE (loc) != FUNCTION_DECL
18281 && early_dwarf
18282 && current_function_decl
18283 && want_address != 1
18284 && ! DECL_IGNORED_P (loc)
18285 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18286 || POINTER_TYPE_P (TREE_TYPE (loc)))
18287 && DECL_CONTEXT (loc) == current_function_decl
18288 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18289 <= DWARF2_ADDR_SIZE))
18290 {
18291 dw_die_ref ref = lookup_decl_die (loc);
18292 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18293 if (ref)
18294 {
18295 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18296 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18297 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18298 }
18299 else
18300 {
18301 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18302 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18303 }
18304 break;
18305 }
18306 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18307 return 0;
18308 }
18309 else if (CONST_INT_P (rtl))
18310 {
18311 HOST_WIDE_INT val = INTVAL (rtl);
18312 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18313 val &= GET_MODE_MASK (DECL_MODE (loc));
18314 ret = int_loc_descriptor (val);
18315 }
18316 else if (GET_CODE (rtl) == CONST_STRING)
18317 {
18318 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18319 return 0;
18320 }
18321 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18322 ret = new_addr_loc_descr (rtl, dtprel_false);
18323 else
18324 {
18325 machine_mode mode, mem_mode;
18326
18327 /* Certain constructs can only be represented at top-level. */
18328 if (want_address == 2)
18329 {
18330 ret = loc_descriptor (rtl, VOIDmode,
18331 VAR_INIT_STATUS_INITIALIZED);
18332 have_address = 1;
18333 }
18334 else
18335 {
18336 mode = GET_MODE (rtl);
18337 mem_mode = VOIDmode;
18338 if (MEM_P (rtl))
18339 {
18340 mem_mode = mode;
18341 mode = get_address_mode (rtl);
18342 rtl = XEXP (rtl, 0);
18343 have_address = 1;
18344 }
18345 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18346 VAR_INIT_STATUS_INITIALIZED);
18347 }
18348 if (!ret)
18349 expansion_failed (loc, rtl,
18350 "failed to produce loc descriptor for rtl");
18351 }
18352 }
18353 break;
18354
18355 case MEM_REF:
18356 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18357 {
18358 have_address = 1;
18359 goto do_plus;
18360 }
18361 /* Fallthru. */
18362 case INDIRECT_REF:
18363 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18364 have_address = 1;
18365 break;
18366
18367 case TARGET_MEM_REF:
18368 case SSA_NAME:
18369 case DEBUG_EXPR_DECL:
18370 return NULL;
18371
18372 case COMPOUND_EXPR:
18373 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18374 context);
18375
18376 CASE_CONVERT:
18377 case VIEW_CONVERT_EXPR:
18378 case SAVE_EXPR:
18379 case MODIFY_EXPR:
18380 case NON_LVALUE_EXPR:
18381 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18382 context);
18383
18384 case COMPONENT_REF:
18385 case BIT_FIELD_REF:
18386 case ARRAY_REF:
18387 case ARRAY_RANGE_REF:
18388 case REALPART_EXPR:
18389 case IMAGPART_EXPR:
18390 {
18391 tree obj, offset;
18392 poly_int64 bitsize, bitpos, bytepos;
18393 machine_mode mode;
18394 int unsignedp, reversep, volatilep = 0;
18395
18396 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18397 &unsignedp, &reversep, &volatilep);
18398
18399 gcc_assert (obj != loc);
18400
18401 list_ret = loc_list_from_tree_1 (obj,
18402 want_address == 2
18403 && known_eq (bitpos, 0)
18404 && !offset ? 2 : 1,
18405 context);
18406 /* TODO: We can extract value of the small expression via shifting even
18407 for nonzero bitpos. */
18408 if (list_ret == 0)
18409 return 0;
18410 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18411 || !multiple_p (bitsize, BITS_PER_UNIT))
18412 {
18413 expansion_failed (loc, NULL_RTX,
18414 "bitfield access");
18415 return 0;
18416 }
18417
18418 if (offset != NULL_TREE)
18419 {
18420 /* Variable offset. */
18421 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18422 if (list_ret1 == 0)
18423 return 0;
18424 add_loc_list (&list_ret, list_ret1);
18425 if (!list_ret)
18426 return 0;
18427 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18428 }
18429
18430 HOST_WIDE_INT value;
18431 if (bytepos.is_constant (&value) && value > 0)
18432 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18433 value, 0));
18434 else if (maybe_ne (bytepos, 0))
18435 loc_list_plus_const (list_ret, bytepos);
18436
18437 have_address = 1;
18438 break;
18439 }
18440
18441 case INTEGER_CST:
18442 if ((want_address || !tree_fits_shwi_p (loc))
18443 && (ret = cst_pool_loc_descr (loc)))
18444 have_address = 1;
18445 else if (want_address == 2
18446 && tree_fits_shwi_p (loc)
18447 && (ret = address_of_int_loc_descriptor
18448 (int_size_in_bytes (TREE_TYPE (loc)),
18449 tree_to_shwi (loc))))
18450 have_address = 1;
18451 else if (tree_fits_shwi_p (loc))
18452 ret = int_loc_descriptor (tree_to_shwi (loc));
18453 else if (tree_fits_uhwi_p (loc))
18454 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18455 else
18456 {
18457 expansion_failed (loc, NULL_RTX,
18458 "Integer operand is not host integer");
18459 return 0;
18460 }
18461 break;
18462
18463 case CONSTRUCTOR:
18464 case REAL_CST:
18465 case STRING_CST:
18466 case COMPLEX_CST:
18467 if ((ret = cst_pool_loc_descr (loc)))
18468 have_address = 1;
18469 else if (TREE_CODE (loc) == CONSTRUCTOR)
18470 {
18471 tree type = TREE_TYPE (loc);
18472 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18473 unsigned HOST_WIDE_INT offset = 0;
18474 unsigned HOST_WIDE_INT cnt;
18475 constructor_elt *ce;
18476
18477 if (TREE_CODE (type) == RECORD_TYPE)
18478 {
18479 /* This is very limited, but it's enough to output
18480 pointers to member functions, as long as the
18481 referenced function is defined in the current
18482 translation unit. */
18483 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18484 {
18485 tree val = ce->value;
18486
18487 tree field = ce->index;
18488
18489 if (val)
18490 STRIP_NOPS (val);
18491
18492 if (!field || DECL_BIT_FIELD (field))
18493 {
18494 expansion_failed (loc, NULL_RTX,
18495 "bitfield in record type constructor");
18496 size = offset = (unsigned HOST_WIDE_INT)-1;
18497 ret = NULL;
18498 break;
18499 }
18500
18501 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18502 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18503 gcc_assert (pos + fieldsize <= size);
18504 if (pos < offset)
18505 {
18506 expansion_failed (loc, NULL_RTX,
18507 "out-of-order fields in record constructor");
18508 size = offset = (unsigned HOST_WIDE_INT)-1;
18509 ret = NULL;
18510 break;
18511 }
18512 if (pos > offset)
18513 {
18514 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18515 add_loc_descr (&ret, ret1);
18516 offset = pos;
18517 }
18518 if (val && fieldsize != 0)
18519 {
18520 ret1 = loc_descriptor_from_tree (val, want_address, context);
18521 if (!ret1)
18522 {
18523 expansion_failed (loc, NULL_RTX,
18524 "unsupported expression in field");
18525 size = offset = (unsigned HOST_WIDE_INT)-1;
18526 ret = NULL;
18527 break;
18528 }
18529 add_loc_descr (&ret, ret1);
18530 }
18531 if (fieldsize)
18532 {
18533 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18534 add_loc_descr (&ret, ret1);
18535 offset = pos + fieldsize;
18536 }
18537 }
18538
18539 if (offset != size)
18540 {
18541 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18542 add_loc_descr (&ret, ret1);
18543 offset = size;
18544 }
18545
18546 have_address = !!want_address;
18547 }
18548 else
18549 expansion_failed (loc, NULL_RTX,
18550 "constructor of non-record type");
18551 }
18552 else
18553 /* We can construct small constants here using int_loc_descriptor. */
18554 expansion_failed (loc, NULL_RTX,
18555 "constructor or constant not in constant pool");
18556 break;
18557
18558 case TRUTH_AND_EXPR:
18559 case TRUTH_ANDIF_EXPR:
18560 case BIT_AND_EXPR:
18561 op = DW_OP_and;
18562 goto do_binop;
18563
18564 case TRUTH_XOR_EXPR:
18565 case BIT_XOR_EXPR:
18566 op = DW_OP_xor;
18567 goto do_binop;
18568
18569 case TRUTH_OR_EXPR:
18570 case TRUTH_ORIF_EXPR:
18571 case BIT_IOR_EXPR:
18572 op = DW_OP_or;
18573 goto do_binop;
18574
18575 case FLOOR_DIV_EXPR:
18576 case CEIL_DIV_EXPR:
18577 case ROUND_DIV_EXPR:
18578 case TRUNC_DIV_EXPR:
18579 case EXACT_DIV_EXPR:
18580 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18581 return 0;
18582 op = DW_OP_div;
18583 goto do_binop;
18584
18585 case MINUS_EXPR:
18586 op = DW_OP_minus;
18587 goto do_binop;
18588
18589 case FLOOR_MOD_EXPR:
18590 case CEIL_MOD_EXPR:
18591 case ROUND_MOD_EXPR:
18592 case TRUNC_MOD_EXPR:
18593 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18594 {
18595 op = DW_OP_mod;
18596 goto do_binop;
18597 }
18598 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18599 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18600 if (list_ret == 0 || list_ret1 == 0)
18601 return 0;
18602
18603 add_loc_list (&list_ret, list_ret1);
18604 if (list_ret == 0)
18605 return 0;
18606 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18607 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18608 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18609 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18610 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18611 break;
18612
18613 case MULT_EXPR:
18614 op = DW_OP_mul;
18615 goto do_binop;
18616
18617 case LSHIFT_EXPR:
18618 op = DW_OP_shl;
18619 goto do_binop;
18620
18621 case RSHIFT_EXPR:
18622 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18623 goto do_binop;
18624
18625 case POINTER_PLUS_EXPR:
18626 case PLUS_EXPR:
18627 do_plus:
18628 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18629 {
18630 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18631 smarter to encode their opposite. The DW_OP_plus_uconst operation
18632 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18633 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18634 bytes, Y being the size of the operation that pushes the opposite
18635 of the addend. So let's choose the smallest representation. */
18636 const tree tree_addend = TREE_OPERAND (loc, 1);
18637 offset_int wi_addend;
18638 HOST_WIDE_INT shwi_addend;
18639 dw_loc_descr_ref loc_naddend;
18640
18641 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18642 if (list_ret == 0)
18643 return 0;
18644
18645 /* Try to get the literal to push. It is the opposite of the addend,
18646 so as we rely on wrapping during DWARF evaluation, first decode
18647 the literal as a "DWARF-sized" signed number. */
18648 wi_addend = wi::to_offset (tree_addend);
18649 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18650 shwi_addend = wi_addend.to_shwi ();
18651 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18652 ? int_loc_descriptor (-shwi_addend)
18653 : NULL;
18654
18655 if (loc_naddend != NULL
18656 && ((unsigned) size_of_uleb128 (shwi_addend)
18657 > size_of_loc_descr (loc_naddend)))
18658 {
18659 add_loc_descr_to_each (list_ret, loc_naddend);
18660 add_loc_descr_to_each (list_ret,
18661 new_loc_descr (DW_OP_minus, 0, 0));
18662 }
18663 else
18664 {
18665 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18666 {
18667 loc_naddend = loc_cur;
18668 loc_cur = loc_cur->dw_loc_next;
18669 ggc_free (loc_naddend);
18670 }
18671 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18672 }
18673 break;
18674 }
18675
18676 op = DW_OP_plus;
18677 goto do_binop;
18678
18679 case LE_EXPR:
18680 op = DW_OP_le;
18681 goto do_comp_binop;
18682
18683 case GE_EXPR:
18684 op = DW_OP_ge;
18685 goto do_comp_binop;
18686
18687 case LT_EXPR:
18688 op = DW_OP_lt;
18689 goto do_comp_binop;
18690
18691 case GT_EXPR:
18692 op = DW_OP_gt;
18693 goto do_comp_binop;
18694
18695 do_comp_binop:
18696 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18697 {
18698 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18699 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18700 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18701 TREE_CODE (loc));
18702 break;
18703 }
18704 else
18705 goto do_binop;
18706
18707 case EQ_EXPR:
18708 op = DW_OP_eq;
18709 goto do_binop;
18710
18711 case NE_EXPR:
18712 op = DW_OP_ne;
18713 goto do_binop;
18714
18715 do_binop:
18716 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18717 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18718 if (list_ret == 0 || list_ret1 == 0)
18719 return 0;
18720
18721 add_loc_list (&list_ret, list_ret1);
18722 if (list_ret == 0)
18723 return 0;
18724 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18725 break;
18726
18727 case TRUTH_NOT_EXPR:
18728 case BIT_NOT_EXPR:
18729 op = DW_OP_not;
18730 goto do_unop;
18731
18732 case ABS_EXPR:
18733 op = DW_OP_abs;
18734 goto do_unop;
18735
18736 case NEGATE_EXPR:
18737 op = DW_OP_neg;
18738 goto do_unop;
18739
18740 do_unop:
18741 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18742 if (list_ret == 0)
18743 return 0;
18744
18745 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18746 break;
18747
18748 case MIN_EXPR:
18749 case MAX_EXPR:
18750 {
18751 const enum tree_code code =
18752 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18753
18754 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18755 build2 (code, integer_type_node,
18756 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18757 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18758 }
18759
18760 /* fall through */
18761
18762 case COND_EXPR:
18763 {
18764 dw_loc_descr_ref lhs
18765 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18766 dw_loc_list_ref rhs
18767 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18768 dw_loc_descr_ref bra_node, jump_node, tmp;
18769
18770 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18771 if (list_ret == 0 || lhs == 0 || rhs == 0)
18772 return 0;
18773
18774 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18775 add_loc_descr_to_each (list_ret, bra_node);
18776
18777 add_loc_list (&list_ret, rhs);
18778 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18779 add_loc_descr_to_each (list_ret, jump_node);
18780
18781 add_loc_descr_to_each (list_ret, lhs);
18782 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18783 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18784
18785 /* ??? Need a node to point the skip at. Use a nop. */
18786 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18787 add_loc_descr_to_each (list_ret, tmp);
18788 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18789 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18790 }
18791 break;
18792
18793 case FIX_TRUNC_EXPR:
18794 return 0;
18795
18796 default:
18797 /* Leave front-end specific codes as simply unknown. This comes
18798 up, for instance, with the C STMT_EXPR. */
18799 if ((unsigned int) TREE_CODE (loc)
18800 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18801 {
18802 expansion_failed (loc, NULL_RTX,
18803 "language specific tree node");
18804 return 0;
18805 }
18806
18807 /* Otherwise this is a generic code; we should just lists all of
18808 these explicitly. We forgot one. */
18809 if (flag_checking)
18810 gcc_unreachable ();
18811
18812 /* In a release build, we want to degrade gracefully: better to
18813 generate incomplete debugging information than to crash. */
18814 return NULL;
18815 }
18816
18817 if (!ret && !list_ret)
18818 return 0;
18819
18820 if (want_address == 2 && !have_address
18821 && (dwarf_version >= 4 || !dwarf_strict))
18822 {
18823 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18824 {
18825 expansion_failed (loc, NULL_RTX,
18826 "DWARF address size mismatch");
18827 return 0;
18828 }
18829 if (ret)
18830 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18831 else
18832 add_loc_descr_to_each (list_ret,
18833 new_loc_descr (DW_OP_stack_value, 0, 0));
18834 have_address = 1;
18835 }
18836 /* Show if we can't fill the request for an address. */
18837 if (want_address && !have_address)
18838 {
18839 expansion_failed (loc, NULL_RTX,
18840 "Want address and only have value");
18841 return 0;
18842 }
18843
18844 gcc_assert (!ret || !list_ret);
18845
18846 /* If we've got an address and don't want one, dereference. */
18847 if (!want_address && have_address)
18848 {
18849 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18850
18851 if (size > DWARF2_ADDR_SIZE || size == -1)
18852 {
18853 expansion_failed (loc, NULL_RTX,
18854 "DWARF address size mismatch");
18855 return 0;
18856 }
18857 else if (size == DWARF2_ADDR_SIZE)
18858 op = DW_OP_deref;
18859 else
18860 op = DW_OP_deref_size;
18861
18862 if (ret)
18863 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18864 else
18865 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18866 }
18867 if (ret)
18868 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18869
18870 return list_ret;
18871 }
18872
18873 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18874 expressions. */
18875
18876 static dw_loc_list_ref
18877 loc_list_from_tree (tree loc, int want_address,
18878 struct loc_descr_context *context)
18879 {
18880 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18881
18882 for (dw_loc_list_ref loc_cur = result;
18883 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18884 loc_descr_without_nops (loc_cur->expr);
18885 return result;
18886 }
18887
18888 /* Same as above but return only single location expression. */
18889 static dw_loc_descr_ref
18890 loc_descriptor_from_tree (tree loc, int want_address,
18891 struct loc_descr_context *context)
18892 {
18893 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18894 if (!ret)
18895 return NULL;
18896 if (ret->dw_loc_next)
18897 {
18898 expansion_failed (loc, NULL_RTX,
18899 "Location list where only loc descriptor needed");
18900 return NULL;
18901 }
18902 return ret->expr;
18903 }
18904
18905 /* Given a value, round it up to the lowest multiple of `boundary'
18906 which is not less than the value itself. */
18907
18908 static inline HOST_WIDE_INT
18909 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18910 {
18911 return (((value + boundary - 1) / boundary) * boundary);
18912 }
18913
18914 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18915 pointer to the declared type for the relevant field variable, or return
18916 `integer_type_node' if the given node turns out to be an
18917 ERROR_MARK node. */
18918
18919 static inline tree
18920 field_type (const_tree decl)
18921 {
18922 tree type;
18923
18924 if (TREE_CODE (decl) == ERROR_MARK)
18925 return integer_type_node;
18926
18927 type = DECL_BIT_FIELD_TYPE (decl);
18928 if (type == NULL_TREE)
18929 type = TREE_TYPE (decl);
18930
18931 return type;
18932 }
18933
18934 /* Given a pointer to a tree node, return the alignment in bits for
18935 it, or else return BITS_PER_WORD if the node actually turns out to
18936 be an ERROR_MARK node. */
18937
18938 static inline unsigned
18939 simple_type_align_in_bits (const_tree type)
18940 {
18941 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18942 }
18943
18944 static inline unsigned
18945 simple_decl_align_in_bits (const_tree decl)
18946 {
18947 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18948 }
18949
18950 /* Return the result of rounding T up to ALIGN. */
18951
18952 static inline offset_int
18953 round_up_to_align (const offset_int &t, unsigned int align)
18954 {
18955 return wi::udiv_trunc (t + align - 1, align) * align;
18956 }
18957
18958 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18959 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18960 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18961 if we fail to return the size in one of these two forms. */
18962
18963 static dw_loc_descr_ref
18964 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18965 {
18966 tree tree_size;
18967 struct loc_descr_context ctx;
18968
18969 /* Return a constant integer in priority, if possible. */
18970 *cst_size = int_size_in_bytes (type);
18971 if (*cst_size != -1)
18972 return NULL;
18973
18974 ctx.context_type = const_cast<tree> (type);
18975 ctx.base_decl = NULL_TREE;
18976 ctx.dpi = NULL;
18977 ctx.placeholder_arg = false;
18978 ctx.placeholder_seen = false;
18979
18980 type = TYPE_MAIN_VARIANT (type);
18981 tree_size = TYPE_SIZE_UNIT (type);
18982 return ((tree_size != NULL_TREE)
18983 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18984 : NULL);
18985 }
18986
18987 /* Helper structure for RECORD_TYPE processing. */
18988 struct vlr_context
18989 {
18990 /* Root RECORD_TYPE. It is needed to generate data member location
18991 descriptions in variable-length records (VLR), but also to cope with
18992 variants, which are composed of nested structures multiplexed with
18993 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18994 function processing a FIELD_DECL, it is required to be non null. */
18995 tree struct_type;
18996 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18997 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18998 this variant part as part of the root record (in storage units). For
18999 regular records, it must be NULL_TREE. */
19000 tree variant_part_offset;
19001 };
19002
19003 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19004 addressed byte of the "containing object" for the given FIELD_DECL. If
19005 possible, return a native constant through CST_OFFSET (in which case NULL is
19006 returned); otherwise return a DWARF expression that computes the offset.
19007
19008 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19009 that offset is, either because the argument turns out to be a pointer to an
19010 ERROR_MARK node, or because the offset expression is too complex for us.
19011
19012 CTX is required: see the comment for VLR_CONTEXT. */
19013
19014 static dw_loc_descr_ref
19015 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19016 HOST_WIDE_INT *cst_offset)
19017 {
19018 tree tree_result;
19019 dw_loc_list_ref loc_result;
19020
19021 *cst_offset = 0;
19022
19023 if (TREE_CODE (decl) == ERROR_MARK)
19024 return NULL;
19025 else
19026 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19027
19028 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19029 case. */
19030 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19031 return NULL;
19032
19033 #ifdef PCC_BITFIELD_TYPE_MATTERS
19034 /* We used to handle only constant offsets in all cases. Now, we handle
19035 properly dynamic byte offsets only when PCC bitfield type doesn't
19036 matter. */
19037 if (PCC_BITFIELD_TYPE_MATTERS
19038 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19039 {
19040 offset_int object_offset_in_bits;
19041 offset_int object_offset_in_bytes;
19042 offset_int bitpos_int;
19043 tree type;
19044 tree field_size_tree;
19045 offset_int deepest_bitpos;
19046 offset_int field_size_in_bits;
19047 unsigned int type_align_in_bits;
19048 unsigned int decl_align_in_bits;
19049 offset_int type_size_in_bits;
19050
19051 bitpos_int = wi::to_offset (bit_position (decl));
19052 type = field_type (decl);
19053 type_size_in_bits = offset_int_type_size_in_bits (type);
19054 type_align_in_bits = simple_type_align_in_bits (type);
19055
19056 field_size_tree = DECL_SIZE (decl);
19057
19058 /* The size could be unspecified if there was an error, or for
19059 a flexible array member. */
19060 if (!field_size_tree)
19061 field_size_tree = bitsize_zero_node;
19062
19063 /* If the size of the field is not constant, use the type size. */
19064 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19065 field_size_in_bits = wi::to_offset (field_size_tree);
19066 else
19067 field_size_in_bits = type_size_in_bits;
19068
19069 decl_align_in_bits = simple_decl_align_in_bits (decl);
19070
19071 /* The GCC front-end doesn't make any attempt to keep track of the
19072 starting bit offset (relative to the start of the containing
19073 structure type) of the hypothetical "containing object" for a
19074 bit-field. Thus, when computing the byte offset value for the
19075 start of the "containing object" of a bit-field, we must deduce
19076 this information on our own. This can be rather tricky to do in
19077 some cases. For example, handling the following structure type
19078 definition when compiling for an i386/i486 target (which only
19079 aligns long long's to 32-bit boundaries) can be very tricky:
19080
19081 struct S { int field1; long long field2:31; };
19082
19083 Fortunately, there is a simple rule-of-thumb which can be used
19084 in such cases. When compiling for an i386/i486, GCC will
19085 allocate 8 bytes for the structure shown above. It decides to
19086 do this based upon one simple rule for bit-field allocation.
19087 GCC allocates each "containing object" for each bit-field at
19088 the first (i.e. lowest addressed) legitimate alignment boundary
19089 (based upon the required minimum alignment for the declared
19090 type of the field) which it can possibly use, subject to the
19091 condition that there is still enough available space remaining
19092 in the containing object (when allocated at the selected point)
19093 to fully accommodate all of the bits of the bit-field itself.
19094
19095 This simple rule makes it obvious why GCC allocates 8 bytes for
19096 each object of the structure type shown above. When looking
19097 for a place to allocate the "containing object" for `field2',
19098 the compiler simply tries to allocate a 64-bit "containing
19099 object" at each successive 32-bit boundary (starting at zero)
19100 until it finds a place to allocate that 64- bit field such that
19101 at least 31 contiguous (and previously unallocated) bits remain
19102 within that selected 64 bit field. (As it turns out, for the
19103 example above, the compiler finds it is OK to allocate the
19104 "containing object" 64-bit field at bit-offset zero within the
19105 structure type.)
19106
19107 Here we attempt to work backwards from the limited set of facts
19108 we're given, and we try to deduce from those facts, where GCC
19109 must have believed that the containing object started (within
19110 the structure type). The value we deduce is then used (by the
19111 callers of this routine) to generate DW_AT_location and
19112 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19113 the case of DW_AT_location, regular fields as well). */
19114
19115 /* Figure out the bit-distance from the start of the structure to
19116 the "deepest" bit of the bit-field. */
19117 deepest_bitpos = bitpos_int + field_size_in_bits;
19118
19119 /* This is the tricky part. Use some fancy footwork to deduce
19120 where the lowest addressed bit of the containing object must
19121 be. */
19122 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19123
19124 /* Round up to type_align by default. This works best for
19125 bitfields. */
19126 object_offset_in_bits
19127 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19128
19129 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19130 {
19131 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19132
19133 /* Round up to decl_align instead. */
19134 object_offset_in_bits
19135 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19136 }
19137
19138 object_offset_in_bytes
19139 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19140 if (ctx->variant_part_offset == NULL_TREE)
19141 {
19142 *cst_offset = object_offset_in_bytes.to_shwi ();
19143 return NULL;
19144 }
19145 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19146 }
19147 else
19148 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19149 tree_result = byte_position (decl);
19150
19151 if (ctx->variant_part_offset != NULL_TREE)
19152 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19153 ctx->variant_part_offset, tree_result);
19154
19155 /* If the byte offset is a constant, it's simplier to handle a native
19156 constant rather than a DWARF expression. */
19157 if (TREE_CODE (tree_result) == INTEGER_CST)
19158 {
19159 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19160 return NULL;
19161 }
19162 struct loc_descr_context loc_ctx = {
19163 ctx->struct_type, /* context_type */
19164 NULL_TREE, /* base_decl */
19165 NULL, /* dpi */
19166 false, /* placeholder_arg */
19167 false /* placeholder_seen */
19168 };
19169 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19170
19171 /* We want a DWARF expression: abort if we only have a location list with
19172 multiple elements. */
19173 if (!loc_result || !single_element_loc_list_p (loc_result))
19174 return NULL;
19175 else
19176 return loc_result->expr;
19177 }
19178 \f
19179 /* The following routines define various Dwarf attributes and any data
19180 associated with them. */
19181
19182 /* Add a location description attribute value to a DIE.
19183
19184 This emits location attributes suitable for whole variables and
19185 whole parameters. Note that the location attributes for struct fields are
19186 generated by the routine `data_member_location_attribute' below. */
19187
19188 static inline void
19189 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19190 dw_loc_list_ref descr)
19191 {
19192 bool check_no_locviews = true;
19193 if (descr == 0)
19194 return;
19195 if (single_element_loc_list_p (descr))
19196 add_AT_loc (die, attr_kind, descr->expr);
19197 else
19198 {
19199 add_AT_loc_list (die, attr_kind, descr);
19200 gcc_assert (descr->ll_symbol);
19201 if (attr_kind == DW_AT_location && descr->vl_symbol
19202 && dwarf2out_locviews_in_attribute ())
19203 {
19204 add_AT_view_list (die, DW_AT_GNU_locviews);
19205 check_no_locviews = false;
19206 }
19207 }
19208
19209 if (check_no_locviews)
19210 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19211 }
19212
19213 /* Add DW_AT_accessibility attribute to DIE if needed. */
19214
19215 static void
19216 add_accessibility_attribute (dw_die_ref die, tree decl)
19217 {
19218 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19219 children, otherwise the default is DW_ACCESS_public. In DWARF2
19220 the default has always been DW_ACCESS_public. */
19221 if (TREE_PROTECTED (decl))
19222 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19223 else if (TREE_PRIVATE (decl))
19224 {
19225 if (dwarf_version == 2
19226 || die->die_parent == NULL
19227 || die->die_parent->die_tag != DW_TAG_class_type)
19228 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19229 }
19230 else if (dwarf_version > 2
19231 && die->die_parent
19232 && die->die_parent->die_tag == DW_TAG_class_type)
19233 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19234 }
19235
19236 /* Attach the specialized form of location attribute used for data members of
19237 struct and union types. In the special case of a FIELD_DECL node which
19238 represents a bit-field, the "offset" part of this special location
19239 descriptor must indicate the distance in bytes from the lowest-addressed
19240 byte of the containing struct or union type to the lowest-addressed byte of
19241 the "containing object" for the bit-field. (See the `field_byte_offset'
19242 function above).
19243
19244 For any given bit-field, the "containing object" is a hypothetical object
19245 (of some integral or enum type) within which the given bit-field lives. The
19246 type of this hypothetical "containing object" is always the same as the
19247 declared type of the individual bit-field itself (for GCC anyway... the
19248 DWARF spec doesn't actually mandate this). Note that it is the size (in
19249 bytes) of the hypothetical "containing object" which will be given in the
19250 DW_AT_byte_size attribute for this bit-field. (See the
19251 `byte_size_attribute' function below.) It is also used when calculating the
19252 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19253 function below.)
19254
19255 CTX is required: see the comment for VLR_CONTEXT. */
19256
19257 static void
19258 add_data_member_location_attribute (dw_die_ref die,
19259 tree decl,
19260 struct vlr_context *ctx)
19261 {
19262 HOST_WIDE_INT offset;
19263 dw_loc_descr_ref loc_descr = 0;
19264
19265 if (TREE_CODE (decl) == TREE_BINFO)
19266 {
19267 /* We're working on the TAG_inheritance for a base class. */
19268 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19269 {
19270 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19271 aren't at a fixed offset from all (sub)objects of the same
19272 type. We need to extract the appropriate offset from our
19273 vtable. The following dwarf expression means
19274
19275 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19276
19277 This is specific to the V3 ABI, of course. */
19278
19279 dw_loc_descr_ref tmp;
19280
19281 /* Make a copy of the object address. */
19282 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19283 add_loc_descr (&loc_descr, tmp);
19284
19285 /* Extract the vtable address. */
19286 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19287 add_loc_descr (&loc_descr, tmp);
19288
19289 /* Calculate the address of the offset. */
19290 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19291 gcc_assert (offset < 0);
19292
19293 tmp = int_loc_descriptor (-offset);
19294 add_loc_descr (&loc_descr, tmp);
19295 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19296 add_loc_descr (&loc_descr, tmp);
19297
19298 /* Extract the offset. */
19299 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19300 add_loc_descr (&loc_descr, tmp);
19301
19302 /* Add it to the object address. */
19303 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19304 add_loc_descr (&loc_descr, tmp);
19305 }
19306 else
19307 offset = tree_to_shwi (BINFO_OFFSET (decl));
19308 }
19309 else
19310 {
19311 loc_descr = field_byte_offset (decl, ctx, &offset);
19312
19313 /* If loc_descr is available then we know the field offset is dynamic.
19314 However, GDB does not handle dynamic field offsets very well at the
19315 moment. */
19316 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19317 {
19318 loc_descr = NULL;
19319 offset = 0;
19320 }
19321
19322 /* Data member location evalutation starts with the base address on the
19323 stack. Compute the field offset and add it to this base address. */
19324 else if (loc_descr != NULL)
19325 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19326 }
19327
19328 if (! loc_descr)
19329 {
19330 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19331 e.g. GDB only added support to it in November 2016. For DWARF5
19332 we need newer debug info consumers anyway. We might change this
19333 to dwarf_version >= 4 once most consumers catched up. */
19334 if (dwarf_version >= 5
19335 && TREE_CODE (decl) == FIELD_DECL
19336 && DECL_BIT_FIELD_TYPE (decl))
19337 {
19338 tree off = bit_position (decl);
19339 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19340 {
19341 remove_AT (die, DW_AT_byte_size);
19342 remove_AT (die, DW_AT_bit_offset);
19343 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19344 return;
19345 }
19346 }
19347 if (dwarf_version > 2)
19348 {
19349 /* Don't need to output a location expression, just the constant. */
19350 if (offset < 0)
19351 add_AT_int (die, DW_AT_data_member_location, offset);
19352 else
19353 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19354 return;
19355 }
19356 else
19357 {
19358 enum dwarf_location_atom op;
19359
19360 /* The DWARF2 standard says that we should assume that the structure
19361 address is already on the stack, so we can specify a structure
19362 field address by using DW_OP_plus_uconst. */
19363 op = DW_OP_plus_uconst;
19364 loc_descr = new_loc_descr (op, offset, 0);
19365 }
19366 }
19367
19368 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19369 }
19370
19371 /* Writes integer values to dw_vec_const array. */
19372
19373 static void
19374 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19375 {
19376 while (size != 0)
19377 {
19378 *dest++ = val & 0xff;
19379 val >>= 8;
19380 --size;
19381 }
19382 }
19383
19384 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19385
19386 static HOST_WIDE_INT
19387 extract_int (const unsigned char *src, unsigned int size)
19388 {
19389 HOST_WIDE_INT val = 0;
19390
19391 src += size;
19392 while (size != 0)
19393 {
19394 val <<= 8;
19395 val |= *--src & 0xff;
19396 --size;
19397 }
19398 return val;
19399 }
19400
19401 /* Writes wide_int values to dw_vec_const array. */
19402
19403 static void
19404 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19405 {
19406 int i;
19407
19408 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19409 {
19410 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19411 return;
19412 }
19413
19414 /* We'd have to extend this code to support odd sizes. */
19415 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19416
19417 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19418
19419 if (WORDS_BIG_ENDIAN)
19420 for (i = n - 1; i >= 0; i--)
19421 {
19422 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19423 dest += sizeof (HOST_WIDE_INT);
19424 }
19425 else
19426 for (i = 0; i < n; i++)
19427 {
19428 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19429 dest += sizeof (HOST_WIDE_INT);
19430 }
19431 }
19432
19433 /* Writes floating point values to dw_vec_const array. */
19434
19435 static void
19436 insert_float (const_rtx rtl, unsigned char *array)
19437 {
19438 long val[4];
19439 int i;
19440 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19441
19442 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19443
19444 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19445 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19446 {
19447 insert_int (val[i], 4, array);
19448 array += 4;
19449 }
19450 }
19451
19452 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19453 does not have a "location" either in memory or in a register. These
19454 things can arise in GNU C when a constant is passed as an actual parameter
19455 to an inlined function. They can also arise in C++ where declared
19456 constants do not necessarily get memory "homes". */
19457
19458 static bool
19459 add_const_value_attribute (dw_die_ref die, rtx rtl)
19460 {
19461 switch (GET_CODE (rtl))
19462 {
19463 case CONST_INT:
19464 {
19465 HOST_WIDE_INT val = INTVAL (rtl);
19466
19467 if (val < 0)
19468 add_AT_int (die, DW_AT_const_value, val);
19469 else
19470 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19471 }
19472 return true;
19473
19474 case CONST_WIDE_INT:
19475 {
19476 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19477 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19478 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19479 wide_int w = wi::zext (w1, prec);
19480 add_AT_wide (die, DW_AT_const_value, w);
19481 }
19482 return true;
19483
19484 case CONST_DOUBLE:
19485 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19486 floating-point constant. A CONST_DOUBLE is used whenever the
19487 constant requires more than one word in order to be adequately
19488 represented. */
19489 if (TARGET_SUPPORTS_WIDE_INT == 0
19490 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19491 add_AT_double (die, DW_AT_const_value,
19492 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19493 else
19494 {
19495 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19496 unsigned int length = GET_MODE_SIZE (mode);
19497 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19498
19499 insert_float (rtl, array);
19500 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19501 }
19502 return true;
19503
19504 case CONST_VECTOR:
19505 {
19506 unsigned int length;
19507 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19508 return false;
19509
19510 machine_mode mode = GET_MODE (rtl);
19511 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19512 unsigned char *array
19513 = ggc_vec_alloc<unsigned char> (length * elt_size);
19514 unsigned int i;
19515 unsigned char *p;
19516 machine_mode imode = GET_MODE_INNER (mode);
19517
19518 switch (GET_MODE_CLASS (mode))
19519 {
19520 case MODE_VECTOR_INT:
19521 for (i = 0, p = array; i < length; i++, p += elt_size)
19522 {
19523 rtx elt = CONST_VECTOR_ELT (rtl, i);
19524 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19525 }
19526 break;
19527
19528 case MODE_VECTOR_FLOAT:
19529 for (i = 0, p = array; i < length; i++, p += elt_size)
19530 {
19531 rtx elt = CONST_VECTOR_ELT (rtl, i);
19532 insert_float (elt, p);
19533 }
19534 break;
19535
19536 default:
19537 gcc_unreachable ();
19538 }
19539
19540 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19541 }
19542 return true;
19543
19544 case CONST_STRING:
19545 if (dwarf_version >= 4 || !dwarf_strict)
19546 {
19547 dw_loc_descr_ref loc_result;
19548 resolve_one_addr (&rtl);
19549 rtl_addr:
19550 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19551 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19552 add_AT_loc (die, DW_AT_location, loc_result);
19553 vec_safe_push (used_rtx_array, rtl);
19554 return true;
19555 }
19556 return false;
19557
19558 case CONST:
19559 if (CONSTANT_P (XEXP (rtl, 0)))
19560 return add_const_value_attribute (die, XEXP (rtl, 0));
19561 /* FALLTHROUGH */
19562 case SYMBOL_REF:
19563 if (!const_ok_for_output (rtl))
19564 return false;
19565 /* FALLTHROUGH */
19566 case LABEL_REF:
19567 if (dwarf_version >= 4 || !dwarf_strict)
19568 goto rtl_addr;
19569 return false;
19570
19571 case PLUS:
19572 /* In cases where an inlined instance of an inline function is passed
19573 the address of an `auto' variable (which is local to the caller) we
19574 can get a situation where the DECL_RTL of the artificial local
19575 variable (for the inlining) which acts as a stand-in for the
19576 corresponding formal parameter (of the inline function) will look
19577 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19578 exactly a compile-time constant expression, but it isn't the address
19579 of the (artificial) local variable either. Rather, it represents the
19580 *value* which the artificial local variable always has during its
19581 lifetime. We currently have no way to represent such quasi-constant
19582 values in Dwarf, so for now we just punt and generate nothing. */
19583 return false;
19584
19585 case HIGH:
19586 case CONST_FIXED:
19587 return false;
19588
19589 case MEM:
19590 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19591 && MEM_READONLY_P (rtl)
19592 && GET_MODE (rtl) == BLKmode)
19593 {
19594 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19595 return true;
19596 }
19597 return false;
19598
19599 default:
19600 /* No other kinds of rtx should be possible here. */
19601 gcc_unreachable ();
19602 }
19603 return false;
19604 }
19605
19606 /* Determine whether the evaluation of EXPR references any variables
19607 or functions which aren't otherwise used (and therefore may not be
19608 output). */
19609 static tree
19610 reference_to_unused (tree * tp, int * walk_subtrees,
19611 void * data ATTRIBUTE_UNUSED)
19612 {
19613 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19614 *walk_subtrees = 0;
19615
19616 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19617 && ! TREE_ASM_WRITTEN (*tp))
19618 return *tp;
19619 /* ??? The C++ FE emits debug information for using decls, so
19620 putting gcc_unreachable here falls over. See PR31899. For now
19621 be conservative. */
19622 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19623 return *tp;
19624 else if (VAR_P (*tp))
19625 {
19626 varpool_node *node = varpool_node::get (*tp);
19627 if (!node || !node->definition)
19628 return *tp;
19629 }
19630 else if (TREE_CODE (*tp) == FUNCTION_DECL
19631 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19632 {
19633 /* The call graph machinery must have finished analyzing,
19634 optimizing and gimplifying the CU by now.
19635 So if *TP has no call graph node associated
19636 to it, it means *TP will not be emitted. */
19637 if (!cgraph_node::get (*tp))
19638 return *tp;
19639 }
19640 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19641 return *tp;
19642
19643 return NULL_TREE;
19644 }
19645
19646 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19647 for use in a later add_const_value_attribute call. */
19648
19649 static rtx
19650 rtl_for_decl_init (tree init, tree type)
19651 {
19652 rtx rtl = NULL_RTX;
19653
19654 STRIP_NOPS (init);
19655
19656 /* If a variable is initialized with a string constant without embedded
19657 zeros, build CONST_STRING. */
19658 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19659 {
19660 tree enttype = TREE_TYPE (type);
19661 tree domain = TYPE_DOMAIN (type);
19662 scalar_int_mode mode;
19663
19664 if (is_int_mode (TYPE_MODE (enttype), &mode)
19665 && GET_MODE_SIZE (mode) == 1
19666 && domain
19667 && TYPE_MAX_VALUE (domain)
19668 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19669 && integer_zerop (TYPE_MIN_VALUE (domain))
19670 && compare_tree_int (TYPE_MAX_VALUE (domain),
19671 TREE_STRING_LENGTH (init) - 1) == 0
19672 && ((size_t) TREE_STRING_LENGTH (init)
19673 == strlen (TREE_STRING_POINTER (init)) + 1))
19674 {
19675 rtl = gen_rtx_CONST_STRING (VOIDmode,
19676 ggc_strdup (TREE_STRING_POINTER (init)));
19677 rtl = gen_rtx_MEM (BLKmode, rtl);
19678 MEM_READONLY_P (rtl) = 1;
19679 }
19680 }
19681 /* Other aggregates, and complex values, could be represented using
19682 CONCAT: FIXME! */
19683 else if (AGGREGATE_TYPE_P (type)
19684 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19685 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19686 || TREE_CODE (type) == COMPLEX_TYPE)
19687 ;
19688 /* Vectors only work if their mode is supported by the target.
19689 FIXME: generic vectors ought to work too. */
19690 else if (TREE_CODE (type) == VECTOR_TYPE
19691 && !VECTOR_MODE_P (TYPE_MODE (type)))
19692 ;
19693 /* If the initializer is something that we know will expand into an
19694 immediate RTL constant, expand it now. We must be careful not to
19695 reference variables which won't be output. */
19696 else if (initializer_constant_valid_p (init, type)
19697 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19698 {
19699 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19700 possible. */
19701 if (TREE_CODE (type) == VECTOR_TYPE)
19702 switch (TREE_CODE (init))
19703 {
19704 case VECTOR_CST:
19705 break;
19706 case CONSTRUCTOR:
19707 if (TREE_CONSTANT (init))
19708 {
19709 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19710 bool constant_p = true;
19711 tree value;
19712 unsigned HOST_WIDE_INT ix;
19713
19714 /* Even when ctor is constant, it might contain non-*_CST
19715 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19716 belong into VECTOR_CST nodes. */
19717 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19718 if (!CONSTANT_CLASS_P (value))
19719 {
19720 constant_p = false;
19721 break;
19722 }
19723
19724 if (constant_p)
19725 {
19726 init = build_vector_from_ctor (type, elts);
19727 break;
19728 }
19729 }
19730 /* FALLTHRU */
19731
19732 default:
19733 return NULL;
19734 }
19735
19736 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19737
19738 /* If expand_expr returns a MEM, it wasn't immediate. */
19739 gcc_assert (!rtl || !MEM_P (rtl));
19740 }
19741
19742 return rtl;
19743 }
19744
19745 /* Generate RTL for the variable DECL to represent its location. */
19746
19747 static rtx
19748 rtl_for_decl_location (tree decl)
19749 {
19750 rtx rtl;
19751
19752 /* Here we have to decide where we are going to say the parameter "lives"
19753 (as far as the debugger is concerned). We only have a couple of
19754 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19755
19756 DECL_RTL normally indicates where the parameter lives during most of the
19757 activation of the function. If optimization is enabled however, this
19758 could be either NULL or else a pseudo-reg. Both of those cases indicate
19759 that the parameter doesn't really live anywhere (as far as the code
19760 generation parts of GCC are concerned) during most of the function's
19761 activation. That will happen (for example) if the parameter is never
19762 referenced within the function.
19763
19764 We could just generate a location descriptor here for all non-NULL
19765 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19766 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19767 where DECL_RTL is NULL or is a pseudo-reg.
19768
19769 Note however that we can only get away with using DECL_INCOMING_RTL as
19770 a backup substitute for DECL_RTL in certain limited cases. In cases
19771 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19772 we can be sure that the parameter was passed using the same type as it is
19773 declared to have within the function, and that its DECL_INCOMING_RTL
19774 points us to a place where a value of that type is passed.
19775
19776 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19777 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19778 because in these cases DECL_INCOMING_RTL points us to a value of some
19779 type which is *different* from the type of the parameter itself. Thus,
19780 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19781 such cases, the debugger would end up (for example) trying to fetch a
19782 `float' from a place which actually contains the first part of a
19783 `double'. That would lead to really incorrect and confusing
19784 output at debug-time.
19785
19786 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19787 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19788 are a couple of exceptions however. On little-endian machines we can
19789 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19790 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19791 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19792 when (on a little-endian machine) a non-prototyped function has a
19793 parameter declared to be of type `short' or `char'. In such cases,
19794 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19795 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19796 passed `int' value. If the debugger then uses that address to fetch
19797 a `short' or a `char' (on a little-endian machine) the result will be
19798 the correct data, so we allow for such exceptional cases below.
19799
19800 Note that our goal here is to describe the place where the given formal
19801 parameter lives during most of the function's activation (i.e. between the
19802 end of the prologue and the start of the epilogue). We'll do that as best
19803 as we can. Note however that if the given formal parameter is modified
19804 sometime during the execution of the function, then a stack backtrace (at
19805 debug-time) will show the function as having been called with the *new*
19806 value rather than the value which was originally passed in. This happens
19807 rarely enough that it is not a major problem, but it *is* a problem, and
19808 I'd like to fix it.
19809
19810 A future version of dwarf2out.c may generate two additional attributes for
19811 any given DW_TAG_formal_parameter DIE which will describe the "passed
19812 type" and the "passed location" for the given formal parameter in addition
19813 to the attributes we now generate to indicate the "declared type" and the
19814 "active location" for each parameter. This additional set of attributes
19815 could be used by debuggers for stack backtraces. Separately, note that
19816 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19817 This happens (for example) for inlined-instances of inline function formal
19818 parameters which are never referenced. This really shouldn't be
19819 happening. All PARM_DECL nodes should get valid non-NULL
19820 DECL_INCOMING_RTL values. FIXME. */
19821
19822 /* Use DECL_RTL as the "location" unless we find something better. */
19823 rtl = DECL_RTL_IF_SET (decl);
19824
19825 /* When generating abstract instances, ignore everything except
19826 constants, symbols living in memory, and symbols living in
19827 fixed registers. */
19828 if (! reload_completed)
19829 {
19830 if (rtl
19831 && (CONSTANT_P (rtl)
19832 || (MEM_P (rtl)
19833 && CONSTANT_P (XEXP (rtl, 0)))
19834 || (REG_P (rtl)
19835 && VAR_P (decl)
19836 && TREE_STATIC (decl))))
19837 {
19838 rtl = targetm.delegitimize_address (rtl);
19839 return rtl;
19840 }
19841 rtl = NULL_RTX;
19842 }
19843 else if (TREE_CODE (decl) == PARM_DECL)
19844 {
19845 if (rtl == NULL_RTX
19846 || is_pseudo_reg (rtl)
19847 || (MEM_P (rtl)
19848 && is_pseudo_reg (XEXP (rtl, 0))
19849 && DECL_INCOMING_RTL (decl)
19850 && MEM_P (DECL_INCOMING_RTL (decl))
19851 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19852 {
19853 tree declared_type = TREE_TYPE (decl);
19854 tree passed_type = DECL_ARG_TYPE (decl);
19855 machine_mode dmode = TYPE_MODE (declared_type);
19856 machine_mode pmode = TYPE_MODE (passed_type);
19857
19858 /* This decl represents a formal parameter which was optimized out.
19859 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19860 all cases where (rtl == NULL_RTX) just below. */
19861 if (dmode == pmode)
19862 rtl = DECL_INCOMING_RTL (decl);
19863 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19864 && SCALAR_INT_MODE_P (dmode)
19865 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19866 && DECL_INCOMING_RTL (decl))
19867 {
19868 rtx inc = DECL_INCOMING_RTL (decl);
19869 if (REG_P (inc))
19870 rtl = inc;
19871 else if (MEM_P (inc))
19872 {
19873 if (BYTES_BIG_ENDIAN)
19874 rtl = adjust_address_nv (inc, dmode,
19875 GET_MODE_SIZE (pmode)
19876 - GET_MODE_SIZE (dmode));
19877 else
19878 rtl = inc;
19879 }
19880 }
19881 }
19882
19883 /* If the parm was passed in registers, but lives on the stack, then
19884 make a big endian correction if the mode of the type of the
19885 parameter is not the same as the mode of the rtl. */
19886 /* ??? This is the same series of checks that are made in dbxout.c before
19887 we reach the big endian correction code there. It isn't clear if all
19888 of these checks are necessary here, but keeping them all is the safe
19889 thing to do. */
19890 else if (MEM_P (rtl)
19891 && XEXP (rtl, 0) != const0_rtx
19892 && ! CONSTANT_P (XEXP (rtl, 0))
19893 /* Not passed in memory. */
19894 && !MEM_P (DECL_INCOMING_RTL (decl))
19895 /* Not passed by invisible reference. */
19896 && (!REG_P (XEXP (rtl, 0))
19897 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19898 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19899 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19900 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19901 #endif
19902 )
19903 /* Big endian correction check. */
19904 && BYTES_BIG_ENDIAN
19905 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19906 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19907 UNITS_PER_WORD))
19908 {
19909 machine_mode addr_mode = get_address_mode (rtl);
19910 poly_int64 offset = (UNITS_PER_WORD
19911 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19912
19913 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19914 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19915 }
19916 }
19917 else if (VAR_P (decl)
19918 && rtl
19919 && MEM_P (rtl)
19920 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19921 {
19922 machine_mode addr_mode = get_address_mode (rtl);
19923 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19924 GET_MODE (rtl));
19925
19926 /* If a variable is declared "register" yet is smaller than
19927 a register, then if we store the variable to memory, it
19928 looks like we're storing a register-sized value, when in
19929 fact we are not. We need to adjust the offset of the
19930 storage location to reflect the actual value's bytes,
19931 else gdb will not be able to display it. */
19932 if (maybe_ne (offset, 0))
19933 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19934 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19935 }
19936
19937 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19938 and will have been substituted directly into all expressions that use it.
19939 C does not have such a concept, but C++ and other languages do. */
19940 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19941 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19942
19943 if (rtl)
19944 rtl = targetm.delegitimize_address (rtl);
19945
19946 /* If we don't look past the constant pool, we risk emitting a
19947 reference to a constant pool entry that isn't referenced from
19948 code, and thus is not emitted. */
19949 if (rtl)
19950 rtl = avoid_constant_pool_reference (rtl);
19951
19952 /* Try harder to get a rtl. If this symbol ends up not being emitted
19953 in the current CU, resolve_addr will remove the expression referencing
19954 it. */
19955 if (rtl == NULL_RTX
19956 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19957 && VAR_P (decl)
19958 && !DECL_EXTERNAL (decl)
19959 && TREE_STATIC (decl)
19960 && DECL_NAME (decl)
19961 && !DECL_HARD_REGISTER (decl)
19962 && DECL_MODE (decl) != VOIDmode)
19963 {
19964 rtl = make_decl_rtl_for_debug (decl);
19965 if (!MEM_P (rtl)
19966 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19967 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19968 rtl = NULL_RTX;
19969 }
19970
19971 return rtl;
19972 }
19973
19974 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19975 returned. If so, the decl for the COMMON block is returned, and the
19976 value is the offset into the common block for the symbol. */
19977
19978 static tree
19979 fortran_common (tree decl, HOST_WIDE_INT *value)
19980 {
19981 tree val_expr, cvar;
19982 machine_mode mode;
19983 poly_int64 bitsize, bitpos;
19984 tree offset;
19985 HOST_WIDE_INT cbitpos;
19986 int unsignedp, reversep, volatilep = 0;
19987
19988 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19989 it does not have a value (the offset into the common area), or if it
19990 is thread local (as opposed to global) then it isn't common, and shouldn't
19991 be handled as such. */
19992 if (!VAR_P (decl)
19993 || !TREE_STATIC (decl)
19994 || !DECL_HAS_VALUE_EXPR_P (decl)
19995 || !is_fortran ())
19996 return NULL_TREE;
19997
19998 val_expr = DECL_VALUE_EXPR (decl);
19999 if (TREE_CODE (val_expr) != COMPONENT_REF)
20000 return NULL_TREE;
20001
20002 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20003 &unsignedp, &reversep, &volatilep);
20004
20005 if (cvar == NULL_TREE
20006 || !VAR_P (cvar)
20007 || DECL_ARTIFICIAL (cvar)
20008 || !TREE_PUBLIC (cvar)
20009 /* We don't expect to have to cope with variable offsets,
20010 since at present all static data must have a constant size. */
20011 || !bitpos.is_constant (&cbitpos))
20012 return NULL_TREE;
20013
20014 *value = 0;
20015 if (offset != NULL)
20016 {
20017 if (!tree_fits_shwi_p (offset))
20018 return NULL_TREE;
20019 *value = tree_to_shwi (offset);
20020 }
20021 if (cbitpos != 0)
20022 *value += cbitpos / BITS_PER_UNIT;
20023
20024 return cvar;
20025 }
20026
20027 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20028 data attribute for a variable or a parameter. We generate the
20029 DW_AT_const_value attribute only in those cases where the given variable
20030 or parameter does not have a true "location" either in memory or in a
20031 register. This can happen (for example) when a constant is passed as an
20032 actual argument in a call to an inline function. (It's possible that
20033 these things can crop up in other ways also.) Note that one type of
20034 constant value which can be passed into an inlined function is a constant
20035 pointer. This can happen for example if an actual argument in an inlined
20036 function call evaluates to a compile-time constant address.
20037
20038 CACHE_P is true if it is worth caching the location list for DECL,
20039 so that future calls can reuse it rather than regenerate it from scratch.
20040 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20041 since we will need to refer to them each time the function is inlined. */
20042
20043 static bool
20044 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20045 {
20046 rtx rtl;
20047 dw_loc_list_ref list;
20048 var_loc_list *loc_list;
20049 cached_dw_loc_list *cache;
20050
20051 if (early_dwarf)
20052 return false;
20053
20054 if (TREE_CODE (decl) == ERROR_MARK)
20055 return false;
20056
20057 if (get_AT (die, DW_AT_location)
20058 || get_AT (die, DW_AT_const_value))
20059 return true;
20060
20061 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20062 || TREE_CODE (decl) == RESULT_DECL);
20063
20064 /* Try to get some constant RTL for this decl, and use that as the value of
20065 the location. */
20066
20067 rtl = rtl_for_decl_location (decl);
20068 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20069 && add_const_value_attribute (die, rtl))
20070 return true;
20071
20072 /* See if we have single element location list that is equivalent to
20073 a constant value. That way we are better to use add_const_value_attribute
20074 rather than expanding constant value equivalent. */
20075 loc_list = lookup_decl_loc (decl);
20076 if (loc_list
20077 && loc_list->first
20078 && loc_list->first->next == NULL
20079 && NOTE_P (loc_list->first->loc)
20080 && NOTE_VAR_LOCATION (loc_list->first->loc)
20081 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20082 {
20083 struct var_loc_node *node;
20084
20085 node = loc_list->first;
20086 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20087 if (GET_CODE (rtl) == EXPR_LIST)
20088 rtl = XEXP (rtl, 0);
20089 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20090 && add_const_value_attribute (die, rtl))
20091 return true;
20092 }
20093 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20094 list several times. See if we've already cached the contents. */
20095 list = NULL;
20096 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20097 cache_p = false;
20098 if (cache_p)
20099 {
20100 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20101 if (cache)
20102 list = cache->loc_list;
20103 }
20104 if (list == NULL)
20105 {
20106 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20107 NULL);
20108 /* It is usually worth caching this result if the decl is from
20109 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20110 if (cache_p && list && list->dw_loc_next)
20111 {
20112 cached_dw_loc_list **slot
20113 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20114 DECL_UID (decl),
20115 INSERT);
20116 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20117 cache->decl_id = DECL_UID (decl);
20118 cache->loc_list = list;
20119 *slot = cache;
20120 }
20121 }
20122 if (list)
20123 {
20124 add_AT_location_description (die, DW_AT_location, list);
20125 return true;
20126 }
20127 /* None of that worked, so it must not really have a location;
20128 try adding a constant value attribute from the DECL_INITIAL. */
20129 return tree_add_const_value_attribute_for_decl (die, decl);
20130 }
20131
20132 /* Helper function for tree_add_const_value_attribute. Natively encode
20133 initializer INIT into an array. Return true if successful. */
20134
20135 static bool
20136 native_encode_initializer (tree init, unsigned char *array, int size)
20137 {
20138 tree type;
20139
20140 if (init == NULL_TREE)
20141 return false;
20142
20143 STRIP_NOPS (init);
20144 switch (TREE_CODE (init))
20145 {
20146 case STRING_CST:
20147 type = TREE_TYPE (init);
20148 if (TREE_CODE (type) == ARRAY_TYPE)
20149 {
20150 tree enttype = TREE_TYPE (type);
20151 scalar_int_mode mode;
20152
20153 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20154 || GET_MODE_SIZE (mode) != 1)
20155 return false;
20156 if (int_size_in_bytes (type) != size)
20157 return false;
20158 if (size > TREE_STRING_LENGTH (init))
20159 {
20160 memcpy (array, TREE_STRING_POINTER (init),
20161 TREE_STRING_LENGTH (init));
20162 memset (array + TREE_STRING_LENGTH (init),
20163 '\0', size - TREE_STRING_LENGTH (init));
20164 }
20165 else
20166 memcpy (array, TREE_STRING_POINTER (init), size);
20167 return true;
20168 }
20169 return false;
20170 case CONSTRUCTOR:
20171 type = TREE_TYPE (init);
20172 if (int_size_in_bytes (type) != size)
20173 return false;
20174 if (TREE_CODE (type) == ARRAY_TYPE)
20175 {
20176 HOST_WIDE_INT min_index;
20177 unsigned HOST_WIDE_INT cnt;
20178 int curpos = 0, fieldsize;
20179 constructor_elt *ce;
20180
20181 if (TYPE_DOMAIN (type) == NULL_TREE
20182 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20183 return false;
20184
20185 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20186 if (fieldsize <= 0)
20187 return false;
20188
20189 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20190 memset (array, '\0', size);
20191 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20192 {
20193 tree val = ce->value;
20194 tree index = ce->index;
20195 int pos = curpos;
20196 if (index && TREE_CODE (index) == RANGE_EXPR)
20197 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20198 * fieldsize;
20199 else if (index)
20200 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20201
20202 if (val)
20203 {
20204 STRIP_NOPS (val);
20205 if (!native_encode_initializer (val, array + pos, fieldsize))
20206 return false;
20207 }
20208 curpos = pos + fieldsize;
20209 if (index && TREE_CODE (index) == RANGE_EXPR)
20210 {
20211 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20212 - tree_to_shwi (TREE_OPERAND (index, 0));
20213 while (count-- > 0)
20214 {
20215 if (val)
20216 memcpy (array + curpos, array + pos, fieldsize);
20217 curpos += fieldsize;
20218 }
20219 }
20220 gcc_assert (curpos <= size);
20221 }
20222 return true;
20223 }
20224 else if (TREE_CODE (type) == RECORD_TYPE
20225 || TREE_CODE (type) == UNION_TYPE)
20226 {
20227 tree field = NULL_TREE;
20228 unsigned HOST_WIDE_INT cnt;
20229 constructor_elt *ce;
20230
20231 if (int_size_in_bytes (type) != size)
20232 return false;
20233
20234 if (TREE_CODE (type) == RECORD_TYPE)
20235 field = TYPE_FIELDS (type);
20236
20237 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20238 {
20239 tree val = ce->value;
20240 int pos, fieldsize;
20241
20242 if (ce->index != 0)
20243 field = ce->index;
20244
20245 if (val)
20246 STRIP_NOPS (val);
20247
20248 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20249 return false;
20250
20251 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20252 && TYPE_DOMAIN (TREE_TYPE (field))
20253 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20254 return false;
20255 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20256 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20257 return false;
20258 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20259 pos = int_byte_position (field);
20260 gcc_assert (pos + fieldsize <= size);
20261 if (val && fieldsize != 0
20262 && !native_encode_initializer (val, array + pos, fieldsize))
20263 return false;
20264 }
20265 return true;
20266 }
20267 return false;
20268 case VIEW_CONVERT_EXPR:
20269 case NON_LVALUE_EXPR:
20270 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20271 default:
20272 return native_encode_expr (init, array, size) == size;
20273 }
20274 }
20275
20276 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20277 attribute is the const value T. */
20278
20279 static bool
20280 tree_add_const_value_attribute (dw_die_ref die, tree t)
20281 {
20282 tree init;
20283 tree type = TREE_TYPE (t);
20284 rtx rtl;
20285
20286 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20287 return false;
20288
20289 init = t;
20290 gcc_assert (!DECL_P (init));
20291
20292 if (TREE_CODE (init) == INTEGER_CST)
20293 {
20294 if (tree_fits_uhwi_p (init))
20295 {
20296 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20297 return true;
20298 }
20299 if (tree_fits_shwi_p (init))
20300 {
20301 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20302 return true;
20303 }
20304 }
20305 if (! early_dwarf)
20306 {
20307 rtl = rtl_for_decl_init (init, type);
20308 if (rtl)
20309 return add_const_value_attribute (die, rtl);
20310 }
20311 /* If the host and target are sane, try harder. */
20312 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20313 && initializer_constant_valid_p (init, type))
20314 {
20315 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20316 if (size > 0 && (int) size == size)
20317 {
20318 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20319
20320 if (native_encode_initializer (init, array, size))
20321 {
20322 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20323 return true;
20324 }
20325 ggc_free (array);
20326 }
20327 }
20328 return false;
20329 }
20330
20331 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20332 attribute is the const value of T, where T is an integral constant
20333 variable with static storage duration
20334 (so it can't be a PARM_DECL or a RESULT_DECL). */
20335
20336 static bool
20337 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20338 {
20339
20340 if (!decl
20341 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20342 || (VAR_P (decl) && !TREE_STATIC (decl)))
20343 return false;
20344
20345 if (TREE_READONLY (decl)
20346 && ! TREE_THIS_VOLATILE (decl)
20347 && DECL_INITIAL (decl))
20348 /* OK */;
20349 else
20350 return false;
20351
20352 /* Don't add DW_AT_const_value if abstract origin already has one. */
20353 if (get_AT (var_die, DW_AT_const_value))
20354 return false;
20355
20356 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20357 }
20358
20359 /* Convert the CFI instructions for the current function into a
20360 location list. This is used for DW_AT_frame_base when we targeting
20361 a dwarf2 consumer that does not support the dwarf3
20362 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20363 expressions. */
20364
20365 static dw_loc_list_ref
20366 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20367 {
20368 int ix;
20369 dw_fde_ref fde;
20370 dw_loc_list_ref list, *list_tail;
20371 dw_cfi_ref cfi;
20372 dw_cfa_location last_cfa, next_cfa;
20373 const char *start_label, *last_label, *section;
20374 dw_cfa_location remember;
20375
20376 fde = cfun->fde;
20377 gcc_assert (fde != NULL);
20378
20379 section = secname_for_decl (current_function_decl);
20380 list_tail = &list;
20381 list = NULL;
20382
20383 memset (&next_cfa, 0, sizeof (next_cfa));
20384 next_cfa.reg = INVALID_REGNUM;
20385 remember = next_cfa;
20386
20387 start_label = fde->dw_fde_begin;
20388
20389 /* ??? Bald assumption that the CIE opcode list does not contain
20390 advance opcodes. */
20391 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20392 lookup_cfa_1 (cfi, &next_cfa, &remember);
20393
20394 last_cfa = next_cfa;
20395 last_label = start_label;
20396
20397 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20398 {
20399 /* If the first partition contained no CFI adjustments, the
20400 CIE opcodes apply to the whole first partition. */
20401 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20402 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20403 list_tail =&(*list_tail)->dw_loc_next;
20404 start_label = last_label = fde->dw_fde_second_begin;
20405 }
20406
20407 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20408 {
20409 switch (cfi->dw_cfi_opc)
20410 {
20411 case DW_CFA_set_loc:
20412 case DW_CFA_advance_loc1:
20413 case DW_CFA_advance_loc2:
20414 case DW_CFA_advance_loc4:
20415 if (!cfa_equal_p (&last_cfa, &next_cfa))
20416 {
20417 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20418 start_label, 0, last_label, 0, section);
20419
20420 list_tail = &(*list_tail)->dw_loc_next;
20421 last_cfa = next_cfa;
20422 start_label = last_label;
20423 }
20424 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20425 break;
20426
20427 case DW_CFA_advance_loc:
20428 /* The encoding is complex enough that we should never emit this. */
20429 gcc_unreachable ();
20430
20431 default:
20432 lookup_cfa_1 (cfi, &next_cfa, &remember);
20433 break;
20434 }
20435 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20436 {
20437 if (!cfa_equal_p (&last_cfa, &next_cfa))
20438 {
20439 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20440 start_label, 0, last_label, 0, section);
20441
20442 list_tail = &(*list_tail)->dw_loc_next;
20443 last_cfa = next_cfa;
20444 start_label = last_label;
20445 }
20446 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20447 start_label, 0, fde->dw_fde_end, 0, section);
20448 list_tail = &(*list_tail)->dw_loc_next;
20449 start_label = last_label = fde->dw_fde_second_begin;
20450 }
20451 }
20452
20453 if (!cfa_equal_p (&last_cfa, &next_cfa))
20454 {
20455 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20456 start_label, 0, last_label, 0, section);
20457 list_tail = &(*list_tail)->dw_loc_next;
20458 start_label = last_label;
20459 }
20460
20461 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20462 start_label, 0,
20463 fde->dw_fde_second_begin
20464 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20465 section);
20466
20467 maybe_gen_llsym (list);
20468
20469 return list;
20470 }
20471
20472 /* Compute a displacement from the "steady-state frame pointer" to the
20473 frame base (often the same as the CFA), and store it in
20474 frame_pointer_fb_offset. OFFSET is added to the displacement
20475 before the latter is negated. */
20476
20477 static void
20478 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20479 {
20480 rtx reg, elim;
20481
20482 #ifdef FRAME_POINTER_CFA_OFFSET
20483 reg = frame_pointer_rtx;
20484 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20485 #else
20486 reg = arg_pointer_rtx;
20487 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20488 #endif
20489
20490 elim = (ira_use_lra_p
20491 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20492 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20493 elim = strip_offset_and_add (elim, &offset);
20494
20495 frame_pointer_fb_offset = -offset;
20496
20497 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20498 in which to eliminate. This is because it's stack pointer isn't
20499 directly accessible as a register within the ISA. To work around
20500 this, assume that while we cannot provide a proper value for
20501 frame_pointer_fb_offset, we won't need one either. */
20502 frame_pointer_fb_offset_valid
20503 = ((SUPPORTS_STACK_ALIGNMENT
20504 && (elim == hard_frame_pointer_rtx
20505 || elim == stack_pointer_rtx))
20506 || elim == (frame_pointer_needed
20507 ? hard_frame_pointer_rtx
20508 : stack_pointer_rtx));
20509 }
20510
20511 /* Generate a DW_AT_name attribute given some string value to be included as
20512 the value of the attribute. */
20513
20514 static void
20515 add_name_attribute (dw_die_ref die, const char *name_string)
20516 {
20517 if (name_string != NULL && *name_string != 0)
20518 {
20519 if (demangle_name_func)
20520 name_string = (*demangle_name_func) (name_string);
20521
20522 add_AT_string (die, DW_AT_name, name_string);
20523 }
20524 }
20525
20526 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20527 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20528 of TYPE accordingly.
20529
20530 ??? This is a temporary measure until after we're able to generate
20531 regular DWARF for the complex Ada type system. */
20532
20533 static void
20534 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20535 dw_die_ref context_die)
20536 {
20537 tree dtype;
20538 dw_die_ref dtype_die;
20539
20540 if (!lang_hooks.types.descriptive_type)
20541 return;
20542
20543 dtype = lang_hooks.types.descriptive_type (type);
20544 if (!dtype)
20545 return;
20546
20547 dtype_die = lookup_type_die (dtype);
20548 if (!dtype_die)
20549 {
20550 gen_type_die (dtype, context_die);
20551 dtype_die = lookup_type_die (dtype);
20552 gcc_assert (dtype_die);
20553 }
20554
20555 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20556 }
20557
20558 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20559
20560 static const char *
20561 comp_dir_string (void)
20562 {
20563 const char *wd;
20564 char *wd1;
20565 static const char *cached_wd = NULL;
20566
20567 if (cached_wd != NULL)
20568 return cached_wd;
20569
20570 wd = get_src_pwd ();
20571 if (wd == NULL)
20572 return NULL;
20573
20574 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20575 {
20576 int wdlen;
20577
20578 wdlen = strlen (wd);
20579 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20580 strcpy (wd1, wd);
20581 wd1 [wdlen] = DIR_SEPARATOR;
20582 wd1 [wdlen + 1] = 0;
20583 wd = wd1;
20584 }
20585
20586 cached_wd = remap_debug_filename (wd);
20587 return cached_wd;
20588 }
20589
20590 /* Generate a DW_AT_comp_dir attribute for DIE. */
20591
20592 static void
20593 add_comp_dir_attribute (dw_die_ref die)
20594 {
20595 const char * wd = comp_dir_string ();
20596 if (wd != NULL)
20597 add_AT_string (die, DW_AT_comp_dir, wd);
20598 }
20599
20600 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20601 pointer computation, ...), output a representation for that bound according
20602 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20603 loc_list_from_tree for the meaning of CONTEXT. */
20604
20605 static void
20606 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20607 int forms, struct loc_descr_context *context)
20608 {
20609 dw_die_ref context_die, decl_die;
20610 dw_loc_list_ref list;
20611 bool strip_conversions = true;
20612 bool placeholder_seen = false;
20613
20614 while (strip_conversions)
20615 switch (TREE_CODE (value))
20616 {
20617 case ERROR_MARK:
20618 case SAVE_EXPR:
20619 return;
20620
20621 CASE_CONVERT:
20622 case VIEW_CONVERT_EXPR:
20623 value = TREE_OPERAND (value, 0);
20624 break;
20625
20626 default:
20627 strip_conversions = false;
20628 break;
20629 }
20630
20631 /* If possible and permitted, output the attribute as a constant. */
20632 if ((forms & dw_scalar_form_constant) != 0
20633 && TREE_CODE (value) == INTEGER_CST)
20634 {
20635 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20636
20637 /* If HOST_WIDE_INT is big enough then represent the bound as
20638 a constant value. We need to choose a form based on
20639 whether the type is signed or unsigned. We cannot just
20640 call add_AT_unsigned if the value itself is positive
20641 (add_AT_unsigned might add the unsigned value encoded as
20642 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20643 bounds type and then sign extend any unsigned values found
20644 for signed types. This is needed only for
20645 DW_AT_{lower,upper}_bound, since for most other attributes,
20646 consumers will treat DW_FORM_data[1248] as unsigned values,
20647 regardless of the underlying type. */
20648 if (prec <= HOST_BITS_PER_WIDE_INT
20649 || tree_fits_uhwi_p (value))
20650 {
20651 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20652 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20653 else
20654 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20655 }
20656 else
20657 /* Otherwise represent the bound as an unsigned value with
20658 the precision of its type. The precision and signedness
20659 of the type will be necessary to re-interpret it
20660 unambiguously. */
20661 add_AT_wide (die, attr, wi::to_wide (value));
20662 return;
20663 }
20664
20665 /* Otherwise, if it's possible and permitted too, output a reference to
20666 another DIE. */
20667 if ((forms & dw_scalar_form_reference) != 0)
20668 {
20669 tree decl = NULL_TREE;
20670
20671 /* Some type attributes reference an outer type. For instance, the upper
20672 bound of an array may reference an embedding record (this happens in
20673 Ada). */
20674 if (TREE_CODE (value) == COMPONENT_REF
20675 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20676 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20677 decl = TREE_OPERAND (value, 1);
20678
20679 else if (VAR_P (value)
20680 || TREE_CODE (value) == PARM_DECL
20681 || TREE_CODE (value) == RESULT_DECL)
20682 decl = value;
20683
20684 if (decl != NULL_TREE)
20685 {
20686 dw_die_ref decl_die = lookup_decl_die (decl);
20687
20688 /* ??? Can this happen, or should the variable have been bound
20689 first? Probably it can, since I imagine that we try to create
20690 the types of parameters in the order in which they exist in
20691 the list, and won't have created a forward reference to a
20692 later parameter. */
20693 if (decl_die != NULL)
20694 {
20695 add_AT_die_ref (die, attr, decl_die);
20696 return;
20697 }
20698 }
20699 }
20700
20701 /* Last chance: try to create a stack operation procedure to evaluate the
20702 value. Do nothing if even that is not possible or permitted. */
20703 if ((forms & dw_scalar_form_exprloc) == 0)
20704 return;
20705
20706 list = loc_list_from_tree (value, 2, context);
20707 if (context && context->placeholder_arg)
20708 {
20709 placeholder_seen = context->placeholder_seen;
20710 context->placeholder_seen = false;
20711 }
20712 if (list == NULL || single_element_loc_list_p (list))
20713 {
20714 /* If this attribute is not a reference nor constant, it is
20715 a DWARF expression rather than location description. For that
20716 loc_list_from_tree (value, 0, &context) is needed. */
20717 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20718 if (list2 && single_element_loc_list_p (list2))
20719 {
20720 if (placeholder_seen)
20721 {
20722 struct dwarf_procedure_info dpi;
20723 dpi.fndecl = NULL_TREE;
20724 dpi.args_count = 1;
20725 if (!resolve_args_picking (list2->expr, 1, &dpi))
20726 return;
20727 }
20728 add_AT_loc (die, attr, list2->expr);
20729 return;
20730 }
20731 }
20732
20733 /* If that failed to give a single element location list, fall back to
20734 outputting this as a reference... still if permitted. */
20735 if (list == NULL
20736 || (forms & dw_scalar_form_reference) == 0
20737 || placeholder_seen)
20738 return;
20739
20740 if (current_function_decl == 0)
20741 context_die = comp_unit_die ();
20742 else
20743 context_die = lookup_decl_die (current_function_decl);
20744
20745 decl_die = new_die (DW_TAG_variable, context_die, value);
20746 add_AT_flag (decl_die, DW_AT_artificial, 1);
20747 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20748 context_die);
20749 add_AT_location_description (decl_die, DW_AT_location, list);
20750 add_AT_die_ref (die, attr, decl_die);
20751 }
20752
20753 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20754 default. */
20755
20756 static int
20757 lower_bound_default (void)
20758 {
20759 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20760 {
20761 case DW_LANG_C:
20762 case DW_LANG_C89:
20763 case DW_LANG_C99:
20764 case DW_LANG_C11:
20765 case DW_LANG_C_plus_plus:
20766 case DW_LANG_C_plus_plus_11:
20767 case DW_LANG_C_plus_plus_14:
20768 case DW_LANG_ObjC:
20769 case DW_LANG_ObjC_plus_plus:
20770 return 0;
20771 case DW_LANG_Fortran77:
20772 case DW_LANG_Fortran90:
20773 case DW_LANG_Fortran95:
20774 case DW_LANG_Fortran03:
20775 case DW_LANG_Fortran08:
20776 return 1;
20777 case DW_LANG_UPC:
20778 case DW_LANG_D:
20779 case DW_LANG_Python:
20780 return dwarf_version >= 4 ? 0 : -1;
20781 case DW_LANG_Ada95:
20782 case DW_LANG_Ada83:
20783 case DW_LANG_Cobol74:
20784 case DW_LANG_Cobol85:
20785 case DW_LANG_Modula2:
20786 case DW_LANG_PLI:
20787 return dwarf_version >= 4 ? 1 : -1;
20788 default:
20789 return -1;
20790 }
20791 }
20792
20793 /* Given a tree node describing an array bound (either lower or upper) output
20794 a representation for that bound. */
20795
20796 static void
20797 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20798 tree bound, struct loc_descr_context *context)
20799 {
20800 int dflt;
20801
20802 while (1)
20803 switch (TREE_CODE (bound))
20804 {
20805 /* Strip all conversions. */
20806 CASE_CONVERT:
20807 case VIEW_CONVERT_EXPR:
20808 bound = TREE_OPERAND (bound, 0);
20809 break;
20810
20811 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20812 are even omitted when they are the default. */
20813 case INTEGER_CST:
20814 /* If the value for this bound is the default one, we can even omit the
20815 attribute. */
20816 if (bound_attr == DW_AT_lower_bound
20817 && tree_fits_shwi_p (bound)
20818 && (dflt = lower_bound_default ()) != -1
20819 && tree_to_shwi (bound) == dflt)
20820 return;
20821
20822 /* FALLTHRU */
20823
20824 default:
20825 /* Because of the complex interaction there can be with other GNAT
20826 encodings, GDB isn't ready yet to handle proper DWARF description
20827 for self-referencial subrange bounds: let GNAT encodings do the
20828 magic in such a case. */
20829 if (is_ada ()
20830 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20831 && contains_placeholder_p (bound))
20832 return;
20833
20834 add_scalar_info (subrange_die, bound_attr, bound,
20835 dw_scalar_form_constant
20836 | dw_scalar_form_exprloc
20837 | dw_scalar_form_reference,
20838 context);
20839 return;
20840 }
20841 }
20842
20843 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20844 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20845 Note that the block of subscript information for an array type also
20846 includes information about the element type of the given array type.
20847
20848 This function reuses previously set type and bound information if
20849 available. */
20850
20851 static void
20852 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20853 {
20854 unsigned dimension_number;
20855 tree lower, upper;
20856 dw_die_ref child = type_die->die_child;
20857
20858 for (dimension_number = 0;
20859 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20860 type = TREE_TYPE (type), dimension_number++)
20861 {
20862 tree domain = TYPE_DOMAIN (type);
20863
20864 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20865 break;
20866
20867 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20868 and (in GNU C only) variable bounds. Handle all three forms
20869 here. */
20870
20871 /* Find and reuse a previously generated DW_TAG_subrange_type if
20872 available.
20873
20874 For multi-dimensional arrays, as we iterate through the
20875 various dimensions in the enclosing for loop above, we also
20876 iterate through the DIE children and pick at each
20877 DW_TAG_subrange_type previously generated (if available).
20878 Each child DW_TAG_subrange_type DIE describes the range of
20879 the current dimension. At this point we should have as many
20880 DW_TAG_subrange_type's as we have dimensions in the
20881 array. */
20882 dw_die_ref subrange_die = NULL;
20883 if (child)
20884 while (1)
20885 {
20886 child = child->die_sib;
20887 if (child->die_tag == DW_TAG_subrange_type)
20888 subrange_die = child;
20889 if (child == type_die->die_child)
20890 {
20891 /* If we wrapped around, stop looking next time. */
20892 child = NULL;
20893 break;
20894 }
20895 if (child->die_tag == DW_TAG_subrange_type)
20896 break;
20897 }
20898 if (!subrange_die)
20899 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20900
20901 if (domain)
20902 {
20903 /* We have an array type with specified bounds. */
20904 lower = TYPE_MIN_VALUE (domain);
20905 upper = TYPE_MAX_VALUE (domain);
20906
20907 /* Define the index type. */
20908 if (TREE_TYPE (domain)
20909 && !get_AT (subrange_die, DW_AT_type))
20910 {
20911 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20912 TREE_TYPE field. We can't emit debug info for this
20913 because it is an unnamed integral type. */
20914 if (TREE_CODE (domain) == INTEGER_TYPE
20915 && TYPE_NAME (domain) == NULL_TREE
20916 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20917 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20918 ;
20919 else
20920 add_type_attribute (subrange_die, TREE_TYPE (domain),
20921 TYPE_UNQUALIFIED, false, type_die);
20922 }
20923
20924 /* ??? If upper is NULL, the array has unspecified length,
20925 but it does have a lower bound. This happens with Fortran
20926 dimension arr(N:*)
20927 Since the debugger is definitely going to need to know N
20928 to produce useful results, go ahead and output the lower
20929 bound solo, and hope the debugger can cope. */
20930
20931 if (!get_AT (subrange_die, DW_AT_lower_bound))
20932 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20933 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20934 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20935 }
20936
20937 /* Otherwise we have an array type with an unspecified length. The
20938 DWARF-2 spec does not say how to handle this; let's just leave out the
20939 bounds. */
20940 }
20941 }
20942
20943 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20944
20945 static void
20946 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20947 {
20948 dw_die_ref decl_die;
20949 HOST_WIDE_INT size;
20950 dw_loc_descr_ref size_expr = NULL;
20951
20952 switch (TREE_CODE (tree_node))
20953 {
20954 case ERROR_MARK:
20955 size = 0;
20956 break;
20957 case ENUMERAL_TYPE:
20958 case RECORD_TYPE:
20959 case UNION_TYPE:
20960 case QUAL_UNION_TYPE:
20961 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20962 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20963 {
20964 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20965 return;
20966 }
20967 size_expr = type_byte_size (tree_node, &size);
20968 break;
20969 case FIELD_DECL:
20970 /* For a data member of a struct or union, the DW_AT_byte_size is
20971 generally given as the number of bytes normally allocated for an
20972 object of the *declared* type of the member itself. This is true
20973 even for bit-fields. */
20974 size = int_size_in_bytes (field_type (tree_node));
20975 break;
20976 default:
20977 gcc_unreachable ();
20978 }
20979
20980 /* Support for dynamically-sized objects was introduced by DWARFv3.
20981 At the moment, GDB does not handle variable byte sizes very well,
20982 though. */
20983 if ((dwarf_version >= 3 || !dwarf_strict)
20984 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20985 && size_expr != NULL)
20986 add_AT_loc (die, DW_AT_byte_size, size_expr);
20987
20988 /* Note that `size' might be -1 when we get to this point. If it is, that
20989 indicates that the byte size of the entity in question is variable and
20990 that we could not generate a DWARF expression that computes it. */
20991 if (size >= 0)
20992 add_AT_unsigned (die, DW_AT_byte_size, size);
20993 }
20994
20995 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20996 alignment. */
20997
20998 static void
20999 add_alignment_attribute (dw_die_ref die, tree tree_node)
21000 {
21001 if (dwarf_version < 5 && dwarf_strict)
21002 return;
21003
21004 unsigned align;
21005
21006 if (DECL_P (tree_node))
21007 {
21008 if (!DECL_USER_ALIGN (tree_node))
21009 return;
21010
21011 align = DECL_ALIGN_UNIT (tree_node);
21012 }
21013 else if (TYPE_P (tree_node))
21014 {
21015 if (!TYPE_USER_ALIGN (tree_node))
21016 return;
21017
21018 align = TYPE_ALIGN_UNIT (tree_node);
21019 }
21020 else
21021 gcc_unreachable ();
21022
21023 add_AT_unsigned (die, DW_AT_alignment, align);
21024 }
21025
21026 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21027 which specifies the distance in bits from the highest order bit of the
21028 "containing object" for the bit-field to the highest order bit of the
21029 bit-field itself.
21030
21031 For any given bit-field, the "containing object" is a hypothetical object
21032 (of some integral or enum type) within which the given bit-field lives. The
21033 type of this hypothetical "containing object" is always the same as the
21034 declared type of the individual bit-field itself. The determination of the
21035 exact location of the "containing object" for a bit-field is rather
21036 complicated. It's handled by the `field_byte_offset' function (above).
21037
21038 CTX is required: see the comment for VLR_CONTEXT.
21039
21040 Note that it is the size (in bytes) of the hypothetical "containing object"
21041 which will be given in the DW_AT_byte_size attribute for this bit-field.
21042 (See `byte_size_attribute' above). */
21043
21044 static inline void
21045 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21046 {
21047 HOST_WIDE_INT object_offset_in_bytes;
21048 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21049 HOST_WIDE_INT bitpos_int;
21050 HOST_WIDE_INT highest_order_object_bit_offset;
21051 HOST_WIDE_INT highest_order_field_bit_offset;
21052 HOST_WIDE_INT bit_offset;
21053
21054 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21055
21056 /* Must be a field and a bit field. */
21057 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21058
21059 /* We can't yet handle bit-fields whose offsets are variable, so if we
21060 encounter such things, just return without generating any attribute
21061 whatsoever. Likewise for variable or too large size. */
21062 if (! tree_fits_shwi_p (bit_position (decl))
21063 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21064 return;
21065
21066 bitpos_int = int_bit_position (decl);
21067
21068 /* Note that the bit offset is always the distance (in bits) from the
21069 highest-order bit of the "containing object" to the highest-order bit of
21070 the bit-field itself. Since the "high-order end" of any object or field
21071 is different on big-endian and little-endian machines, the computation
21072 below must take account of these differences. */
21073 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21074 highest_order_field_bit_offset = bitpos_int;
21075
21076 if (! BYTES_BIG_ENDIAN)
21077 {
21078 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21079 highest_order_object_bit_offset +=
21080 simple_type_size_in_bits (original_type);
21081 }
21082
21083 bit_offset
21084 = (! BYTES_BIG_ENDIAN
21085 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21086 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21087
21088 if (bit_offset < 0)
21089 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21090 else
21091 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21092 }
21093
21094 /* For a FIELD_DECL node which represents a bit field, output an attribute
21095 which specifies the length in bits of the given field. */
21096
21097 static inline void
21098 add_bit_size_attribute (dw_die_ref die, tree decl)
21099 {
21100 /* Must be a field and a bit field. */
21101 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21102 && DECL_BIT_FIELD_TYPE (decl));
21103
21104 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21105 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21106 }
21107
21108 /* If the compiled language is ANSI C, then add a 'prototyped'
21109 attribute, if arg types are given for the parameters of a function. */
21110
21111 static inline void
21112 add_prototyped_attribute (dw_die_ref die, tree func_type)
21113 {
21114 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21115 {
21116 case DW_LANG_C:
21117 case DW_LANG_C89:
21118 case DW_LANG_C99:
21119 case DW_LANG_C11:
21120 case DW_LANG_ObjC:
21121 if (prototype_p (func_type))
21122 add_AT_flag (die, DW_AT_prototyped, 1);
21123 break;
21124 default:
21125 break;
21126 }
21127 }
21128
21129 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21130 by looking in the type declaration, the object declaration equate table or
21131 the block mapping. */
21132
21133 static inline dw_die_ref
21134 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21135 {
21136 dw_die_ref origin_die = NULL;
21137
21138 if (DECL_P (origin))
21139 {
21140 dw_die_ref c;
21141 origin_die = lookup_decl_die (origin);
21142 /* "Unwrap" the decls DIE which we put in the imported unit context.
21143 We are looking for the abstract copy here. */
21144 if (in_lto_p
21145 && origin_die
21146 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21147 /* ??? Identify this better. */
21148 && c->with_offset)
21149 origin_die = c;
21150 }
21151 else if (TYPE_P (origin))
21152 origin_die = lookup_type_die (origin);
21153 else if (TREE_CODE (origin) == BLOCK)
21154 origin_die = BLOCK_DIE (origin);
21155
21156 /* XXX: Functions that are never lowered don't always have correct block
21157 trees (in the case of java, they simply have no block tree, in some other
21158 languages). For these functions, there is nothing we can really do to
21159 output correct debug info for inlined functions in all cases. Rather
21160 than die, we'll just produce deficient debug info now, in that we will
21161 have variables without a proper abstract origin. In the future, when all
21162 functions are lowered, we should re-add a gcc_assert (origin_die)
21163 here. */
21164
21165 if (origin_die)
21166 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21167 return origin_die;
21168 }
21169
21170 /* We do not currently support the pure_virtual attribute. */
21171
21172 static inline void
21173 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21174 {
21175 if (DECL_VINDEX (func_decl))
21176 {
21177 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21178
21179 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21180 add_AT_loc (die, DW_AT_vtable_elem_location,
21181 new_loc_descr (DW_OP_constu,
21182 tree_to_shwi (DECL_VINDEX (func_decl)),
21183 0));
21184
21185 /* GNU extension: Record what type this method came from originally. */
21186 if (debug_info_level > DINFO_LEVEL_TERSE
21187 && DECL_CONTEXT (func_decl))
21188 add_AT_die_ref (die, DW_AT_containing_type,
21189 lookup_type_die (DECL_CONTEXT (func_decl)));
21190 }
21191 }
21192 \f
21193 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21194 given decl. This used to be a vendor extension until after DWARF 4
21195 standardized it. */
21196
21197 static void
21198 add_linkage_attr (dw_die_ref die, tree decl)
21199 {
21200 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21201
21202 /* Mimic what assemble_name_raw does with a leading '*'. */
21203 if (name[0] == '*')
21204 name = &name[1];
21205
21206 if (dwarf_version >= 4)
21207 add_AT_string (die, DW_AT_linkage_name, name);
21208 else
21209 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21210 }
21211
21212 /* Add source coordinate attributes for the given decl. */
21213
21214 static void
21215 add_src_coords_attributes (dw_die_ref die, tree decl)
21216 {
21217 expanded_location s;
21218
21219 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21220 return;
21221 s = expand_location (DECL_SOURCE_LOCATION (decl));
21222 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21223 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21224 if (debug_column_info && s.column)
21225 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21226 }
21227
21228 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21229
21230 static void
21231 add_linkage_name_raw (dw_die_ref die, tree decl)
21232 {
21233 /* Defer until we have an assembler name set. */
21234 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21235 {
21236 limbo_die_node *asm_name;
21237
21238 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21239 asm_name->die = die;
21240 asm_name->created_for = decl;
21241 asm_name->next = deferred_asm_name;
21242 deferred_asm_name = asm_name;
21243 }
21244 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21245 add_linkage_attr (die, decl);
21246 }
21247
21248 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21249
21250 static void
21251 add_linkage_name (dw_die_ref die, tree decl)
21252 {
21253 if (debug_info_level > DINFO_LEVEL_NONE
21254 && VAR_OR_FUNCTION_DECL_P (decl)
21255 && TREE_PUBLIC (decl)
21256 && !(VAR_P (decl) && DECL_REGISTER (decl))
21257 && die->die_tag != DW_TAG_member)
21258 add_linkage_name_raw (die, decl);
21259 }
21260
21261 /* Add a DW_AT_name attribute and source coordinate attribute for the
21262 given decl, but only if it actually has a name. */
21263
21264 static void
21265 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21266 bool no_linkage_name)
21267 {
21268 tree decl_name;
21269
21270 decl_name = DECL_NAME (decl);
21271 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21272 {
21273 const char *name = dwarf2_name (decl, 0);
21274 if (name)
21275 add_name_attribute (die, name);
21276 if (! DECL_ARTIFICIAL (decl))
21277 add_src_coords_attributes (die, decl);
21278
21279 if (!no_linkage_name)
21280 add_linkage_name (die, decl);
21281 }
21282
21283 #ifdef VMS_DEBUGGING_INFO
21284 /* Get the function's name, as described by its RTL. This may be different
21285 from the DECL_NAME name used in the source file. */
21286 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21287 {
21288 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21289 XEXP (DECL_RTL (decl), 0), false);
21290 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21291 }
21292 #endif /* VMS_DEBUGGING_INFO */
21293 }
21294
21295 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21296
21297 static void
21298 add_discr_value (dw_die_ref die, dw_discr_value *value)
21299 {
21300 dw_attr_node attr;
21301
21302 attr.dw_attr = DW_AT_discr_value;
21303 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21304 attr.dw_attr_val.val_entry = NULL;
21305 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21306 if (value->pos)
21307 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21308 else
21309 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21310 add_dwarf_attr (die, &attr);
21311 }
21312
21313 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21314
21315 static void
21316 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21317 {
21318 dw_attr_node attr;
21319
21320 attr.dw_attr = DW_AT_discr_list;
21321 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21322 attr.dw_attr_val.val_entry = NULL;
21323 attr.dw_attr_val.v.val_discr_list = discr_list;
21324 add_dwarf_attr (die, &attr);
21325 }
21326
21327 static inline dw_discr_list_ref
21328 AT_discr_list (dw_attr_node *attr)
21329 {
21330 return attr->dw_attr_val.v.val_discr_list;
21331 }
21332
21333 #ifdef VMS_DEBUGGING_INFO
21334 /* Output the debug main pointer die for VMS */
21335
21336 void
21337 dwarf2out_vms_debug_main_pointer (void)
21338 {
21339 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21340 dw_die_ref die;
21341
21342 /* Allocate the VMS debug main subprogram die. */
21343 die = new_die_raw (DW_TAG_subprogram);
21344 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21345 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21346 current_function_funcdef_no);
21347 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21348
21349 /* Make it the first child of comp_unit_die (). */
21350 die->die_parent = comp_unit_die ();
21351 if (comp_unit_die ()->die_child)
21352 {
21353 die->die_sib = comp_unit_die ()->die_child->die_sib;
21354 comp_unit_die ()->die_child->die_sib = die;
21355 }
21356 else
21357 {
21358 die->die_sib = die;
21359 comp_unit_die ()->die_child = die;
21360 }
21361 }
21362 #endif /* VMS_DEBUGGING_INFO */
21363
21364 /* Push a new declaration scope. */
21365
21366 static void
21367 push_decl_scope (tree scope)
21368 {
21369 vec_safe_push (decl_scope_table, scope);
21370 }
21371
21372 /* Pop a declaration scope. */
21373
21374 static inline void
21375 pop_decl_scope (void)
21376 {
21377 decl_scope_table->pop ();
21378 }
21379
21380 /* walk_tree helper function for uses_local_type, below. */
21381
21382 static tree
21383 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21384 {
21385 if (!TYPE_P (*tp))
21386 *walk_subtrees = 0;
21387 else
21388 {
21389 tree name = TYPE_NAME (*tp);
21390 if (name && DECL_P (name) && decl_function_context (name))
21391 return *tp;
21392 }
21393 return NULL_TREE;
21394 }
21395
21396 /* If TYPE involves a function-local type (including a local typedef to a
21397 non-local type), returns that type; otherwise returns NULL_TREE. */
21398
21399 static tree
21400 uses_local_type (tree type)
21401 {
21402 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21403 return used;
21404 }
21405
21406 /* Return the DIE for the scope that immediately contains this type.
21407 Non-named types that do not involve a function-local type get global
21408 scope. Named types nested in namespaces or other types get their
21409 containing scope. All other types (i.e. function-local named types) get
21410 the current active scope. */
21411
21412 static dw_die_ref
21413 scope_die_for (tree t, dw_die_ref context_die)
21414 {
21415 dw_die_ref scope_die = NULL;
21416 tree containing_scope;
21417
21418 /* Non-types always go in the current scope. */
21419 gcc_assert (TYPE_P (t));
21420
21421 /* Use the scope of the typedef, rather than the scope of the type
21422 it refers to. */
21423 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21424 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21425 else
21426 containing_scope = TYPE_CONTEXT (t);
21427
21428 /* Use the containing namespace if there is one. */
21429 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21430 {
21431 if (context_die == lookup_decl_die (containing_scope))
21432 /* OK */;
21433 else if (debug_info_level > DINFO_LEVEL_TERSE)
21434 context_die = get_context_die (containing_scope);
21435 else
21436 containing_scope = NULL_TREE;
21437 }
21438
21439 /* Ignore function type "scopes" from the C frontend. They mean that
21440 a tagged type is local to a parmlist of a function declarator, but
21441 that isn't useful to DWARF. */
21442 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21443 containing_scope = NULL_TREE;
21444
21445 if (SCOPE_FILE_SCOPE_P (containing_scope))
21446 {
21447 /* If T uses a local type keep it local as well, to avoid references
21448 to function-local DIEs from outside the function. */
21449 if (current_function_decl && uses_local_type (t))
21450 scope_die = context_die;
21451 else
21452 scope_die = comp_unit_die ();
21453 }
21454 else if (TYPE_P (containing_scope))
21455 {
21456 /* For types, we can just look up the appropriate DIE. */
21457 if (debug_info_level > DINFO_LEVEL_TERSE)
21458 scope_die = get_context_die (containing_scope);
21459 else
21460 {
21461 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21462 if (scope_die == NULL)
21463 scope_die = comp_unit_die ();
21464 }
21465 }
21466 else
21467 scope_die = context_die;
21468
21469 return scope_die;
21470 }
21471
21472 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21473
21474 static inline int
21475 local_scope_p (dw_die_ref context_die)
21476 {
21477 for (; context_die; context_die = context_die->die_parent)
21478 if (context_die->die_tag == DW_TAG_inlined_subroutine
21479 || context_die->die_tag == DW_TAG_subprogram)
21480 return 1;
21481
21482 return 0;
21483 }
21484
21485 /* Returns nonzero if CONTEXT_DIE is a class. */
21486
21487 static inline int
21488 class_scope_p (dw_die_ref context_die)
21489 {
21490 return (context_die
21491 && (context_die->die_tag == DW_TAG_structure_type
21492 || context_die->die_tag == DW_TAG_class_type
21493 || context_die->die_tag == DW_TAG_interface_type
21494 || context_die->die_tag == DW_TAG_union_type));
21495 }
21496
21497 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21498 whether or not to treat a DIE in this context as a declaration. */
21499
21500 static inline int
21501 class_or_namespace_scope_p (dw_die_ref context_die)
21502 {
21503 return (class_scope_p (context_die)
21504 || (context_die && context_die->die_tag == DW_TAG_namespace));
21505 }
21506
21507 /* Many forms of DIEs require a "type description" attribute. This
21508 routine locates the proper "type descriptor" die for the type given
21509 by 'type' plus any additional qualifiers given by 'cv_quals', and
21510 adds a DW_AT_type attribute below the given die. */
21511
21512 static void
21513 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21514 bool reverse, dw_die_ref context_die)
21515 {
21516 enum tree_code code = TREE_CODE (type);
21517 dw_die_ref type_die = NULL;
21518
21519 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21520 or fixed-point type, use the inner type. This is because we have no
21521 support for unnamed types in base_type_die. This can happen if this is
21522 an Ada subrange type. Correct solution is emit a subrange type die. */
21523 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21524 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21525 type = TREE_TYPE (type), code = TREE_CODE (type);
21526
21527 if (code == ERROR_MARK
21528 /* Handle a special case. For functions whose return type is void, we
21529 generate *no* type attribute. (Note that no object may have type
21530 `void', so this only applies to function return types). */
21531 || code == VOID_TYPE)
21532 return;
21533
21534 type_die = modified_type_die (type,
21535 cv_quals | TYPE_QUALS (type),
21536 reverse,
21537 context_die);
21538
21539 if (type_die != NULL)
21540 add_AT_die_ref (object_die, DW_AT_type, type_die);
21541 }
21542
21543 /* Given an object die, add the calling convention attribute for the
21544 function call type. */
21545 static void
21546 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21547 {
21548 enum dwarf_calling_convention value = DW_CC_normal;
21549
21550 value = ((enum dwarf_calling_convention)
21551 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21552
21553 if (is_fortran ()
21554 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21555 {
21556 /* DWARF 2 doesn't provide a way to identify a program's source-level
21557 entry point. DW_AT_calling_convention attributes are only meant
21558 to describe functions' calling conventions. However, lacking a
21559 better way to signal the Fortran main program, we used this for
21560 a long time, following existing custom. Now, DWARF 4 has
21561 DW_AT_main_subprogram, which we add below, but some tools still
21562 rely on the old way, which we thus keep. */
21563 value = DW_CC_program;
21564
21565 if (dwarf_version >= 4 || !dwarf_strict)
21566 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21567 }
21568
21569 /* Only add the attribute if the backend requests it, and
21570 is not DW_CC_normal. */
21571 if (value && (value != DW_CC_normal))
21572 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21573 }
21574
21575 /* Given a tree pointer to a struct, class, union, or enum type node, return
21576 a pointer to the (string) tag name for the given type, or zero if the type
21577 was declared without a tag. */
21578
21579 static const char *
21580 type_tag (const_tree type)
21581 {
21582 const char *name = 0;
21583
21584 if (TYPE_NAME (type) != 0)
21585 {
21586 tree t = 0;
21587
21588 /* Find the IDENTIFIER_NODE for the type name. */
21589 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21590 && !TYPE_NAMELESS (type))
21591 t = TYPE_NAME (type);
21592
21593 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21594 a TYPE_DECL node, regardless of whether or not a `typedef' was
21595 involved. */
21596 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21597 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21598 {
21599 /* We want to be extra verbose. Don't call dwarf_name if
21600 DECL_NAME isn't set. The default hook for decl_printable_name
21601 doesn't like that, and in this context it's correct to return
21602 0, instead of "<anonymous>" or the like. */
21603 if (DECL_NAME (TYPE_NAME (type))
21604 && !DECL_NAMELESS (TYPE_NAME (type)))
21605 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21606 }
21607
21608 /* Now get the name as a string, or invent one. */
21609 if (!name && t != 0)
21610 name = IDENTIFIER_POINTER (t);
21611 }
21612
21613 return (name == 0 || *name == '\0') ? 0 : name;
21614 }
21615
21616 /* Return the type associated with a data member, make a special check
21617 for bit field types. */
21618
21619 static inline tree
21620 member_declared_type (const_tree member)
21621 {
21622 return (DECL_BIT_FIELD_TYPE (member)
21623 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21624 }
21625
21626 /* Get the decl's label, as described by its RTL. This may be different
21627 from the DECL_NAME name used in the source file. */
21628
21629 #if 0
21630 static const char *
21631 decl_start_label (tree decl)
21632 {
21633 rtx x;
21634 const char *fnname;
21635
21636 x = DECL_RTL (decl);
21637 gcc_assert (MEM_P (x));
21638
21639 x = XEXP (x, 0);
21640 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21641
21642 fnname = XSTR (x, 0);
21643 return fnname;
21644 }
21645 #endif
21646 \f
21647 /* For variable-length arrays that have been previously generated, but
21648 may be incomplete due to missing subscript info, fill the subscript
21649 info. Return TRUE if this is one of those cases. */
21650 static bool
21651 fill_variable_array_bounds (tree type)
21652 {
21653 if (TREE_ASM_WRITTEN (type)
21654 && TREE_CODE (type) == ARRAY_TYPE
21655 && variably_modified_type_p (type, NULL))
21656 {
21657 dw_die_ref array_die = lookup_type_die (type);
21658 if (!array_die)
21659 return false;
21660 add_subscript_info (array_die, type, !is_ada ());
21661 return true;
21662 }
21663 return false;
21664 }
21665
21666 /* These routines generate the internal representation of the DIE's for
21667 the compilation unit. Debugging information is collected by walking
21668 the declaration trees passed in from dwarf2out_decl(). */
21669
21670 static void
21671 gen_array_type_die (tree type, dw_die_ref context_die)
21672 {
21673 dw_die_ref array_die;
21674
21675 /* GNU compilers represent multidimensional array types as sequences of one
21676 dimensional array types whose element types are themselves array types.
21677 We sometimes squish that down to a single array_type DIE with multiple
21678 subscripts in the Dwarf debugging info. The draft Dwarf specification
21679 say that we are allowed to do this kind of compression in C, because
21680 there is no difference between an array of arrays and a multidimensional
21681 array. We don't do this for Ada to remain as close as possible to the
21682 actual representation, which is especially important against the language
21683 flexibilty wrt arrays of variable size. */
21684
21685 bool collapse_nested_arrays = !is_ada ();
21686
21687 if (fill_variable_array_bounds (type))
21688 return;
21689
21690 dw_die_ref scope_die = scope_die_for (type, context_die);
21691 tree element_type;
21692
21693 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21694 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21695 if (TYPE_STRING_FLAG (type)
21696 && TREE_CODE (type) == ARRAY_TYPE
21697 && is_fortran ()
21698 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21699 {
21700 HOST_WIDE_INT size;
21701
21702 array_die = new_die (DW_TAG_string_type, scope_die, type);
21703 add_name_attribute (array_die, type_tag (type));
21704 equate_type_number_to_die (type, array_die);
21705 size = int_size_in_bytes (type);
21706 if (size >= 0)
21707 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21708 /* ??? We can't annotate types late, but for LTO we may not
21709 generate a location early either (gfortran.dg/save_6.f90). */
21710 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21711 && TYPE_DOMAIN (type) != NULL_TREE
21712 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21713 {
21714 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21715 tree rszdecl = szdecl;
21716
21717 size = int_size_in_bytes (TREE_TYPE (szdecl));
21718 if (!DECL_P (szdecl))
21719 {
21720 if (TREE_CODE (szdecl) == INDIRECT_REF
21721 && DECL_P (TREE_OPERAND (szdecl, 0)))
21722 {
21723 rszdecl = TREE_OPERAND (szdecl, 0);
21724 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21725 != DWARF2_ADDR_SIZE)
21726 size = 0;
21727 }
21728 else
21729 size = 0;
21730 }
21731 if (size > 0)
21732 {
21733 dw_loc_list_ref loc
21734 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21735 NULL);
21736 if (loc)
21737 {
21738 add_AT_location_description (array_die, DW_AT_string_length,
21739 loc);
21740 if (size != DWARF2_ADDR_SIZE)
21741 add_AT_unsigned (array_die, dwarf_version >= 5
21742 ? DW_AT_string_length_byte_size
21743 : DW_AT_byte_size, size);
21744 }
21745 }
21746 }
21747 return;
21748 }
21749
21750 array_die = new_die (DW_TAG_array_type, scope_die, type);
21751 add_name_attribute (array_die, type_tag (type));
21752 equate_type_number_to_die (type, array_die);
21753
21754 if (TREE_CODE (type) == VECTOR_TYPE)
21755 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21756
21757 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21758 if (is_fortran ()
21759 && TREE_CODE (type) == ARRAY_TYPE
21760 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21761 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21762 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21763
21764 #if 0
21765 /* We default the array ordering. Debuggers will probably do the right
21766 things even if DW_AT_ordering is not present. It's not even an issue
21767 until we start to get into multidimensional arrays anyway. If a debugger
21768 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21769 then we'll have to put the DW_AT_ordering attribute back in. (But if
21770 and when we find out that we need to put these in, we will only do so
21771 for multidimensional arrays. */
21772 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21773 #endif
21774
21775 if (TREE_CODE (type) == VECTOR_TYPE)
21776 {
21777 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21778 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21779 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21780 add_bound_info (subrange_die, DW_AT_upper_bound,
21781 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21782 }
21783 else
21784 add_subscript_info (array_die, type, collapse_nested_arrays);
21785
21786 /* Add representation of the type of the elements of this array type and
21787 emit the corresponding DIE if we haven't done it already. */
21788 element_type = TREE_TYPE (type);
21789 if (collapse_nested_arrays)
21790 while (TREE_CODE (element_type) == ARRAY_TYPE)
21791 {
21792 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21793 break;
21794 element_type = TREE_TYPE (element_type);
21795 }
21796
21797 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21798 TREE_CODE (type) == ARRAY_TYPE
21799 && TYPE_REVERSE_STORAGE_ORDER (type),
21800 context_die);
21801
21802 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21803 if (TYPE_ARTIFICIAL (type))
21804 add_AT_flag (array_die, DW_AT_artificial, 1);
21805
21806 if (get_AT (array_die, DW_AT_name))
21807 add_pubtype (type, array_die);
21808
21809 add_alignment_attribute (array_die, type);
21810 }
21811
21812 /* This routine generates DIE for array with hidden descriptor, details
21813 are filled into *info by a langhook. */
21814
21815 static void
21816 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21817 dw_die_ref context_die)
21818 {
21819 const dw_die_ref scope_die = scope_die_for (type, context_die);
21820 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21821 struct loc_descr_context context = { type, info->base_decl, NULL,
21822 false, false };
21823 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21824 int dim;
21825
21826 add_name_attribute (array_die, type_tag (type));
21827 equate_type_number_to_die (type, array_die);
21828
21829 if (info->ndimensions > 1)
21830 switch (info->ordering)
21831 {
21832 case array_descr_ordering_row_major:
21833 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21834 break;
21835 case array_descr_ordering_column_major:
21836 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21837 break;
21838 default:
21839 break;
21840 }
21841
21842 if (dwarf_version >= 3 || !dwarf_strict)
21843 {
21844 if (info->data_location)
21845 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21846 dw_scalar_form_exprloc, &context);
21847 if (info->associated)
21848 add_scalar_info (array_die, DW_AT_associated, info->associated,
21849 dw_scalar_form_constant
21850 | dw_scalar_form_exprloc
21851 | dw_scalar_form_reference, &context);
21852 if (info->allocated)
21853 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21854 dw_scalar_form_constant
21855 | dw_scalar_form_exprloc
21856 | dw_scalar_form_reference, &context);
21857 if (info->stride)
21858 {
21859 const enum dwarf_attribute attr
21860 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21861 const int forms
21862 = (info->stride_in_bits)
21863 ? dw_scalar_form_constant
21864 : (dw_scalar_form_constant
21865 | dw_scalar_form_exprloc
21866 | dw_scalar_form_reference);
21867
21868 add_scalar_info (array_die, attr, info->stride, forms, &context);
21869 }
21870 }
21871 if (dwarf_version >= 5)
21872 {
21873 if (info->rank)
21874 {
21875 add_scalar_info (array_die, DW_AT_rank, info->rank,
21876 dw_scalar_form_constant
21877 | dw_scalar_form_exprloc, &context);
21878 subrange_tag = DW_TAG_generic_subrange;
21879 context.placeholder_arg = true;
21880 }
21881 }
21882
21883 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21884
21885 for (dim = 0; dim < info->ndimensions; dim++)
21886 {
21887 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21888
21889 if (info->dimen[dim].bounds_type)
21890 add_type_attribute (subrange_die,
21891 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21892 false, context_die);
21893 if (info->dimen[dim].lower_bound)
21894 add_bound_info (subrange_die, DW_AT_lower_bound,
21895 info->dimen[dim].lower_bound, &context);
21896 if (info->dimen[dim].upper_bound)
21897 add_bound_info (subrange_die, DW_AT_upper_bound,
21898 info->dimen[dim].upper_bound, &context);
21899 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21900 add_scalar_info (subrange_die, DW_AT_byte_stride,
21901 info->dimen[dim].stride,
21902 dw_scalar_form_constant
21903 | dw_scalar_form_exprloc
21904 | dw_scalar_form_reference,
21905 &context);
21906 }
21907
21908 gen_type_die (info->element_type, context_die);
21909 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21910 TREE_CODE (type) == ARRAY_TYPE
21911 && TYPE_REVERSE_STORAGE_ORDER (type),
21912 context_die);
21913
21914 if (get_AT (array_die, DW_AT_name))
21915 add_pubtype (type, array_die);
21916
21917 add_alignment_attribute (array_die, type);
21918 }
21919
21920 #if 0
21921 static void
21922 gen_entry_point_die (tree decl, dw_die_ref context_die)
21923 {
21924 tree origin = decl_ultimate_origin (decl);
21925 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21926
21927 if (origin != NULL)
21928 add_abstract_origin_attribute (decl_die, origin);
21929 else
21930 {
21931 add_name_and_src_coords_attributes (decl_die, decl);
21932 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21933 TYPE_UNQUALIFIED, false, context_die);
21934 }
21935
21936 if (DECL_ABSTRACT_P (decl))
21937 equate_decl_number_to_die (decl, decl_die);
21938 else
21939 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21940 }
21941 #endif
21942
21943 /* Walk through the list of incomplete types again, trying once more to
21944 emit full debugging info for them. */
21945
21946 static void
21947 retry_incomplete_types (void)
21948 {
21949 set_early_dwarf s;
21950 int i;
21951
21952 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21953 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21954 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21955 vec_safe_truncate (incomplete_types, 0);
21956 }
21957
21958 /* Determine what tag to use for a record type. */
21959
21960 static enum dwarf_tag
21961 record_type_tag (tree type)
21962 {
21963 if (! lang_hooks.types.classify_record)
21964 return DW_TAG_structure_type;
21965
21966 switch (lang_hooks.types.classify_record (type))
21967 {
21968 case RECORD_IS_STRUCT:
21969 return DW_TAG_structure_type;
21970
21971 case RECORD_IS_CLASS:
21972 return DW_TAG_class_type;
21973
21974 case RECORD_IS_INTERFACE:
21975 if (dwarf_version >= 3 || !dwarf_strict)
21976 return DW_TAG_interface_type;
21977 return DW_TAG_structure_type;
21978
21979 default:
21980 gcc_unreachable ();
21981 }
21982 }
21983
21984 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21985 include all of the information about the enumeration values also. Each
21986 enumerated type name/value is listed as a child of the enumerated type
21987 DIE. */
21988
21989 static dw_die_ref
21990 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21991 {
21992 dw_die_ref type_die = lookup_type_die (type);
21993 dw_die_ref orig_type_die = type_die;
21994
21995 if (type_die == NULL)
21996 {
21997 type_die = new_die (DW_TAG_enumeration_type,
21998 scope_die_for (type, context_die), type);
21999 equate_type_number_to_die (type, type_die);
22000 add_name_attribute (type_die, type_tag (type));
22001 if ((dwarf_version >= 4 || !dwarf_strict)
22002 && ENUM_IS_SCOPED (type))
22003 add_AT_flag (type_die, DW_AT_enum_class, 1);
22004 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22005 add_AT_flag (type_die, DW_AT_declaration, 1);
22006 if (!dwarf_strict)
22007 add_AT_unsigned (type_die, DW_AT_encoding,
22008 TYPE_UNSIGNED (type)
22009 ? DW_ATE_unsigned
22010 : DW_ATE_signed);
22011 }
22012 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22013 return type_die;
22014 else
22015 remove_AT (type_die, DW_AT_declaration);
22016
22017 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22018 given enum type is incomplete, do not generate the DW_AT_byte_size
22019 attribute or the DW_AT_element_list attribute. */
22020 if (TYPE_SIZE (type))
22021 {
22022 tree link;
22023
22024 if (!ENUM_IS_OPAQUE (type))
22025 TREE_ASM_WRITTEN (type) = 1;
22026 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22027 add_byte_size_attribute (type_die, type);
22028 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22029 add_alignment_attribute (type_die, type);
22030 if ((dwarf_version >= 3 || !dwarf_strict)
22031 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22032 {
22033 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22034 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22035 context_die);
22036 }
22037 if (TYPE_STUB_DECL (type) != NULL_TREE)
22038 {
22039 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22040 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22041 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22042 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22043 }
22044
22045 /* If the first reference to this type was as the return type of an
22046 inline function, then it may not have a parent. Fix this now. */
22047 if (type_die->die_parent == NULL)
22048 add_child_die (scope_die_for (type, context_die), type_die);
22049
22050 for (link = TYPE_VALUES (type);
22051 link != NULL; link = TREE_CHAIN (link))
22052 {
22053 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22054 tree value = TREE_VALUE (link);
22055
22056 gcc_assert (!ENUM_IS_OPAQUE (type));
22057 add_name_attribute (enum_die,
22058 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22059
22060 if (TREE_CODE (value) == CONST_DECL)
22061 value = DECL_INITIAL (value);
22062
22063 if (simple_type_size_in_bits (TREE_TYPE (value))
22064 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22065 {
22066 /* For constant forms created by add_AT_unsigned DWARF
22067 consumers (GDB, elfutils, etc.) always zero extend
22068 the value. Only when the actual value is negative
22069 do we need to use add_AT_int to generate a constant
22070 form that can represent negative values. */
22071 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22072 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22073 add_AT_unsigned (enum_die, DW_AT_const_value,
22074 (unsigned HOST_WIDE_INT) val);
22075 else
22076 add_AT_int (enum_die, DW_AT_const_value, val);
22077 }
22078 else
22079 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22080 that here. TODO: This should be re-worked to use correct
22081 signed/unsigned double tags for all cases. */
22082 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22083 }
22084
22085 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22086 if (TYPE_ARTIFICIAL (type)
22087 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22088 add_AT_flag (type_die, DW_AT_artificial, 1);
22089 }
22090 else
22091 add_AT_flag (type_die, DW_AT_declaration, 1);
22092
22093 add_pubtype (type, type_die);
22094
22095 return type_die;
22096 }
22097
22098 /* Generate a DIE to represent either a real live formal parameter decl or to
22099 represent just the type of some formal parameter position in some function
22100 type.
22101
22102 Note that this routine is a bit unusual because its argument may be a
22103 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22104 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22105 node. If it's the former then this function is being called to output a
22106 DIE to represent a formal parameter object (or some inlining thereof). If
22107 it's the latter, then this function is only being called to output a
22108 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22109 argument type of some subprogram type.
22110 If EMIT_NAME_P is true, name and source coordinate attributes
22111 are emitted. */
22112
22113 static dw_die_ref
22114 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22115 dw_die_ref context_die)
22116 {
22117 tree node_or_origin = node ? node : origin;
22118 tree ultimate_origin;
22119 dw_die_ref parm_die = NULL;
22120
22121 if (DECL_P (node_or_origin))
22122 {
22123 parm_die = lookup_decl_die (node);
22124
22125 /* If the contexts differ, we may not be talking about the same
22126 thing.
22127 ??? When in LTO the DIE parent is the "abstract" copy and the
22128 context_die is the specification "copy". But this whole block
22129 should eventually be no longer needed. */
22130 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22131 {
22132 if (!DECL_ABSTRACT_P (node))
22133 {
22134 /* This can happen when creating an inlined instance, in
22135 which case we need to create a new DIE that will get
22136 annotated with DW_AT_abstract_origin. */
22137 parm_die = NULL;
22138 }
22139 else
22140 gcc_unreachable ();
22141 }
22142
22143 if (parm_die && parm_die->die_parent == NULL)
22144 {
22145 /* Check that parm_die already has the right attributes that
22146 we would have added below. If any attributes are
22147 missing, fall through to add them. */
22148 if (! DECL_ABSTRACT_P (node_or_origin)
22149 && !get_AT (parm_die, DW_AT_location)
22150 && !get_AT (parm_die, DW_AT_const_value))
22151 /* We are missing location info, and are about to add it. */
22152 ;
22153 else
22154 {
22155 add_child_die (context_die, parm_die);
22156 return parm_die;
22157 }
22158 }
22159 }
22160
22161 /* If we have a previously generated DIE, use it, unless this is an
22162 concrete instance (origin != NULL), in which case we need a new
22163 DIE with a corresponding DW_AT_abstract_origin. */
22164 bool reusing_die;
22165 if (parm_die && origin == NULL)
22166 reusing_die = true;
22167 else
22168 {
22169 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22170 reusing_die = false;
22171 }
22172
22173 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22174 {
22175 case tcc_declaration:
22176 ultimate_origin = decl_ultimate_origin (node_or_origin);
22177 if (node || ultimate_origin)
22178 origin = ultimate_origin;
22179
22180 if (reusing_die)
22181 goto add_location;
22182
22183 if (origin != NULL)
22184 add_abstract_origin_attribute (parm_die, origin);
22185 else if (emit_name_p)
22186 add_name_and_src_coords_attributes (parm_die, node);
22187 if (origin == NULL
22188 || (! DECL_ABSTRACT_P (node_or_origin)
22189 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22190 decl_function_context
22191 (node_or_origin))))
22192 {
22193 tree type = TREE_TYPE (node_or_origin);
22194 if (decl_by_reference_p (node_or_origin))
22195 add_type_attribute (parm_die, TREE_TYPE (type),
22196 TYPE_UNQUALIFIED,
22197 false, context_die);
22198 else
22199 add_type_attribute (parm_die, type,
22200 decl_quals (node_or_origin),
22201 false, context_die);
22202 }
22203 if (origin == NULL && DECL_ARTIFICIAL (node))
22204 add_AT_flag (parm_die, DW_AT_artificial, 1);
22205 add_location:
22206 if (node && node != origin)
22207 equate_decl_number_to_die (node, parm_die);
22208 if (! DECL_ABSTRACT_P (node_or_origin))
22209 add_location_or_const_value_attribute (parm_die, node_or_origin,
22210 node == NULL);
22211
22212 break;
22213
22214 case tcc_type:
22215 /* We were called with some kind of a ..._TYPE node. */
22216 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22217 context_die);
22218 break;
22219
22220 default:
22221 gcc_unreachable ();
22222 }
22223
22224 return parm_die;
22225 }
22226
22227 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22228 children DW_TAG_formal_parameter DIEs representing the arguments of the
22229 parameter pack.
22230
22231 PARM_PACK must be a function parameter pack.
22232 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22233 must point to the subsequent arguments of the function PACK_ARG belongs to.
22234 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22235 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22236 following the last one for which a DIE was generated. */
22237
22238 static dw_die_ref
22239 gen_formal_parameter_pack_die (tree parm_pack,
22240 tree pack_arg,
22241 dw_die_ref subr_die,
22242 tree *next_arg)
22243 {
22244 tree arg;
22245 dw_die_ref parm_pack_die;
22246
22247 gcc_assert (parm_pack
22248 && lang_hooks.function_parameter_pack_p (parm_pack)
22249 && subr_die);
22250
22251 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22252 add_src_coords_attributes (parm_pack_die, parm_pack);
22253
22254 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22255 {
22256 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22257 parm_pack))
22258 break;
22259 gen_formal_parameter_die (arg, NULL,
22260 false /* Don't emit name attribute. */,
22261 parm_pack_die);
22262 }
22263 if (next_arg)
22264 *next_arg = arg;
22265 return parm_pack_die;
22266 }
22267
22268 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22269 at the end of an (ANSI prototyped) formal parameters list. */
22270
22271 static void
22272 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22273 {
22274 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22275 }
22276
22277 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22278 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22279 parameters as specified in some function type specification (except for
22280 those which appear as part of a function *definition*). */
22281
22282 static void
22283 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22284 {
22285 tree link;
22286 tree formal_type = NULL;
22287 tree first_parm_type;
22288 tree arg;
22289
22290 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22291 {
22292 arg = DECL_ARGUMENTS (function_or_method_type);
22293 function_or_method_type = TREE_TYPE (function_or_method_type);
22294 }
22295 else
22296 arg = NULL_TREE;
22297
22298 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22299
22300 /* Make our first pass over the list of formal parameter types and output a
22301 DW_TAG_formal_parameter DIE for each one. */
22302 for (link = first_parm_type; link; )
22303 {
22304 dw_die_ref parm_die;
22305
22306 formal_type = TREE_VALUE (link);
22307 if (formal_type == void_type_node)
22308 break;
22309
22310 /* Output a (nameless) DIE to represent the formal parameter itself. */
22311 parm_die = gen_formal_parameter_die (formal_type, NULL,
22312 true /* Emit name attribute. */,
22313 context_die);
22314 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22315 && link == first_parm_type)
22316 {
22317 add_AT_flag (parm_die, DW_AT_artificial, 1);
22318 if (dwarf_version >= 3 || !dwarf_strict)
22319 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22320 }
22321 else if (arg && DECL_ARTIFICIAL (arg))
22322 add_AT_flag (parm_die, DW_AT_artificial, 1);
22323
22324 link = TREE_CHAIN (link);
22325 if (arg)
22326 arg = DECL_CHAIN (arg);
22327 }
22328
22329 /* If this function type has an ellipsis, add a
22330 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22331 if (formal_type != void_type_node)
22332 gen_unspecified_parameters_die (function_or_method_type, context_die);
22333
22334 /* Make our second (and final) pass over the list of formal parameter types
22335 and output DIEs to represent those types (as necessary). */
22336 for (link = TYPE_ARG_TYPES (function_or_method_type);
22337 link && TREE_VALUE (link);
22338 link = TREE_CHAIN (link))
22339 gen_type_die (TREE_VALUE (link), context_die);
22340 }
22341
22342 /* We want to generate the DIE for TYPE so that we can generate the
22343 die for MEMBER, which has been defined; we will need to refer back
22344 to the member declaration nested within TYPE. If we're trying to
22345 generate minimal debug info for TYPE, processing TYPE won't do the
22346 trick; we need to attach the member declaration by hand. */
22347
22348 static void
22349 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22350 {
22351 gen_type_die (type, context_die);
22352
22353 /* If we're trying to avoid duplicate debug info, we may not have
22354 emitted the member decl for this function. Emit it now. */
22355 if (TYPE_STUB_DECL (type)
22356 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22357 && ! lookup_decl_die (member))
22358 {
22359 dw_die_ref type_die;
22360 gcc_assert (!decl_ultimate_origin (member));
22361
22362 push_decl_scope (type);
22363 type_die = lookup_type_die_strip_naming_typedef (type);
22364 if (TREE_CODE (member) == FUNCTION_DECL)
22365 gen_subprogram_die (member, type_die);
22366 else if (TREE_CODE (member) == FIELD_DECL)
22367 {
22368 /* Ignore the nameless fields that are used to skip bits but handle
22369 C++ anonymous unions and structs. */
22370 if (DECL_NAME (member) != NULL_TREE
22371 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22372 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22373 {
22374 struct vlr_context vlr_ctx = {
22375 DECL_CONTEXT (member), /* struct_type */
22376 NULL_TREE /* variant_part_offset */
22377 };
22378 gen_type_die (member_declared_type (member), type_die);
22379 gen_field_die (member, &vlr_ctx, type_die);
22380 }
22381 }
22382 else
22383 gen_variable_die (member, NULL_TREE, type_die);
22384
22385 pop_decl_scope ();
22386 }
22387 }
22388 \f
22389 /* Forward declare these functions, because they are mutually recursive
22390 with their set_block_* pairing functions. */
22391 static void set_decl_origin_self (tree);
22392
22393 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22394 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22395 that it points to the node itself, thus indicating that the node is its
22396 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22397 the given node is NULL, recursively descend the decl/block tree which
22398 it is the root of, and for each other ..._DECL or BLOCK node contained
22399 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22400 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22401 values to point to themselves. */
22402
22403 static void
22404 set_block_origin_self (tree stmt)
22405 {
22406 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22407 {
22408 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22409
22410 {
22411 tree local_decl;
22412
22413 for (local_decl = BLOCK_VARS (stmt);
22414 local_decl != NULL_TREE;
22415 local_decl = DECL_CHAIN (local_decl))
22416 /* Do not recurse on nested functions since the inlining status
22417 of parent and child can be different as per the DWARF spec. */
22418 if (TREE_CODE (local_decl) != FUNCTION_DECL
22419 && !DECL_EXTERNAL (local_decl))
22420 set_decl_origin_self (local_decl);
22421 }
22422
22423 {
22424 tree subblock;
22425
22426 for (subblock = BLOCK_SUBBLOCKS (stmt);
22427 subblock != NULL_TREE;
22428 subblock = BLOCK_CHAIN (subblock))
22429 set_block_origin_self (subblock); /* Recurse. */
22430 }
22431 }
22432 }
22433
22434 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22435 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22436 node to so that it points to the node itself, thus indicating that the
22437 node represents its own (abstract) origin. Additionally, if the
22438 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22439 the decl/block tree of which the given node is the root of, and for
22440 each other ..._DECL or BLOCK node contained therein whose
22441 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22442 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22443 point to themselves. */
22444
22445 static void
22446 set_decl_origin_self (tree decl)
22447 {
22448 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22449 {
22450 DECL_ABSTRACT_ORIGIN (decl) = decl;
22451 if (TREE_CODE (decl) == FUNCTION_DECL)
22452 {
22453 tree arg;
22454
22455 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22456 DECL_ABSTRACT_ORIGIN (arg) = arg;
22457 if (DECL_INITIAL (decl) != NULL_TREE
22458 && DECL_INITIAL (decl) != error_mark_node)
22459 set_block_origin_self (DECL_INITIAL (decl));
22460 }
22461 }
22462 }
22463 \f
22464 /* Mark the early DIE for DECL as the abstract instance. */
22465
22466 static void
22467 dwarf2out_abstract_function (tree decl)
22468 {
22469 dw_die_ref old_die;
22470
22471 /* Make sure we have the actual abstract inline, not a clone. */
22472 decl = DECL_ORIGIN (decl);
22473
22474 if (DECL_IGNORED_P (decl))
22475 return;
22476
22477 old_die = lookup_decl_die (decl);
22478 /* With early debug we always have an old DIE unless we are in LTO
22479 and the user did not compile but only link with debug. */
22480 if (in_lto_p && ! old_die)
22481 return;
22482 gcc_assert (old_die != NULL);
22483 if (get_AT (old_die, DW_AT_inline)
22484 || get_AT (old_die, DW_AT_abstract_origin))
22485 /* We've already generated the abstract instance. */
22486 return;
22487
22488 /* Go ahead and put DW_AT_inline on the DIE. */
22489 if (DECL_DECLARED_INLINE_P (decl))
22490 {
22491 if (cgraph_function_possibly_inlined_p (decl))
22492 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22493 else
22494 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22495 }
22496 else
22497 {
22498 if (cgraph_function_possibly_inlined_p (decl))
22499 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22500 else
22501 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22502 }
22503
22504 if (DECL_DECLARED_INLINE_P (decl)
22505 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22506 add_AT_flag (old_die, DW_AT_artificial, 1);
22507
22508 set_decl_origin_self (decl);
22509 }
22510
22511 /* Helper function of premark_used_types() which gets called through
22512 htab_traverse.
22513
22514 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22515 marked as unused by prune_unused_types. */
22516
22517 bool
22518 premark_used_types_helper (tree const &type, void *)
22519 {
22520 dw_die_ref die;
22521
22522 die = lookup_type_die (type);
22523 if (die != NULL)
22524 die->die_perennial_p = 1;
22525 return true;
22526 }
22527
22528 /* Helper function of premark_types_used_by_global_vars which gets called
22529 through htab_traverse.
22530
22531 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22532 marked as unused by prune_unused_types. The DIE of the type is marked
22533 only if the global variable using the type will actually be emitted. */
22534
22535 int
22536 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22537 void *)
22538 {
22539 struct types_used_by_vars_entry *entry;
22540 dw_die_ref die;
22541
22542 entry = (struct types_used_by_vars_entry *) *slot;
22543 gcc_assert (entry->type != NULL
22544 && entry->var_decl != NULL);
22545 die = lookup_type_die (entry->type);
22546 if (die)
22547 {
22548 /* Ask cgraph if the global variable really is to be emitted.
22549 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22550 varpool_node *node = varpool_node::get (entry->var_decl);
22551 if (node && node->definition)
22552 {
22553 die->die_perennial_p = 1;
22554 /* Keep the parent DIEs as well. */
22555 while ((die = die->die_parent) && die->die_perennial_p == 0)
22556 die->die_perennial_p = 1;
22557 }
22558 }
22559 return 1;
22560 }
22561
22562 /* Mark all members of used_types_hash as perennial. */
22563
22564 static void
22565 premark_used_types (struct function *fun)
22566 {
22567 if (fun && fun->used_types_hash)
22568 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22569 }
22570
22571 /* Mark all members of types_used_by_vars_entry as perennial. */
22572
22573 static void
22574 premark_types_used_by_global_vars (void)
22575 {
22576 if (types_used_by_vars_hash)
22577 types_used_by_vars_hash
22578 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22579 }
22580
22581 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22582 for CA_LOC call arg loc node. */
22583
22584 static dw_die_ref
22585 gen_call_site_die (tree decl, dw_die_ref subr_die,
22586 struct call_arg_loc_node *ca_loc)
22587 {
22588 dw_die_ref stmt_die = NULL, die;
22589 tree block = ca_loc->block;
22590
22591 while (block
22592 && block != DECL_INITIAL (decl)
22593 && TREE_CODE (block) == BLOCK)
22594 {
22595 stmt_die = BLOCK_DIE (block);
22596 if (stmt_die)
22597 break;
22598 block = BLOCK_SUPERCONTEXT (block);
22599 }
22600 if (stmt_die == NULL)
22601 stmt_die = subr_die;
22602 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22603 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22604 if (ca_loc->tail_call_p)
22605 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22606 if (ca_loc->symbol_ref)
22607 {
22608 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22609 if (tdie)
22610 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22611 else
22612 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22613 false);
22614 }
22615 return die;
22616 }
22617
22618 /* Generate a DIE to represent a declared function (either file-scope or
22619 block-local). */
22620
22621 static void
22622 gen_subprogram_die (tree decl, dw_die_ref context_die)
22623 {
22624 tree origin = decl_ultimate_origin (decl);
22625 dw_die_ref subr_die;
22626 dw_die_ref old_die = lookup_decl_die (decl);
22627
22628 /* This function gets called multiple times for different stages of
22629 the debug process. For example, for func() in this code:
22630
22631 namespace S
22632 {
22633 void func() { ... }
22634 }
22635
22636 ...we get called 4 times. Twice in early debug and twice in
22637 late debug:
22638
22639 Early debug
22640 -----------
22641
22642 1. Once while generating func() within the namespace. This is
22643 the declaration. The declaration bit below is set, as the
22644 context is the namespace.
22645
22646 A new DIE will be generated with DW_AT_declaration set.
22647
22648 2. Once for func() itself. This is the specification. The
22649 declaration bit below is clear as the context is the CU.
22650
22651 We will use the cached DIE from (1) to create a new DIE with
22652 DW_AT_specification pointing to the declaration in (1).
22653
22654 Late debug via rest_of_handle_final()
22655 -------------------------------------
22656
22657 3. Once generating func() within the namespace. This is also the
22658 declaration, as in (1), but this time we will early exit below
22659 as we have a cached DIE and a declaration needs no additional
22660 annotations (no locations), as the source declaration line
22661 info is enough.
22662
22663 4. Once for func() itself. As in (2), this is the specification,
22664 but this time we will re-use the cached DIE, and just annotate
22665 it with the location information that should now be available.
22666
22667 For something without namespaces, but with abstract instances, we
22668 are also called a multiple times:
22669
22670 class Base
22671 {
22672 public:
22673 Base (); // constructor declaration (1)
22674 };
22675
22676 Base::Base () { } // constructor specification (2)
22677
22678 Early debug
22679 -----------
22680
22681 1. Once for the Base() constructor by virtue of it being a
22682 member of the Base class. This is done via
22683 rest_of_type_compilation.
22684
22685 This is a declaration, so a new DIE will be created with
22686 DW_AT_declaration.
22687
22688 2. Once for the Base() constructor definition, but this time
22689 while generating the abstract instance of the base
22690 constructor (__base_ctor) which is being generated via early
22691 debug of reachable functions.
22692
22693 Even though we have a cached version of the declaration (1),
22694 we will create a DW_AT_specification of the declaration DIE
22695 in (1).
22696
22697 3. Once for the __base_ctor itself, but this time, we generate
22698 an DW_AT_abstract_origin version of the DW_AT_specification in
22699 (2).
22700
22701 Late debug via rest_of_handle_final
22702 -----------------------------------
22703
22704 4. One final time for the __base_ctor (which will have a cached
22705 DIE with DW_AT_abstract_origin created in (3). This time,
22706 we will just annotate the location information now
22707 available.
22708 */
22709 int declaration = (current_function_decl != decl
22710 || class_or_namespace_scope_p (context_die));
22711
22712 /* A declaration that has been previously dumped needs no
22713 additional information. */
22714 if (old_die && declaration)
22715 return;
22716
22717 /* Now that the C++ front end lazily declares artificial member fns, we
22718 might need to retrofit the declaration into its class. */
22719 if (!declaration && !origin && !old_die
22720 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22721 && !class_or_namespace_scope_p (context_die)
22722 && debug_info_level > DINFO_LEVEL_TERSE)
22723 old_die = force_decl_die (decl);
22724
22725 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22726 if (origin != NULL)
22727 {
22728 gcc_assert (!declaration || local_scope_p (context_die));
22729
22730 /* Fixup die_parent for the abstract instance of a nested
22731 inline function. */
22732 if (old_die && old_die->die_parent == NULL)
22733 add_child_die (context_die, old_die);
22734
22735 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22736 {
22737 /* If we have a DW_AT_abstract_origin we have a working
22738 cached version. */
22739 subr_die = old_die;
22740 }
22741 else
22742 {
22743 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22744 add_abstract_origin_attribute (subr_die, origin);
22745 /* This is where the actual code for a cloned function is.
22746 Let's emit linkage name attribute for it. This helps
22747 debuggers to e.g, set breakpoints into
22748 constructors/destructors when the user asks "break
22749 K::K". */
22750 add_linkage_name (subr_die, decl);
22751 }
22752 }
22753 /* A cached copy, possibly from early dwarf generation. Reuse as
22754 much as possible. */
22755 else if (old_die)
22756 {
22757 if (!get_AT_flag (old_die, DW_AT_declaration)
22758 /* We can have a normal definition following an inline one in the
22759 case of redefinition of GNU C extern inlines.
22760 It seems reasonable to use AT_specification in this case. */
22761 && !get_AT (old_die, DW_AT_inline))
22762 {
22763 /* Detect and ignore this case, where we are trying to output
22764 something we have already output. */
22765 if (get_AT (old_die, DW_AT_low_pc)
22766 || get_AT (old_die, DW_AT_ranges))
22767 return;
22768
22769 /* If we have no location information, this must be a
22770 partially generated DIE from early dwarf generation.
22771 Fall through and generate it. */
22772 }
22773
22774 /* If the definition comes from the same place as the declaration,
22775 maybe use the old DIE. We always want the DIE for this function
22776 that has the *_pc attributes to be under comp_unit_die so the
22777 debugger can find it. We also need to do this for abstract
22778 instances of inlines, since the spec requires the out-of-line copy
22779 to have the same parent. For local class methods, this doesn't
22780 apply; we just use the old DIE. */
22781 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22782 struct dwarf_file_data * file_index = lookup_filename (s.file);
22783 if (((is_unit_die (old_die->die_parent)
22784 /* This condition fixes the inconsistency/ICE with the
22785 following Fortran test (or some derivative thereof) while
22786 building libgfortran:
22787
22788 module some_m
22789 contains
22790 logical function funky (FLAG)
22791 funky = .true.
22792 end function
22793 end module
22794 */
22795 || (old_die->die_parent
22796 && old_die->die_parent->die_tag == DW_TAG_module)
22797 || context_die == NULL)
22798 && (DECL_ARTIFICIAL (decl)
22799 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22800 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22801 == (unsigned) s.line)
22802 && (!debug_column_info
22803 || s.column == 0
22804 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22805 == (unsigned) s.column)))))
22806 /* With LTO if there's an abstract instance for
22807 the old DIE, this is a concrete instance and
22808 thus re-use the DIE. */
22809 || get_AT (old_die, DW_AT_abstract_origin))
22810 {
22811 subr_die = old_die;
22812
22813 /* Clear out the declaration attribute, but leave the
22814 parameters so they can be augmented with location
22815 information later. Unless this was a declaration, in
22816 which case, wipe out the nameless parameters and recreate
22817 them further down. */
22818 if (remove_AT (subr_die, DW_AT_declaration))
22819 {
22820
22821 remove_AT (subr_die, DW_AT_object_pointer);
22822 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22823 }
22824 }
22825 /* Make a specification pointing to the previously built
22826 declaration. */
22827 else
22828 {
22829 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22830 add_AT_specification (subr_die, old_die);
22831 add_pubname (decl, subr_die);
22832 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22833 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22834 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22835 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22836 if (debug_column_info
22837 && s.column
22838 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22839 != (unsigned) s.column))
22840 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22841
22842 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22843 emit the real type on the definition die. */
22844 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22845 {
22846 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22847 if (die == auto_die || die == decltype_auto_die)
22848 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22849 TYPE_UNQUALIFIED, false, context_die);
22850 }
22851
22852 /* When we process the method declaration, we haven't seen
22853 the out-of-class defaulted definition yet, so we have to
22854 recheck now. */
22855 if ((dwarf_version >= 5 || ! dwarf_strict)
22856 && !get_AT (subr_die, DW_AT_defaulted))
22857 {
22858 int defaulted
22859 = lang_hooks.decls.decl_dwarf_attribute (decl,
22860 DW_AT_defaulted);
22861 if (defaulted != -1)
22862 {
22863 /* Other values must have been handled before. */
22864 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22865 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22866 }
22867 }
22868 }
22869 }
22870 /* Create a fresh DIE for anything else. */
22871 else
22872 {
22873 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22874
22875 if (TREE_PUBLIC (decl))
22876 add_AT_flag (subr_die, DW_AT_external, 1);
22877
22878 add_name_and_src_coords_attributes (subr_die, decl);
22879 add_pubname (decl, subr_die);
22880 if (debug_info_level > DINFO_LEVEL_TERSE)
22881 {
22882 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22883 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22884 TYPE_UNQUALIFIED, false, context_die);
22885 }
22886
22887 add_pure_or_virtual_attribute (subr_die, decl);
22888 if (DECL_ARTIFICIAL (decl))
22889 add_AT_flag (subr_die, DW_AT_artificial, 1);
22890
22891 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22892 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22893
22894 add_alignment_attribute (subr_die, decl);
22895
22896 add_accessibility_attribute (subr_die, decl);
22897 }
22898
22899 /* Unless we have an existing non-declaration DIE, equate the new
22900 DIE. */
22901 if (!old_die || is_declaration_die (old_die))
22902 equate_decl_number_to_die (decl, subr_die);
22903
22904 if (declaration)
22905 {
22906 if (!old_die || !get_AT (old_die, DW_AT_inline))
22907 {
22908 add_AT_flag (subr_die, DW_AT_declaration, 1);
22909
22910 /* If this is an explicit function declaration then generate
22911 a DW_AT_explicit attribute. */
22912 if ((dwarf_version >= 3 || !dwarf_strict)
22913 && lang_hooks.decls.decl_dwarf_attribute (decl,
22914 DW_AT_explicit) == 1)
22915 add_AT_flag (subr_die, DW_AT_explicit, 1);
22916
22917 /* If this is a C++11 deleted special function member then generate
22918 a DW_AT_deleted attribute. */
22919 if ((dwarf_version >= 5 || !dwarf_strict)
22920 && lang_hooks.decls.decl_dwarf_attribute (decl,
22921 DW_AT_deleted) == 1)
22922 add_AT_flag (subr_die, DW_AT_deleted, 1);
22923
22924 /* If this is a C++11 defaulted special function member then
22925 generate a DW_AT_defaulted attribute. */
22926 if (dwarf_version >= 5 || !dwarf_strict)
22927 {
22928 int defaulted
22929 = lang_hooks.decls.decl_dwarf_attribute (decl,
22930 DW_AT_defaulted);
22931 if (defaulted != -1)
22932 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22933 }
22934
22935 /* If this is a C++11 non-static member function with & ref-qualifier
22936 then generate a DW_AT_reference attribute. */
22937 if ((dwarf_version >= 5 || !dwarf_strict)
22938 && lang_hooks.decls.decl_dwarf_attribute (decl,
22939 DW_AT_reference) == 1)
22940 add_AT_flag (subr_die, DW_AT_reference, 1);
22941
22942 /* If this is a C++11 non-static member function with &&
22943 ref-qualifier then generate a DW_AT_reference attribute. */
22944 if ((dwarf_version >= 5 || !dwarf_strict)
22945 && lang_hooks.decls.decl_dwarf_attribute (decl,
22946 DW_AT_rvalue_reference)
22947 == 1)
22948 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22949 }
22950 }
22951 /* For non DECL_EXTERNALs, if range information is available, fill
22952 the DIE with it. */
22953 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22954 {
22955 HOST_WIDE_INT cfa_fb_offset;
22956
22957 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22958
22959 if (!crtl->has_bb_partition)
22960 {
22961 dw_fde_ref fde = fun->fde;
22962 if (fde->dw_fde_begin)
22963 {
22964 /* We have already generated the labels. */
22965 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22966 fde->dw_fde_end, false);
22967 }
22968 else
22969 {
22970 /* Create start/end labels and add the range. */
22971 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22972 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22973 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22974 current_function_funcdef_no);
22975 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22976 current_function_funcdef_no);
22977 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22978 false);
22979 }
22980
22981 #if VMS_DEBUGGING_INFO
22982 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22983 Section 2.3 Prologue and Epilogue Attributes:
22984 When a breakpoint is set on entry to a function, it is generally
22985 desirable for execution to be suspended, not on the very first
22986 instruction of the function, but rather at a point after the
22987 function's frame has been set up, after any language defined local
22988 declaration processing has been completed, and before execution of
22989 the first statement of the function begins. Debuggers generally
22990 cannot properly determine where this point is. Similarly for a
22991 breakpoint set on exit from a function. The prologue and epilogue
22992 attributes allow a compiler to communicate the location(s) to use. */
22993
22994 {
22995 if (fde->dw_fde_vms_end_prologue)
22996 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22997 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22998
22999 if (fde->dw_fde_vms_begin_epilogue)
23000 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23001 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23002 }
23003 #endif
23004
23005 }
23006 else
23007 {
23008 /* Generate pubnames entries for the split function code ranges. */
23009 dw_fde_ref fde = fun->fde;
23010
23011 if (fde->dw_fde_second_begin)
23012 {
23013 if (dwarf_version >= 3 || !dwarf_strict)
23014 {
23015 /* We should use ranges for non-contiguous code section
23016 addresses. Use the actual code range for the initial
23017 section, since the HOT/COLD labels might precede an
23018 alignment offset. */
23019 bool range_list_added = false;
23020 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23021 fde->dw_fde_end, &range_list_added,
23022 false);
23023 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23024 fde->dw_fde_second_end,
23025 &range_list_added, false);
23026 if (range_list_added)
23027 add_ranges (NULL);
23028 }
23029 else
23030 {
23031 /* There is no real support in DW2 for this .. so we make
23032 a work-around. First, emit the pub name for the segment
23033 containing the function label. Then make and emit a
23034 simplified subprogram DIE for the second segment with the
23035 name pre-fixed by __hot/cold_sect_of_. We use the same
23036 linkage name for the second die so that gdb will find both
23037 sections when given "b foo". */
23038 const char *name = NULL;
23039 tree decl_name = DECL_NAME (decl);
23040 dw_die_ref seg_die;
23041
23042 /* Do the 'primary' section. */
23043 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23044 fde->dw_fde_end, false);
23045
23046 /* Build a minimal DIE for the secondary section. */
23047 seg_die = new_die (DW_TAG_subprogram,
23048 subr_die->die_parent, decl);
23049
23050 if (TREE_PUBLIC (decl))
23051 add_AT_flag (seg_die, DW_AT_external, 1);
23052
23053 if (decl_name != NULL
23054 && IDENTIFIER_POINTER (decl_name) != NULL)
23055 {
23056 name = dwarf2_name (decl, 1);
23057 if (! DECL_ARTIFICIAL (decl))
23058 add_src_coords_attributes (seg_die, decl);
23059
23060 add_linkage_name (seg_die, decl);
23061 }
23062 gcc_assert (name != NULL);
23063 add_pure_or_virtual_attribute (seg_die, decl);
23064 if (DECL_ARTIFICIAL (decl))
23065 add_AT_flag (seg_die, DW_AT_artificial, 1);
23066
23067 name = concat ("__second_sect_of_", name, NULL);
23068 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23069 fde->dw_fde_second_end, false);
23070 add_name_attribute (seg_die, name);
23071 if (want_pubnames ())
23072 add_pubname_string (name, seg_die);
23073 }
23074 }
23075 else
23076 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23077 false);
23078 }
23079
23080 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23081
23082 /* We define the "frame base" as the function's CFA. This is more
23083 convenient for several reasons: (1) It's stable across the prologue
23084 and epilogue, which makes it better than just a frame pointer,
23085 (2) With dwarf3, there exists a one-byte encoding that allows us
23086 to reference the .debug_frame data by proxy, but failing that,
23087 (3) We can at least reuse the code inspection and interpretation
23088 code that determines the CFA position at various points in the
23089 function. */
23090 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23091 {
23092 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23093 add_AT_loc (subr_die, DW_AT_frame_base, op);
23094 }
23095 else
23096 {
23097 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23098 if (list->dw_loc_next)
23099 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23100 else
23101 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23102 }
23103
23104 /* Compute a displacement from the "steady-state frame pointer" to
23105 the CFA. The former is what all stack slots and argument slots
23106 will reference in the rtl; the latter is what we've told the
23107 debugger about. We'll need to adjust all frame_base references
23108 by this displacement. */
23109 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23110
23111 if (fun->static_chain_decl)
23112 {
23113 /* DWARF requires here a location expression that computes the
23114 address of the enclosing subprogram's frame base. The machinery
23115 in tree-nested.c is supposed to store this specific address in the
23116 last field of the FRAME record. */
23117 const tree frame_type
23118 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23119 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23120
23121 tree fb_expr
23122 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23123 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23124 fb_expr, fb_decl, NULL_TREE);
23125
23126 add_AT_location_description (subr_die, DW_AT_static_link,
23127 loc_list_from_tree (fb_expr, 0, NULL));
23128 }
23129
23130 resolve_variable_values ();
23131 }
23132
23133 /* Generate child dies for template paramaters. */
23134 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23135 gen_generic_params_dies (decl);
23136
23137 /* Now output descriptions of the arguments for this function. This gets
23138 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23139 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23140 `...' at the end of the formal parameter list. In order to find out if
23141 there was a trailing ellipsis or not, we must instead look at the type
23142 associated with the FUNCTION_DECL. This will be a node of type
23143 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23144 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23145 an ellipsis at the end. */
23146
23147 /* In the case where we are describing a mere function declaration, all we
23148 need to do here (and all we *can* do here) is to describe the *types* of
23149 its formal parameters. */
23150 if (debug_info_level <= DINFO_LEVEL_TERSE)
23151 ;
23152 else if (declaration)
23153 gen_formal_types_die (decl, subr_die);
23154 else
23155 {
23156 /* Generate DIEs to represent all known formal parameters. */
23157 tree parm = DECL_ARGUMENTS (decl);
23158 tree generic_decl = early_dwarf
23159 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23160 tree generic_decl_parm = generic_decl
23161 ? DECL_ARGUMENTS (generic_decl)
23162 : NULL;
23163
23164 /* Now we want to walk the list of parameters of the function and
23165 emit their relevant DIEs.
23166
23167 We consider the case of DECL being an instance of a generic function
23168 as well as it being a normal function.
23169
23170 If DECL is an instance of a generic function we walk the
23171 parameters of the generic function declaration _and_ the parameters of
23172 DECL itself. This is useful because we want to emit specific DIEs for
23173 function parameter packs and those are declared as part of the
23174 generic function declaration. In that particular case,
23175 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23176 That DIE has children DIEs representing the set of arguments
23177 of the pack. Note that the set of pack arguments can be empty.
23178 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23179 children DIE.
23180
23181 Otherwise, we just consider the parameters of DECL. */
23182 while (generic_decl_parm || parm)
23183 {
23184 if (generic_decl_parm
23185 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23186 gen_formal_parameter_pack_die (generic_decl_parm,
23187 parm, subr_die,
23188 &parm);
23189 else if (parm)
23190 {
23191 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23192
23193 if (early_dwarf
23194 && parm == DECL_ARGUMENTS (decl)
23195 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23196 && parm_die
23197 && (dwarf_version >= 3 || !dwarf_strict))
23198 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23199
23200 parm = DECL_CHAIN (parm);
23201 }
23202 else if (parm)
23203 parm = DECL_CHAIN (parm);
23204
23205 if (generic_decl_parm)
23206 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23207 }
23208
23209 /* Decide whether we need an unspecified_parameters DIE at the end.
23210 There are 2 more cases to do this for: 1) the ansi ... declaration -
23211 this is detectable when the end of the arg list is not a
23212 void_type_node 2) an unprototyped function declaration (not a
23213 definition). This just means that we have no info about the
23214 parameters at all. */
23215 if (early_dwarf)
23216 {
23217 if (prototype_p (TREE_TYPE (decl)))
23218 {
23219 /* This is the prototyped case, check for.... */
23220 if (stdarg_p (TREE_TYPE (decl)))
23221 gen_unspecified_parameters_die (decl, subr_die);
23222 }
23223 else if (DECL_INITIAL (decl) == NULL_TREE)
23224 gen_unspecified_parameters_die (decl, subr_die);
23225 }
23226 }
23227
23228 if (subr_die != old_die)
23229 /* Add the calling convention attribute if requested. */
23230 add_calling_convention_attribute (subr_die, decl);
23231
23232 /* Output Dwarf info for all of the stuff within the body of the function
23233 (if it has one - it may be just a declaration).
23234
23235 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23236 a function. This BLOCK actually represents the outermost binding contour
23237 for the function, i.e. the contour in which the function's formal
23238 parameters and labels get declared. Curiously, it appears that the front
23239 end doesn't actually put the PARM_DECL nodes for the current function onto
23240 the BLOCK_VARS list for this outer scope, but are strung off of the
23241 DECL_ARGUMENTS list for the function instead.
23242
23243 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23244 the LABEL_DECL nodes for the function however, and we output DWARF info
23245 for those in decls_for_scope. Just within the `outer_scope' there will be
23246 a BLOCK node representing the function's outermost pair of curly braces,
23247 and any blocks used for the base and member initializers of a C++
23248 constructor function. */
23249 tree outer_scope = DECL_INITIAL (decl);
23250 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23251 {
23252 int call_site_note_count = 0;
23253 int tail_call_site_note_count = 0;
23254
23255 /* Emit a DW_TAG_variable DIE for a named return value. */
23256 if (DECL_NAME (DECL_RESULT (decl)))
23257 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23258
23259 /* The first time through decls_for_scope we will generate the
23260 DIEs for the locals. The second time, we fill in the
23261 location info. */
23262 decls_for_scope (outer_scope, subr_die);
23263
23264 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23265 {
23266 struct call_arg_loc_node *ca_loc;
23267 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23268 {
23269 dw_die_ref die = NULL;
23270 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23271 rtx arg, next_arg;
23272
23273 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23274 ? XEXP (ca_loc->call_arg_loc_note, 0)
23275 : NULL_RTX);
23276 arg; arg = next_arg)
23277 {
23278 dw_loc_descr_ref reg, val;
23279 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23280 dw_die_ref cdie, tdie = NULL;
23281
23282 next_arg = XEXP (arg, 1);
23283 if (REG_P (XEXP (XEXP (arg, 0), 0))
23284 && next_arg
23285 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23286 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23287 && REGNO (XEXP (XEXP (arg, 0), 0))
23288 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23289 next_arg = XEXP (next_arg, 1);
23290 if (mode == VOIDmode)
23291 {
23292 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23293 if (mode == VOIDmode)
23294 mode = GET_MODE (XEXP (arg, 0));
23295 }
23296 if (mode == VOIDmode || mode == BLKmode)
23297 continue;
23298 /* Get dynamic information about call target only if we
23299 have no static information: we cannot generate both
23300 DW_AT_call_origin and DW_AT_call_target
23301 attributes. */
23302 if (ca_loc->symbol_ref == NULL_RTX)
23303 {
23304 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23305 {
23306 tloc = XEXP (XEXP (arg, 0), 1);
23307 continue;
23308 }
23309 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23310 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23311 {
23312 tlocc = XEXP (XEXP (arg, 0), 1);
23313 continue;
23314 }
23315 }
23316 reg = NULL;
23317 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23318 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23319 VAR_INIT_STATUS_INITIALIZED);
23320 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23321 {
23322 rtx mem = XEXP (XEXP (arg, 0), 0);
23323 reg = mem_loc_descriptor (XEXP (mem, 0),
23324 get_address_mode (mem),
23325 GET_MODE (mem),
23326 VAR_INIT_STATUS_INITIALIZED);
23327 }
23328 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23329 == DEBUG_PARAMETER_REF)
23330 {
23331 tree tdecl
23332 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23333 tdie = lookup_decl_die (tdecl);
23334 if (tdie == NULL)
23335 continue;
23336 }
23337 else
23338 continue;
23339 if (reg == NULL
23340 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23341 != DEBUG_PARAMETER_REF)
23342 continue;
23343 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23344 VOIDmode,
23345 VAR_INIT_STATUS_INITIALIZED);
23346 if (val == NULL)
23347 continue;
23348 if (die == NULL)
23349 die = gen_call_site_die (decl, subr_die, ca_loc);
23350 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23351 NULL_TREE);
23352 if (reg != NULL)
23353 add_AT_loc (cdie, DW_AT_location, reg);
23354 else if (tdie != NULL)
23355 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23356 tdie);
23357 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23358 if (next_arg != XEXP (arg, 1))
23359 {
23360 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23361 if (mode == VOIDmode)
23362 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23363 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23364 0), 1),
23365 mode, VOIDmode,
23366 VAR_INIT_STATUS_INITIALIZED);
23367 if (val != NULL)
23368 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23369 val);
23370 }
23371 }
23372 if (die == NULL
23373 && (ca_loc->symbol_ref || tloc))
23374 die = gen_call_site_die (decl, subr_die, ca_loc);
23375 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23376 {
23377 dw_loc_descr_ref tval = NULL;
23378
23379 if (tloc != NULL_RTX)
23380 tval = mem_loc_descriptor (tloc,
23381 GET_MODE (tloc) == VOIDmode
23382 ? Pmode : GET_MODE (tloc),
23383 VOIDmode,
23384 VAR_INIT_STATUS_INITIALIZED);
23385 if (tval)
23386 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23387 else if (tlocc != NULL_RTX)
23388 {
23389 tval = mem_loc_descriptor (tlocc,
23390 GET_MODE (tlocc) == VOIDmode
23391 ? Pmode : GET_MODE (tlocc),
23392 VOIDmode,
23393 VAR_INIT_STATUS_INITIALIZED);
23394 if (tval)
23395 add_AT_loc (die,
23396 dwarf_AT (DW_AT_call_target_clobbered),
23397 tval);
23398 }
23399 }
23400 if (die != NULL)
23401 {
23402 call_site_note_count++;
23403 if (ca_loc->tail_call_p)
23404 tail_call_site_note_count++;
23405 }
23406 }
23407 }
23408 call_arg_locations = NULL;
23409 call_arg_loc_last = NULL;
23410 if (tail_call_site_count >= 0
23411 && tail_call_site_count == tail_call_site_note_count
23412 && (!dwarf_strict || dwarf_version >= 5))
23413 {
23414 if (call_site_count >= 0
23415 && call_site_count == call_site_note_count)
23416 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23417 else
23418 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23419 }
23420 call_site_count = -1;
23421 tail_call_site_count = -1;
23422 }
23423
23424 /* Mark used types after we have created DIEs for the functions scopes. */
23425 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23426 }
23427
23428 /* Returns a hash value for X (which really is a die_struct). */
23429
23430 hashval_t
23431 block_die_hasher::hash (die_struct *d)
23432 {
23433 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23434 }
23435
23436 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23437 as decl_id and die_parent of die_struct Y. */
23438
23439 bool
23440 block_die_hasher::equal (die_struct *x, die_struct *y)
23441 {
23442 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23443 }
23444
23445 /* Hold information about markers for inlined entry points. */
23446 struct GTY ((for_user)) inline_entry_data
23447 {
23448 /* The block that's the inlined_function_outer_scope for an inlined
23449 function. */
23450 tree block;
23451
23452 /* The label at the inlined entry point. */
23453 const char *label_pfx;
23454 unsigned int label_num;
23455
23456 /* The view number to be used as the inlined entry point. */
23457 var_loc_view view;
23458 };
23459
23460 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23461 {
23462 typedef tree compare_type;
23463 static inline hashval_t hash (const inline_entry_data *);
23464 static inline bool equal (const inline_entry_data *, const_tree);
23465 };
23466
23467 /* Hash table routines for inline_entry_data. */
23468
23469 inline hashval_t
23470 inline_entry_data_hasher::hash (const inline_entry_data *data)
23471 {
23472 return htab_hash_pointer (data->block);
23473 }
23474
23475 inline bool
23476 inline_entry_data_hasher::equal (const inline_entry_data *data,
23477 const_tree block)
23478 {
23479 return data->block == block;
23480 }
23481
23482 /* Inlined entry points pending DIE creation in this compilation unit. */
23483
23484 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23485
23486
23487 /* Return TRUE if DECL, which may have been previously generated as
23488 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23489 true if decl (or its origin) is either an extern declaration or a
23490 class/namespace scoped declaration.
23491
23492 The declare_in_namespace support causes us to get two DIEs for one
23493 variable, both of which are declarations. We want to avoid
23494 considering one to be a specification, so we must test for
23495 DECLARATION and DW_AT_declaration. */
23496 static inline bool
23497 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23498 {
23499 return (old_die && TREE_STATIC (decl) && !declaration
23500 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23501 }
23502
23503 /* Return true if DECL is a local static. */
23504
23505 static inline bool
23506 local_function_static (tree decl)
23507 {
23508 gcc_assert (VAR_P (decl));
23509 return TREE_STATIC (decl)
23510 && DECL_CONTEXT (decl)
23511 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23512 }
23513
23514 /* Generate a DIE to represent a declared data object.
23515 Either DECL or ORIGIN must be non-null. */
23516
23517 static void
23518 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23519 {
23520 HOST_WIDE_INT off = 0;
23521 tree com_decl;
23522 tree decl_or_origin = decl ? decl : origin;
23523 tree ultimate_origin;
23524 dw_die_ref var_die;
23525 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23526 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23527 || class_or_namespace_scope_p (context_die));
23528 bool specialization_p = false;
23529 bool no_linkage_name = false;
23530
23531 /* While C++ inline static data members have definitions inside of the
23532 class, force the first DIE to be a declaration, then let gen_member_die
23533 reparent it to the class context and call gen_variable_die again
23534 to create the outside of the class DIE for the definition. */
23535 if (!declaration
23536 && old_die == NULL
23537 && decl
23538 && DECL_CONTEXT (decl)
23539 && TYPE_P (DECL_CONTEXT (decl))
23540 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23541 {
23542 declaration = true;
23543 if (dwarf_version < 5)
23544 no_linkage_name = true;
23545 }
23546
23547 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23548 if (decl || ultimate_origin)
23549 origin = ultimate_origin;
23550 com_decl = fortran_common (decl_or_origin, &off);
23551
23552 /* Symbol in common gets emitted as a child of the common block, in the form
23553 of a data member. */
23554 if (com_decl)
23555 {
23556 dw_die_ref com_die;
23557 dw_loc_list_ref loc = NULL;
23558 die_node com_die_arg;
23559
23560 var_die = lookup_decl_die (decl_or_origin);
23561 if (var_die)
23562 {
23563 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23564 {
23565 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23566 if (loc)
23567 {
23568 if (off)
23569 {
23570 /* Optimize the common case. */
23571 if (single_element_loc_list_p (loc)
23572 && loc->expr->dw_loc_opc == DW_OP_addr
23573 && loc->expr->dw_loc_next == NULL
23574 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23575 == SYMBOL_REF)
23576 {
23577 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23578 loc->expr->dw_loc_oprnd1.v.val_addr
23579 = plus_constant (GET_MODE (x), x , off);
23580 }
23581 else
23582 loc_list_plus_const (loc, off);
23583 }
23584 add_AT_location_description (var_die, DW_AT_location, loc);
23585 remove_AT (var_die, DW_AT_declaration);
23586 }
23587 }
23588 return;
23589 }
23590
23591 if (common_block_die_table == NULL)
23592 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23593
23594 com_die_arg.decl_id = DECL_UID (com_decl);
23595 com_die_arg.die_parent = context_die;
23596 com_die = common_block_die_table->find (&com_die_arg);
23597 if (! early_dwarf)
23598 loc = loc_list_from_tree (com_decl, 2, NULL);
23599 if (com_die == NULL)
23600 {
23601 const char *cnam
23602 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23603 die_node **slot;
23604
23605 com_die = new_die (DW_TAG_common_block, context_die, decl);
23606 add_name_and_src_coords_attributes (com_die, com_decl);
23607 if (loc)
23608 {
23609 add_AT_location_description (com_die, DW_AT_location, loc);
23610 /* Avoid sharing the same loc descriptor between
23611 DW_TAG_common_block and DW_TAG_variable. */
23612 loc = loc_list_from_tree (com_decl, 2, NULL);
23613 }
23614 else if (DECL_EXTERNAL (decl_or_origin))
23615 add_AT_flag (com_die, DW_AT_declaration, 1);
23616 if (want_pubnames ())
23617 add_pubname_string (cnam, com_die); /* ??? needed? */
23618 com_die->decl_id = DECL_UID (com_decl);
23619 slot = common_block_die_table->find_slot (com_die, INSERT);
23620 *slot = com_die;
23621 }
23622 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23623 {
23624 add_AT_location_description (com_die, DW_AT_location, loc);
23625 loc = loc_list_from_tree (com_decl, 2, NULL);
23626 remove_AT (com_die, DW_AT_declaration);
23627 }
23628 var_die = new_die (DW_TAG_variable, com_die, decl);
23629 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23630 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23631 decl_quals (decl_or_origin), false,
23632 context_die);
23633 add_alignment_attribute (var_die, decl);
23634 add_AT_flag (var_die, DW_AT_external, 1);
23635 if (loc)
23636 {
23637 if (off)
23638 {
23639 /* Optimize the common case. */
23640 if (single_element_loc_list_p (loc)
23641 && loc->expr->dw_loc_opc == DW_OP_addr
23642 && loc->expr->dw_loc_next == NULL
23643 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23644 {
23645 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23646 loc->expr->dw_loc_oprnd1.v.val_addr
23647 = plus_constant (GET_MODE (x), x, off);
23648 }
23649 else
23650 loc_list_plus_const (loc, off);
23651 }
23652 add_AT_location_description (var_die, DW_AT_location, loc);
23653 }
23654 else if (DECL_EXTERNAL (decl_or_origin))
23655 add_AT_flag (var_die, DW_AT_declaration, 1);
23656 if (decl)
23657 equate_decl_number_to_die (decl, var_die);
23658 return;
23659 }
23660
23661 if (old_die)
23662 {
23663 if (declaration)
23664 {
23665 /* A declaration that has been previously dumped, needs no
23666 further annotations, since it doesn't need location on
23667 the second pass. */
23668 return;
23669 }
23670 else if (decl_will_get_specification_p (old_die, decl, declaration)
23671 && !get_AT (old_die, DW_AT_specification))
23672 {
23673 /* Fall-thru so we can make a new variable die along with a
23674 DW_AT_specification. */
23675 }
23676 else if (origin && old_die->die_parent != context_die)
23677 {
23678 /* If we will be creating an inlined instance, we need a
23679 new DIE that will get annotated with
23680 DW_AT_abstract_origin. */
23681 gcc_assert (!DECL_ABSTRACT_P (decl));
23682 }
23683 else
23684 {
23685 /* If a DIE was dumped early, it still needs location info.
23686 Skip to where we fill the location bits. */
23687 var_die = old_die;
23688
23689 /* ??? In LTRANS we cannot annotate early created variably
23690 modified type DIEs without copying them and adjusting all
23691 references to them. Thus we dumped them again. Also add a
23692 reference to them but beware of -g0 compile and -g link
23693 in which case the reference will be already present. */
23694 tree type = TREE_TYPE (decl_or_origin);
23695 if (in_lto_p
23696 && ! get_AT (var_die, DW_AT_type)
23697 && variably_modified_type_p
23698 (type, decl_function_context (decl_or_origin)))
23699 {
23700 if (decl_by_reference_p (decl_or_origin))
23701 add_type_attribute (var_die, TREE_TYPE (type),
23702 TYPE_UNQUALIFIED, false, context_die);
23703 else
23704 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23705 false, context_die);
23706 }
23707
23708 goto gen_variable_die_location;
23709 }
23710 }
23711
23712 /* For static data members, the declaration in the class is supposed
23713 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23714 also in DWARF2; the specification should still be DW_TAG_variable
23715 referencing the DW_TAG_member DIE. */
23716 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23717 var_die = new_die (DW_TAG_member, context_die, decl);
23718 else
23719 var_die = new_die (DW_TAG_variable, context_die, decl);
23720
23721 if (origin != NULL)
23722 add_abstract_origin_attribute (var_die, origin);
23723
23724 /* Loop unrolling can create multiple blocks that refer to the same
23725 static variable, so we must test for the DW_AT_declaration flag.
23726
23727 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23728 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23729 sharing them.
23730
23731 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23732 else if (decl_will_get_specification_p (old_die, decl, declaration))
23733 {
23734 /* This is a definition of a C++ class level static. */
23735 add_AT_specification (var_die, old_die);
23736 specialization_p = true;
23737 if (DECL_NAME (decl))
23738 {
23739 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23740 struct dwarf_file_data * file_index = lookup_filename (s.file);
23741
23742 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23743 add_AT_file (var_die, DW_AT_decl_file, file_index);
23744
23745 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23746 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23747
23748 if (debug_column_info
23749 && s.column
23750 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23751 != (unsigned) s.column))
23752 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23753
23754 if (old_die->die_tag == DW_TAG_member)
23755 add_linkage_name (var_die, decl);
23756 }
23757 }
23758 else
23759 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23760
23761 if ((origin == NULL && !specialization_p)
23762 || (origin != NULL
23763 && !DECL_ABSTRACT_P (decl_or_origin)
23764 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23765 decl_function_context
23766 (decl_or_origin))))
23767 {
23768 tree type = TREE_TYPE (decl_or_origin);
23769
23770 if (decl_by_reference_p (decl_or_origin))
23771 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23772 context_die);
23773 else
23774 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23775 context_die);
23776 }
23777
23778 if (origin == NULL && !specialization_p)
23779 {
23780 if (TREE_PUBLIC (decl))
23781 add_AT_flag (var_die, DW_AT_external, 1);
23782
23783 if (DECL_ARTIFICIAL (decl))
23784 add_AT_flag (var_die, DW_AT_artificial, 1);
23785
23786 add_alignment_attribute (var_die, decl);
23787
23788 add_accessibility_attribute (var_die, decl);
23789 }
23790
23791 if (declaration)
23792 add_AT_flag (var_die, DW_AT_declaration, 1);
23793
23794 if (decl && (DECL_ABSTRACT_P (decl)
23795 || !old_die || is_declaration_die (old_die)))
23796 equate_decl_number_to_die (decl, var_die);
23797
23798 gen_variable_die_location:
23799 if (! declaration
23800 && (! DECL_ABSTRACT_P (decl_or_origin)
23801 /* Local static vars are shared between all clones/inlines,
23802 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23803 already set. */
23804 || (VAR_P (decl_or_origin)
23805 && TREE_STATIC (decl_or_origin)
23806 && DECL_RTL_SET_P (decl_or_origin))))
23807 {
23808 if (early_dwarf)
23809 add_pubname (decl_or_origin, var_die);
23810 else
23811 add_location_or_const_value_attribute (var_die, decl_or_origin,
23812 decl == NULL);
23813 }
23814 else
23815 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23816
23817 if ((dwarf_version >= 4 || !dwarf_strict)
23818 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23819 DW_AT_const_expr) == 1
23820 && !get_AT (var_die, DW_AT_const_expr)
23821 && !specialization_p)
23822 add_AT_flag (var_die, DW_AT_const_expr, 1);
23823
23824 if (!dwarf_strict)
23825 {
23826 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23827 DW_AT_inline);
23828 if (inl != -1
23829 && !get_AT (var_die, DW_AT_inline)
23830 && !specialization_p)
23831 add_AT_unsigned (var_die, DW_AT_inline, inl);
23832 }
23833 }
23834
23835 /* Generate a DIE to represent a named constant. */
23836
23837 static void
23838 gen_const_die (tree decl, dw_die_ref context_die)
23839 {
23840 dw_die_ref const_die;
23841 tree type = TREE_TYPE (decl);
23842
23843 const_die = lookup_decl_die (decl);
23844 if (const_die)
23845 return;
23846
23847 const_die = new_die (DW_TAG_constant, context_die, decl);
23848 equate_decl_number_to_die (decl, const_die);
23849 add_name_and_src_coords_attributes (const_die, decl);
23850 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23851 if (TREE_PUBLIC (decl))
23852 add_AT_flag (const_die, DW_AT_external, 1);
23853 if (DECL_ARTIFICIAL (decl))
23854 add_AT_flag (const_die, DW_AT_artificial, 1);
23855 tree_add_const_value_attribute_for_decl (const_die, decl);
23856 }
23857
23858 /* Generate a DIE to represent a label identifier. */
23859
23860 static void
23861 gen_label_die (tree decl, dw_die_ref context_die)
23862 {
23863 tree origin = decl_ultimate_origin (decl);
23864 dw_die_ref lbl_die = lookup_decl_die (decl);
23865 rtx insn;
23866 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23867
23868 if (!lbl_die)
23869 {
23870 lbl_die = new_die (DW_TAG_label, context_die, decl);
23871 equate_decl_number_to_die (decl, lbl_die);
23872
23873 if (origin != NULL)
23874 add_abstract_origin_attribute (lbl_die, origin);
23875 else
23876 add_name_and_src_coords_attributes (lbl_die, decl);
23877 }
23878
23879 if (DECL_ABSTRACT_P (decl))
23880 equate_decl_number_to_die (decl, lbl_die);
23881 else if (! early_dwarf)
23882 {
23883 insn = DECL_RTL_IF_SET (decl);
23884
23885 /* Deleted labels are programmer specified labels which have been
23886 eliminated because of various optimizations. We still emit them
23887 here so that it is possible to put breakpoints on them. */
23888 if (insn
23889 && (LABEL_P (insn)
23890 || ((NOTE_P (insn)
23891 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23892 {
23893 /* When optimization is enabled (via -O) some parts of the compiler
23894 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23895 represent source-level labels which were explicitly declared by
23896 the user. This really shouldn't be happening though, so catch
23897 it if it ever does happen. */
23898 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23899
23900 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23901 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23902 }
23903 else if (insn
23904 && NOTE_P (insn)
23905 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23906 && CODE_LABEL_NUMBER (insn) != -1)
23907 {
23908 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23909 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23910 }
23911 }
23912 }
23913
23914 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23915 attributes to the DIE for a block STMT, to describe where the inlined
23916 function was called from. This is similar to add_src_coords_attributes. */
23917
23918 static inline void
23919 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23920 {
23921 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23922
23923 if (dwarf_version >= 3 || !dwarf_strict)
23924 {
23925 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23926 add_AT_unsigned (die, DW_AT_call_line, s.line);
23927 if (debug_column_info && s.column)
23928 add_AT_unsigned (die, DW_AT_call_column, s.column);
23929 }
23930 }
23931
23932
23933 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23934 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23935
23936 static inline void
23937 add_high_low_attributes (tree stmt, dw_die_ref die)
23938 {
23939 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23940
23941 if (inline_entry_data **iedp
23942 = !inline_entry_data_table ? NULL
23943 : inline_entry_data_table->find_slot_with_hash (stmt,
23944 htab_hash_pointer (stmt),
23945 NO_INSERT))
23946 {
23947 inline_entry_data *ied = *iedp;
23948 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23949 gcc_assert (debug_inline_points);
23950 gcc_assert (inlined_function_outer_scope_p (stmt));
23951
23952 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23953 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23954
23955 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23956 && !dwarf_strict)
23957 {
23958 if (!output_asm_line_debug_info ())
23959 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23960 else
23961 {
23962 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23963 /* FIXME: this will resolve to a small number. Could we
23964 possibly emit smaller data? Ideally we'd emit a
23965 uleb128, but that would make the size of DIEs
23966 impossible for the compiler to compute, since it's
23967 the assembler that computes the value of the view
23968 label in this case. Ideally, we'd have a single form
23969 encompassing both the address and the view, and
23970 indirecting them through a table might make things
23971 easier, but even that would be more wasteful,
23972 space-wise, than what we have now. */
23973 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23974 }
23975 }
23976
23977 inline_entry_data_table->clear_slot (iedp);
23978 }
23979
23980 if (BLOCK_FRAGMENT_CHAIN (stmt)
23981 && (dwarf_version >= 3 || !dwarf_strict))
23982 {
23983 tree chain, superblock = NULL_TREE;
23984 dw_die_ref pdie;
23985 dw_attr_node *attr = NULL;
23986
23987 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23988 {
23989 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23990 BLOCK_NUMBER (stmt));
23991 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23992 }
23993
23994 /* Optimize duplicate .debug_ranges lists or even tails of
23995 lists. If this BLOCK has same ranges as its supercontext,
23996 lookup DW_AT_ranges attribute in the supercontext (and
23997 recursively so), verify that the ranges_table contains the
23998 right values and use it instead of adding a new .debug_range. */
23999 for (chain = stmt, pdie = die;
24000 BLOCK_SAME_RANGE (chain);
24001 chain = BLOCK_SUPERCONTEXT (chain))
24002 {
24003 dw_attr_node *new_attr;
24004
24005 pdie = pdie->die_parent;
24006 if (pdie == NULL)
24007 break;
24008 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24009 break;
24010 new_attr = get_AT (pdie, DW_AT_ranges);
24011 if (new_attr == NULL
24012 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24013 break;
24014 attr = new_attr;
24015 superblock = BLOCK_SUPERCONTEXT (chain);
24016 }
24017 if (attr != NULL
24018 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24019 == BLOCK_NUMBER (superblock))
24020 && BLOCK_FRAGMENT_CHAIN (superblock))
24021 {
24022 unsigned long off = attr->dw_attr_val.v.val_offset;
24023 unsigned long supercnt = 0, thiscnt = 0;
24024 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24025 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24026 {
24027 ++supercnt;
24028 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24029 == BLOCK_NUMBER (chain));
24030 }
24031 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24032 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24033 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24034 ++thiscnt;
24035 gcc_assert (supercnt >= thiscnt);
24036 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24037 false);
24038 note_rnglist_head (off + supercnt - thiscnt);
24039 return;
24040 }
24041
24042 unsigned int offset = add_ranges (stmt, true);
24043 add_AT_range_list (die, DW_AT_ranges, offset, false);
24044 note_rnglist_head (offset);
24045
24046 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24047 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24048 do
24049 {
24050 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24051 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24052 chain = BLOCK_FRAGMENT_CHAIN (chain);
24053 }
24054 while (chain);
24055 add_ranges (NULL);
24056 }
24057 else
24058 {
24059 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24060 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24061 BLOCK_NUMBER (stmt));
24062 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24063 BLOCK_NUMBER (stmt));
24064 add_AT_low_high_pc (die, label, label_high, false);
24065 }
24066 }
24067
24068 /* Generate a DIE for a lexical block. */
24069
24070 static void
24071 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24072 {
24073 dw_die_ref old_die = BLOCK_DIE (stmt);
24074 dw_die_ref stmt_die = NULL;
24075 if (!old_die)
24076 {
24077 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24078 BLOCK_DIE (stmt) = stmt_die;
24079 }
24080
24081 if (BLOCK_ABSTRACT (stmt))
24082 {
24083 if (old_die)
24084 {
24085 /* This must have been generated early and it won't even
24086 need location information since it's a DW_AT_inline
24087 function. */
24088 if (flag_checking)
24089 for (dw_die_ref c = context_die; c; c = c->die_parent)
24090 if (c->die_tag == DW_TAG_inlined_subroutine
24091 || c->die_tag == DW_TAG_subprogram)
24092 {
24093 gcc_assert (get_AT (c, DW_AT_inline));
24094 break;
24095 }
24096 return;
24097 }
24098 }
24099 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24100 {
24101 /* If this is an inlined instance, create a new lexical die for
24102 anything below to attach DW_AT_abstract_origin to. */
24103 if (old_die)
24104 {
24105 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24106 BLOCK_DIE (stmt) = stmt_die;
24107 old_die = NULL;
24108 }
24109
24110 tree origin = block_ultimate_origin (stmt);
24111 if (origin != NULL_TREE && origin != stmt)
24112 add_abstract_origin_attribute (stmt_die, origin);
24113 }
24114
24115 if (old_die)
24116 stmt_die = old_die;
24117
24118 /* A non abstract block whose blocks have already been reordered
24119 should have the instruction range for this block. If so, set the
24120 high/low attributes. */
24121 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24122 {
24123 gcc_assert (stmt_die);
24124 add_high_low_attributes (stmt, stmt_die);
24125 }
24126
24127 decls_for_scope (stmt, stmt_die);
24128 }
24129
24130 /* Generate a DIE for an inlined subprogram. */
24131
24132 static void
24133 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24134 {
24135 tree decl;
24136
24137 /* The instance of function that is effectively being inlined shall not
24138 be abstract. */
24139 gcc_assert (! BLOCK_ABSTRACT (stmt));
24140
24141 decl = block_ultimate_origin (stmt);
24142
24143 /* Make sure any inlined functions are known to be inlineable. */
24144 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24145 || cgraph_function_possibly_inlined_p (decl));
24146
24147 if (! BLOCK_ABSTRACT (stmt))
24148 {
24149 dw_die_ref subr_die
24150 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24151
24152 if (call_arg_locations || debug_inline_points)
24153 BLOCK_DIE (stmt) = subr_die;
24154 add_abstract_origin_attribute (subr_die, decl);
24155 if (TREE_ASM_WRITTEN (stmt))
24156 add_high_low_attributes (stmt, subr_die);
24157 add_call_src_coords_attributes (stmt, subr_die);
24158
24159 decls_for_scope (stmt, subr_die);
24160 }
24161 }
24162
24163 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24164 the comment for VLR_CONTEXT. */
24165
24166 static void
24167 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24168 {
24169 dw_die_ref decl_die;
24170
24171 if (TREE_TYPE (decl) == error_mark_node)
24172 return;
24173
24174 decl_die = new_die (DW_TAG_member, context_die, decl);
24175 add_name_and_src_coords_attributes (decl_die, decl);
24176 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24177 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24178 context_die);
24179
24180 if (DECL_BIT_FIELD_TYPE (decl))
24181 {
24182 add_byte_size_attribute (decl_die, decl);
24183 add_bit_size_attribute (decl_die, decl);
24184 add_bit_offset_attribute (decl_die, decl, ctx);
24185 }
24186
24187 add_alignment_attribute (decl_die, decl);
24188
24189 /* If we have a variant part offset, then we are supposed to process a member
24190 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24191 trees. */
24192 gcc_assert (ctx->variant_part_offset == NULL_TREE
24193 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24194 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24195 add_data_member_location_attribute (decl_die, decl, ctx);
24196
24197 if (DECL_ARTIFICIAL (decl))
24198 add_AT_flag (decl_die, DW_AT_artificial, 1);
24199
24200 add_accessibility_attribute (decl_die, decl);
24201
24202 /* Equate decl number to die, so that we can look up this decl later on. */
24203 equate_decl_number_to_die (decl, decl_die);
24204 }
24205
24206 /* Generate a DIE for a pointer to a member type. TYPE can be an
24207 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24208 pointer to member function. */
24209
24210 static void
24211 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24212 {
24213 if (lookup_type_die (type))
24214 return;
24215
24216 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24217 scope_die_for (type, context_die), type);
24218
24219 equate_type_number_to_die (type, ptr_die);
24220 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24221 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24222 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24223 context_die);
24224 add_alignment_attribute (ptr_die, type);
24225
24226 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24227 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24228 {
24229 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24230 add_AT_loc (ptr_die, DW_AT_use_location, op);
24231 }
24232 }
24233
24234 static char *producer_string;
24235
24236 /* Return a heap allocated producer string including command line options
24237 if -grecord-gcc-switches. */
24238
24239 static char *
24240 gen_producer_string (void)
24241 {
24242 size_t j;
24243 auto_vec<const char *> switches;
24244 const char *language_string = lang_hooks.name;
24245 char *producer, *tail;
24246 const char *p;
24247 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24248 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24249
24250 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24251 switch (save_decoded_options[j].opt_index)
24252 {
24253 case OPT_o:
24254 case OPT_d:
24255 case OPT_dumpbase:
24256 case OPT_dumpdir:
24257 case OPT_auxbase:
24258 case OPT_auxbase_strip:
24259 case OPT_quiet:
24260 case OPT_version:
24261 case OPT_v:
24262 case OPT_w:
24263 case OPT_L:
24264 case OPT_D:
24265 case OPT_I:
24266 case OPT_U:
24267 case OPT_SPECIAL_unknown:
24268 case OPT_SPECIAL_ignore:
24269 case OPT_SPECIAL_program_name:
24270 case OPT_SPECIAL_input_file:
24271 case OPT_grecord_gcc_switches:
24272 case OPT__output_pch_:
24273 case OPT_fdiagnostics_show_location_:
24274 case OPT_fdiagnostics_show_option:
24275 case OPT_fdiagnostics_show_caret:
24276 case OPT_fdiagnostics_color_:
24277 case OPT_fverbose_asm:
24278 case OPT____:
24279 case OPT__sysroot_:
24280 case OPT_nostdinc:
24281 case OPT_nostdinc__:
24282 case OPT_fpreprocessed:
24283 case OPT_fltrans_output_list_:
24284 case OPT_fresolution_:
24285 case OPT_fdebug_prefix_map_:
24286 case OPT_fmacro_prefix_map_:
24287 case OPT_ffile_prefix_map_:
24288 case OPT_fcompare_debug:
24289 case OPT_fchecking:
24290 case OPT_fchecking_:
24291 /* Ignore these. */
24292 continue;
24293 default:
24294 if (cl_options[save_decoded_options[j].opt_index].flags
24295 & CL_NO_DWARF_RECORD)
24296 continue;
24297 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24298 == '-');
24299 switch (save_decoded_options[j].canonical_option[0][1])
24300 {
24301 case 'M':
24302 case 'i':
24303 case 'W':
24304 continue;
24305 case 'f':
24306 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24307 "dump", 4) == 0)
24308 continue;
24309 break;
24310 default:
24311 break;
24312 }
24313 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24314 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24315 break;
24316 }
24317
24318 producer = XNEWVEC (char, plen + 1 + len + 1);
24319 tail = producer;
24320 sprintf (tail, "%s %s", language_string, version_string);
24321 tail += plen;
24322
24323 FOR_EACH_VEC_ELT (switches, j, p)
24324 {
24325 len = strlen (p);
24326 *tail = ' ';
24327 memcpy (tail + 1, p, len);
24328 tail += len + 1;
24329 }
24330
24331 *tail = '\0';
24332 return producer;
24333 }
24334
24335 /* Given a C and/or C++ language/version string return the "highest".
24336 C++ is assumed to be "higher" than C in this case. Used for merging
24337 LTO translation unit languages. */
24338 static const char *
24339 highest_c_language (const char *lang1, const char *lang2)
24340 {
24341 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24342 return "GNU C++17";
24343 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24344 return "GNU C++14";
24345 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24346 return "GNU C++11";
24347 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24348 return "GNU C++98";
24349
24350 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24351 return "GNU C17";
24352 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24353 return "GNU C11";
24354 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24355 return "GNU C99";
24356 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24357 return "GNU C89";
24358
24359 gcc_unreachable ();
24360 }
24361
24362
24363 /* Generate the DIE for the compilation unit. */
24364
24365 static dw_die_ref
24366 gen_compile_unit_die (const char *filename)
24367 {
24368 dw_die_ref die;
24369 const char *language_string = lang_hooks.name;
24370 int language;
24371
24372 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24373
24374 if (filename)
24375 {
24376 add_name_attribute (die, filename);
24377 /* Don't add cwd for <built-in>. */
24378 if (filename[0] != '<')
24379 add_comp_dir_attribute (die);
24380 }
24381
24382 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24383
24384 /* If our producer is LTO try to figure out a common language to use
24385 from the global list of translation units. */
24386 if (strcmp (language_string, "GNU GIMPLE") == 0)
24387 {
24388 unsigned i;
24389 tree t;
24390 const char *common_lang = NULL;
24391
24392 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24393 {
24394 if (!TRANSLATION_UNIT_LANGUAGE (t))
24395 continue;
24396 if (!common_lang)
24397 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24398 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24399 ;
24400 else if (strncmp (common_lang, "GNU C", 5) == 0
24401 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24402 /* Mixing C and C++ is ok, use C++ in that case. */
24403 common_lang = highest_c_language (common_lang,
24404 TRANSLATION_UNIT_LANGUAGE (t));
24405 else
24406 {
24407 /* Fall back to C. */
24408 common_lang = NULL;
24409 break;
24410 }
24411 }
24412
24413 if (common_lang)
24414 language_string = common_lang;
24415 }
24416
24417 language = DW_LANG_C;
24418 if (strncmp (language_string, "GNU C", 5) == 0
24419 && ISDIGIT (language_string[5]))
24420 {
24421 language = DW_LANG_C89;
24422 if (dwarf_version >= 3 || !dwarf_strict)
24423 {
24424 if (strcmp (language_string, "GNU C89") != 0)
24425 language = DW_LANG_C99;
24426
24427 if (dwarf_version >= 5 /* || !dwarf_strict */)
24428 if (strcmp (language_string, "GNU C11") == 0
24429 || strcmp (language_string, "GNU C17") == 0)
24430 language = DW_LANG_C11;
24431 }
24432 }
24433 else if (strncmp (language_string, "GNU C++", 7) == 0)
24434 {
24435 language = DW_LANG_C_plus_plus;
24436 if (dwarf_version >= 5 /* || !dwarf_strict */)
24437 {
24438 if (strcmp (language_string, "GNU C++11") == 0)
24439 language = DW_LANG_C_plus_plus_11;
24440 else if (strcmp (language_string, "GNU C++14") == 0)
24441 language = DW_LANG_C_plus_plus_14;
24442 else if (strcmp (language_string, "GNU C++17") == 0)
24443 /* For now. */
24444 language = DW_LANG_C_plus_plus_14;
24445 }
24446 }
24447 else if (strcmp (language_string, "GNU F77") == 0)
24448 language = DW_LANG_Fortran77;
24449 else if (dwarf_version >= 3 || !dwarf_strict)
24450 {
24451 if (strcmp (language_string, "GNU Ada") == 0)
24452 language = DW_LANG_Ada95;
24453 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24454 {
24455 language = DW_LANG_Fortran95;
24456 if (dwarf_version >= 5 /* || !dwarf_strict */)
24457 {
24458 if (strcmp (language_string, "GNU Fortran2003") == 0)
24459 language = DW_LANG_Fortran03;
24460 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24461 language = DW_LANG_Fortran08;
24462 }
24463 }
24464 else if (strcmp (language_string, "GNU Objective-C") == 0)
24465 language = DW_LANG_ObjC;
24466 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24467 language = DW_LANG_ObjC_plus_plus;
24468 else if (dwarf_version >= 5 || !dwarf_strict)
24469 {
24470 if (strcmp (language_string, "GNU Go") == 0)
24471 language = DW_LANG_Go;
24472 }
24473 }
24474 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24475 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24476 language = DW_LANG_Fortran90;
24477 /* Likewise for Ada. */
24478 else if (strcmp (language_string, "GNU Ada") == 0)
24479 language = DW_LANG_Ada83;
24480
24481 add_AT_unsigned (die, DW_AT_language, language);
24482
24483 switch (language)
24484 {
24485 case DW_LANG_Fortran77:
24486 case DW_LANG_Fortran90:
24487 case DW_LANG_Fortran95:
24488 case DW_LANG_Fortran03:
24489 case DW_LANG_Fortran08:
24490 /* Fortran has case insensitive identifiers and the front-end
24491 lowercases everything. */
24492 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24493 break;
24494 default:
24495 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24496 break;
24497 }
24498 return die;
24499 }
24500
24501 /* Generate the DIE for a base class. */
24502
24503 static void
24504 gen_inheritance_die (tree binfo, tree access, tree type,
24505 dw_die_ref context_die)
24506 {
24507 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24508 struct vlr_context ctx = { type, NULL };
24509
24510 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24511 context_die);
24512 add_data_member_location_attribute (die, binfo, &ctx);
24513
24514 if (BINFO_VIRTUAL_P (binfo))
24515 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24516
24517 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24518 children, otherwise the default is DW_ACCESS_public. In DWARF2
24519 the default has always been DW_ACCESS_private. */
24520 if (access == access_public_node)
24521 {
24522 if (dwarf_version == 2
24523 || context_die->die_tag == DW_TAG_class_type)
24524 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24525 }
24526 else if (access == access_protected_node)
24527 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24528 else if (dwarf_version > 2
24529 && context_die->die_tag != DW_TAG_class_type)
24530 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24531 }
24532
24533 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24534 structure. */
24535 static bool
24536 is_variant_part (tree decl)
24537 {
24538 return (TREE_CODE (decl) == FIELD_DECL
24539 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24540 }
24541
24542 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24543 return the FIELD_DECL. Return NULL_TREE otherwise. */
24544
24545 static tree
24546 analyze_discr_in_predicate (tree operand, tree struct_type)
24547 {
24548 bool continue_stripping = true;
24549 while (continue_stripping)
24550 switch (TREE_CODE (operand))
24551 {
24552 CASE_CONVERT:
24553 operand = TREE_OPERAND (operand, 0);
24554 break;
24555 default:
24556 continue_stripping = false;
24557 break;
24558 }
24559
24560 /* Match field access to members of struct_type only. */
24561 if (TREE_CODE (operand) == COMPONENT_REF
24562 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24563 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24564 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24565 return TREE_OPERAND (operand, 1);
24566 else
24567 return NULL_TREE;
24568 }
24569
24570 /* Check that SRC is a constant integer that can be represented as a native
24571 integer constant (either signed or unsigned). If so, store it into DEST and
24572 return true. Return false otherwise. */
24573
24574 static bool
24575 get_discr_value (tree src, dw_discr_value *dest)
24576 {
24577 tree discr_type = TREE_TYPE (src);
24578
24579 if (lang_hooks.types.get_debug_type)
24580 {
24581 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24582 if (debug_type != NULL)
24583 discr_type = debug_type;
24584 }
24585
24586 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24587 return false;
24588
24589 /* Signedness can vary between the original type and the debug type. This
24590 can happen for character types in Ada for instance: the character type
24591 used for code generation can be signed, to be compatible with the C one,
24592 but from a debugger point of view, it must be unsigned. */
24593 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24594 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24595
24596 if (is_orig_unsigned != is_debug_unsigned)
24597 src = fold_convert (discr_type, src);
24598
24599 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24600 return false;
24601
24602 dest->pos = is_debug_unsigned;
24603 if (is_debug_unsigned)
24604 dest->v.uval = tree_to_uhwi (src);
24605 else
24606 dest->v.sval = tree_to_shwi (src);
24607
24608 return true;
24609 }
24610
24611 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24612 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24613 store NULL_TREE in DISCR_DECL. Otherwise:
24614
24615 - store the discriminant field in STRUCT_TYPE that controls the variant
24616 part to *DISCR_DECL
24617
24618 - put in *DISCR_LISTS_P an array where for each variant, the item
24619 represents the corresponding matching list of discriminant values.
24620
24621 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24622 the above array.
24623
24624 Note that when the array is allocated (i.e. when the analysis is
24625 successful), it is up to the caller to free the array. */
24626
24627 static void
24628 analyze_variants_discr (tree variant_part_decl,
24629 tree struct_type,
24630 tree *discr_decl,
24631 dw_discr_list_ref **discr_lists_p,
24632 unsigned *discr_lists_length)
24633 {
24634 tree variant_part_type = TREE_TYPE (variant_part_decl);
24635 tree variant;
24636 dw_discr_list_ref *discr_lists;
24637 unsigned i;
24638
24639 /* Compute how many variants there are in this variant part. */
24640 *discr_lists_length = 0;
24641 for (variant = TYPE_FIELDS (variant_part_type);
24642 variant != NULL_TREE;
24643 variant = DECL_CHAIN (variant))
24644 ++*discr_lists_length;
24645
24646 *discr_decl = NULL_TREE;
24647 *discr_lists_p
24648 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24649 sizeof (**discr_lists_p));
24650 discr_lists = *discr_lists_p;
24651
24652 /* And then analyze all variants to extract discriminant information for all
24653 of them. This analysis is conservative: as soon as we detect something we
24654 do not support, abort everything and pretend we found nothing. */
24655 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24656 variant != NULL_TREE;
24657 variant = DECL_CHAIN (variant), ++i)
24658 {
24659 tree match_expr = DECL_QUALIFIER (variant);
24660
24661 /* Now, try to analyze the predicate and deduce a discriminant for
24662 it. */
24663 if (match_expr == boolean_true_node)
24664 /* Typically happens for the default variant: it matches all cases that
24665 previous variants rejected. Don't output any matching value for
24666 this one. */
24667 continue;
24668
24669 /* The following loop tries to iterate over each discriminant
24670 possibility: single values or ranges. */
24671 while (match_expr != NULL_TREE)
24672 {
24673 tree next_round_match_expr;
24674 tree candidate_discr = NULL_TREE;
24675 dw_discr_list_ref new_node = NULL;
24676
24677 /* Possibilities are matched one after the other by nested
24678 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24679 continue with the rest at next iteration. */
24680 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24681 {
24682 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24683 match_expr = TREE_OPERAND (match_expr, 1);
24684 }
24685 else
24686 next_round_match_expr = NULL_TREE;
24687
24688 if (match_expr == boolean_false_node)
24689 /* This sub-expression matches nothing: just wait for the next
24690 one. */
24691 ;
24692
24693 else if (TREE_CODE (match_expr) == EQ_EXPR)
24694 {
24695 /* We are matching: <discr_field> == <integer_cst>
24696 This sub-expression matches a single value. */
24697 tree integer_cst = TREE_OPERAND (match_expr, 1);
24698
24699 candidate_discr
24700 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24701 struct_type);
24702
24703 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24704 if (!get_discr_value (integer_cst,
24705 &new_node->dw_discr_lower_bound))
24706 goto abort;
24707 new_node->dw_discr_range = false;
24708 }
24709
24710 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24711 {
24712 /* We are matching:
24713 <discr_field> > <integer_cst>
24714 && <discr_field> < <integer_cst>.
24715 This sub-expression matches the range of values between the
24716 two matched integer constants. Note that comparisons can be
24717 inclusive or exclusive. */
24718 tree candidate_discr_1, candidate_discr_2;
24719 tree lower_cst, upper_cst;
24720 bool lower_cst_included, upper_cst_included;
24721 tree lower_op = TREE_OPERAND (match_expr, 0);
24722 tree upper_op = TREE_OPERAND (match_expr, 1);
24723
24724 /* When the comparison is exclusive, the integer constant is not
24725 the discriminant range bound we are looking for: we will have
24726 to increment or decrement it. */
24727 if (TREE_CODE (lower_op) == GE_EXPR)
24728 lower_cst_included = true;
24729 else if (TREE_CODE (lower_op) == GT_EXPR)
24730 lower_cst_included = false;
24731 else
24732 goto abort;
24733
24734 if (TREE_CODE (upper_op) == LE_EXPR)
24735 upper_cst_included = true;
24736 else if (TREE_CODE (upper_op) == LT_EXPR)
24737 upper_cst_included = false;
24738 else
24739 goto abort;
24740
24741 /* Extract the discriminant from the first operand and check it
24742 is consistant with the same analysis in the second
24743 operand. */
24744 candidate_discr_1
24745 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24746 struct_type);
24747 candidate_discr_2
24748 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24749 struct_type);
24750 if (candidate_discr_1 == candidate_discr_2)
24751 candidate_discr = candidate_discr_1;
24752 else
24753 goto abort;
24754
24755 /* Extract bounds from both. */
24756 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24757 lower_cst = TREE_OPERAND (lower_op, 1);
24758 upper_cst = TREE_OPERAND (upper_op, 1);
24759
24760 if (!lower_cst_included)
24761 lower_cst
24762 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24763 build_int_cst (TREE_TYPE (lower_cst), 1));
24764 if (!upper_cst_included)
24765 upper_cst
24766 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24767 build_int_cst (TREE_TYPE (upper_cst), 1));
24768
24769 if (!get_discr_value (lower_cst,
24770 &new_node->dw_discr_lower_bound)
24771 || !get_discr_value (upper_cst,
24772 &new_node->dw_discr_upper_bound))
24773 goto abort;
24774
24775 new_node->dw_discr_range = true;
24776 }
24777
24778 else
24779 /* Unsupported sub-expression: we cannot determine the set of
24780 matching discriminant values. Abort everything. */
24781 goto abort;
24782
24783 /* If the discriminant info is not consistant with what we saw so
24784 far, consider the analysis failed and abort everything. */
24785 if (candidate_discr == NULL_TREE
24786 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24787 goto abort;
24788 else
24789 *discr_decl = candidate_discr;
24790
24791 if (new_node != NULL)
24792 {
24793 new_node->dw_discr_next = discr_lists[i];
24794 discr_lists[i] = new_node;
24795 }
24796 match_expr = next_round_match_expr;
24797 }
24798 }
24799
24800 /* If we reach this point, we could match everything we were interested
24801 in. */
24802 return;
24803
24804 abort:
24805 /* Clean all data structure and return no result. */
24806 free (*discr_lists_p);
24807 *discr_lists_p = NULL;
24808 *discr_decl = NULL_TREE;
24809 }
24810
24811 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24812 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24813 under CONTEXT_DIE.
24814
24815 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24816 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24817 this type, which are record types, represent the available variants and each
24818 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24819 values are inferred from these attributes.
24820
24821 In trees, the offsets for the fields inside these sub-records are relative
24822 to the variant part itself, whereas the corresponding DIEs should have
24823 offset attributes that are relative to the embedding record base address.
24824 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24825 must be an expression that computes the offset of the variant part to
24826 describe in DWARF. */
24827
24828 static void
24829 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24830 dw_die_ref context_die)
24831 {
24832 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24833 tree variant_part_offset = vlr_ctx->variant_part_offset;
24834 struct loc_descr_context ctx = {
24835 vlr_ctx->struct_type, /* context_type */
24836 NULL_TREE, /* base_decl */
24837 NULL, /* dpi */
24838 false, /* placeholder_arg */
24839 false /* placeholder_seen */
24840 };
24841
24842 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24843 NULL_TREE if there is no such field. */
24844 tree discr_decl = NULL_TREE;
24845 dw_discr_list_ref *discr_lists;
24846 unsigned discr_lists_length = 0;
24847 unsigned i;
24848
24849 dw_die_ref dwarf_proc_die = NULL;
24850 dw_die_ref variant_part_die
24851 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24852
24853 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24854
24855 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24856 &discr_decl, &discr_lists, &discr_lists_length);
24857
24858 if (discr_decl != NULL_TREE)
24859 {
24860 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24861
24862 if (discr_die)
24863 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24864 else
24865 /* We have no DIE for the discriminant, so just discard all
24866 discrimimant information in the output. */
24867 discr_decl = NULL_TREE;
24868 }
24869
24870 /* If the offset for this variant part is more complex than a constant,
24871 create a DWARF procedure for it so that we will not have to generate DWARF
24872 expressions for it for each member. */
24873 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24874 && (dwarf_version >= 3 || !dwarf_strict))
24875 {
24876 const tree dwarf_proc_fndecl
24877 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24878 build_function_type (TREE_TYPE (variant_part_offset),
24879 NULL_TREE));
24880 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24881 const dw_loc_descr_ref dwarf_proc_body
24882 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24883
24884 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24885 dwarf_proc_fndecl, context_die);
24886 if (dwarf_proc_die != NULL)
24887 variant_part_offset = dwarf_proc_call;
24888 }
24889
24890 /* Output DIEs for all variants. */
24891 i = 0;
24892 for (tree variant = TYPE_FIELDS (variant_part_type);
24893 variant != NULL_TREE;
24894 variant = DECL_CHAIN (variant), ++i)
24895 {
24896 tree variant_type = TREE_TYPE (variant);
24897 dw_die_ref variant_die;
24898
24899 /* All variants (i.e. members of a variant part) are supposed to be
24900 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24901 under these records. */
24902 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24903
24904 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24905 equate_decl_number_to_die (variant, variant_die);
24906
24907 /* Output discriminant values this variant matches, if any. */
24908 if (discr_decl == NULL || discr_lists[i] == NULL)
24909 /* In the case we have discriminant information at all, this is
24910 probably the default variant: as the standard says, don't
24911 output any discriminant value/list attribute. */
24912 ;
24913 else if (discr_lists[i]->dw_discr_next == NULL
24914 && !discr_lists[i]->dw_discr_range)
24915 /* If there is only one accepted value, don't bother outputting a
24916 list. */
24917 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24918 else
24919 add_discr_list (variant_die, discr_lists[i]);
24920
24921 for (tree member = TYPE_FIELDS (variant_type);
24922 member != NULL_TREE;
24923 member = DECL_CHAIN (member))
24924 {
24925 struct vlr_context vlr_sub_ctx = {
24926 vlr_ctx->struct_type, /* struct_type */
24927 NULL /* variant_part_offset */
24928 };
24929 if (is_variant_part (member))
24930 {
24931 /* All offsets for fields inside variant parts are relative to
24932 the top-level embedding RECORD_TYPE's base address. On the
24933 other hand, offsets in GCC's types are relative to the
24934 nested-most variant part. So we have to sum offsets each time
24935 we recurse. */
24936
24937 vlr_sub_ctx.variant_part_offset
24938 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24939 variant_part_offset, byte_position (member));
24940 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24941 }
24942 else
24943 {
24944 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24945 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24946 }
24947 }
24948 }
24949
24950 free (discr_lists);
24951 }
24952
24953 /* Generate a DIE for a class member. */
24954
24955 static void
24956 gen_member_die (tree type, dw_die_ref context_die)
24957 {
24958 tree member;
24959 tree binfo = TYPE_BINFO (type);
24960
24961 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24962
24963 /* If this is not an incomplete type, output descriptions of each of its
24964 members. Note that as we output the DIEs necessary to represent the
24965 members of this record or union type, we will also be trying to output
24966 DIEs to represent the *types* of those members. However the `type'
24967 function (above) will specifically avoid generating type DIEs for member
24968 types *within* the list of member DIEs for this (containing) type except
24969 for those types (of members) which are explicitly marked as also being
24970 members of this (containing) type themselves. The g++ front- end can
24971 force any given type to be treated as a member of some other (containing)
24972 type by setting the TYPE_CONTEXT of the given (member) type to point to
24973 the TREE node representing the appropriate (containing) type. */
24974
24975 /* First output info about the base classes. */
24976 if (binfo)
24977 {
24978 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24979 int i;
24980 tree base;
24981
24982 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24983 gen_inheritance_die (base,
24984 (accesses ? (*accesses)[i] : access_public_node),
24985 type,
24986 context_die);
24987 }
24988
24989 /* Now output info about the data members and type members. */
24990 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24991 {
24992 struct vlr_context vlr_ctx = { type, NULL_TREE };
24993 bool static_inline_p
24994 = (TREE_STATIC (member)
24995 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24996 != -1));
24997
24998 /* Ignore clones. */
24999 if (DECL_ABSTRACT_ORIGIN (member))
25000 continue;
25001
25002 /* If we thought we were generating minimal debug info for TYPE
25003 and then changed our minds, some of the member declarations
25004 may have already been defined. Don't define them again, but
25005 do put them in the right order. */
25006
25007 if (dw_die_ref child = lookup_decl_die (member))
25008 {
25009 /* Handle inline static data members, which only have in-class
25010 declarations. */
25011 dw_die_ref ref = NULL;
25012 if (child->die_tag == DW_TAG_variable
25013 && child->die_parent == comp_unit_die ())
25014 {
25015 ref = get_AT_ref (child, DW_AT_specification);
25016 /* For C++17 inline static data members followed by redundant
25017 out of class redeclaration, we might get here with
25018 child being the DIE created for the out of class
25019 redeclaration and with its DW_AT_specification being
25020 the DIE created for in-class definition. We want to
25021 reparent the latter, and don't want to create another
25022 DIE with DW_AT_specification in that case, because
25023 we already have one. */
25024 if (ref
25025 && static_inline_p
25026 && ref->die_tag == DW_TAG_variable
25027 && ref->die_parent == comp_unit_die ()
25028 && get_AT (ref, DW_AT_specification) == NULL)
25029 {
25030 child = ref;
25031 ref = NULL;
25032 static_inline_p = false;
25033 }
25034 }
25035
25036 if (child->die_tag == DW_TAG_variable
25037 && child->die_parent == comp_unit_die ()
25038 && ref == NULL)
25039 {
25040 reparent_child (child, context_die);
25041 if (dwarf_version < 5)
25042 child->die_tag = DW_TAG_member;
25043 }
25044 else
25045 splice_child_die (context_die, child);
25046 }
25047
25048 /* Do not generate standard DWARF for variant parts if we are generating
25049 the corresponding GNAT encodings: DIEs generated for both would
25050 conflict in our mappings. */
25051 else if (is_variant_part (member)
25052 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25053 {
25054 vlr_ctx.variant_part_offset = byte_position (member);
25055 gen_variant_part (member, &vlr_ctx, context_die);
25056 }
25057 else
25058 {
25059 vlr_ctx.variant_part_offset = NULL_TREE;
25060 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25061 }
25062
25063 /* For C++ inline static data members emit immediately a DW_TAG_variable
25064 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25065 DW_AT_specification. */
25066 if (static_inline_p)
25067 {
25068 int old_extern = DECL_EXTERNAL (member);
25069 DECL_EXTERNAL (member) = 0;
25070 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25071 DECL_EXTERNAL (member) = old_extern;
25072 }
25073 }
25074 }
25075
25076 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25077 is set, we pretend that the type was never defined, so we only get the
25078 member DIEs needed by later specification DIEs. */
25079
25080 static void
25081 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25082 enum debug_info_usage usage)
25083 {
25084 if (TREE_ASM_WRITTEN (type))
25085 {
25086 /* Fill in the bound of variable-length fields in late dwarf if
25087 still incomplete. */
25088 if (!early_dwarf && variably_modified_type_p (type, NULL))
25089 for (tree member = TYPE_FIELDS (type);
25090 member;
25091 member = DECL_CHAIN (member))
25092 fill_variable_array_bounds (TREE_TYPE (member));
25093 return;
25094 }
25095
25096 dw_die_ref type_die = lookup_type_die (type);
25097 dw_die_ref scope_die = 0;
25098 int nested = 0;
25099 int complete = (TYPE_SIZE (type)
25100 && (! TYPE_STUB_DECL (type)
25101 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25102 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25103 complete = complete && should_emit_struct_debug (type, usage);
25104
25105 if (type_die && ! complete)
25106 return;
25107
25108 if (TYPE_CONTEXT (type) != NULL_TREE
25109 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25110 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25111 nested = 1;
25112
25113 scope_die = scope_die_for (type, context_die);
25114
25115 /* Generate child dies for template paramaters. */
25116 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25117 schedule_generic_params_dies_gen (type);
25118
25119 if (! type_die || (nested && is_cu_die (scope_die)))
25120 /* First occurrence of type or toplevel definition of nested class. */
25121 {
25122 dw_die_ref old_die = type_die;
25123
25124 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25125 ? record_type_tag (type) : DW_TAG_union_type,
25126 scope_die, type);
25127 equate_type_number_to_die (type, type_die);
25128 if (old_die)
25129 add_AT_specification (type_die, old_die);
25130 else
25131 add_name_attribute (type_die, type_tag (type));
25132 }
25133 else
25134 remove_AT (type_die, DW_AT_declaration);
25135
25136 /* If this type has been completed, then give it a byte_size attribute and
25137 then give a list of members. */
25138 if (complete && !ns_decl)
25139 {
25140 /* Prevent infinite recursion in cases where the type of some member of
25141 this type is expressed in terms of this type itself. */
25142 TREE_ASM_WRITTEN (type) = 1;
25143 add_byte_size_attribute (type_die, type);
25144 add_alignment_attribute (type_die, type);
25145 if (TYPE_STUB_DECL (type) != NULL_TREE)
25146 {
25147 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25148 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25149 }
25150
25151 /* If the first reference to this type was as the return type of an
25152 inline function, then it may not have a parent. Fix this now. */
25153 if (type_die->die_parent == NULL)
25154 add_child_die (scope_die, type_die);
25155
25156 push_decl_scope (type);
25157 gen_member_die (type, type_die);
25158 pop_decl_scope ();
25159
25160 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25161 if (TYPE_ARTIFICIAL (type))
25162 add_AT_flag (type_die, DW_AT_artificial, 1);
25163
25164 /* GNU extension: Record what type our vtable lives in. */
25165 if (TYPE_VFIELD (type))
25166 {
25167 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25168
25169 gen_type_die (vtype, context_die);
25170 add_AT_die_ref (type_die, DW_AT_containing_type,
25171 lookup_type_die (vtype));
25172 }
25173 }
25174 else
25175 {
25176 add_AT_flag (type_die, DW_AT_declaration, 1);
25177
25178 /* We don't need to do this for function-local types. */
25179 if (TYPE_STUB_DECL (type)
25180 && ! decl_function_context (TYPE_STUB_DECL (type)))
25181 vec_safe_push (incomplete_types, type);
25182 }
25183
25184 if (get_AT (type_die, DW_AT_name))
25185 add_pubtype (type, type_die);
25186 }
25187
25188 /* Generate a DIE for a subroutine _type_. */
25189
25190 static void
25191 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25192 {
25193 tree return_type = TREE_TYPE (type);
25194 dw_die_ref subr_die
25195 = new_die (DW_TAG_subroutine_type,
25196 scope_die_for (type, context_die), type);
25197
25198 equate_type_number_to_die (type, subr_die);
25199 add_prototyped_attribute (subr_die, type);
25200 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25201 context_die);
25202 add_alignment_attribute (subr_die, type);
25203 gen_formal_types_die (type, subr_die);
25204
25205 if (get_AT (subr_die, DW_AT_name))
25206 add_pubtype (type, subr_die);
25207 if ((dwarf_version >= 5 || !dwarf_strict)
25208 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25209 add_AT_flag (subr_die, DW_AT_reference, 1);
25210 if ((dwarf_version >= 5 || !dwarf_strict)
25211 && lang_hooks.types.type_dwarf_attribute (type,
25212 DW_AT_rvalue_reference) != -1)
25213 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25214 }
25215
25216 /* Generate a DIE for a type definition. */
25217
25218 static void
25219 gen_typedef_die (tree decl, dw_die_ref context_die)
25220 {
25221 dw_die_ref type_die;
25222 tree type;
25223
25224 if (TREE_ASM_WRITTEN (decl))
25225 {
25226 if (DECL_ORIGINAL_TYPE (decl))
25227 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25228 return;
25229 }
25230
25231 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25232 checks in process_scope_var and modified_type_die), this should be called
25233 only for original types. */
25234 gcc_assert (decl_ultimate_origin (decl) == NULL
25235 || decl_ultimate_origin (decl) == decl);
25236
25237 TREE_ASM_WRITTEN (decl) = 1;
25238 type_die = new_die (DW_TAG_typedef, context_die, decl);
25239
25240 add_name_and_src_coords_attributes (type_die, decl);
25241 if (DECL_ORIGINAL_TYPE (decl))
25242 {
25243 type = DECL_ORIGINAL_TYPE (decl);
25244 if (type == error_mark_node)
25245 return;
25246
25247 gcc_assert (type != TREE_TYPE (decl));
25248 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25249 }
25250 else
25251 {
25252 type = TREE_TYPE (decl);
25253 if (type == error_mark_node)
25254 return;
25255
25256 if (is_naming_typedef_decl (TYPE_NAME (type)))
25257 {
25258 /* Here, we are in the case of decl being a typedef naming
25259 an anonymous type, e.g:
25260 typedef struct {...} foo;
25261 In that case TREE_TYPE (decl) is not a typedef variant
25262 type and TYPE_NAME of the anonymous type is set to the
25263 TYPE_DECL of the typedef. This construct is emitted by
25264 the C++ FE.
25265
25266 TYPE is the anonymous struct named by the typedef
25267 DECL. As we need the DW_AT_type attribute of the
25268 DW_TAG_typedef to point to the DIE of TYPE, let's
25269 generate that DIE right away. add_type_attribute
25270 called below will then pick (via lookup_type_die) that
25271 anonymous struct DIE. */
25272 if (!TREE_ASM_WRITTEN (type))
25273 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25274
25275 /* This is a GNU Extension. We are adding a
25276 DW_AT_linkage_name attribute to the DIE of the
25277 anonymous struct TYPE. The value of that attribute
25278 is the name of the typedef decl naming the anonymous
25279 struct. This greatly eases the work of consumers of
25280 this debug info. */
25281 add_linkage_name_raw (lookup_type_die (type), decl);
25282 }
25283 }
25284
25285 add_type_attribute (type_die, type, decl_quals (decl), false,
25286 context_die);
25287
25288 if (is_naming_typedef_decl (decl))
25289 /* We want that all subsequent calls to lookup_type_die with
25290 TYPE in argument yield the DW_TAG_typedef we have just
25291 created. */
25292 equate_type_number_to_die (type, type_die);
25293
25294 add_alignment_attribute (type_die, TREE_TYPE (decl));
25295
25296 add_accessibility_attribute (type_die, decl);
25297
25298 if (DECL_ABSTRACT_P (decl))
25299 equate_decl_number_to_die (decl, type_die);
25300
25301 if (get_AT (type_die, DW_AT_name))
25302 add_pubtype (decl, type_die);
25303 }
25304
25305 /* Generate a DIE for a struct, class, enum or union type. */
25306
25307 static void
25308 gen_tagged_type_die (tree type,
25309 dw_die_ref context_die,
25310 enum debug_info_usage usage)
25311 {
25312 int need_pop;
25313
25314 if (type == NULL_TREE
25315 || !is_tagged_type (type))
25316 return;
25317
25318 if (TREE_ASM_WRITTEN (type))
25319 need_pop = 0;
25320 /* If this is a nested type whose containing class hasn't been written
25321 out yet, writing it out will cover this one, too. This does not apply
25322 to instantiations of member class templates; they need to be added to
25323 the containing class as they are generated. FIXME: This hurts the
25324 idea of combining type decls from multiple TUs, since we can't predict
25325 what set of template instantiations we'll get. */
25326 else if (TYPE_CONTEXT (type)
25327 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25328 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25329 {
25330 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25331
25332 if (TREE_ASM_WRITTEN (type))
25333 return;
25334
25335 /* If that failed, attach ourselves to the stub. */
25336 push_decl_scope (TYPE_CONTEXT (type));
25337 context_die = lookup_type_die (TYPE_CONTEXT (type));
25338 need_pop = 1;
25339 }
25340 else if (TYPE_CONTEXT (type) != NULL_TREE
25341 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25342 {
25343 /* If this type is local to a function that hasn't been written
25344 out yet, use a NULL context for now; it will be fixed up in
25345 decls_for_scope. */
25346 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25347 /* A declaration DIE doesn't count; nested types need to go in the
25348 specification. */
25349 if (context_die && is_declaration_die (context_die))
25350 context_die = NULL;
25351 need_pop = 0;
25352 }
25353 else
25354 {
25355 context_die = declare_in_namespace (type, context_die);
25356 need_pop = 0;
25357 }
25358
25359 if (TREE_CODE (type) == ENUMERAL_TYPE)
25360 {
25361 /* This might have been written out by the call to
25362 declare_in_namespace. */
25363 if (!TREE_ASM_WRITTEN (type))
25364 gen_enumeration_type_die (type, context_die);
25365 }
25366 else
25367 gen_struct_or_union_type_die (type, context_die, usage);
25368
25369 if (need_pop)
25370 pop_decl_scope ();
25371
25372 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25373 it up if it is ever completed. gen_*_type_die will set it for us
25374 when appropriate. */
25375 }
25376
25377 /* Generate a type description DIE. */
25378
25379 static void
25380 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25381 enum debug_info_usage usage)
25382 {
25383 struct array_descr_info info;
25384
25385 if (type == NULL_TREE || type == error_mark_node)
25386 return;
25387
25388 if (flag_checking && type)
25389 verify_type (type);
25390
25391 if (TYPE_NAME (type) != NULL_TREE
25392 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25393 && is_redundant_typedef (TYPE_NAME (type))
25394 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25395 /* The DECL of this type is a typedef we don't want to emit debug
25396 info for but we want debug info for its underlying typedef.
25397 This can happen for e.g, the injected-class-name of a C++
25398 type. */
25399 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25400
25401 /* If TYPE is a typedef type variant, let's generate debug info
25402 for the parent typedef which TYPE is a type of. */
25403 if (typedef_variant_p (type))
25404 {
25405 if (TREE_ASM_WRITTEN (type))
25406 return;
25407
25408 tree name = TYPE_NAME (type);
25409 tree origin = decl_ultimate_origin (name);
25410 if (origin != NULL && origin != name)
25411 {
25412 gen_decl_die (origin, NULL, NULL, context_die);
25413 return;
25414 }
25415
25416 /* Prevent broken recursion; we can't hand off to the same type. */
25417 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25418
25419 /* Give typedefs the right scope. */
25420 context_die = scope_die_for (type, context_die);
25421
25422 TREE_ASM_WRITTEN (type) = 1;
25423
25424 gen_decl_die (name, NULL, NULL, context_die);
25425 return;
25426 }
25427
25428 /* If type is an anonymous tagged type named by a typedef, let's
25429 generate debug info for the typedef. */
25430 if (is_naming_typedef_decl (TYPE_NAME (type)))
25431 {
25432 /* Use the DIE of the containing namespace as the parent DIE of
25433 the type description DIE we want to generate. */
25434 if (DECL_CONTEXT (TYPE_NAME (type))
25435 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25436 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25437
25438 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25439 return;
25440 }
25441
25442 if (lang_hooks.types.get_debug_type)
25443 {
25444 tree debug_type = lang_hooks.types.get_debug_type (type);
25445
25446 if (debug_type != NULL_TREE && debug_type != type)
25447 {
25448 gen_type_die_with_usage (debug_type, context_die, usage);
25449 return;
25450 }
25451 }
25452
25453 /* We are going to output a DIE to represent the unqualified version
25454 of this type (i.e. without any const or volatile qualifiers) so
25455 get the main variant (i.e. the unqualified version) of this type
25456 now. (Vectors and arrays are special because the debugging info is in the
25457 cloned type itself. Similarly function/method types can contain extra
25458 ref-qualification). */
25459 if (TREE_CODE (type) == FUNCTION_TYPE
25460 || TREE_CODE (type) == METHOD_TYPE)
25461 {
25462 /* For function/method types, can't use type_main_variant here,
25463 because that can have different ref-qualifiers for C++,
25464 but try to canonicalize. */
25465 tree main = TYPE_MAIN_VARIANT (type);
25466 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25467 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25468 && check_base_type (t, main)
25469 && check_lang_type (t, type))
25470 {
25471 type = t;
25472 break;
25473 }
25474 }
25475 else if (TREE_CODE (type) != VECTOR_TYPE
25476 && TREE_CODE (type) != ARRAY_TYPE)
25477 type = type_main_variant (type);
25478
25479 /* If this is an array type with hidden descriptor, handle it first. */
25480 if (!TREE_ASM_WRITTEN (type)
25481 && lang_hooks.types.get_array_descr_info)
25482 {
25483 memset (&info, 0, sizeof (info));
25484 if (lang_hooks.types.get_array_descr_info (type, &info))
25485 {
25486 /* Fortran sometimes emits array types with no dimension. */
25487 gcc_assert (info.ndimensions >= 0
25488 && (info.ndimensions
25489 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25490 gen_descr_array_type_die (type, &info, context_die);
25491 TREE_ASM_WRITTEN (type) = 1;
25492 return;
25493 }
25494 }
25495
25496 if (TREE_ASM_WRITTEN (type))
25497 {
25498 /* Variable-length types may be incomplete even if
25499 TREE_ASM_WRITTEN. For such types, fall through to
25500 gen_array_type_die() and possibly fill in
25501 DW_AT_{upper,lower}_bound attributes. */
25502 if ((TREE_CODE (type) != ARRAY_TYPE
25503 && TREE_CODE (type) != RECORD_TYPE
25504 && TREE_CODE (type) != UNION_TYPE
25505 && TREE_CODE (type) != QUAL_UNION_TYPE)
25506 || !variably_modified_type_p (type, NULL))
25507 return;
25508 }
25509
25510 switch (TREE_CODE (type))
25511 {
25512 case ERROR_MARK:
25513 break;
25514
25515 case POINTER_TYPE:
25516 case REFERENCE_TYPE:
25517 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25518 ensures that the gen_type_die recursion will terminate even if the
25519 type is recursive. Recursive types are possible in Ada. */
25520 /* ??? We could perhaps do this for all types before the switch
25521 statement. */
25522 TREE_ASM_WRITTEN (type) = 1;
25523
25524 /* For these types, all that is required is that we output a DIE (or a
25525 set of DIEs) to represent the "basis" type. */
25526 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25527 DINFO_USAGE_IND_USE);
25528 break;
25529
25530 case OFFSET_TYPE:
25531 /* This code is used for C++ pointer-to-data-member types.
25532 Output a description of the relevant class type. */
25533 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25534 DINFO_USAGE_IND_USE);
25535
25536 /* Output a description of the type of the object pointed to. */
25537 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25538 DINFO_USAGE_IND_USE);
25539
25540 /* Now output a DIE to represent this pointer-to-data-member type
25541 itself. */
25542 gen_ptr_to_mbr_type_die (type, context_die);
25543 break;
25544
25545 case FUNCTION_TYPE:
25546 /* Force out return type (in case it wasn't forced out already). */
25547 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25548 DINFO_USAGE_DIR_USE);
25549 gen_subroutine_type_die (type, context_die);
25550 break;
25551
25552 case METHOD_TYPE:
25553 /* Force out return type (in case it wasn't forced out already). */
25554 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25555 DINFO_USAGE_DIR_USE);
25556 gen_subroutine_type_die (type, context_die);
25557 break;
25558
25559 case ARRAY_TYPE:
25560 case VECTOR_TYPE:
25561 gen_array_type_die (type, context_die);
25562 break;
25563
25564 case ENUMERAL_TYPE:
25565 case RECORD_TYPE:
25566 case UNION_TYPE:
25567 case QUAL_UNION_TYPE:
25568 gen_tagged_type_die (type, context_die, usage);
25569 return;
25570
25571 case VOID_TYPE:
25572 case INTEGER_TYPE:
25573 case REAL_TYPE:
25574 case FIXED_POINT_TYPE:
25575 case COMPLEX_TYPE:
25576 case BOOLEAN_TYPE:
25577 /* No DIEs needed for fundamental types. */
25578 break;
25579
25580 case NULLPTR_TYPE:
25581 case LANG_TYPE:
25582 /* Just use DW_TAG_unspecified_type. */
25583 {
25584 dw_die_ref type_die = lookup_type_die (type);
25585 if (type_die == NULL)
25586 {
25587 tree name = TYPE_IDENTIFIER (type);
25588 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25589 type);
25590 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25591 equate_type_number_to_die (type, type_die);
25592 }
25593 }
25594 break;
25595
25596 default:
25597 if (is_cxx_auto (type))
25598 {
25599 tree name = TYPE_IDENTIFIER (type);
25600 dw_die_ref *die = (name == get_identifier ("auto")
25601 ? &auto_die : &decltype_auto_die);
25602 if (!*die)
25603 {
25604 *die = new_die (DW_TAG_unspecified_type,
25605 comp_unit_die (), NULL_TREE);
25606 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25607 }
25608 equate_type_number_to_die (type, *die);
25609 break;
25610 }
25611 gcc_unreachable ();
25612 }
25613
25614 TREE_ASM_WRITTEN (type) = 1;
25615 }
25616
25617 static void
25618 gen_type_die (tree type, dw_die_ref context_die)
25619 {
25620 if (type != error_mark_node)
25621 {
25622 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25623 if (flag_checking)
25624 {
25625 dw_die_ref die = lookup_type_die (type);
25626 if (die)
25627 check_die (die);
25628 }
25629 }
25630 }
25631
25632 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25633 things which are local to the given block. */
25634
25635 static void
25636 gen_block_die (tree stmt, dw_die_ref context_die)
25637 {
25638 int must_output_die = 0;
25639 bool inlined_func;
25640
25641 /* Ignore blocks that are NULL. */
25642 if (stmt == NULL_TREE)
25643 return;
25644
25645 inlined_func = inlined_function_outer_scope_p (stmt);
25646
25647 /* If the block is one fragment of a non-contiguous block, do not
25648 process the variables, since they will have been done by the
25649 origin block. Do process subblocks. */
25650 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25651 {
25652 tree sub;
25653
25654 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25655 gen_block_die (sub, context_die);
25656
25657 return;
25658 }
25659
25660 /* Determine if we need to output any Dwarf DIEs at all to represent this
25661 block. */
25662 if (inlined_func)
25663 /* The outer scopes for inlinings *must* always be represented. We
25664 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25665 must_output_die = 1;
25666 else
25667 {
25668 /* Determine if this block directly contains any "significant"
25669 local declarations which we will need to output DIEs for. */
25670 if (debug_info_level > DINFO_LEVEL_TERSE)
25671 /* We are not in terse mode so *any* local declaration counts
25672 as being a "significant" one. */
25673 must_output_die = ((BLOCK_VARS (stmt) != NULL
25674 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25675 && (TREE_USED (stmt)
25676 || TREE_ASM_WRITTEN (stmt)
25677 || BLOCK_ABSTRACT (stmt)));
25678 else if ((TREE_USED (stmt)
25679 || TREE_ASM_WRITTEN (stmt)
25680 || BLOCK_ABSTRACT (stmt))
25681 && !dwarf2out_ignore_block (stmt))
25682 must_output_die = 1;
25683 }
25684
25685 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25686 DIE for any block which contains no significant local declarations at
25687 all. Rather, in such cases we just call `decls_for_scope' so that any
25688 needed Dwarf info for any sub-blocks will get properly generated. Note
25689 that in terse mode, our definition of what constitutes a "significant"
25690 local declaration gets restricted to include only inlined function
25691 instances and local (nested) function definitions. */
25692 if (must_output_die)
25693 {
25694 if (inlined_func)
25695 {
25696 /* If STMT block is abstract, that means we have been called
25697 indirectly from dwarf2out_abstract_function.
25698 That function rightfully marks the descendent blocks (of
25699 the abstract function it is dealing with) as being abstract,
25700 precisely to prevent us from emitting any
25701 DW_TAG_inlined_subroutine DIE as a descendent
25702 of an abstract function instance. So in that case, we should
25703 not call gen_inlined_subroutine_die.
25704
25705 Later though, when cgraph asks dwarf2out to emit info
25706 for the concrete instance of the function decl into which
25707 the concrete instance of STMT got inlined, the later will lead
25708 to the generation of a DW_TAG_inlined_subroutine DIE. */
25709 if (! BLOCK_ABSTRACT (stmt))
25710 gen_inlined_subroutine_die (stmt, context_die);
25711 }
25712 else
25713 gen_lexical_block_die (stmt, context_die);
25714 }
25715 else
25716 decls_for_scope (stmt, context_die);
25717 }
25718
25719 /* Process variable DECL (or variable with origin ORIGIN) within
25720 block STMT and add it to CONTEXT_DIE. */
25721 static void
25722 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25723 {
25724 dw_die_ref die;
25725 tree decl_or_origin = decl ? decl : origin;
25726
25727 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25728 die = lookup_decl_die (decl_or_origin);
25729 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25730 {
25731 if (TYPE_DECL_IS_STUB (decl_or_origin))
25732 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25733 else
25734 die = lookup_decl_die (decl_or_origin);
25735 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25736 if (! die && ! early_dwarf)
25737 return;
25738 }
25739 else
25740 die = NULL;
25741
25742 /* Avoid creating DIEs for local typedefs and concrete static variables that
25743 will only be pruned later. */
25744 if ((origin || decl_ultimate_origin (decl))
25745 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25746 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25747 {
25748 origin = decl_ultimate_origin (decl_or_origin);
25749 if (decl && VAR_P (decl) && die != NULL)
25750 {
25751 die = lookup_decl_die (origin);
25752 if (die != NULL)
25753 equate_decl_number_to_die (decl, die);
25754 }
25755 return;
25756 }
25757
25758 if (die != NULL && die->die_parent == NULL)
25759 add_child_die (context_die, die);
25760 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25761 {
25762 if (early_dwarf)
25763 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25764 stmt, context_die);
25765 }
25766 else
25767 {
25768 if (decl && DECL_P (decl))
25769 {
25770 die = lookup_decl_die (decl);
25771
25772 /* Early created DIEs do not have a parent as the decls refer
25773 to the function as DECL_CONTEXT rather than the BLOCK. */
25774 if (die && die->die_parent == NULL)
25775 {
25776 gcc_assert (in_lto_p);
25777 add_child_die (context_die, die);
25778 }
25779 }
25780
25781 gen_decl_die (decl, origin, NULL, context_die);
25782 }
25783 }
25784
25785 /* Generate all of the decls declared within a given scope and (recursively)
25786 all of its sub-blocks. */
25787
25788 static void
25789 decls_for_scope (tree stmt, dw_die_ref context_die)
25790 {
25791 tree decl;
25792 unsigned int i;
25793 tree subblocks;
25794
25795 /* Ignore NULL blocks. */
25796 if (stmt == NULL_TREE)
25797 return;
25798
25799 /* Output the DIEs to represent all of the data objects and typedefs
25800 declared directly within this block but not within any nested
25801 sub-blocks. Also, nested function and tag DIEs have been
25802 generated with a parent of NULL; fix that up now. We don't
25803 have to do this if we're at -g1. */
25804 if (debug_info_level > DINFO_LEVEL_TERSE)
25805 {
25806 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25807 process_scope_var (stmt, decl, NULL_TREE, context_die);
25808 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25809 origin - avoid doing this twice as we have no good way to see
25810 if we've done it once already. */
25811 if (! early_dwarf)
25812 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25813 {
25814 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25815 if (decl == current_function_decl)
25816 /* Ignore declarations of the current function, while they
25817 are declarations, gen_subprogram_die would treat them
25818 as definitions again, because they are equal to
25819 current_function_decl and endlessly recurse. */;
25820 else if (TREE_CODE (decl) == FUNCTION_DECL)
25821 process_scope_var (stmt, decl, NULL_TREE, context_die);
25822 else
25823 process_scope_var (stmt, NULL_TREE, decl, context_die);
25824 }
25825 }
25826
25827 /* Even if we're at -g1, we need to process the subblocks in order to get
25828 inlined call information. */
25829
25830 /* Output the DIEs to represent all sub-blocks (and the items declared
25831 therein) of this block. */
25832 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25833 subblocks != NULL;
25834 subblocks = BLOCK_CHAIN (subblocks))
25835 gen_block_die (subblocks, context_die);
25836 }
25837
25838 /* Is this a typedef we can avoid emitting? */
25839
25840 bool
25841 is_redundant_typedef (const_tree decl)
25842 {
25843 if (TYPE_DECL_IS_STUB (decl))
25844 return true;
25845
25846 if (DECL_ARTIFICIAL (decl)
25847 && DECL_CONTEXT (decl)
25848 && is_tagged_type (DECL_CONTEXT (decl))
25849 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25850 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25851 /* Also ignore the artificial member typedef for the class name. */
25852 return true;
25853
25854 return false;
25855 }
25856
25857 /* Return TRUE if TYPE is a typedef that names a type for linkage
25858 purposes. This kind of typedefs is produced by the C++ FE for
25859 constructs like:
25860
25861 typedef struct {...} foo;
25862
25863 In that case, there is no typedef variant type produced for foo.
25864 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25865 struct type. */
25866
25867 static bool
25868 is_naming_typedef_decl (const_tree decl)
25869 {
25870 if (decl == NULL_TREE
25871 || TREE_CODE (decl) != TYPE_DECL
25872 || DECL_NAMELESS (decl)
25873 || !is_tagged_type (TREE_TYPE (decl))
25874 || DECL_IS_BUILTIN (decl)
25875 || is_redundant_typedef (decl)
25876 /* It looks like Ada produces TYPE_DECLs that are very similar
25877 to C++ naming typedefs but that have different
25878 semantics. Let's be specific to c++ for now. */
25879 || !is_cxx (decl))
25880 return FALSE;
25881
25882 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25883 && TYPE_NAME (TREE_TYPE (decl)) == decl
25884 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25885 != TYPE_NAME (TREE_TYPE (decl))));
25886 }
25887
25888 /* Looks up the DIE for a context. */
25889
25890 static inline dw_die_ref
25891 lookup_context_die (tree context)
25892 {
25893 if (context)
25894 {
25895 /* Find die that represents this context. */
25896 if (TYPE_P (context))
25897 {
25898 context = TYPE_MAIN_VARIANT (context);
25899 dw_die_ref ctx = lookup_type_die (context);
25900 if (!ctx)
25901 return NULL;
25902 return strip_naming_typedef (context, ctx);
25903 }
25904 else
25905 return lookup_decl_die (context);
25906 }
25907 return comp_unit_die ();
25908 }
25909
25910 /* Returns the DIE for a context. */
25911
25912 static inline dw_die_ref
25913 get_context_die (tree context)
25914 {
25915 if (context)
25916 {
25917 /* Find die that represents this context. */
25918 if (TYPE_P (context))
25919 {
25920 context = TYPE_MAIN_VARIANT (context);
25921 return strip_naming_typedef (context, force_type_die (context));
25922 }
25923 else
25924 return force_decl_die (context);
25925 }
25926 return comp_unit_die ();
25927 }
25928
25929 /* Returns the DIE for decl. A DIE will always be returned. */
25930
25931 static dw_die_ref
25932 force_decl_die (tree decl)
25933 {
25934 dw_die_ref decl_die;
25935 unsigned saved_external_flag;
25936 tree save_fn = NULL_TREE;
25937 decl_die = lookup_decl_die (decl);
25938 if (!decl_die)
25939 {
25940 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25941
25942 decl_die = lookup_decl_die (decl);
25943 if (decl_die)
25944 return decl_die;
25945
25946 switch (TREE_CODE (decl))
25947 {
25948 case FUNCTION_DECL:
25949 /* Clear current_function_decl, so that gen_subprogram_die thinks
25950 that this is a declaration. At this point, we just want to force
25951 declaration die. */
25952 save_fn = current_function_decl;
25953 current_function_decl = NULL_TREE;
25954 gen_subprogram_die (decl, context_die);
25955 current_function_decl = save_fn;
25956 break;
25957
25958 case VAR_DECL:
25959 /* Set external flag to force declaration die. Restore it after
25960 gen_decl_die() call. */
25961 saved_external_flag = DECL_EXTERNAL (decl);
25962 DECL_EXTERNAL (decl) = 1;
25963 gen_decl_die (decl, NULL, NULL, context_die);
25964 DECL_EXTERNAL (decl) = saved_external_flag;
25965 break;
25966
25967 case NAMESPACE_DECL:
25968 if (dwarf_version >= 3 || !dwarf_strict)
25969 dwarf2out_decl (decl);
25970 else
25971 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25972 decl_die = comp_unit_die ();
25973 break;
25974
25975 case TRANSLATION_UNIT_DECL:
25976 decl_die = comp_unit_die ();
25977 break;
25978
25979 default:
25980 gcc_unreachable ();
25981 }
25982
25983 /* We should be able to find the DIE now. */
25984 if (!decl_die)
25985 decl_die = lookup_decl_die (decl);
25986 gcc_assert (decl_die);
25987 }
25988
25989 return decl_die;
25990 }
25991
25992 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25993 always returned. */
25994
25995 static dw_die_ref
25996 force_type_die (tree type)
25997 {
25998 dw_die_ref type_die;
25999
26000 type_die = lookup_type_die (type);
26001 if (!type_die)
26002 {
26003 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26004
26005 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26006 false, context_die);
26007 gcc_assert (type_die);
26008 }
26009 return type_die;
26010 }
26011
26012 /* Force out any required namespaces to be able to output DECL,
26013 and return the new context_die for it, if it's changed. */
26014
26015 static dw_die_ref
26016 setup_namespace_context (tree thing, dw_die_ref context_die)
26017 {
26018 tree context = (DECL_P (thing)
26019 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26020 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26021 /* Force out the namespace. */
26022 context_die = force_decl_die (context);
26023
26024 return context_die;
26025 }
26026
26027 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26028 type) within its namespace, if appropriate.
26029
26030 For compatibility with older debuggers, namespace DIEs only contain
26031 declarations; all definitions are emitted at CU scope, with
26032 DW_AT_specification pointing to the declaration (like with class
26033 members). */
26034
26035 static dw_die_ref
26036 declare_in_namespace (tree thing, dw_die_ref context_die)
26037 {
26038 dw_die_ref ns_context;
26039
26040 if (debug_info_level <= DINFO_LEVEL_TERSE)
26041 return context_die;
26042
26043 /* External declarations in the local scope only need to be emitted
26044 once, not once in the namespace and once in the scope.
26045
26046 This avoids declaring the `extern' below in the
26047 namespace DIE as well as in the innermost scope:
26048
26049 namespace S
26050 {
26051 int i=5;
26052 int foo()
26053 {
26054 int i=8;
26055 extern int i;
26056 return i;
26057 }
26058 }
26059 */
26060 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26061 return context_die;
26062
26063 /* If this decl is from an inlined function, then don't try to emit it in its
26064 namespace, as we will get confused. It would have already been emitted
26065 when the abstract instance of the inline function was emitted anyways. */
26066 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26067 return context_die;
26068
26069 ns_context = setup_namespace_context (thing, context_die);
26070
26071 if (ns_context != context_die)
26072 {
26073 if (is_fortran ())
26074 return ns_context;
26075 if (DECL_P (thing))
26076 gen_decl_die (thing, NULL, NULL, ns_context);
26077 else
26078 gen_type_die (thing, ns_context);
26079 }
26080 return context_die;
26081 }
26082
26083 /* Generate a DIE for a namespace or namespace alias. */
26084
26085 static void
26086 gen_namespace_die (tree decl, dw_die_ref context_die)
26087 {
26088 dw_die_ref namespace_die;
26089
26090 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26091 they are an alias of. */
26092 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26093 {
26094 /* Output a real namespace or module. */
26095 context_die = setup_namespace_context (decl, comp_unit_die ());
26096 namespace_die = new_die (is_fortran ()
26097 ? DW_TAG_module : DW_TAG_namespace,
26098 context_die, decl);
26099 /* For Fortran modules defined in different CU don't add src coords. */
26100 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26101 {
26102 const char *name = dwarf2_name (decl, 0);
26103 if (name)
26104 add_name_attribute (namespace_die, name);
26105 }
26106 else
26107 add_name_and_src_coords_attributes (namespace_die, decl);
26108 if (DECL_EXTERNAL (decl))
26109 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26110 equate_decl_number_to_die (decl, namespace_die);
26111 }
26112 else
26113 {
26114 /* Output a namespace alias. */
26115
26116 /* Force out the namespace we are an alias of, if necessary. */
26117 dw_die_ref origin_die
26118 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26119
26120 if (DECL_FILE_SCOPE_P (decl)
26121 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26122 context_die = setup_namespace_context (decl, comp_unit_die ());
26123 /* Now create the namespace alias DIE. */
26124 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26125 add_name_and_src_coords_attributes (namespace_die, decl);
26126 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26127 equate_decl_number_to_die (decl, namespace_die);
26128 }
26129 if ((dwarf_version >= 5 || !dwarf_strict)
26130 && lang_hooks.decls.decl_dwarf_attribute (decl,
26131 DW_AT_export_symbols) == 1)
26132 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26133
26134 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26135 if (want_pubnames ())
26136 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26137 }
26138
26139 /* Generate Dwarf debug information for a decl described by DECL.
26140 The return value is currently only meaningful for PARM_DECLs,
26141 for all other decls it returns NULL.
26142
26143 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26144 It can be NULL otherwise. */
26145
26146 static dw_die_ref
26147 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26148 dw_die_ref context_die)
26149 {
26150 tree decl_or_origin = decl ? decl : origin;
26151 tree class_origin = NULL, ultimate_origin;
26152
26153 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26154 return NULL;
26155
26156 switch (TREE_CODE (decl_or_origin))
26157 {
26158 case ERROR_MARK:
26159 break;
26160
26161 case CONST_DECL:
26162 if (!is_fortran () && !is_ada ())
26163 {
26164 /* The individual enumerators of an enum type get output when we output
26165 the Dwarf representation of the relevant enum type itself. */
26166 break;
26167 }
26168
26169 /* Emit its type. */
26170 gen_type_die (TREE_TYPE (decl), context_die);
26171
26172 /* And its containing namespace. */
26173 context_die = declare_in_namespace (decl, context_die);
26174
26175 gen_const_die (decl, context_die);
26176 break;
26177
26178 case FUNCTION_DECL:
26179 #if 0
26180 /* FIXME */
26181 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26182 on local redeclarations of global functions. That seems broken. */
26183 if (current_function_decl != decl)
26184 /* This is only a declaration. */;
26185 #endif
26186
26187 /* We should have abstract copies already and should not generate
26188 stray type DIEs in late LTO dumping. */
26189 if (! early_dwarf)
26190 ;
26191
26192 /* If we're emitting a clone, emit info for the abstract instance. */
26193 else if (origin || DECL_ORIGIN (decl) != decl)
26194 dwarf2out_abstract_function (origin
26195 ? DECL_ORIGIN (origin)
26196 : DECL_ABSTRACT_ORIGIN (decl));
26197
26198 /* If we're emitting a possibly inlined function emit it as
26199 abstract instance. */
26200 else if (cgraph_function_possibly_inlined_p (decl)
26201 && ! DECL_ABSTRACT_P (decl)
26202 && ! class_or_namespace_scope_p (context_die)
26203 /* dwarf2out_abstract_function won't emit a die if this is just
26204 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26205 that case, because that works only if we have a die. */
26206 && DECL_INITIAL (decl) != NULL_TREE)
26207 dwarf2out_abstract_function (decl);
26208
26209 /* Otherwise we're emitting the primary DIE for this decl. */
26210 else if (debug_info_level > DINFO_LEVEL_TERSE)
26211 {
26212 /* Before we describe the FUNCTION_DECL itself, make sure that we
26213 have its containing type. */
26214 if (!origin)
26215 origin = decl_class_context (decl);
26216 if (origin != NULL_TREE)
26217 gen_type_die (origin, context_die);
26218
26219 /* And its return type. */
26220 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26221
26222 /* And its virtual context. */
26223 if (DECL_VINDEX (decl) != NULL_TREE)
26224 gen_type_die (DECL_CONTEXT (decl), context_die);
26225
26226 /* Make sure we have a member DIE for decl. */
26227 if (origin != NULL_TREE)
26228 gen_type_die_for_member (origin, decl, context_die);
26229
26230 /* And its containing namespace. */
26231 context_die = declare_in_namespace (decl, context_die);
26232 }
26233
26234 /* Now output a DIE to represent the function itself. */
26235 if (decl)
26236 gen_subprogram_die (decl, context_die);
26237 break;
26238
26239 case TYPE_DECL:
26240 /* If we are in terse mode, don't generate any DIEs to represent any
26241 actual typedefs. */
26242 if (debug_info_level <= DINFO_LEVEL_TERSE)
26243 break;
26244
26245 /* In the special case of a TYPE_DECL node representing the declaration
26246 of some type tag, if the given TYPE_DECL is marked as having been
26247 instantiated from some other (original) TYPE_DECL node (e.g. one which
26248 was generated within the original definition of an inline function) we
26249 used to generate a special (abbreviated) DW_TAG_structure_type,
26250 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26251 should be actually referencing those DIEs, as variable DIEs with that
26252 type would be emitted already in the abstract origin, so it was always
26253 removed during unused type prunning. Don't add anything in this
26254 case. */
26255 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26256 break;
26257
26258 if (is_redundant_typedef (decl))
26259 gen_type_die (TREE_TYPE (decl), context_die);
26260 else
26261 /* Output a DIE to represent the typedef itself. */
26262 gen_typedef_die (decl, context_die);
26263 break;
26264
26265 case LABEL_DECL:
26266 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26267 gen_label_die (decl, context_die);
26268 break;
26269
26270 case VAR_DECL:
26271 case RESULT_DECL:
26272 /* If we are in terse mode, don't generate any DIEs to represent any
26273 variable declarations or definitions. */
26274 if (debug_info_level <= DINFO_LEVEL_TERSE)
26275 break;
26276
26277 /* Avoid generating stray type DIEs during late dwarf dumping.
26278 All types have been dumped early. */
26279 if (early_dwarf
26280 /* ??? But in LTRANS we cannot annotate early created variably
26281 modified type DIEs without copying them and adjusting all
26282 references to them. Dump them again as happens for inlining
26283 which copies both the decl and the types. */
26284 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26285 in VLA bound information for example. */
26286 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26287 current_function_decl)))
26288 {
26289 /* Output any DIEs that are needed to specify the type of this data
26290 object. */
26291 if (decl_by_reference_p (decl_or_origin))
26292 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26293 else
26294 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26295 }
26296
26297 if (early_dwarf)
26298 {
26299 /* And its containing type. */
26300 class_origin = decl_class_context (decl_or_origin);
26301 if (class_origin != NULL_TREE)
26302 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26303
26304 /* And its containing namespace. */
26305 context_die = declare_in_namespace (decl_or_origin, context_die);
26306 }
26307
26308 /* Now output the DIE to represent the data object itself. This gets
26309 complicated because of the possibility that the VAR_DECL really
26310 represents an inlined instance of a formal parameter for an inline
26311 function. */
26312 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26313 if (ultimate_origin != NULL_TREE
26314 && TREE_CODE (ultimate_origin) == PARM_DECL)
26315 gen_formal_parameter_die (decl, origin,
26316 true /* Emit name attribute. */,
26317 context_die);
26318 else
26319 gen_variable_die (decl, origin, context_die);
26320 break;
26321
26322 case FIELD_DECL:
26323 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26324 /* Ignore the nameless fields that are used to skip bits but handle C++
26325 anonymous unions and structs. */
26326 if (DECL_NAME (decl) != NULL_TREE
26327 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26328 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26329 {
26330 gen_type_die (member_declared_type (decl), context_die);
26331 gen_field_die (decl, ctx, context_die);
26332 }
26333 break;
26334
26335 case PARM_DECL:
26336 /* Avoid generating stray type DIEs during late dwarf dumping.
26337 All types have been dumped early. */
26338 if (early_dwarf
26339 /* ??? But in LTRANS we cannot annotate early created variably
26340 modified type DIEs without copying them and adjusting all
26341 references to them. Dump them again as happens for inlining
26342 which copies both the decl and the types. */
26343 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26344 in VLA bound information for example. */
26345 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26346 current_function_decl)))
26347 {
26348 if (DECL_BY_REFERENCE (decl_or_origin))
26349 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26350 else
26351 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26352 }
26353 return gen_formal_parameter_die (decl, origin,
26354 true /* Emit name attribute. */,
26355 context_die);
26356
26357 case NAMESPACE_DECL:
26358 if (dwarf_version >= 3 || !dwarf_strict)
26359 gen_namespace_die (decl, context_die);
26360 break;
26361
26362 case IMPORTED_DECL:
26363 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26364 DECL_CONTEXT (decl), context_die);
26365 break;
26366
26367 case NAMELIST_DECL:
26368 gen_namelist_decl (DECL_NAME (decl), context_die,
26369 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26370 break;
26371
26372 default:
26373 /* Probably some frontend-internal decl. Assume we don't care. */
26374 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26375 break;
26376 }
26377
26378 return NULL;
26379 }
26380 \f
26381 /* Output initial debug information for global DECL. Called at the
26382 end of the parsing process.
26383
26384 This is the initial debug generation process. As such, the DIEs
26385 generated may be incomplete. A later debug generation pass
26386 (dwarf2out_late_global_decl) will augment the information generated
26387 in this pass (e.g., with complete location info). */
26388
26389 static void
26390 dwarf2out_early_global_decl (tree decl)
26391 {
26392 set_early_dwarf s;
26393
26394 /* gen_decl_die() will set DECL_ABSTRACT because
26395 cgraph_function_possibly_inlined_p() returns true. This is in
26396 turn will cause DW_AT_inline attributes to be set.
26397
26398 This happens because at early dwarf generation, there is no
26399 cgraph information, causing cgraph_function_possibly_inlined_p()
26400 to return true. Trick cgraph_function_possibly_inlined_p()
26401 while we generate dwarf early. */
26402 bool save = symtab->global_info_ready;
26403 symtab->global_info_ready = true;
26404
26405 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26406 other DECLs and they can point to template types or other things
26407 that dwarf2out can't handle when done via dwarf2out_decl. */
26408 if (TREE_CODE (decl) != TYPE_DECL
26409 && TREE_CODE (decl) != PARM_DECL)
26410 {
26411 if (TREE_CODE (decl) == FUNCTION_DECL)
26412 {
26413 tree save_fndecl = current_function_decl;
26414
26415 /* For nested functions, make sure we have DIEs for the parents first
26416 so that all nested DIEs are generated at the proper scope in the
26417 first shot. */
26418 tree context = decl_function_context (decl);
26419 if (context != NULL)
26420 {
26421 dw_die_ref context_die = lookup_decl_die (context);
26422 current_function_decl = context;
26423
26424 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26425 enough so that it lands in its own context. This avoids type
26426 pruning issues later on. */
26427 if (context_die == NULL || is_declaration_die (context_die))
26428 dwarf2out_decl (context);
26429 }
26430
26431 /* Emit an abstract origin of a function first. This happens
26432 with C++ constructor clones for example and makes
26433 dwarf2out_abstract_function happy which requires the early
26434 DIE of the abstract instance to be present. */
26435 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26436 dw_die_ref origin_die;
26437 if (origin != NULL
26438 /* Do not emit the DIE multiple times but make sure to
26439 process it fully here in case we just saw a declaration. */
26440 && ((origin_die = lookup_decl_die (origin)) == NULL
26441 || is_declaration_die (origin_die)))
26442 {
26443 current_function_decl = origin;
26444 dwarf2out_decl (origin);
26445 }
26446
26447 /* Emit the DIE for decl but avoid doing that multiple times. */
26448 dw_die_ref old_die;
26449 if ((old_die = lookup_decl_die (decl)) == NULL
26450 || is_declaration_die (old_die))
26451 {
26452 current_function_decl = decl;
26453 dwarf2out_decl (decl);
26454 }
26455
26456 current_function_decl = save_fndecl;
26457 }
26458 else
26459 dwarf2out_decl (decl);
26460 }
26461 symtab->global_info_ready = save;
26462 }
26463
26464 /* Return whether EXPR is an expression with the following pattern:
26465 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26466
26467 static bool
26468 is_trivial_indirect_ref (tree expr)
26469 {
26470 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26471 return false;
26472
26473 tree nop = TREE_OPERAND (expr, 0);
26474 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26475 return false;
26476
26477 tree int_cst = TREE_OPERAND (nop, 0);
26478 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26479 }
26480
26481 /* Output debug information for global decl DECL. Called from
26482 toplev.c after compilation proper has finished. */
26483
26484 static void
26485 dwarf2out_late_global_decl (tree decl)
26486 {
26487 /* Fill-in any location information we were unable to determine
26488 on the first pass. */
26489 if (VAR_P (decl))
26490 {
26491 dw_die_ref die = lookup_decl_die (decl);
26492
26493 /* We may have to generate early debug late for LTO in case debug
26494 was not enabled at compile-time or the target doesn't support
26495 the LTO early debug scheme. */
26496 if (! die && in_lto_p)
26497 {
26498 dwarf2out_decl (decl);
26499 die = lookup_decl_die (decl);
26500 }
26501
26502 if (die)
26503 {
26504 /* We get called via the symtab code invoking late_global_decl
26505 for symbols that are optimized out.
26506
26507 Do not add locations for those, except if they have a
26508 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26509 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26510 INDIRECT_REF expression, as this could generate relocations to
26511 text symbols in LTO object files, which is invalid. */
26512 varpool_node *node = varpool_node::get (decl);
26513 if ((! node || ! node->definition)
26514 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26515 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26516 tree_add_const_value_attribute_for_decl (die, decl);
26517 else
26518 add_location_or_const_value_attribute (die, decl, false);
26519 }
26520 }
26521 }
26522
26523 /* Output debug information for type decl DECL. Called from toplev.c
26524 and from language front ends (to record built-in types). */
26525 static void
26526 dwarf2out_type_decl (tree decl, int local)
26527 {
26528 if (!local)
26529 {
26530 set_early_dwarf s;
26531 dwarf2out_decl (decl);
26532 }
26533 }
26534
26535 /* Output debug information for imported module or decl DECL.
26536 NAME is non-NULL name in the lexical block if the decl has been renamed.
26537 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26538 that DECL belongs to.
26539 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26540 static void
26541 dwarf2out_imported_module_or_decl_1 (tree decl,
26542 tree name,
26543 tree lexical_block,
26544 dw_die_ref lexical_block_die)
26545 {
26546 expanded_location xloc;
26547 dw_die_ref imported_die = NULL;
26548 dw_die_ref at_import_die;
26549
26550 if (TREE_CODE (decl) == IMPORTED_DECL)
26551 {
26552 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26553 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26554 gcc_assert (decl);
26555 }
26556 else
26557 xloc = expand_location (input_location);
26558
26559 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26560 {
26561 at_import_die = force_type_die (TREE_TYPE (decl));
26562 /* For namespace N { typedef void T; } using N::T; base_type_die
26563 returns NULL, but DW_TAG_imported_declaration requires
26564 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26565 if (!at_import_die)
26566 {
26567 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26568 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26569 at_import_die = lookup_type_die (TREE_TYPE (decl));
26570 gcc_assert (at_import_die);
26571 }
26572 }
26573 else
26574 {
26575 at_import_die = lookup_decl_die (decl);
26576 if (!at_import_die)
26577 {
26578 /* If we're trying to avoid duplicate debug info, we may not have
26579 emitted the member decl for this field. Emit it now. */
26580 if (TREE_CODE (decl) == FIELD_DECL)
26581 {
26582 tree type = DECL_CONTEXT (decl);
26583
26584 if (TYPE_CONTEXT (type)
26585 && TYPE_P (TYPE_CONTEXT (type))
26586 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26587 DINFO_USAGE_DIR_USE))
26588 return;
26589 gen_type_die_for_member (type, decl,
26590 get_context_die (TYPE_CONTEXT (type)));
26591 }
26592 if (TREE_CODE (decl) == NAMELIST_DECL)
26593 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26594 get_context_die (DECL_CONTEXT (decl)),
26595 NULL_TREE);
26596 else
26597 at_import_die = force_decl_die (decl);
26598 }
26599 }
26600
26601 if (TREE_CODE (decl) == NAMESPACE_DECL)
26602 {
26603 if (dwarf_version >= 3 || !dwarf_strict)
26604 imported_die = new_die (DW_TAG_imported_module,
26605 lexical_block_die,
26606 lexical_block);
26607 else
26608 return;
26609 }
26610 else
26611 imported_die = new_die (DW_TAG_imported_declaration,
26612 lexical_block_die,
26613 lexical_block);
26614
26615 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26616 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26617 if (debug_column_info && xloc.column)
26618 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26619 if (name)
26620 add_AT_string (imported_die, DW_AT_name,
26621 IDENTIFIER_POINTER (name));
26622 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26623 }
26624
26625 /* Output debug information for imported module or decl DECL.
26626 NAME is non-NULL name in context if the decl has been renamed.
26627 CHILD is true if decl is one of the renamed decls as part of
26628 importing whole module.
26629 IMPLICIT is set if this hook is called for an implicit import
26630 such as inline namespace. */
26631
26632 static void
26633 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26634 bool child, bool implicit)
26635 {
26636 /* dw_die_ref at_import_die; */
26637 dw_die_ref scope_die;
26638
26639 if (debug_info_level <= DINFO_LEVEL_TERSE)
26640 return;
26641
26642 gcc_assert (decl);
26643
26644 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26645 should be enough, for DWARF4 and older even if we emit as extension
26646 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26647 for the benefit of consumers unaware of DW_AT_export_symbols. */
26648 if (implicit
26649 && dwarf_version >= 5
26650 && lang_hooks.decls.decl_dwarf_attribute (decl,
26651 DW_AT_export_symbols) == 1)
26652 return;
26653
26654 set_early_dwarf s;
26655
26656 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26657 We need decl DIE for reference and scope die. First, get DIE for the decl
26658 itself. */
26659
26660 /* Get the scope die for decl context. Use comp_unit_die for global module
26661 or decl. If die is not found for non globals, force new die. */
26662 if (context
26663 && TYPE_P (context)
26664 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26665 return;
26666
26667 scope_die = get_context_die (context);
26668
26669 if (child)
26670 {
26671 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26672 there is nothing we can do, here. */
26673 if (dwarf_version < 3 && dwarf_strict)
26674 return;
26675
26676 gcc_assert (scope_die->die_child);
26677 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26678 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26679 scope_die = scope_die->die_child;
26680 }
26681
26682 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26683 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26684 }
26685
26686 /* Output debug information for namelists. */
26687
26688 static dw_die_ref
26689 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26690 {
26691 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26692 tree value;
26693 unsigned i;
26694
26695 if (debug_info_level <= DINFO_LEVEL_TERSE)
26696 return NULL;
26697
26698 gcc_assert (scope_die != NULL);
26699 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26700 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26701
26702 /* If there are no item_decls, we have a nondefining namelist, e.g.
26703 with USE association; hence, set DW_AT_declaration. */
26704 if (item_decls == NULL_TREE)
26705 {
26706 add_AT_flag (nml_die, DW_AT_declaration, 1);
26707 return nml_die;
26708 }
26709
26710 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26711 {
26712 nml_item_ref_die = lookup_decl_die (value);
26713 if (!nml_item_ref_die)
26714 nml_item_ref_die = force_decl_die (value);
26715
26716 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26717 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26718 }
26719 return nml_die;
26720 }
26721
26722
26723 /* Write the debugging output for DECL and return the DIE. */
26724
26725 static void
26726 dwarf2out_decl (tree decl)
26727 {
26728 dw_die_ref context_die = comp_unit_die ();
26729
26730 switch (TREE_CODE (decl))
26731 {
26732 case ERROR_MARK:
26733 return;
26734
26735 case FUNCTION_DECL:
26736 /* If we're a nested function, initially use a parent of NULL; if we're
26737 a plain function, this will be fixed up in decls_for_scope. If
26738 we're a method, it will be ignored, since we already have a DIE. */
26739 if (decl_function_context (decl)
26740 /* But if we're in terse mode, we don't care about scope. */
26741 && debug_info_level > DINFO_LEVEL_TERSE)
26742 context_die = NULL;
26743 break;
26744
26745 case VAR_DECL:
26746 /* For local statics lookup proper context die. */
26747 if (local_function_static (decl))
26748 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26749
26750 /* If we are in terse mode, don't generate any DIEs to represent any
26751 variable declarations or definitions. */
26752 if (debug_info_level <= DINFO_LEVEL_TERSE)
26753 return;
26754 break;
26755
26756 case CONST_DECL:
26757 if (debug_info_level <= DINFO_LEVEL_TERSE)
26758 return;
26759 if (!is_fortran () && !is_ada ())
26760 return;
26761 if (TREE_STATIC (decl) && decl_function_context (decl))
26762 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26763 break;
26764
26765 case NAMESPACE_DECL:
26766 case IMPORTED_DECL:
26767 if (debug_info_level <= DINFO_LEVEL_TERSE)
26768 return;
26769 if (lookup_decl_die (decl) != NULL)
26770 return;
26771 break;
26772
26773 case TYPE_DECL:
26774 /* Don't emit stubs for types unless they are needed by other DIEs. */
26775 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26776 return;
26777
26778 /* Don't bother trying to generate any DIEs to represent any of the
26779 normal built-in types for the language we are compiling. */
26780 if (DECL_IS_BUILTIN (decl))
26781 return;
26782
26783 /* If we are in terse mode, don't generate any DIEs for types. */
26784 if (debug_info_level <= DINFO_LEVEL_TERSE)
26785 return;
26786
26787 /* If we're a function-scope tag, initially use a parent of NULL;
26788 this will be fixed up in decls_for_scope. */
26789 if (decl_function_context (decl))
26790 context_die = NULL;
26791
26792 break;
26793
26794 case NAMELIST_DECL:
26795 break;
26796
26797 default:
26798 return;
26799 }
26800
26801 gen_decl_die (decl, NULL, NULL, context_die);
26802
26803 if (flag_checking)
26804 {
26805 dw_die_ref die = lookup_decl_die (decl);
26806 if (die)
26807 check_die (die);
26808 }
26809 }
26810
26811 /* Write the debugging output for DECL. */
26812
26813 static void
26814 dwarf2out_function_decl (tree decl)
26815 {
26816 dwarf2out_decl (decl);
26817 call_arg_locations = NULL;
26818 call_arg_loc_last = NULL;
26819 call_site_count = -1;
26820 tail_call_site_count = -1;
26821 decl_loc_table->empty ();
26822 cached_dw_loc_list_table->empty ();
26823 }
26824
26825 /* Output a marker (i.e. a label) for the beginning of the generated code for
26826 a lexical block. */
26827
26828 static void
26829 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26830 unsigned int blocknum)
26831 {
26832 switch_to_section (current_function_section ());
26833 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26834 }
26835
26836 /* Output a marker (i.e. a label) for the end of the generated code for a
26837 lexical block. */
26838
26839 static void
26840 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26841 {
26842 switch_to_section (current_function_section ());
26843 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26844 }
26845
26846 /* Returns nonzero if it is appropriate not to emit any debugging
26847 information for BLOCK, because it doesn't contain any instructions.
26848
26849 Don't allow this for blocks with nested functions or local classes
26850 as we would end up with orphans, and in the presence of scheduling
26851 we may end up calling them anyway. */
26852
26853 static bool
26854 dwarf2out_ignore_block (const_tree block)
26855 {
26856 tree decl;
26857 unsigned int i;
26858
26859 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26860 if (TREE_CODE (decl) == FUNCTION_DECL
26861 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26862 return 0;
26863 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26864 {
26865 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26866 if (TREE_CODE (decl) == FUNCTION_DECL
26867 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26868 return 0;
26869 }
26870
26871 return 1;
26872 }
26873
26874 /* Hash table routines for file_hash. */
26875
26876 bool
26877 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26878 {
26879 return filename_cmp (p1->filename, p2) == 0;
26880 }
26881
26882 hashval_t
26883 dwarf_file_hasher::hash (dwarf_file_data *p)
26884 {
26885 return htab_hash_string (p->filename);
26886 }
26887
26888 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26889 dwarf2out.c) and return its "index". The index of each (known) filename is
26890 just a unique number which is associated with only that one filename. We
26891 need such numbers for the sake of generating labels (in the .debug_sfnames
26892 section) and references to those files numbers (in the .debug_srcinfo
26893 and .debug_macinfo sections). If the filename given as an argument is not
26894 found in our current list, add it to the list and assign it the next
26895 available unique index number. */
26896
26897 static struct dwarf_file_data *
26898 lookup_filename (const char *file_name)
26899 {
26900 struct dwarf_file_data * created;
26901
26902 if (!file_name)
26903 return NULL;
26904
26905 dwarf_file_data **slot
26906 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26907 INSERT);
26908 if (*slot)
26909 return *slot;
26910
26911 created = ggc_alloc<dwarf_file_data> ();
26912 created->filename = file_name;
26913 created->emitted_number = 0;
26914 *slot = created;
26915 return created;
26916 }
26917
26918 /* If the assembler will construct the file table, then translate the compiler
26919 internal file table number into the assembler file table number, and emit
26920 a .file directive if we haven't already emitted one yet. The file table
26921 numbers are different because we prune debug info for unused variables and
26922 types, which may include filenames. */
26923
26924 static int
26925 maybe_emit_file (struct dwarf_file_data * fd)
26926 {
26927 if (! fd->emitted_number)
26928 {
26929 if (last_emitted_file)
26930 fd->emitted_number = last_emitted_file->emitted_number + 1;
26931 else
26932 fd->emitted_number = 1;
26933 last_emitted_file = fd;
26934
26935 if (output_asm_line_debug_info ())
26936 {
26937 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26938 output_quoted_string (asm_out_file,
26939 remap_debug_filename (fd->filename));
26940 fputc ('\n', asm_out_file);
26941 }
26942 }
26943
26944 return fd->emitted_number;
26945 }
26946
26947 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26948 That generation should happen after function debug info has been
26949 generated. The value of the attribute is the constant value of ARG. */
26950
26951 static void
26952 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26953 {
26954 die_arg_entry entry;
26955
26956 if (!die || !arg)
26957 return;
26958
26959 gcc_assert (early_dwarf);
26960
26961 if (!tmpl_value_parm_die_table)
26962 vec_alloc (tmpl_value_parm_die_table, 32);
26963
26964 entry.die = die;
26965 entry.arg = arg;
26966 vec_safe_push (tmpl_value_parm_die_table, entry);
26967 }
26968
26969 /* Return TRUE if T is an instance of generic type, FALSE
26970 otherwise. */
26971
26972 static bool
26973 generic_type_p (tree t)
26974 {
26975 if (t == NULL_TREE || !TYPE_P (t))
26976 return false;
26977 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26978 }
26979
26980 /* Schedule the generation of the generic parameter dies for the
26981 instance of generic type T. The proper generation itself is later
26982 done by gen_scheduled_generic_parms_dies. */
26983
26984 static void
26985 schedule_generic_params_dies_gen (tree t)
26986 {
26987 if (!generic_type_p (t))
26988 return;
26989
26990 gcc_assert (early_dwarf);
26991
26992 if (!generic_type_instances)
26993 vec_alloc (generic_type_instances, 256);
26994
26995 vec_safe_push (generic_type_instances, t);
26996 }
26997
26998 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26999 by append_entry_to_tmpl_value_parm_die_table. This function must
27000 be called after function DIEs have been generated. */
27001
27002 static void
27003 gen_remaining_tmpl_value_param_die_attribute (void)
27004 {
27005 if (tmpl_value_parm_die_table)
27006 {
27007 unsigned i, j;
27008 die_arg_entry *e;
27009
27010 /* We do this in two phases - first get the cases we can
27011 handle during early-finish, preserving those we cannot
27012 (containing symbolic constants where we don't yet know
27013 whether we are going to output the referenced symbols).
27014 For those we try again at late-finish. */
27015 j = 0;
27016 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27017 {
27018 if (!e->die->removed
27019 && !tree_add_const_value_attribute (e->die, e->arg))
27020 {
27021 dw_loc_descr_ref loc = NULL;
27022 if (! early_dwarf
27023 && (dwarf_version >= 5 || !dwarf_strict))
27024 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27025 if (loc)
27026 add_AT_loc (e->die, DW_AT_location, loc);
27027 else
27028 (*tmpl_value_parm_die_table)[j++] = *e;
27029 }
27030 }
27031 tmpl_value_parm_die_table->truncate (j);
27032 }
27033 }
27034
27035 /* Generate generic parameters DIEs for instances of generic types
27036 that have been previously scheduled by
27037 schedule_generic_params_dies_gen. This function must be called
27038 after all the types of the CU have been laid out. */
27039
27040 static void
27041 gen_scheduled_generic_parms_dies (void)
27042 {
27043 unsigned i;
27044 tree t;
27045
27046 if (!generic_type_instances)
27047 return;
27048
27049 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27050 if (COMPLETE_TYPE_P (t))
27051 gen_generic_params_dies (t);
27052
27053 generic_type_instances = NULL;
27054 }
27055
27056
27057 /* Replace DW_AT_name for the decl with name. */
27058
27059 static void
27060 dwarf2out_set_name (tree decl, tree name)
27061 {
27062 dw_die_ref die;
27063 dw_attr_node *attr;
27064 const char *dname;
27065
27066 die = TYPE_SYMTAB_DIE (decl);
27067 if (!die)
27068 return;
27069
27070 dname = dwarf2_name (name, 0);
27071 if (!dname)
27072 return;
27073
27074 attr = get_AT (die, DW_AT_name);
27075 if (attr)
27076 {
27077 struct indirect_string_node *node;
27078
27079 node = find_AT_string (dname);
27080 /* replace the string. */
27081 attr->dw_attr_val.v.val_str = node;
27082 }
27083
27084 else
27085 add_name_attribute (die, dname);
27086 }
27087
27088 /* True if before or during processing of the first function being emitted. */
27089 static bool in_first_function_p = true;
27090 /* True if loc_note during dwarf2out_var_location call might still be
27091 before first real instruction at address equal to .Ltext0. */
27092 static bool maybe_at_text_label_p = true;
27093 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27094 static unsigned int first_loclabel_num_not_at_text_label;
27095
27096 /* Look ahead for a real insn, or for a begin stmt marker. */
27097
27098 static rtx_insn *
27099 dwarf2out_next_real_insn (rtx_insn *loc_note)
27100 {
27101 rtx_insn *next_real = NEXT_INSN (loc_note);
27102
27103 while (next_real)
27104 if (INSN_P (next_real))
27105 break;
27106 else
27107 next_real = NEXT_INSN (next_real);
27108
27109 return next_real;
27110 }
27111
27112 /* Called by the final INSN scan whenever we see a var location. We
27113 use it to drop labels in the right places, and throw the location in
27114 our lookup table. */
27115
27116 static void
27117 dwarf2out_var_location (rtx_insn *loc_note)
27118 {
27119 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27120 struct var_loc_node *newloc;
27121 rtx_insn *next_real, *next_note;
27122 rtx_insn *call_insn = NULL;
27123 static const char *last_label;
27124 static const char *last_postcall_label;
27125 static bool last_in_cold_section_p;
27126 static rtx_insn *expected_next_loc_note;
27127 tree decl;
27128 bool var_loc_p;
27129 var_loc_view view = 0;
27130
27131 if (!NOTE_P (loc_note))
27132 {
27133 if (CALL_P (loc_note))
27134 {
27135 maybe_reset_location_view (loc_note, cur_line_info_table);
27136 call_site_count++;
27137 if (SIBLING_CALL_P (loc_note))
27138 tail_call_site_count++;
27139 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27140 {
27141 call_insn = loc_note;
27142 loc_note = NULL;
27143 var_loc_p = false;
27144
27145 next_real = dwarf2out_next_real_insn (call_insn);
27146 next_note = NULL;
27147 cached_next_real_insn = NULL;
27148 goto create_label;
27149 }
27150 if (optimize == 0 && !flag_var_tracking)
27151 {
27152 /* When the var-tracking pass is not running, there is no note
27153 for indirect calls whose target is compile-time known. In this
27154 case, process such calls specifically so that we generate call
27155 sites for them anyway. */
27156 rtx x = PATTERN (loc_note);
27157 if (GET_CODE (x) == PARALLEL)
27158 x = XVECEXP (x, 0, 0);
27159 if (GET_CODE (x) == SET)
27160 x = SET_SRC (x);
27161 if (GET_CODE (x) == CALL)
27162 x = XEXP (x, 0);
27163 if (!MEM_P (x)
27164 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27165 || !SYMBOL_REF_DECL (XEXP (x, 0))
27166 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27167 != FUNCTION_DECL))
27168 {
27169 call_insn = loc_note;
27170 loc_note = NULL;
27171 var_loc_p = false;
27172
27173 next_real = dwarf2out_next_real_insn (call_insn);
27174 next_note = NULL;
27175 cached_next_real_insn = NULL;
27176 goto create_label;
27177 }
27178 }
27179 }
27180 else if (!debug_variable_location_views)
27181 gcc_unreachable ();
27182 else
27183 maybe_reset_location_view (loc_note, cur_line_info_table);
27184
27185 return;
27186 }
27187
27188 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27189 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27190 return;
27191
27192 /* Optimize processing a large consecutive sequence of location
27193 notes so we don't spend too much time in next_real_insn. If the
27194 next insn is another location note, remember the next_real_insn
27195 calculation for next time. */
27196 next_real = cached_next_real_insn;
27197 if (next_real)
27198 {
27199 if (expected_next_loc_note != loc_note)
27200 next_real = NULL;
27201 }
27202
27203 next_note = NEXT_INSN (loc_note);
27204 if (! next_note
27205 || next_note->deleted ()
27206 || ! NOTE_P (next_note)
27207 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27208 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27209 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27210 next_note = NULL;
27211
27212 if (! next_real)
27213 next_real = dwarf2out_next_real_insn (loc_note);
27214
27215 if (next_note)
27216 {
27217 expected_next_loc_note = next_note;
27218 cached_next_real_insn = next_real;
27219 }
27220 else
27221 cached_next_real_insn = NULL;
27222
27223 /* If there are no instructions which would be affected by this note,
27224 don't do anything. */
27225 if (var_loc_p
27226 && next_real == NULL_RTX
27227 && !NOTE_DURING_CALL_P (loc_note))
27228 return;
27229
27230 create_label:
27231
27232 if (next_real == NULL_RTX)
27233 next_real = get_last_insn ();
27234
27235 /* If there were any real insns between note we processed last time
27236 and this note (or if it is the first note), clear
27237 last_{,postcall_}label so that they are not reused this time. */
27238 if (last_var_location_insn == NULL_RTX
27239 || last_var_location_insn != next_real
27240 || last_in_cold_section_p != in_cold_section_p)
27241 {
27242 last_label = NULL;
27243 last_postcall_label = NULL;
27244 }
27245
27246 if (var_loc_p)
27247 {
27248 const char *label
27249 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27250 view = cur_line_info_table->view;
27251 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27252 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27253 if (newloc == NULL)
27254 return;
27255 }
27256 else
27257 {
27258 decl = NULL_TREE;
27259 newloc = NULL;
27260 }
27261
27262 /* If there were no real insns between note we processed last time
27263 and this note, use the label we emitted last time. Otherwise
27264 create a new label and emit it. */
27265 if (last_label == NULL)
27266 {
27267 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27268 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27269 loclabel_num++;
27270 last_label = ggc_strdup (loclabel);
27271 /* See if loclabel might be equal to .Ltext0. If yes,
27272 bump first_loclabel_num_not_at_text_label. */
27273 if (!have_multiple_function_sections
27274 && in_first_function_p
27275 && maybe_at_text_label_p)
27276 {
27277 static rtx_insn *last_start;
27278 rtx_insn *insn;
27279 for (insn = loc_note; insn; insn = previous_insn (insn))
27280 if (insn == last_start)
27281 break;
27282 else if (!NONDEBUG_INSN_P (insn))
27283 continue;
27284 else
27285 {
27286 rtx body = PATTERN (insn);
27287 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27288 continue;
27289 /* Inline asm could occupy zero bytes. */
27290 else if (GET_CODE (body) == ASM_INPUT
27291 || asm_noperands (body) >= 0)
27292 continue;
27293 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27294 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27295 continue;
27296 #endif
27297 else
27298 {
27299 /* Assume insn has non-zero length. */
27300 maybe_at_text_label_p = false;
27301 break;
27302 }
27303 }
27304 if (maybe_at_text_label_p)
27305 {
27306 last_start = loc_note;
27307 first_loclabel_num_not_at_text_label = loclabel_num;
27308 }
27309 }
27310 }
27311
27312 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27313 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27314
27315 if (!var_loc_p)
27316 {
27317 struct call_arg_loc_node *ca_loc
27318 = ggc_cleared_alloc<call_arg_loc_node> ();
27319 rtx_insn *prev = call_insn;
27320
27321 ca_loc->call_arg_loc_note
27322 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27323 ca_loc->next = NULL;
27324 ca_loc->label = last_label;
27325 gcc_assert (prev
27326 && (CALL_P (prev)
27327 || (NONJUMP_INSN_P (prev)
27328 && GET_CODE (PATTERN (prev)) == SEQUENCE
27329 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27330 if (!CALL_P (prev))
27331 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27332 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27333
27334 /* Look for a SYMBOL_REF in the "prev" instruction. */
27335 rtx x = get_call_rtx_from (PATTERN (prev));
27336 if (x)
27337 {
27338 /* Try to get the call symbol, if any. */
27339 if (MEM_P (XEXP (x, 0)))
27340 x = XEXP (x, 0);
27341 /* First, look for a memory access to a symbol_ref. */
27342 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27343 && SYMBOL_REF_DECL (XEXP (x, 0))
27344 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27345 ca_loc->symbol_ref = XEXP (x, 0);
27346 /* Otherwise, look at a compile-time known user-level function
27347 declaration. */
27348 else if (MEM_P (x)
27349 && MEM_EXPR (x)
27350 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27351 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27352 }
27353
27354 ca_loc->block = insn_scope (prev);
27355 if (call_arg_locations)
27356 call_arg_loc_last->next = ca_loc;
27357 else
27358 call_arg_locations = ca_loc;
27359 call_arg_loc_last = ca_loc;
27360 }
27361 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27362 {
27363 newloc->label = last_label;
27364 newloc->view = view;
27365 }
27366 else
27367 {
27368 if (!last_postcall_label)
27369 {
27370 sprintf (loclabel, "%s-1", last_label);
27371 last_postcall_label = ggc_strdup (loclabel);
27372 }
27373 newloc->label = last_postcall_label;
27374 /* ??? This view is at last_label, not last_label-1, but we
27375 could only assume view at last_label-1 is zero if we could
27376 assume calls always have length greater than one. This is
27377 probably true in general, though there might be a rare
27378 exception to this rule, e.g. if a call insn is optimized out
27379 by target magic. Then, even the -1 in the label will be
27380 wrong, which might invalidate the range. Anyway, using view,
27381 though technically possibly incorrect, will work as far as
27382 ranges go: since L-1 is in the middle of the call insn,
27383 (L-1).0 and (L-1).V shouldn't make any difference, and having
27384 the loclist entry refer to the .loc entry might be useful, so
27385 leave it like this. */
27386 newloc->view = view;
27387 }
27388
27389 if (var_loc_p && flag_debug_asm)
27390 {
27391 const char *name, *sep, *patstr;
27392 if (decl && DECL_NAME (decl))
27393 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27394 else
27395 name = "";
27396 if (NOTE_VAR_LOCATION_LOC (loc_note))
27397 {
27398 sep = " => ";
27399 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27400 }
27401 else
27402 {
27403 sep = " ";
27404 patstr = "RESET";
27405 }
27406 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27407 name, sep, patstr);
27408 }
27409
27410 last_var_location_insn = next_real;
27411 last_in_cold_section_p = in_cold_section_p;
27412 }
27413
27414 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27415 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27416 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27417 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27418 BLOCK_FRAGMENT_ORIGIN links. */
27419 static bool
27420 block_within_block_p (tree block, tree outer, bool bothways)
27421 {
27422 if (block == outer)
27423 return true;
27424
27425 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27426 for (tree context = BLOCK_SUPERCONTEXT (block);
27427 context != outer;
27428 context = BLOCK_SUPERCONTEXT (context))
27429 if (!context || TREE_CODE (context) != BLOCK)
27430 return false;
27431
27432 if (!bothways)
27433 return true;
27434
27435 /* Now check that each block is actually referenced by its
27436 parent. */
27437 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27438 context = BLOCK_SUPERCONTEXT (context))
27439 {
27440 if (BLOCK_FRAGMENT_ORIGIN (context))
27441 {
27442 gcc_assert (!BLOCK_SUBBLOCKS (context));
27443 context = BLOCK_FRAGMENT_ORIGIN (context);
27444 }
27445 for (tree sub = BLOCK_SUBBLOCKS (context);
27446 sub != block;
27447 sub = BLOCK_CHAIN (sub))
27448 if (!sub)
27449 return false;
27450 if (context == outer)
27451 return true;
27452 else
27453 block = context;
27454 }
27455 }
27456
27457 /* Called during final while assembling the marker of the entry point
27458 for an inlined function. */
27459
27460 static void
27461 dwarf2out_inline_entry (tree block)
27462 {
27463 gcc_assert (debug_inline_points);
27464
27465 /* If we can't represent it, don't bother. */
27466 if (!(dwarf_version >= 3 || !dwarf_strict))
27467 return;
27468
27469 gcc_assert (DECL_P (block_ultimate_origin (block)));
27470
27471 /* Sanity check the block tree. This would catch a case in which
27472 BLOCK got removed from the tree reachable from the outermost
27473 lexical block, but got retained in markers. It would still link
27474 back to its parents, but some ancestor would be missing a link
27475 down the path to the sub BLOCK. If the block got removed, its
27476 BLOCK_NUMBER will not be a usable value. */
27477 if (flag_checking)
27478 gcc_assert (block_within_block_p (block,
27479 DECL_INITIAL (current_function_decl),
27480 true));
27481
27482 gcc_assert (inlined_function_outer_scope_p (block));
27483 gcc_assert (!BLOCK_DIE (block));
27484
27485 if (BLOCK_FRAGMENT_ORIGIN (block))
27486 block = BLOCK_FRAGMENT_ORIGIN (block);
27487 /* Can the entry point ever not be at the beginning of an
27488 unfragmented lexical block? */
27489 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27490 || (cur_line_info_table
27491 && !ZERO_VIEW_P (cur_line_info_table->view))))
27492 return;
27493
27494 if (!inline_entry_data_table)
27495 inline_entry_data_table
27496 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27497
27498
27499 inline_entry_data **iedp
27500 = inline_entry_data_table->find_slot_with_hash (block,
27501 htab_hash_pointer (block),
27502 INSERT);
27503 if (*iedp)
27504 /* ??? Ideally, we'd record all entry points for the same inlined
27505 function (some may have been duplicated by e.g. unrolling), but
27506 we have no way to represent that ATM. */
27507 return;
27508
27509 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27510 ied->block = block;
27511 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27512 ied->label_num = BLOCK_NUMBER (block);
27513 if (cur_line_info_table)
27514 ied->view = cur_line_info_table->view;
27515
27516 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27517
27518 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27519 BLOCK_NUMBER (block));
27520 ASM_OUTPUT_LABEL (asm_out_file, label);
27521 }
27522
27523 /* Called from finalize_size_functions for size functions so that their body
27524 can be encoded in the debug info to describe the layout of variable-length
27525 structures. */
27526
27527 static void
27528 dwarf2out_size_function (tree decl)
27529 {
27530 function_to_dwarf_procedure (decl);
27531 }
27532
27533 /* Note in one location list that text section has changed. */
27534
27535 int
27536 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27537 {
27538 var_loc_list *list = *slot;
27539 if (list->first)
27540 list->last_before_switch
27541 = list->last->next ? list->last->next : list->last;
27542 return 1;
27543 }
27544
27545 /* Note in all location lists that text section has changed. */
27546
27547 static void
27548 var_location_switch_text_section (void)
27549 {
27550 if (decl_loc_table == NULL)
27551 return;
27552
27553 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27554 }
27555
27556 /* Create a new line number table. */
27557
27558 static dw_line_info_table *
27559 new_line_info_table (void)
27560 {
27561 dw_line_info_table *table;
27562
27563 table = ggc_cleared_alloc<dw_line_info_table> ();
27564 table->file_num = 1;
27565 table->line_num = 1;
27566 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27567 FORCE_RESET_NEXT_VIEW (table->view);
27568 table->symviews_since_reset = 0;
27569
27570 return table;
27571 }
27572
27573 /* Lookup the "current" table into which we emit line info, so
27574 that we don't have to do it for every source line. */
27575
27576 static void
27577 set_cur_line_info_table (section *sec)
27578 {
27579 dw_line_info_table *table;
27580
27581 if (sec == text_section)
27582 table = text_section_line_info;
27583 else if (sec == cold_text_section)
27584 {
27585 table = cold_text_section_line_info;
27586 if (!table)
27587 {
27588 cold_text_section_line_info = table = new_line_info_table ();
27589 table->end_label = cold_end_label;
27590 }
27591 }
27592 else
27593 {
27594 const char *end_label;
27595
27596 if (crtl->has_bb_partition)
27597 {
27598 if (in_cold_section_p)
27599 end_label = crtl->subsections.cold_section_end_label;
27600 else
27601 end_label = crtl->subsections.hot_section_end_label;
27602 }
27603 else
27604 {
27605 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27606 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27607 current_function_funcdef_no);
27608 end_label = ggc_strdup (label);
27609 }
27610
27611 table = new_line_info_table ();
27612 table->end_label = end_label;
27613
27614 vec_safe_push (separate_line_info, table);
27615 }
27616
27617 if (output_asm_line_debug_info ())
27618 table->is_stmt = (cur_line_info_table
27619 ? cur_line_info_table->is_stmt
27620 : DWARF_LINE_DEFAULT_IS_STMT_START);
27621 cur_line_info_table = table;
27622 }
27623
27624
27625 /* We need to reset the locations at the beginning of each
27626 function. We can't do this in the end_function hook, because the
27627 declarations that use the locations won't have been output when
27628 that hook is called. Also compute have_multiple_function_sections here. */
27629
27630 static void
27631 dwarf2out_begin_function (tree fun)
27632 {
27633 section *sec = function_section (fun);
27634
27635 if (sec != text_section)
27636 have_multiple_function_sections = true;
27637
27638 if (crtl->has_bb_partition && !cold_text_section)
27639 {
27640 gcc_assert (current_function_decl == fun);
27641 cold_text_section = unlikely_text_section ();
27642 switch_to_section (cold_text_section);
27643 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27644 switch_to_section (sec);
27645 }
27646
27647 dwarf2out_note_section_used ();
27648 call_site_count = 0;
27649 tail_call_site_count = 0;
27650
27651 set_cur_line_info_table (sec);
27652 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27653 }
27654
27655 /* Helper function of dwarf2out_end_function, called only after emitting
27656 the very first function into assembly. Check if some .debug_loc range
27657 might end with a .LVL* label that could be equal to .Ltext0.
27658 In that case we must force using absolute addresses in .debug_loc ranges,
27659 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27660 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27661 list terminator.
27662 Set have_multiple_function_sections to true in that case and
27663 terminate htab traversal. */
27664
27665 int
27666 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27667 {
27668 var_loc_list *entry = *slot;
27669 struct var_loc_node *node;
27670
27671 node = entry->first;
27672 if (node && node->next && node->next->label)
27673 {
27674 unsigned int i;
27675 const char *label = node->next->label;
27676 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27677
27678 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27679 {
27680 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27681 if (strcmp (label, loclabel) == 0)
27682 {
27683 have_multiple_function_sections = true;
27684 return 0;
27685 }
27686 }
27687 }
27688 return 1;
27689 }
27690
27691 /* Hook called after emitting a function into assembly.
27692 This does something only for the very first function emitted. */
27693
27694 static void
27695 dwarf2out_end_function (unsigned int)
27696 {
27697 if (in_first_function_p
27698 && !have_multiple_function_sections
27699 && first_loclabel_num_not_at_text_label
27700 && decl_loc_table)
27701 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27702 in_first_function_p = false;
27703 maybe_at_text_label_p = false;
27704 }
27705
27706 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27707 front-ends register a translation unit even before dwarf2out_init is
27708 called. */
27709 static tree main_translation_unit = NULL_TREE;
27710
27711 /* Hook called by front-ends after they built their main translation unit.
27712 Associate comp_unit_die to UNIT. */
27713
27714 static void
27715 dwarf2out_register_main_translation_unit (tree unit)
27716 {
27717 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27718 && main_translation_unit == NULL_TREE);
27719 main_translation_unit = unit;
27720 /* If dwarf2out_init has not been called yet, it will perform the association
27721 itself looking at main_translation_unit. */
27722 if (decl_die_table != NULL)
27723 equate_decl_number_to_die (unit, comp_unit_die ());
27724 }
27725
27726 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27727
27728 static void
27729 push_dw_line_info_entry (dw_line_info_table *table,
27730 enum dw_line_info_opcode opcode, unsigned int val)
27731 {
27732 dw_line_info_entry e;
27733 e.opcode = opcode;
27734 e.val = val;
27735 vec_safe_push (table->entries, e);
27736 }
27737
27738 /* Output a label to mark the beginning of a source code line entry
27739 and record information relating to this source line, in
27740 'line_info_table' for later output of the .debug_line section. */
27741 /* ??? The discriminator parameter ought to be unsigned. */
27742
27743 static void
27744 dwarf2out_source_line (unsigned int line, unsigned int column,
27745 const char *filename,
27746 int discriminator, bool is_stmt)
27747 {
27748 unsigned int file_num;
27749 dw_line_info_table *table;
27750 static var_loc_view lvugid;
27751
27752 if (debug_info_level < DINFO_LEVEL_TERSE)
27753 return;
27754
27755 table = cur_line_info_table;
27756
27757 if (line == 0)
27758 {
27759 if (debug_variable_location_views
27760 && output_asm_line_debug_info ()
27761 && table && !RESETTING_VIEW_P (table->view))
27762 {
27763 /* If we're using the assembler to compute view numbers, we
27764 can't issue a .loc directive for line zero, so we can't
27765 get a view number at this point. We might attempt to
27766 compute it from the previous view, or equate it to a
27767 subsequent view (though it might not be there!), but
27768 since we're omitting the line number entry, we might as
27769 well omit the view number as well. That means pretending
27770 it's a view number zero, which might very well turn out
27771 to be correct. ??? Extend the assembler so that the
27772 compiler could emit e.g. ".locview .LVU#", to output a
27773 view without changing line number information. We'd then
27774 have to count it in symviews_since_reset; when it's omitted,
27775 it doesn't count. */
27776 if (!zero_view_p)
27777 zero_view_p = BITMAP_GGC_ALLOC ();
27778 bitmap_set_bit (zero_view_p, table->view);
27779 if (flag_debug_asm)
27780 {
27781 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27782 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27783 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27784 ASM_COMMENT_START);
27785 assemble_name (asm_out_file, label);
27786 putc ('\n', asm_out_file);
27787 }
27788 table->view = ++lvugid;
27789 }
27790 return;
27791 }
27792
27793 /* The discriminator column was added in dwarf4. Simplify the below
27794 by simply removing it if we're not supposed to output it. */
27795 if (dwarf_version < 4 && dwarf_strict)
27796 discriminator = 0;
27797
27798 if (!debug_column_info)
27799 column = 0;
27800
27801 file_num = maybe_emit_file (lookup_filename (filename));
27802
27803 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27804 the debugger has used the second (possibly duplicate) line number
27805 at the beginning of the function to mark the end of the prologue.
27806 We could eliminate any other duplicates within the function. For
27807 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27808 that second line number entry. */
27809 /* Recall that this end-of-prologue indication is *not* the same thing
27810 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27811 to which the hook corresponds, follows the last insn that was
27812 emitted by gen_prologue. What we need is to precede the first insn
27813 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27814 insn that corresponds to something the user wrote. These may be
27815 very different locations once scheduling is enabled. */
27816
27817 if (0 && file_num == table->file_num
27818 && line == table->line_num
27819 && column == table->column_num
27820 && discriminator == table->discrim_num
27821 && is_stmt == table->is_stmt)
27822 return;
27823
27824 switch_to_section (current_function_section ());
27825
27826 /* If requested, emit something human-readable. */
27827 if (flag_debug_asm)
27828 {
27829 if (debug_column_info)
27830 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27831 filename, line, column);
27832 else
27833 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27834 filename, line);
27835 }
27836
27837 if (output_asm_line_debug_info ())
27838 {
27839 /* Emit the .loc directive understood by GNU as. */
27840 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27841 file_num, line, is_stmt, discriminator */
27842 fputs ("\t.loc ", asm_out_file);
27843 fprint_ul (asm_out_file, file_num);
27844 putc (' ', asm_out_file);
27845 fprint_ul (asm_out_file, line);
27846 putc (' ', asm_out_file);
27847 fprint_ul (asm_out_file, column);
27848
27849 if (is_stmt != table->is_stmt)
27850 {
27851 fputs (" is_stmt ", asm_out_file);
27852 putc (is_stmt ? '1' : '0', asm_out_file);
27853 }
27854 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27855 {
27856 gcc_assert (discriminator > 0);
27857 fputs (" discriminator ", asm_out_file);
27858 fprint_ul (asm_out_file, (unsigned long) discriminator);
27859 }
27860 if (debug_variable_location_views)
27861 {
27862 if (!RESETTING_VIEW_P (table->view))
27863 {
27864 table->symviews_since_reset++;
27865 if (table->symviews_since_reset > symview_upper_bound)
27866 symview_upper_bound = table->symviews_since_reset;
27867 /* When we're using the assembler to compute view
27868 numbers, we output symbolic labels after "view" in
27869 .loc directives, and the assembler will set them for
27870 us, so that we can refer to the view numbers in
27871 location lists. The only exceptions are when we know
27872 a view will be zero: "-0" is a forced reset, used
27873 e.g. in the beginning of functions, whereas "0" tells
27874 the assembler to check that there was a PC change
27875 since the previous view, in a way that implicitly
27876 resets the next view. */
27877 fputs (" view ", asm_out_file);
27878 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27879 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27880 assemble_name (asm_out_file, label);
27881 table->view = ++lvugid;
27882 }
27883 else
27884 {
27885 table->symviews_since_reset = 0;
27886 if (FORCE_RESETTING_VIEW_P (table->view))
27887 fputs (" view -0", asm_out_file);
27888 else
27889 fputs (" view 0", asm_out_file);
27890 /* Mark the present view as a zero view. Earlier debug
27891 binds may have already added its id to loclists to be
27892 emitted later, so we can't reuse the id for something
27893 else. However, it's good to know whether a view is
27894 known to be zero, because then we may be able to
27895 optimize out locviews that are all zeros, so take
27896 note of it in zero_view_p. */
27897 if (!zero_view_p)
27898 zero_view_p = BITMAP_GGC_ALLOC ();
27899 bitmap_set_bit (zero_view_p, lvugid);
27900 table->view = ++lvugid;
27901 }
27902 }
27903 putc ('\n', asm_out_file);
27904 }
27905 else
27906 {
27907 unsigned int label_num = ++line_info_label_num;
27908
27909 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27910
27911 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27912 push_dw_line_info_entry (table, LI_adv_address, label_num);
27913 else
27914 push_dw_line_info_entry (table, LI_set_address, label_num);
27915 if (debug_variable_location_views)
27916 {
27917 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27918 if (resetting)
27919 table->view = 0;
27920
27921 if (flag_debug_asm)
27922 fprintf (asm_out_file, "\t%s view %s%d\n",
27923 ASM_COMMENT_START,
27924 resetting ? "-" : "",
27925 table->view);
27926
27927 table->view++;
27928 }
27929 if (file_num != table->file_num)
27930 push_dw_line_info_entry (table, LI_set_file, file_num);
27931 if (discriminator != table->discrim_num)
27932 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27933 if (is_stmt != table->is_stmt)
27934 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27935 push_dw_line_info_entry (table, LI_set_line, line);
27936 if (debug_column_info)
27937 push_dw_line_info_entry (table, LI_set_column, column);
27938 }
27939
27940 table->file_num = file_num;
27941 table->line_num = line;
27942 table->column_num = column;
27943 table->discrim_num = discriminator;
27944 table->is_stmt = is_stmt;
27945 table->in_use = true;
27946 }
27947
27948 /* Record the beginning of a new source file. */
27949
27950 static void
27951 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27952 {
27953 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27954 {
27955 macinfo_entry e;
27956 e.code = DW_MACINFO_start_file;
27957 e.lineno = lineno;
27958 e.info = ggc_strdup (filename);
27959 vec_safe_push (macinfo_table, e);
27960 }
27961 }
27962
27963 /* Record the end of a source file. */
27964
27965 static void
27966 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27967 {
27968 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27969 {
27970 macinfo_entry e;
27971 e.code = DW_MACINFO_end_file;
27972 e.lineno = lineno;
27973 e.info = NULL;
27974 vec_safe_push (macinfo_table, e);
27975 }
27976 }
27977
27978 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27979 the tail part of the directive line, i.e. the part which is past the
27980 initial whitespace, #, whitespace, directive-name, whitespace part. */
27981
27982 static void
27983 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27984 const char *buffer ATTRIBUTE_UNUSED)
27985 {
27986 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27987 {
27988 macinfo_entry e;
27989 /* Insert a dummy first entry to be able to optimize the whole
27990 predefined macro block using DW_MACRO_import. */
27991 if (macinfo_table->is_empty () && lineno <= 1)
27992 {
27993 e.code = 0;
27994 e.lineno = 0;
27995 e.info = NULL;
27996 vec_safe_push (macinfo_table, e);
27997 }
27998 e.code = DW_MACINFO_define;
27999 e.lineno = lineno;
28000 e.info = ggc_strdup (buffer);
28001 vec_safe_push (macinfo_table, e);
28002 }
28003 }
28004
28005 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28006 the tail part of the directive line, i.e. the part which is past the
28007 initial whitespace, #, whitespace, directive-name, whitespace part. */
28008
28009 static void
28010 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28011 const char *buffer ATTRIBUTE_UNUSED)
28012 {
28013 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28014 {
28015 macinfo_entry e;
28016 /* Insert a dummy first entry to be able to optimize the whole
28017 predefined macro block using DW_MACRO_import. */
28018 if (macinfo_table->is_empty () && lineno <= 1)
28019 {
28020 e.code = 0;
28021 e.lineno = 0;
28022 e.info = NULL;
28023 vec_safe_push (macinfo_table, e);
28024 }
28025 e.code = DW_MACINFO_undef;
28026 e.lineno = lineno;
28027 e.info = ggc_strdup (buffer);
28028 vec_safe_push (macinfo_table, e);
28029 }
28030 }
28031
28032 /* Helpers to manipulate hash table of CUs. */
28033
28034 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28035 {
28036 static inline hashval_t hash (const macinfo_entry *);
28037 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28038 };
28039
28040 inline hashval_t
28041 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28042 {
28043 return htab_hash_string (entry->info);
28044 }
28045
28046 inline bool
28047 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28048 const macinfo_entry *entry2)
28049 {
28050 return !strcmp (entry1->info, entry2->info);
28051 }
28052
28053 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28054
28055 /* Output a single .debug_macinfo entry. */
28056
28057 static void
28058 output_macinfo_op (macinfo_entry *ref)
28059 {
28060 int file_num;
28061 size_t len;
28062 struct indirect_string_node *node;
28063 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28064 struct dwarf_file_data *fd;
28065
28066 switch (ref->code)
28067 {
28068 case DW_MACINFO_start_file:
28069 fd = lookup_filename (ref->info);
28070 file_num = maybe_emit_file (fd);
28071 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28072 dw2_asm_output_data_uleb128 (ref->lineno,
28073 "Included from line number %lu",
28074 (unsigned long) ref->lineno);
28075 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28076 break;
28077 case DW_MACINFO_end_file:
28078 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28079 break;
28080 case DW_MACINFO_define:
28081 case DW_MACINFO_undef:
28082 len = strlen (ref->info) + 1;
28083 if (!dwarf_strict
28084 && len > DWARF_OFFSET_SIZE
28085 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28086 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28087 {
28088 ref->code = ref->code == DW_MACINFO_define
28089 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28090 output_macinfo_op (ref);
28091 return;
28092 }
28093 dw2_asm_output_data (1, ref->code,
28094 ref->code == DW_MACINFO_define
28095 ? "Define macro" : "Undefine macro");
28096 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28097 (unsigned long) ref->lineno);
28098 dw2_asm_output_nstring (ref->info, -1, "The macro");
28099 break;
28100 case DW_MACRO_define_strp:
28101 case DW_MACRO_undef_strp:
28102 node = find_AT_string (ref->info);
28103 gcc_assert (node
28104 && (node->form == DW_FORM_strp
28105 || node->form == dwarf_form (DW_FORM_strx)));
28106 dw2_asm_output_data (1, ref->code,
28107 ref->code == DW_MACRO_define_strp
28108 ? "Define macro strp"
28109 : "Undefine macro strp");
28110 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28111 (unsigned long) ref->lineno);
28112 if (node->form == DW_FORM_strp)
28113 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28114 debug_str_section, "The macro: \"%s\"",
28115 ref->info);
28116 else
28117 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28118 ref->info);
28119 break;
28120 case DW_MACRO_import:
28121 dw2_asm_output_data (1, ref->code, "Import");
28122 ASM_GENERATE_INTERNAL_LABEL (label,
28123 DEBUG_MACRO_SECTION_LABEL,
28124 ref->lineno + macinfo_label_base);
28125 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28126 break;
28127 default:
28128 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28129 ASM_COMMENT_START, (unsigned long) ref->code);
28130 break;
28131 }
28132 }
28133
28134 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28135 other compilation unit .debug_macinfo sections. IDX is the first
28136 index of a define/undef, return the number of ops that should be
28137 emitted in a comdat .debug_macinfo section and emit
28138 a DW_MACRO_import entry referencing it.
28139 If the define/undef entry should be emitted normally, return 0. */
28140
28141 static unsigned
28142 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28143 macinfo_hash_type **macinfo_htab)
28144 {
28145 macinfo_entry *first, *second, *cur, *inc;
28146 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28147 unsigned char checksum[16];
28148 struct md5_ctx ctx;
28149 char *grp_name, *tail;
28150 const char *base;
28151 unsigned int i, count, encoded_filename_len, linebuf_len;
28152 macinfo_entry **slot;
28153
28154 first = &(*macinfo_table)[idx];
28155 second = &(*macinfo_table)[idx + 1];
28156
28157 /* Optimize only if there are at least two consecutive define/undef ops,
28158 and either all of them are before first DW_MACINFO_start_file
28159 with lineno {0,1} (i.e. predefined macro block), or all of them are
28160 in some included header file. */
28161 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28162 return 0;
28163 if (vec_safe_is_empty (files))
28164 {
28165 if (first->lineno > 1 || second->lineno > 1)
28166 return 0;
28167 }
28168 else if (first->lineno == 0)
28169 return 0;
28170
28171 /* Find the last define/undef entry that can be grouped together
28172 with first and at the same time compute md5 checksum of their
28173 codes, linenumbers and strings. */
28174 md5_init_ctx (&ctx);
28175 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28176 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28177 break;
28178 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28179 break;
28180 else
28181 {
28182 unsigned char code = cur->code;
28183 md5_process_bytes (&code, 1, &ctx);
28184 checksum_uleb128 (cur->lineno, &ctx);
28185 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28186 }
28187 md5_finish_ctx (&ctx, checksum);
28188 count = i - idx;
28189
28190 /* From the containing include filename (if any) pick up just
28191 usable characters from its basename. */
28192 if (vec_safe_is_empty (files))
28193 base = "";
28194 else
28195 base = lbasename (files->last ().info);
28196 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28197 if (ISIDNUM (base[i]) || base[i] == '.')
28198 encoded_filename_len++;
28199 /* Count . at the end. */
28200 if (encoded_filename_len)
28201 encoded_filename_len++;
28202
28203 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28204 linebuf_len = strlen (linebuf);
28205
28206 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28207 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28208 + 16 * 2 + 1);
28209 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28210 tail = grp_name + 4;
28211 if (encoded_filename_len)
28212 {
28213 for (i = 0; base[i]; i++)
28214 if (ISIDNUM (base[i]) || base[i] == '.')
28215 *tail++ = base[i];
28216 *tail++ = '.';
28217 }
28218 memcpy (tail, linebuf, linebuf_len);
28219 tail += linebuf_len;
28220 *tail++ = '.';
28221 for (i = 0; i < 16; i++)
28222 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28223
28224 /* Construct a macinfo_entry for DW_MACRO_import
28225 in the empty vector entry before the first define/undef. */
28226 inc = &(*macinfo_table)[idx - 1];
28227 inc->code = DW_MACRO_import;
28228 inc->lineno = 0;
28229 inc->info = ggc_strdup (grp_name);
28230 if (!*macinfo_htab)
28231 *macinfo_htab = new macinfo_hash_type (10);
28232 /* Avoid emitting duplicates. */
28233 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28234 if (*slot != NULL)
28235 {
28236 inc->code = 0;
28237 inc->info = NULL;
28238 /* If such an entry has been used before, just emit
28239 a DW_MACRO_import op. */
28240 inc = *slot;
28241 output_macinfo_op (inc);
28242 /* And clear all macinfo_entry in the range to avoid emitting them
28243 in the second pass. */
28244 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28245 {
28246 cur->code = 0;
28247 cur->info = NULL;
28248 }
28249 }
28250 else
28251 {
28252 *slot = inc;
28253 inc->lineno = (*macinfo_htab)->elements ();
28254 output_macinfo_op (inc);
28255 }
28256 return count;
28257 }
28258
28259 /* Save any strings needed by the macinfo table in the debug str
28260 table. All strings must be collected into the table by the time
28261 index_string is called. */
28262
28263 static void
28264 save_macinfo_strings (void)
28265 {
28266 unsigned len;
28267 unsigned i;
28268 macinfo_entry *ref;
28269
28270 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28271 {
28272 switch (ref->code)
28273 {
28274 /* Match the logic in output_macinfo_op to decide on
28275 indirect strings. */
28276 case DW_MACINFO_define:
28277 case DW_MACINFO_undef:
28278 len = strlen (ref->info) + 1;
28279 if (!dwarf_strict
28280 && len > DWARF_OFFSET_SIZE
28281 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28282 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28283 set_indirect_string (find_AT_string (ref->info));
28284 break;
28285 case DW_MACRO_define_strp:
28286 case DW_MACRO_undef_strp:
28287 set_indirect_string (find_AT_string (ref->info));
28288 break;
28289 default:
28290 break;
28291 }
28292 }
28293 }
28294
28295 /* Output macinfo section(s). */
28296
28297 static void
28298 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28299 {
28300 unsigned i;
28301 unsigned long length = vec_safe_length (macinfo_table);
28302 macinfo_entry *ref;
28303 vec<macinfo_entry, va_gc> *files = NULL;
28304 macinfo_hash_type *macinfo_htab = NULL;
28305 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28306
28307 if (! length)
28308 return;
28309
28310 /* output_macinfo* uses these interchangeably. */
28311 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28312 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28313 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28314 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28315
28316 /* AIX Assembler inserts the length, so adjust the reference to match the
28317 offset expected by debuggers. */
28318 strcpy (dl_section_ref, debug_line_label);
28319 if (XCOFF_DEBUGGING_INFO)
28320 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28321
28322 /* For .debug_macro emit the section header. */
28323 if (!dwarf_strict || dwarf_version >= 5)
28324 {
28325 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28326 "DWARF macro version number");
28327 if (DWARF_OFFSET_SIZE == 8)
28328 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28329 else
28330 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28331 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28332 debug_line_section, NULL);
28333 }
28334
28335 /* In the first loop, it emits the primary .debug_macinfo section
28336 and after each emitted op the macinfo_entry is cleared.
28337 If a longer range of define/undef ops can be optimized using
28338 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28339 the vector before the first define/undef in the range and the
28340 whole range of define/undef ops is not emitted and kept. */
28341 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28342 {
28343 switch (ref->code)
28344 {
28345 case DW_MACINFO_start_file:
28346 vec_safe_push (files, *ref);
28347 break;
28348 case DW_MACINFO_end_file:
28349 if (!vec_safe_is_empty (files))
28350 files->pop ();
28351 break;
28352 case DW_MACINFO_define:
28353 case DW_MACINFO_undef:
28354 if ((!dwarf_strict || dwarf_version >= 5)
28355 && HAVE_COMDAT_GROUP
28356 && vec_safe_length (files) != 1
28357 && i > 0
28358 && i + 1 < length
28359 && (*macinfo_table)[i - 1].code == 0)
28360 {
28361 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28362 if (count)
28363 {
28364 i += count - 1;
28365 continue;
28366 }
28367 }
28368 break;
28369 case 0:
28370 /* A dummy entry may be inserted at the beginning to be able
28371 to optimize the whole block of predefined macros. */
28372 if (i == 0)
28373 continue;
28374 default:
28375 break;
28376 }
28377 output_macinfo_op (ref);
28378 ref->info = NULL;
28379 ref->code = 0;
28380 }
28381
28382 if (!macinfo_htab)
28383 return;
28384
28385 /* Save the number of transparent includes so we can adjust the
28386 label number for the fat LTO object DWARF. */
28387 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28388
28389 delete macinfo_htab;
28390 macinfo_htab = NULL;
28391
28392 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28393 terminate the current chain and switch to a new comdat .debug_macinfo
28394 section and emit the define/undef entries within it. */
28395 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28396 switch (ref->code)
28397 {
28398 case 0:
28399 continue;
28400 case DW_MACRO_import:
28401 {
28402 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28403 tree comdat_key = get_identifier (ref->info);
28404 /* Terminate the previous .debug_macinfo section. */
28405 dw2_asm_output_data (1, 0, "End compilation unit");
28406 targetm.asm_out.named_section (debug_macinfo_section_name,
28407 SECTION_DEBUG
28408 | SECTION_LINKONCE
28409 | (early_lto_debug
28410 ? SECTION_EXCLUDE : 0),
28411 comdat_key);
28412 ASM_GENERATE_INTERNAL_LABEL (label,
28413 DEBUG_MACRO_SECTION_LABEL,
28414 ref->lineno + macinfo_label_base);
28415 ASM_OUTPUT_LABEL (asm_out_file, label);
28416 ref->code = 0;
28417 ref->info = NULL;
28418 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28419 "DWARF macro version number");
28420 if (DWARF_OFFSET_SIZE == 8)
28421 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28422 else
28423 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28424 }
28425 break;
28426 case DW_MACINFO_define:
28427 case DW_MACINFO_undef:
28428 output_macinfo_op (ref);
28429 ref->code = 0;
28430 ref->info = NULL;
28431 break;
28432 default:
28433 gcc_unreachable ();
28434 }
28435
28436 macinfo_label_base += macinfo_label_base_adj;
28437 }
28438
28439 /* Initialize the various sections and labels for dwarf output and prefix
28440 them with PREFIX if non-NULL. Returns the generation (zero based
28441 number of times function was called). */
28442
28443 static unsigned
28444 init_sections_and_labels (bool early_lto_debug)
28445 {
28446 /* As we may get called multiple times have a generation count for
28447 labels. */
28448 static unsigned generation = 0;
28449
28450 if (early_lto_debug)
28451 {
28452 if (!dwarf_split_debug_info)
28453 {
28454 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28455 SECTION_DEBUG | SECTION_EXCLUDE,
28456 NULL);
28457 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28458 SECTION_DEBUG | SECTION_EXCLUDE,
28459 NULL);
28460 debug_macinfo_section_name
28461 = ((dwarf_strict && dwarf_version < 5)
28462 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28463 debug_macinfo_section = get_section (debug_macinfo_section_name,
28464 SECTION_DEBUG
28465 | SECTION_EXCLUDE, NULL);
28466 }
28467 else
28468 {
28469 /* ??? Which of the following do we need early? */
28470 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28471 SECTION_DEBUG | SECTION_EXCLUDE,
28472 NULL);
28473 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28474 SECTION_DEBUG | SECTION_EXCLUDE,
28475 NULL);
28476 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28477 SECTION_DEBUG
28478 | SECTION_EXCLUDE, NULL);
28479 debug_skeleton_abbrev_section
28480 = get_section (DEBUG_LTO_ABBREV_SECTION,
28481 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28482 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28483 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28484 generation);
28485
28486 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28487 stay in the main .o, but the skeleton_line goes into the split
28488 off dwo. */
28489 debug_skeleton_line_section
28490 = get_section (DEBUG_LTO_LINE_SECTION,
28491 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28492 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28493 DEBUG_SKELETON_LINE_SECTION_LABEL,
28494 generation);
28495 debug_str_offsets_section
28496 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28497 SECTION_DEBUG | SECTION_EXCLUDE,
28498 NULL);
28499 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28500 DEBUG_SKELETON_INFO_SECTION_LABEL,
28501 generation);
28502 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28503 DEBUG_STR_DWO_SECTION_FLAGS,
28504 NULL);
28505 debug_macinfo_section_name
28506 = ((dwarf_strict && dwarf_version < 5)
28507 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28508 debug_macinfo_section = get_section (debug_macinfo_section_name,
28509 SECTION_DEBUG | SECTION_EXCLUDE,
28510 NULL);
28511 }
28512 /* For macro info and the file table we have to refer to a
28513 debug_line section. */
28514 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28515 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28516 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28517 DEBUG_LINE_SECTION_LABEL, generation);
28518
28519 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28520 DEBUG_STR_SECTION_FLAGS
28521 | SECTION_EXCLUDE, NULL);
28522 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28523 debug_line_str_section
28524 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28525 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28526 }
28527 else
28528 {
28529 if (!dwarf_split_debug_info)
28530 {
28531 debug_info_section = get_section (DEBUG_INFO_SECTION,
28532 SECTION_DEBUG, NULL);
28533 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28534 SECTION_DEBUG, NULL);
28535 debug_loc_section = get_section (dwarf_version >= 5
28536 ? DEBUG_LOCLISTS_SECTION
28537 : DEBUG_LOC_SECTION,
28538 SECTION_DEBUG, NULL);
28539 debug_macinfo_section_name
28540 = ((dwarf_strict && dwarf_version < 5)
28541 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28542 debug_macinfo_section = get_section (debug_macinfo_section_name,
28543 SECTION_DEBUG, NULL);
28544 }
28545 else
28546 {
28547 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28548 SECTION_DEBUG | SECTION_EXCLUDE,
28549 NULL);
28550 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28551 SECTION_DEBUG | SECTION_EXCLUDE,
28552 NULL);
28553 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28554 SECTION_DEBUG, NULL);
28555 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28556 SECTION_DEBUG, NULL);
28557 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28558 SECTION_DEBUG, NULL);
28559 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28560 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28561 generation);
28562
28563 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28564 stay in the main .o, but the skeleton_line goes into the
28565 split off dwo. */
28566 debug_skeleton_line_section
28567 = get_section (DEBUG_DWO_LINE_SECTION,
28568 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28569 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28570 DEBUG_SKELETON_LINE_SECTION_LABEL,
28571 generation);
28572 debug_str_offsets_section
28573 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28574 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28575 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28576 DEBUG_SKELETON_INFO_SECTION_LABEL,
28577 generation);
28578 debug_loc_section = get_section (dwarf_version >= 5
28579 ? DEBUG_DWO_LOCLISTS_SECTION
28580 : DEBUG_DWO_LOC_SECTION,
28581 SECTION_DEBUG | SECTION_EXCLUDE,
28582 NULL);
28583 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28584 DEBUG_STR_DWO_SECTION_FLAGS,
28585 NULL);
28586 debug_macinfo_section_name
28587 = ((dwarf_strict && dwarf_version < 5)
28588 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28589 debug_macinfo_section = get_section (debug_macinfo_section_name,
28590 SECTION_DEBUG | SECTION_EXCLUDE,
28591 NULL);
28592 }
28593 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28594 SECTION_DEBUG, NULL);
28595 debug_line_section = get_section (DEBUG_LINE_SECTION,
28596 SECTION_DEBUG, NULL);
28597 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28598 SECTION_DEBUG, NULL);
28599 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28600 SECTION_DEBUG, NULL);
28601 debug_str_section = get_section (DEBUG_STR_SECTION,
28602 DEBUG_STR_SECTION_FLAGS, NULL);
28603 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28604 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28605 DEBUG_STR_SECTION_FLAGS, NULL);
28606
28607 debug_ranges_section = get_section (dwarf_version >= 5
28608 ? DEBUG_RNGLISTS_SECTION
28609 : DEBUG_RANGES_SECTION,
28610 SECTION_DEBUG, NULL);
28611 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28612 SECTION_DEBUG, NULL);
28613 }
28614
28615 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28616 DEBUG_ABBREV_SECTION_LABEL, generation);
28617 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28618 DEBUG_INFO_SECTION_LABEL, generation);
28619 info_section_emitted = false;
28620 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28621 DEBUG_LINE_SECTION_LABEL, generation);
28622 /* There are up to 4 unique ranges labels per generation.
28623 See also output_rnglists. */
28624 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28625 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28626 if (dwarf_version >= 5 && dwarf_split_debug_info)
28627 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28628 DEBUG_RANGES_SECTION_LABEL,
28629 1 + generation * 4);
28630 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28631 DEBUG_ADDR_SECTION_LABEL, generation);
28632 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28633 (dwarf_strict && dwarf_version < 5)
28634 ? DEBUG_MACINFO_SECTION_LABEL
28635 : DEBUG_MACRO_SECTION_LABEL, generation);
28636 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28637 generation);
28638
28639 ++generation;
28640 return generation - 1;
28641 }
28642
28643 /* Set up for Dwarf output at the start of compilation. */
28644
28645 static void
28646 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28647 {
28648 /* Allocate the file_table. */
28649 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28650
28651 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28652 /* Allocate the decl_die_table. */
28653 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28654
28655 /* Allocate the decl_loc_table. */
28656 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28657
28658 /* Allocate the cached_dw_loc_list_table. */
28659 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28660
28661 /* Allocate the initial hunk of the decl_scope_table. */
28662 vec_alloc (decl_scope_table, 256);
28663
28664 /* Allocate the initial hunk of the abbrev_die_table. */
28665 vec_alloc (abbrev_die_table, 256);
28666 /* Zero-th entry is allocated, but unused. */
28667 abbrev_die_table->quick_push (NULL);
28668
28669 /* Allocate the dwarf_proc_stack_usage_map. */
28670 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28671
28672 /* Allocate the pubtypes and pubnames vectors. */
28673 vec_alloc (pubname_table, 32);
28674 vec_alloc (pubtype_table, 32);
28675
28676 vec_alloc (incomplete_types, 64);
28677
28678 vec_alloc (used_rtx_array, 32);
28679
28680 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28681 vec_alloc (macinfo_table, 64);
28682 #endif
28683
28684 /* If front-ends already registered a main translation unit but we were not
28685 ready to perform the association, do this now. */
28686 if (main_translation_unit != NULL_TREE)
28687 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28688 }
28689
28690 /* Called before compile () starts outputtting functions, variables
28691 and toplevel asms into assembly. */
28692
28693 static void
28694 dwarf2out_assembly_start (void)
28695 {
28696 if (text_section_line_info)
28697 return;
28698
28699 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28700 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28701 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28702 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28703 COLD_TEXT_SECTION_LABEL, 0);
28704 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28705
28706 switch_to_section (text_section);
28707 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28708 #endif
28709
28710 /* Make sure the line number table for .text always exists. */
28711 text_section_line_info = new_line_info_table ();
28712 text_section_line_info->end_label = text_end_label;
28713
28714 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28715 cur_line_info_table = text_section_line_info;
28716 #endif
28717
28718 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28719 && dwarf2out_do_cfi_asm ()
28720 && !dwarf2out_do_eh_frame ())
28721 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28722 }
28723
28724 /* A helper function for dwarf2out_finish called through
28725 htab_traverse. Assign a string its index. All strings must be
28726 collected into the table by the time index_string is called,
28727 because the indexing code relies on htab_traverse to traverse nodes
28728 in the same order for each run. */
28729
28730 int
28731 index_string (indirect_string_node **h, unsigned int *index)
28732 {
28733 indirect_string_node *node = *h;
28734
28735 find_string_form (node);
28736 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28737 {
28738 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28739 node->index = *index;
28740 *index += 1;
28741 }
28742 return 1;
28743 }
28744
28745 /* A helper function for output_indirect_strings called through
28746 htab_traverse. Output the offset to a string and update the
28747 current offset. */
28748
28749 int
28750 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28751 {
28752 indirect_string_node *node = *h;
28753
28754 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28755 {
28756 /* Assert that this node has been assigned an index. */
28757 gcc_assert (node->index != NO_INDEX_ASSIGNED
28758 && node->index != NOT_INDEXED);
28759 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28760 "indexed string 0x%x: %s", node->index, node->str);
28761 *offset += strlen (node->str) + 1;
28762 }
28763 return 1;
28764 }
28765
28766 /* A helper function for dwarf2out_finish called through
28767 htab_traverse. Output the indexed string. */
28768
28769 int
28770 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28771 {
28772 struct indirect_string_node *node = *h;
28773
28774 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28775 {
28776 /* Assert that the strings are output in the same order as their
28777 indexes were assigned. */
28778 gcc_assert (*cur_idx == node->index);
28779 assemble_string (node->str, strlen (node->str) + 1);
28780 *cur_idx += 1;
28781 }
28782 return 1;
28783 }
28784
28785 /* A helper function for output_indirect_strings. Counts the number
28786 of index strings offsets. Must match the logic of the functions
28787 output_index_string[_offsets] above. */
28788 int
28789 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28790 {
28791 struct indirect_string_node *node = *h;
28792
28793 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28794 *last_idx += 1;
28795 return 1;
28796 }
28797
28798 /* A helper function for dwarf2out_finish called through
28799 htab_traverse. Emit one queued .debug_str string. */
28800
28801 int
28802 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28803 {
28804 struct indirect_string_node *node = *h;
28805
28806 node->form = find_string_form (node);
28807 if (node->form == form && node->refcount > 0)
28808 {
28809 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28810 assemble_string (node->str, strlen (node->str) + 1);
28811 }
28812
28813 return 1;
28814 }
28815
28816 /* Output the indexed string table. */
28817
28818 static void
28819 output_indirect_strings (void)
28820 {
28821 switch_to_section (debug_str_section);
28822 if (!dwarf_split_debug_info)
28823 debug_str_hash->traverse<enum dwarf_form,
28824 output_indirect_string> (DW_FORM_strp);
28825 else
28826 {
28827 unsigned int offset = 0;
28828 unsigned int cur_idx = 0;
28829
28830 if (skeleton_debug_str_hash)
28831 skeleton_debug_str_hash->traverse<enum dwarf_form,
28832 output_indirect_string> (DW_FORM_strp);
28833
28834 switch_to_section (debug_str_offsets_section);
28835 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28836 header. Note that we don't need to generate a label to the
28837 actual index table following the header here, because this is
28838 for the split dwarf case only. In an .dwo file there is only
28839 one string offsets table (and one debug info section). But
28840 if we would start using string offset tables for the main (or
28841 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28842 pointing to the actual index after the header. Split dwarf
28843 units will never have a string offsets base attribute. When
28844 a split unit is moved into a .dwp file the string offsets can
28845 be found through the .debug_cu_index section table. */
28846 if (dwarf_version >= 5)
28847 {
28848 unsigned int last_idx = 0;
28849 unsigned long str_offsets_length;
28850
28851 debug_str_hash->traverse_noresize
28852 <unsigned int *, count_index_strings> (&last_idx);
28853 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28854 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28855 dw2_asm_output_data (4, 0xffffffff,
28856 "Escape value for 64-bit DWARF extension");
28857 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28858 "Length of string offsets unit");
28859 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28860 dw2_asm_output_data (2, 0, "Header zero padding");
28861 }
28862 debug_str_hash->traverse_noresize
28863 <unsigned int *, output_index_string_offset> (&offset);
28864 switch_to_section (debug_str_dwo_section);
28865 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28866 (&cur_idx);
28867 }
28868 }
28869
28870 /* Callback for htab_traverse to assign an index to an entry in the
28871 table, and to write that entry to the .debug_addr section. */
28872
28873 int
28874 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28875 {
28876 addr_table_entry *entry = *slot;
28877
28878 if (entry->refcount == 0)
28879 {
28880 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28881 || entry->index == NOT_INDEXED);
28882 return 1;
28883 }
28884
28885 gcc_assert (entry->index == *cur_index);
28886 (*cur_index)++;
28887
28888 switch (entry->kind)
28889 {
28890 case ate_kind_rtx:
28891 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28892 "0x%x", entry->index);
28893 break;
28894 case ate_kind_rtx_dtprel:
28895 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28896 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28897 DWARF2_ADDR_SIZE,
28898 entry->addr.rtl);
28899 fputc ('\n', asm_out_file);
28900 break;
28901 case ate_kind_label:
28902 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28903 "0x%x", entry->index);
28904 break;
28905 default:
28906 gcc_unreachable ();
28907 }
28908 return 1;
28909 }
28910
28911 /* A helper function for dwarf2out_finish. Counts the number
28912 of indexed addresses. Must match the logic of the functions
28913 output_addr_table_entry above. */
28914 int
28915 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28916 {
28917 addr_table_entry *entry = *slot;
28918
28919 if (entry->refcount > 0)
28920 *last_idx += 1;
28921 return 1;
28922 }
28923
28924 /* Produce the .debug_addr section. */
28925
28926 static void
28927 output_addr_table (void)
28928 {
28929 unsigned int index = 0;
28930 if (addr_index_table == NULL || addr_index_table->size () == 0)
28931 return;
28932
28933 switch_to_section (debug_addr_section);
28934 addr_index_table
28935 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28936 }
28937
28938 #if ENABLE_ASSERT_CHECKING
28939 /* Verify that all marks are clear. */
28940
28941 static void
28942 verify_marks_clear (dw_die_ref die)
28943 {
28944 dw_die_ref c;
28945
28946 gcc_assert (! die->die_mark);
28947 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28948 }
28949 #endif /* ENABLE_ASSERT_CHECKING */
28950
28951 /* Clear the marks for a die and its children.
28952 Be cool if the mark isn't set. */
28953
28954 static void
28955 prune_unmark_dies (dw_die_ref die)
28956 {
28957 dw_die_ref c;
28958
28959 if (die->die_mark)
28960 die->die_mark = 0;
28961 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28962 }
28963
28964 /* Given LOC that is referenced by a DIE we're marking as used, find all
28965 referenced DWARF procedures it references and mark them as used. */
28966
28967 static void
28968 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28969 {
28970 for (; loc != NULL; loc = loc->dw_loc_next)
28971 switch (loc->dw_loc_opc)
28972 {
28973 case DW_OP_implicit_pointer:
28974 case DW_OP_convert:
28975 case DW_OP_reinterpret:
28976 case DW_OP_GNU_implicit_pointer:
28977 case DW_OP_GNU_convert:
28978 case DW_OP_GNU_reinterpret:
28979 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28980 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28981 break;
28982 case DW_OP_GNU_variable_value:
28983 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28984 {
28985 dw_die_ref ref
28986 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28987 if (ref == NULL)
28988 break;
28989 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28990 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28991 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28992 }
28993 /* FALLTHRU */
28994 case DW_OP_call2:
28995 case DW_OP_call4:
28996 case DW_OP_call_ref:
28997 case DW_OP_const_type:
28998 case DW_OP_GNU_const_type:
28999 case DW_OP_GNU_parameter_ref:
29000 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29001 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29002 break;
29003 case DW_OP_regval_type:
29004 case DW_OP_deref_type:
29005 case DW_OP_GNU_regval_type:
29006 case DW_OP_GNU_deref_type:
29007 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29008 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29009 break;
29010 case DW_OP_entry_value:
29011 case DW_OP_GNU_entry_value:
29012 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29013 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29014 break;
29015 default:
29016 break;
29017 }
29018 }
29019
29020 /* Given DIE that we're marking as used, find any other dies
29021 it references as attributes and mark them as used. */
29022
29023 static void
29024 prune_unused_types_walk_attribs (dw_die_ref die)
29025 {
29026 dw_attr_node *a;
29027 unsigned ix;
29028
29029 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29030 {
29031 switch (AT_class (a))
29032 {
29033 /* Make sure DWARF procedures referenced by location descriptions will
29034 get emitted. */
29035 case dw_val_class_loc:
29036 prune_unused_types_walk_loc_descr (AT_loc (a));
29037 break;
29038 case dw_val_class_loc_list:
29039 for (dw_loc_list_ref list = AT_loc_list (a);
29040 list != NULL;
29041 list = list->dw_loc_next)
29042 prune_unused_types_walk_loc_descr (list->expr);
29043 break;
29044
29045 case dw_val_class_view_list:
29046 /* This points to a loc_list in another attribute, so it's
29047 already covered. */
29048 break;
29049
29050 case dw_val_class_die_ref:
29051 /* A reference to another DIE.
29052 Make sure that it will get emitted.
29053 If it was broken out into a comdat group, don't follow it. */
29054 if (! AT_ref (a)->comdat_type_p
29055 || a->dw_attr == DW_AT_specification)
29056 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29057 break;
29058
29059 case dw_val_class_str:
29060 /* Set the string's refcount to 0 so that prune_unused_types_mark
29061 accounts properly for it. */
29062 a->dw_attr_val.v.val_str->refcount = 0;
29063 break;
29064
29065 default:
29066 break;
29067 }
29068 }
29069 }
29070
29071 /* Mark the generic parameters and arguments children DIEs of DIE. */
29072
29073 static void
29074 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29075 {
29076 dw_die_ref c;
29077
29078 if (die == NULL || die->die_child == NULL)
29079 return;
29080 c = die->die_child;
29081 do
29082 {
29083 if (is_template_parameter (c))
29084 prune_unused_types_mark (c, 1);
29085 c = c->die_sib;
29086 } while (c && c != die->die_child);
29087 }
29088
29089 /* Mark DIE as being used. If DOKIDS is true, then walk down
29090 to DIE's children. */
29091
29092 static void
29093 prune_unused_types_mark (dw_die_ref die, int dokids)
29094 {
29095 dw_die_ref c;
29096
29097 if (die->die_mark == 0)
29098 {
29099 /* We haven't done this node yet. Mark it as used. */
29100 die->die_mark = 1;
29101 /* If this is the DIE of a generic type instantiation,
29102 mark the children DIEs that describe its generic parms and
29103 args. */
29104 prune_unused_types_mark_generic_parms_dies (die);
29105
29106 /* We also have to mark its parents as used.
29107 (But we don't want to mark our parent's kids due to this,
29108 unless it is a class.) */
29109 if (die->die_parent)
29110 prune_unused_types_mark (die->die_parent,
29111 class_scope_p (die->die_parent));
29112
29113 /* Mark any referenced nodes. */
29114 prune_unused_types_walk_attribs (die);
29115
29116 /* If this node is a specification,
29117 also mark the definition, if it exists. */
29118 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29119 prune_unused_types_mark (die->die_definition, 1);
29120 }
29121
29122 if (dokids && die->die_mark != 2)
29123 {
29124 /* We need to walk the children, but haven't done so yet.
29125 Remember that we've walked the kids. */
29126 die->die_mark = 2;
29127
29128 /* If this is an array type, we need to make sure our
29129 kids get marked, even if they're types. If we're
29130 breaking out types into comdat sections, do this
29131 for all type definitions. */
29132 if (die->die_tag == DW_TAG_array_type
29133 || (use_debug_types
29134 && is_type_die (die) && ! is_declaration_die (die)))
29135 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29136 else
29137 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29138 }
29139 }
29140
29141 /* For local classes, look if any static member functions were emitted
29142 and if so, mark them. */
29143
29144 static void
29145 prune_unused_types_walk_local_classes (dw_die_ref die)
29146 {
29147 dw_die_ref c;
29148
29149 if (die->die_mark == 2)
29150 return;
29151
29152 switch (die->die_tag)
29153 {
29154 case DW_TAG_structure_type:
29155 case DW_TAG_union_type:
29156 case DW_TAG_class_type:
29157 break;
29158
29159 case DW_TAG_subprogram:
29160 if (!get_AT_flag (die, DW_AT_declaration)
29161 || die->die_definition != NULL)
29162 prune_unused_types_mark (die, 1);
29163 return;
29164
29165 default:
29166 return;
29167 }
29168
29169 /* Mark children. */
29170 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29171 }
29172
29173 /* Walk the tree DIE and mark types that we actually use. */
29174
29175 static void
29176 prune_unused_types_walk (dw_die_ref die)
29177 {
29178 dw_die_ref c;
29179
29180 /* Don't do anything if this node is already marked and
29181 children have been marked as well. */
29182 if (die->die_mark == 2)
29183 return;
29184
29185 switch (die->die_tag)
29186 {
29187 case DW_TAG_structure_type:
29188 case DW_TAG_union_type:
29189 case DW_TAG_class_type:
29190 if (die->die_perennial_p)
29191 break;
29192
29193 for (c = die->die_parent; c; c = c->die_parent)
29194 if (c->die_tag == DW_TAG_subprogram)
29195 break;
29196
29197 /* Finding used static member functions inside of classes
29198 is needed just for local classes, because for other classes
29199 static member function DIEs with DW_AT_specification
29200 are emitted outside of the DW_TAG_*_type. If we ever change
29201 it, we'd need to call this even for non-local classes. */
29202 if (c)
29203 prune_unused_types_walk_local_classes (die);
29204
29205 /* It's a type node --- don't mark it. */
29206 return;
29207
29208 case DW_TAG_const_type:
29209 case DW_TAG_packed_type:
29210 case DW_TAG_pointer_type:
29211 case DW_TAG_reference_type:
29212 case DW_TAG_rvalue_reference_type:
29213 case DW_TAG_volatile_type:
29214 case DW_TAG_typedef:
29215 case DW_TAG_array_type:
29216 case DW_TAG_interface_type:
29217 case DW_TAG_friend:
29218 case DW_TAG_enumeration_type:
29219 case DW_TAG_subroutine_type:
29220 case DW_TAG_string_type:
29221 case DW_TAG_set_type:
29222 case DW_TAG_subrange_type:
29223 case DW_TAG_ptr_to_member_type:
29224 case DW_TAG_file_type:
29225 /* Type nodes are useful only when other DIEs reference them --- don't
29226 mark them. */
29227 /* FALLTHROUGH */
29228
29229 case DW_TAG_dwarf_procedure:
29230 /* Likewise for DWARF procedures. */
29231
29232 if (die->die_perennial_p)
29233 break;
29234
29235 return;
29236
29237 default:
29238 /* Mark everything else. */
29239 break;
29240 }
29241
29242 if (die->die_mark == 0)
29243 {
29244 die->die_mark = 1;
29245
29246 /* Now, mark any dies referenced from here. */
29247 prune_unused_types_walk_attribs (die);
29248 }
29249
29250 die->die_mark = 2;
29251
29252 /* Mark children. */
29253 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29254 }
29255
29256 /* Increment the string counts on strings referred to from DIE's
29257 attributes. */
29258
29259 static void
29260 prune_unused_types_update_strings (dw_die_ref die)
29261 {
29262 dw_attr_node *a;
29263 unsigned ix;
29264
29265 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29266 if (AT_class (a) == dw_val_class_str)
29267 {
29268 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29269 s->refcount++;
29270 /* Avoid unnecessarily putting strings that are used less than
29271 twice in the hash table. */
29272 if (s->refcount
29273 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29274 {
29275 indirect_string_node **slot
29276 = debug_str_hash->find_slot_with_hash (s->str,
29277 htab_hash_string (s->str),
29278 INSERT);
29279 gcc_assert (*slot == NULL);
29280 *slot = s;
29281 }
29282 }
29283 }
29284
29285 /* Mark DIE and its children as removed. */
29286
29287 static void
29288 mark_removed (dw_die_ref die)
29289 {
29290 dw_die_ref c;
29291 die->removed = true;
29292 FOR_EACH_CHILD (die, c, mark_removed (c));
29293 }
29294
29295 /* Remove from the tree DIE any dies that aren't marked. */
29296
29297 static void
29298 prune_unused_types_prune (dw_die_ref die)
29299 {
29300 dw_die_ref c;
29301
29302 gcc_assert (die->die_mark);
29303 prune_unused_types_update_strings (die);
29304
29305 if (! die->die_child)
29306 return;
29307
29308 c = die->die_child;
29309 do {
29310 dw_die_ref prev = c, next;
29311 for (c = c->die_sib; ! c->die_mark; c = next)
29312 if (c == die->die_child)
29313 {
29314 /* No marked children between 'prev' and the end of the list. */
29315 if (prev == c)
29316 /* No marked children at all. */
29317 die->die_child = NULL;
29318 else
29319 {
29320 prev->die_sib = c->die_sib;
29321 die->die_child = prev;
29322 }
29323 c->die_sib = NULL;
29324 mark_removed (c);
29325 return;
29326 }
29327 else
29328 {
29329 next = c->die_sib;
29330 c->die_sib = NULL;
29331 mark_removed (c);
29332 }
29333
29334 if (c != prev->die_sib)
29335 prev->die_sib = c;
29336 prune_unused_types_prune (c);
29337 } while (c != die->die_child);
29338 }
29339
29340 /* Remove dies representing declarations that we never use. */
29341
29342 static void
29343 prune_unused_types (void)
29344 {
29345 unsigned int i;
29346 limbo_die_node *node;
29347 comdat_type_node *ctnode;
29348 pubname_entry *pub;
29349 dw_die_ref base_type;
29350
29351 #if ENABLE_ASSERT_CHECKING
29352 /* All the marks should already be clear. */
29353 verify_marks_clear (comp_unit_die ());
29354 for (node = limbo_die_list; node; node = node->next)
29355 verify_marks_clear (node->die);
29356 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29357 verify_marks_clear (ctnode->root_die);
29358 #endif /* ENABLE_ASSERT_CHECKING */
29359
29360 /* Mark types that are used in global variables. */
29361 premark_types_used_by_global_vars ();
29362
29363 /* Set the mark on nodes that are actually used. */
29364 prune_unused_types_walk (comp_unit_die ());
29365 for (node = limbo_die_list; node; node = node->next)
29366 prune_unused_types_walk (node->die);
29367 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29368 {
29369 prune_unused_types_walk (ctnode->root_die);
29370 prune_unused_types_mark (ctnode->type_die, 1);
29371 }
29372
29373 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29374 are unusual in that they are pubnames that are the children of pubtypes.
29375 They should only be marked via their parent DW_TAG_enumeration_type die,
29376 not as roots in themselves. */
29377 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29378 if (pub->die->die_tag != DW_TAG_enumerator)
29379 prune_unused_types_mark (pub->die, 1);
29380 for (i = 0; base_types.iterate (i, &base_type); i++)
29381 prune_unused_types_mark (base_type, 1);
29382
29383 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29384 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29385 callees). */
29386 cgraph_node *cnode;
29387 FOR_EACH_FUNCTION (cnode)
29388 if (cnode->referred_to_p (false))
29389 {
29390 dw_die_ref die = lookup_decl_die (cnode->decl);
29391 if (die == NULL || die->die_mark)
29392 continue;
29393 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29394 if (e->caller != cnode
29395 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29396 {
29397 prune_unused_types_mark (die, 1);
29398 break;
29399 }
29400 }
29401
29402 if (debug_str_hash)
29403 debug_str_hash->empty ();
29404 if (skeleton_debug_str_hash)
29405 skeleton_debug_str_hash->empty ();
29406 prune_unused_types_prune (comp_unit_die ());
29407 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29408 {
29409 node = *pnode;
29410 if (!node->die->die_mark)
29411 *pnode = node->next;
29412 else
29413 {
29414 prune_unused_types_prune (node->die);
29415 pnode = &node->next;
29416 }
29417 }
29418 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29419 prune_unused_types_prune (ctnode->root_die);
29420
29421 /* Leave the marks clear. */
29422 prune_unmark_dies (comp_unit_die ());
29423 for (node = limbo_die_list; node; node = node->next)
29424 prune_unmark_dies (node->die);
29425 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29426 prune_unmark_dies (ctnode->root_die);
29427 }
29428
29429 /* Helpers to manipulate hash table of comdat type units. */
29430
29431 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29432 {
29433 static inline hashval_t hash (const comdat_type_node *);
29434 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29435 };
29436
29437 inline hashval_t
29438 comdat_type_hasher::hash (const comdat_type_node *type_node)
29439 {
29440 hashval_t h;
29441 memcpy (&h, type_node->signature, sizeof (h));
29442 return h;
29443 }
29444
29445 inline bool
29446 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29447 const comdat_type_node *type_node_2)
29448 {
29449 return (! memcmp (type_node_1->signature, type_node_2->signature,
29450 DWARF_TYPE_SIGNATURE_SIZE));
29451 }
29452
29453 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29454 to the location it would have been added, should we know its
29455 DECL_ASSEMBLER_NAME when we added other attributes. This will
29456 probably improve compactness of debug info, removing equivalent
29457 abbrevs, and hide any differences caused by deferring the
29458 computation of the assembler name, triggered by e.g. PCH. */
29459
29460 static inline void
29461 move_linkage_attr (dw_die_ref die)
29462 {
29463 unsigned ix = vec_safe_length (die->die_attr);
29464 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29465
29466 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29467 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29468
29469 while (--ix > 0)
29470 {
29471 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29472
29473 if (prev->dw_attr == DW_AT_decl_line
29474 || prev->dw_attr == DW_AT_decl_column
29475 || prev->dw_attr == DW_AT_name)
29476 break;
29477 }
29478
29479 if (ix != vec_safe_length (die->die_attr) - 1)
29480 {
29481 die->die_attr->pop ();
29482 die->die_attr->quick_insert (ix, linkage);
29483 }
29484 }
29485
29486 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29487 referenced from typed stack ops and count how often they are used. */
29488
29489 static void
29490 mark_base_types (dw_loc_descr_ref loc)
29491 {
29492 dw_die_ref base_type = NULL;
29493
29494 for (; loc; loc = loc->dw_loc_next)
29495 {
29496 switch (loc->dw_loc_opc)
29497 {
29498 case DW_OP_regval_type:
29499 case DW_OP_deref_type:
29500 case DW_OP_GNU_regval_type:
29501 case DW_OP_GNU_deref_type:
29502 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29503 break;
29504 case DW_OP_convert:
29505 case DW_OP_reinterpret:
29506 case DW_OP_GNU_convert:
29507 case DW_OP_GNU_reinterpret:
29508 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29509 continue;
29510 /* FALLTHRU */
29511 case DW_OP_const_type:
29512 case DW_OP_GNU_const_type:
29513 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29514 break;
29515 case DW_OP_entry_value:
29516 case DW_OP_GNU_entry_value:
29517 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29518 continue;
29519 default:
29520 continue;
29521 }
29522 gcc_assert (base_type->die_parent == comp_unit_die ());
29523 if (base_type->die_mark)
29524 base_type->die_mark++;
29525 else
29526 {
29527 base_types.safe_push (base_type);
29528 base_type->die_mark = 1;
29529 }
29530 }
29531 }
29532
29533 /* Comparison function for sorting marked base types. */
29534
29535 static int
29536 base_type_cmp (const void *x, const void *y)
29537 {
29538 dw_die_ref dx = *(const dw_die_ref *) x;
29539 dw_die_ref dy = *(const dw_die_ref *) y;
29540 unsigned int byte_size1, byte_size2;
29541 unsigned int encoding1, encoding2;
29542 unsigned int align1, align2;
29543 if (dx->die_mark > dy->die_mark)
29544 return -1;
29545 if (dx->die_mark < dy->die_mark)
29546 return 1;
29547 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29548 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29549 if (byte_size1 < byte_size2)
29550 return 1;
29551 if (byte_size1 > byte_size2)
29552 return -1;
29553 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29554 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29555 if (encoding1 < encoding2)
29556 return 1;
29557 if (encoding1 > encoding2)
29558 return -1;
29559 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29560 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29561 if (align1 < align2)
29562 return 1;
29563 if (align1 > align2)
29564 return -1;
29565 return 0;
29566 }
29567
29568 /* Move base types marked by mark_base_types as early as possible
29569 in the CU, sorted by decreasing usage count both to make the
29570 uleb128 references as small as possible and to make sure they
29571 will have die_offset already computed by calc_die_sizes when
29572 sizes of typed stack loc ops is computed. */
29573
29574 static void
29575 move_marked_base_types (void)
29576 {
29577 unsigned int i;
29578 dw_die_ref base_type, die, c;
29579
29580 if (base_types.is_empty ())
29581 return;
29582
29583 /* Sort by decreasing usage count, they will be added again in that
29584 order later on. */
29585 base_types.qsort (base_type_cmp);
29586 die = comp_unit_die ();
29587 c = die->die_child;
29588 do
29589 {
29590 dw_die_ref prev = c;
29591 c = c->die_sib;
29592 while (c->die_mark)
29593 {
29594 remove_child_with_prev (c, prev);
29595 /* As base types got marked, there must be at least
29596 one node other than DW_TAG_base_type. */
29597 gcc_assert (die->die_child != NULL);
29598 c = prev->die_sib;
29599 }
29600 }
29601 while (c != die->die_child);
29602 gcc_assert (die->die_child);
29603 c = die->die_child;
29604 for (i = 0; base_types.iterate (i, &base_type); i++)
29605 {
29606 base_type->die_mark = 0;
29607 base_type->die_sib = c->die_sib;
29608 c->die_sib = base_type;
29609 c = base_type;
29610 }
29611 }
29612
29613 /* Helper function for resolve_addr, attempt to resolve
29614 one CONST_STRING, return true if successful. Similarly verify that
29615 SYMBOL_REFs refer to variables emitted in the current CU. */
29616
29617 static bool
29618 resolve_one_addr (rtx *addr)
29619 {
29620 rtx rtl = *addr;
29621
29622 if (GET_CODE (rtl) == CONST_STRING)
29623 {
29624 size_t len = strlen (XSTR (rtl, 0)) + 1;
29625 tree t = build_string (len, XSTR (rtl, 0));
29626 tree tlen = size_int (len - 1);
29627 TREE_TYPE (t)
29628 = build_array_type (char_type_node, build_index_type (tlen));
29629 rtl = lookup_constant_def (t);
29630 if (!rtl || !MEM_P (rtl))
29631 return false;
29632 rtl = XEXP (rtl, 0);
29633 if (GET_CODE (rtl) == SYMBOL_REF
29634 && SYMBOL_REF_DECL (rtl)
29635 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29636 return false;
29637 vec_safe_push (used_rtx_array, rtl);
29638 *addr = rtl;
29639 return true;
29640 }
29641
29642 if (GET_CODE (rtl) == SYMBOL_REF
29643 && SYMBOL_REF_DECL (rtl))
29644 {
29645 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29646 {
29647 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29648 return false;
29649 }
29650 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29651 return false;
29652 }
29653
29654 if (GET_CODE (rtl) == CONST)
29655 {
29656 subrtx_ptr_iterator::array_type array;
29657 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29658 if (!resolve_one_addr (*iter))
29659 return false;
29660 }
29661
29662 return true;
29663 }
29664
29665 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29666 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29667 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29668
29669 static rtx
29670 string_cst_pool_decl (tree t)
29671 {
29672 rtx rtl = output_constant_def (t, 1);
29673 unsigned char *array;
29674 dw_loc_descr_ref l;
29675 tree decl;
29676 size_t len;
29677 dw_die_ref ref;
29678
29679 if (!rtl || !MEM_P (rtl))
29680 return NULL_RTX;
29681 rtl = XEXP (rtl, 0);
29682 if (GET_CODE (rtl) != SYMBOL_REF
29683 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29684 return NULL_RTX;
29685
29686 decl = SYMBOL_REF_DECL (rtl);
29687 if (!lookup_decl_die (decl))
29688 {
29689 len = TREE_STRING_LENGTH (t);
29690 vec_safe_push (used_rtx_array, rtl);
29691 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29692 array = ggc_vec_alloc<unsigned char> (len);
29693 memcpy (array, TREE_STRING_POINTER (t), len);
29694 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29695 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29696 l->dw_loc_oprnd2.v.val_vec.length = len;
29697 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29698 l->dw_loc_oprnd2.v.val_vec.array = array;
29699 add_AT_loc (ref, DW_AT_location, l);
29700 equate_decl_number_to_die (decl, ref);
29701 }
29702 return rtl;
29703 }
29704
29705 /* Helper function of resolve_addr_in_expr. LOC is
29706 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29707 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29708 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29709 with DW_OP_implicit_pointer if possible
29710 and return true, if unsuccessful, return false. */
29711
29712 static bool
29713 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29714 {
29715 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29716 HOST_WIDE_INT offset = 0;
29717 dw_die_ref ref = NULL;
29718 tree decl;
29719
29720 if (GET_CODE (rtl) == CONST
29721 && GET_CODE (XEXP (rtl, 0)) == PLUS
29722 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29723 {
29724 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29725 rtl = XEXP (XEXP (rtl, 0), 0);
29726 }
29727 if (GET_CODE (rtl) == CONST_STRING)
29728 {
29729 size_t len = strlen (XSTR (rtl, 0)) + 1;
29730 tree t = build_string (len, XSTR (rtl, 0));
29731 tree tlen = size_int (len - 1);
29732
29733 TREE_TYPE (t)
29734 = build_array_type (char_type_node, build_index_type (tlen));
29735 rtl = string_cst_pool_decl (t);
29736 if (!rtl)
29737 return false;
29738 }
29739 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29740 {
29741 decl = SYMBOL_REF_DECL (rtl);
29742 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29743 {
29744 ref = lookup_decl_die (decl);
29745 if (ref && (get_AT (ref, DW_AT_location)
29746 || get_AT (ref, DW_AT_const_value)))
29747 {
29748 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29749 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29750 loc->dw_loc_oprnd1.val_entry = NULL;
29751 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29752 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29753 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29754 loc->dw_loc_oprnd2.v.val_int = offset;
29755 return true;
29756 }
29757 }
29758 }
29759 return false;
29760 }
29761
29762 /* Helper function for resolve_addr, handle one location
29763 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29764 the location list couldn't be resolved. */
29765
29766 static bool
29767 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29768 {
29769 dw_loc_descr_ref keep = NULL;
29770 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29771 switch (loc->dw_loc_opc)
29772 {
29773 case DW_OP_addr:
29774 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29775 {
29776 if ((prev == NULL
29777 || prev->dw_loc_opc == DW_OP_piece
29778 || prev->dw_loc_opc == DW_OP_bit_piece)
29779 && loc->dw_loc_next
29780 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29781 && (!dwarf_strict || dwarf_version >= 5)
29782 && optimize_one_addr_into_implicit_ptr (loc))
29783 break;
29784 return false;
29785 }
29786 break;
29787 case DW_OP_GNU_addr_index:
29788 case DW_OP_addrx:
29789 case DW_OP_GNU_const_index:
29790 case DW_OP_constx:
29791 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29792 || loc->dw_loc_opc == DW_OP_addrx)
29793 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29794 || loc->dw_loc_opc == DW_OP_constx)
29795 && loc->dtprel))
29796 {
29797 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29798 if (!resolve_one_addr (&rtl))
29799 return false;
29800 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29801 loc->dw_loc_oprnd1.val_entry
29802 = add_addr_table_entry (rtl, ate_kind_rtx);
29803 }
29804 break;
29805 case DW_OP_const4u:
29806 case DW_OP_const8u:
29807 if (loc->dtprel
29808 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29809 return false;
29810 break;
29811 case DW_OP_plus_uconst:
29812 if (size_of_loc_descr (loc)
29813 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29814 + 1
29815 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29816 {
29817 dw_loc_descr_ref repl
29818 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29819 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29820 add_loc_descr (&repl, loc->dw_loc_next);
29821 *loc = *repl;
29822 }
29823 break;
29824 case DW_OP_implicit_value:
29825 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29826 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29827 return false;
29828 break;
29829 case DW_OP_implicit_pointer:
29830 case DW_OP_GNU_implicit_pointer:
29831 case DW_OP_GNU_parameter_ref:
29832 case DW_OP_GNU_variable_value:
29833 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29834 {
29835 dw_die_ref ref
29836 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29837 if (ref == NULL)
29838 return false;
29839 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29840 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29841 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29842 }
29843 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29844 {
29845 if (prev == NULL
29846 && loc->dw_loc_next == NULL
29847 && AT_class (a) == dw_val_class_loc)
29848 switch (a->dw_attr)
29849 {
29850 /* Following attributes allow both exprloc and reference,
29851 so if the whole expression is DW_OP_GNU_variable_value
29852 alone we could transform it into reference. */
29853 case DW_AT_byte_size:
29854 case DW_AT_bit_size:
29855 case DW_AT_lower_bound:
29856 case DW_AT_upper_bound:
29857 case DW_AT_bit_stride:
29858 case DW_AT_count:
29859 case DW_AT_allocated:
29860 case DW_AT_associated:
29861 case DW_AT_byte_stride:
29862 a->dw_attr_val.val_class = dw_val_class_die_ref;
29863 a->dw_attr_val.val_entry = NULL;
29864 a->dw_attr_val.v.val_die_ref.die
29865 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29866 a->dw_attr_val.v.val_die_ref.external = 0;
29867 return true;
29868 default:
29869 break;
29870 }
29871 if (dwarf_strict)
29872 return false;
29873 }
29874 break;
29875 case DW_OP_const_type:
29876 case DW_OP_regval_type:
29877 case DW_OP_deref_type:
29878 case DW_OP_convert:
29879 case DW_OP_reinterpret:
29880 case DW_OP_GNU_const_type:
29881 case DW_OP_GNU_regval_type:
29882 case DW_OP_GNU_deref_type:
29883 case DW_OP_GNU_convert:
29884 case DW_OP_GNU_reinterpret:
29885 while (loc->dw_loc_next
29886 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29887 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29888 {
29889 dw_die_ref base1, base2;
29890 unsigned enc1, enc2, size1, size2;
29891 if (loc->dw_loc_opc == DW_OP_regval_type
29892 || loc->dw_loc_opc == DW_OP_deref_type
29893 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29894 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29895 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29896 else if (loc->dw_loc_oprnd1.val_class
29897 == dw_val_class_unsigned_const)
29898 break;
29899 else
29900 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29901 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29902 == dw_val_class_unsigned_const)
29903 break;
29904 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29905 gcc_assert (base1->die_tag == DW_TAG_base_type
29906 && base2->die_tag == DW_TAG_base_type);
29907 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29908 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29909 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29910 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29911 if (size1 == size2
29912 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29913 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29914 && loc != keep)
29915 || enc1 == enc2))
29916 {
29917 /* Optimize away next DW_OP_convert after
29918 adjusting LOC's base type die reference. */
29919 if (loc->dw_loc_opc == DW_OP_regval_type
29920 || loc->dw_loc_opc == DW_OP_deref_type
29921 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29922 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29923 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29924 else
29925 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29926 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29927 continue;
29928 }
29929 /* Don't change integer DW_OP_convert after e.g. floating
29930 point typed stack entry. */
29931 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29932 keep = loc->dw_loc_next;
29933 break;
29934 }
29935 break;
29936 default:
29937 break;
29938 }
29939 return true;
29940 }
29941
29942 /* Helper function of resolve_addr. DIE had DW_AT_location of
29943 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29944 and DW_OP_addr couldn't be resolved. resolve_addr has already
29945 removed the DW_AT_location attribute. This function attempts to
29946 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29947 to it or DW_AT_const_value attribute, if possible. */
29948
29949 static void
29950 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29951 {
29952 if (!VAR_P (decl)
29953 || lookup_decl_die (decl) != die
29954 || DECL_EXTERNAL (decl)
29955 || !TREE_STATIC (decl)
29956 || DECL_INITIAL (decl) == NULL_TREE
29957 || DECL_P (DECL_INITIAL (decl))
29958 || get_AT (die, DW_AT_const_value))
29959 return;
29960
29961 tree init = DECL_INITIAL (decl);
29962 HOST_WIDE_INT offset = 0;
29963 /* For variables that have been optimized away and thus
29964 don't have a memory location, see if we can emit
29965 DW_AT_const_value instead. */
29966 if (tree_add_const_value_attribute (die, init))
29967 return;
29968 if (dwarf_strict && dwarf_version < 5)
29969 return;
29970 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29971 and ADDR_EXPR refers to a decl that has DW_AT_location or
29972 DW_AT_const_value (but isn't addressable, otherwise
29973 resolving the original DW_OP_addr wouldn't fail), see if
29974 we can add DW_OP_implicit_pointer. */
29975 STRIP_NOPS (init);
29976 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29977 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29978 {
29979 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29980 init = TREE_OPERAND (init, 0);
29981 STRIP_NOPS (init);
29982 }
29983 if (TREE_CODE (init) != ADDR_EXPR)
29984 return;
29985 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29986 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29987 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29988 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29989 && TREE_OPERAND (init, 0) != decl))
29990 {
29991 dw_die_ref ref;
29992 dw_loc_descr_ref l;
29993
29994 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29995 {
29996 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29997 if (!rtl)
29998 return;
29999 decl = SYMBOL_REF_DECL (rtl);
30000 }
30001 else
30002 decl = TREE_OPERAND (init, 0);
30003 ref = lookup_decl_die (decl);
30004 if (ref == NULL
30005 || (!get_AT (ref, DW_AT_location)
30006 && !get_AT (ref, DW_AT_const_value)))
30007 return;
30008 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30009 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30010 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30011 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30012 add_AT_loc (die, DW_AT_location, l);
30013 }
30014 }
30015
30016 /* Return NULL if l is a DWARF expression, or first op that is not
30017 valid DWARF expression. */
30018
30019 static dw_loc_descr_ref
30020 non_dwarf_expression (dw_loc_descr_ref l)
30021 {
30022 while (l)
30023 {
30024 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30025 return l;
30026 switch (l->dw_loc_opc)
30027 {
30028 case DW_OP_regx:
30029 case DW_OP_implicit_value:
30030 case DW_OP_stack_value:
30031 case DW_OP_implicit_pointer:
30032 case DW_OP_GNU_implicit_pointer:
30033 case DW_OP_GNU_parameter_ref:
30034 case DW_OP_piece:
30035 case DW_OP_bit_piece:
30036 return l;
30037 default:
30038 break;
30039 }
30040 l = l->dw_loc_next;
30041 }
30042 return NULL;
30043 }
30044
30045 /* Return adjusted copy of EXPR:
30046 If it is empty DWARF expression, return it.
30047 If it is valid non-empty DWARF expression,
30048 return copy of EXPR with DW_OP_deref appended to it.
30049 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30050 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30051 If it is DWARF expression followed by DW_OP_stack_value, return
30052 copy of the DWARF expression without anything appended.
30053 Otherwise, return NULL. */
30054
30055 static dw_loc_descr_ref
30056 copy_deref_exprloc (dw_loc_descr_ref expr)
30057 {
30058 dw_loc_descr_ref tail = NULL;
30059
30060 if (expr == NULL)
30061 return NULL;
30062
30063 dw_loc_descr_ref l = non_dwarf_expression (expr);
30064 if (l && l->dw_loc_next)
30065 return NULL;
30066
30067 if (l)
30068 {
30069 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30070 tail = new_loc_descr ((enum dwarf_location_atom)
30071 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30072 0, 0);
30073 else
30074 switch (l->dw_loc_opc)
30075 {
30076 case DW_OP_regx:
30077 tail = new_loc_descr (DW_OP_bregx,
30078 l->dw_loc_oprnd1.v.val_unsigned, 0);
30079 break;
30080 case DW_OP_stack_value:
30081 break;
30082 default:
30083 return NULL;
30084 }
30085 }
30086 else
30087 tail = new_loc_descr (DW_OP_deref, 0, 0);
30088
30089 dw_loc_descr_ref ret = NULL, *p = &ret;
30090 while (expr != l)
30091 {
30092 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30093 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30094 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30095 p = &(*p)->dw_loc_next;
30096 expr = expr->dw_loc_next;
30097 }
30098 *p = tail;
30099 return ret;
30100 }
30101
30102 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30103 reference to a variable or argument, adjust it if needed and return:
30104 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30105 attribute if present should be removed
30106 0 keep the attribute perhaps with minor modifications, no need to rescan
30107 1 if the attribute has been successfully adjusted. */
30108
30109 static int
30110 optimize_string_length (dw_attr_node *a)
30111 {
30112 dw_loc_descr_ref l = AT_loc (a), lv;
30113 dw_die_ref die;
30114 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30115 {
30116 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30117 die = lookup_decl_die (decl);
30118 if (die)
30119 {
30120 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30121 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30122 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30123 }
30124 else
30125 return -1;
30126 }
30127 else
30128 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30129
30130 /* DWARF5 allows reference class, so we can then reference the DIE.
30131 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30132 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30133 {
30134 a->dw_attr_val.val_class = dw_val_class_die_ref;
30135 a->dw_attr_val.val_entry = NULL;
30136 a->dw_attr_val.v.val_die_ref.die = die;
30137 a->dw_attr_val.v.val_die_ref.external = 0;
30138 return 0;
30139 }
30140
30141 dw_attr_node *av = get_AT (die, DW_AT_location);
30142 dw_loc_list_ref d;
30143 bool non_dwarf_expr = false;
30144
30145 if (av == NULL)
30146 return dwarf_strict ? -1 : 0;
30147 switch (AT_class (av))
30148 {
30149 case dw_val_class_loc_list:
30150 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30151 if (d->expr && non_dwarf_expression (d->expr))
30152 non_dwarf_expr = true;
30153 break;
30154 case dw_val_class_view_list:
30155 gcc_unreachable ();
30156 case dw_val_class_loc:
30157 lv = AT_loc (av);
30158 if (lv == NULL)
30159 return dwarf_strict ? -1 : 0;
30160 if (non_dwarf_expression (lv))
30161 non_dwarf_expr = true;
30162 break;
30163 default:
30164 return dwarf_strict ? -1 : 0;
30165 }
30166
30167 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30168 into DW_OP_call4 or DW_OP_GNU_variable_value into
30169 DW_OP_call4 DW_OP_deref, do so. */
30170 if (!non_dwarf_expr
30171 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30172 {
30173 l->dw_loc_opc = DW_OP_call4;
30174 if (l->dw_loc_next)
30175 l->dw_loc_next = NULL;
30176 else
30177 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30178 return 0;
30179 }
30180
30181 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30182 copy over the DW_AT_location attribute from die to a. */
30183 if (l->dw_loc_next != NULL)
30184 {
30185 a->dw_attr_val = av->dw_attr_val;
30186 return 1;
30187 }
30188
30189 dw_loc_list_ref list, *p;
30190 switch (AT_class (av))
30191 {
30192 case dw_val_class_loc_list:
30193 p = &list;
30194 list = NULL;
30195 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30196 {
30197 lv = copy_deref_exprloc (d->expr);
30198 if (lv)
30199 {
30200 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30201 p = &(*p)->dw_loc_next;
30202 }
30203 else if (!dwarf_strict && d->expr)
30204 return 0;
30205 }
30206 if (list == NULL)
30207 return dwarf_strict ? -1 : 0;
30208 a->dw_attr_val.val_class = dw_val_class_loc_list;
30209 gen_llsym (list);
30210 *AT_loc_list_ptr (a) = list;
30211 return 1;
30212 case dw_val_class_loc:
30213 lv = copy_deref_exprloc (AT_loc (av));
30214 if (lv == NULL)
30215 return dwarf_strict ? -1 : 0;
30216 a->dw_attr_val.v.val_loc = lv;
30217 return 1;
30218 default:
30219 gcc_unreachable ();
30220 }
30221 }
30222
30223 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30224 an address in .rodata section if the string literal is emitted there,
30225 or remove the containing location list or replace DW_AT_const_value
30226 with DW_AT_location and empty location expression, if it isn't found
30227 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30228 to something that has been emitted in the current CU. */
30229
30230 static void
30231 resolve_addr (dw_die_ref die)
30232 {
30233 dw_die_ref c;
30234 dw_attr_node *a;
30235 dw_loc_list_ref *curr, *start, loc;
30236 unsigned ix;
30237 bool remove_AT_byte_size = false;
30238
30239 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30240 switch (AT_class (a))
30241 {
30242 case dw_val_class_loc_list:
30243 start = curr = AT_loc_list_ptr (a);
30244 loc = *curr;
30245 gcc_assert (loc);
30246 /* The same list can be referenced more than once. See if we have
30247 already recorded the result from a previous pass. */
30248 if (loc->replaced)
30249 *curr = loc->dw_loc_next;
30250 else if (!loc->resolved_addr)
30251 {
30252 /* As things stand, we do not expect or allow one die to
30253 reference a suffix of another die's location list chain.
30254 References must be identical or completely separate.
30255 There is therefore no need to cache the result of this
30256 pass on any list other than the first; doing so
30257 would lead to unnecessary writes. */
30258 while (*curr)
30259 {
30260 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30261 if (!resolve_addr_in_expr (a, (*curr)->expr))
30262 {
30263 dw_loc_list_ref next = (*curr)->dw_loc_next;
30264 dw_loc_descr_ref l = (*curr)->expr;
30265
30266 if (next && (*curr)->ll_symbol)
30267 {
30268 gcc_assert (!next->ll_symbol);
30269 next->ll_symbol = (*curr)->ll_symbol;
30270 next->vl_symbol = (*curr)->vl_symbol;
30271 }
30272 if (dwarf_split_debug_info)
30273 remove_loc_list_addr_table_entries (l);
30274 *curr = next;
30275 }
30276 else
30277 {
30278 mark_base_types ((*curr)->expr);
30279 curr = &(*curr)->dw_loc_next;
30280 }
30281 }
30282 if (loc == *start)
30283 loc->resolved_addr = 1;
30284 else
30285 {
30286 loc->replaced = 1;
30287 loc->dw_loc_next = *start;
30288 }
30289 }
30290 if (!*start)
30291 {
30292 remove_AT (die, a->dw_attr);
30293 ix--;
30294 }
30295 break;
30296 case dw_val_class_view_list:
30297 {
30298 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30299 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30300 dw_val_node *llnode
30301 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30302 /* If we no longer have a loclist, or it no longer needs
30303 views, drop this attribute. */
30304 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30305 {
30306 remove_AT (die, a->dw_attr);
30307 ix--;
30308 }
30309 break;
30310 }
30311 case dw_val_class_loc:
30312 {
30313 dw_loc_descr_ref l = AT_loc (a);
30314 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30315 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30316 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30317 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30318 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30319 with DW_FORM_ref referencing the same DIE as
30320 DW_OP_GNU_variable_value used to reference. */
30321 if (a->dw_attr == DW_AT_string_length
30322 && l
30323 && l->dw_loc_opc == DW_OP_GNU_variable_value
30324 && (l->dw_loc_next == NULL
30325 || (l->dw_loc_next->dw_loc_next == NULL
30326 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30327 {
30328 switch (optimize_string_length (a))
30329 {
30330 case -1:
30331 remove_AT (die, a->dw_attr);
30332 ix--;
30333 /* If we drop DW_AT_string_length, we need to drop also
30334 DW_AT_{string_length_,}byte_size. */
30335 remove_AT_byte_size = true;
30336 continue;
30337 default:
30338 break;
30339 case 1:
30340 /* Even if we keep the optimized DW_AT_string_length,
30341 it might have changed AT_class, so process it again. */
30342 ix--;
30343 continue;
30344 }
30345 }
30346 /* For -gdwarf-2 don't attempt to optimize
30347 DW_AT_data_member_location containing
30348 DW_OP_plus_uconst - older consumers might
30349 rely on it being that op instead of a more complex,
30350 but shorter, location description. */
30351 if ((dwarf_version > 2
30352 || a->dw_attr != DW_AT_data_member_location
30353 || l == NULL
30354 || l->dw_loc_opc != DW_OP_plus_uconst
30355 || l->dw_loc_next != NULL)
30356 && !resolve_addr_in_expr (a, l))
30357 {
30358 if (dwarf_split_debug_info)
30359 remove_loc_list_addr_table_entries (l);
30360 if (l != NULL
30361 && l->dw_loc_next == NULL
30362 && l->dw_loc_opc == DW_OP_addr
30363 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30364 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30365 && a->dw_attr == DW_AT_location)
30366 {
30367 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30368 remove_AT (die, a->dw_attr);
30369 ix--;
30370 optimize_location_into_implicit_ptr (die, decl);
30371 break;
30372 }
30373 if (a->dw_attr == DW_AT_string_length)
30374 /* If we drop DW_AT_string_length, we need to drop also
30375 DW_AT_{string_length_,}byte_size. */
30376 remove_AT_byte_size = true;
30377 remove_AT (die, a->dw_attr);
30378 ix--;
30379 }
30380 else
30381 mark_base_types (l);
30382 }
30383 break;
30384 case dw_val_class_addr:
30385 if (a->dw_attr == DW_AT_const_value
30386 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30387 {
30388 if (AT_index (a) != NOT_INDEXED)
30389 remove_addr_table_entry (a->dw_attr_val.val_entry);
30390 remove_AT (die, a->dw_attr);
30391 ix--;
30392 }
30393 if ((die->die_tag == DW_TAG_call_site
30394 && a->dw_attr == DW_AT_call_origin)
30395 || (die->die_tag == DW_TAG_GNU_call_site
30396 && a->dw_attr == DW_AT_abstract_origin))
30397 {
30398 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30399 dw_die_ref tdie = lookup_decl_die (tdecl);
30400 dw_die_ref cdie;
30401 if (tdie == NULL
30402 && DECL_EXTERNAL (tdecl)
30403 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30404 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30405 {
30406 dw_die_ref pdie = cdie;
30407 /* Make sure we don't add these DIEs into type units.
30408 We could emit skeleton DIEs for context (namespaces,
30409 outer structs/classes) and a skeleton DIE for the
30410 innermost context with DW_AT_signature pointing to the
30411 type unit. See PR78835. */
30412 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30413 pdie = pdie->die_parent;
30414 if (pdie == NULL)
30415 {
30416 /* Creating a full DIE for tdecl is overly expensive and
30417 at this point even wrong when in the LTO phase
30418 as it can end up generating new type DIEs we didn't
30419 output and thus optimize_external_refs will crash. */
30420 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30421 add_AT_flag (tdie, DW_AT_external, 1);
30422 add_AT_flag (tdie, DW_AT_declaration, 1);
30423 add_linkage_attr (tdie, tdecl);
30424 add_name_and_src_coords_attributes (tdie, tdecl, true);
30425 equate_decl_number_to_die (tdecl, tdie);
30426 }
30427 }
30428 if (tdie)
30429 {
30430 a->dw_attr_val.val_class = dw_val_class_die_ref;
30431 a->dw_attr_val.v.val_die_ref.die = tdie;
30432 a->dw_attr_val.v.val_die_ref.external = 0;
30433 }
30434 else
30435 {
30436 if (AT_index (a) != NOT_INDEXED)
30437 remove_addr_table_entry (a->dw_attr_val.val_entry);
30438 remove_AT (die, a->dw_attr);
30439 ix--;
30440 }
30441 }
30442 break;
30443 default:
30444 break;
30445 }
30446
30447 if (remove_AT_byte_size)
30448 remove_AT (die, dwarf_version >= 5
30449 ? DW_AT_string_length_byte_size
30450 : DW_AT_byte_size);
30451
30452 FOR_EACH_CHILD (die, c, resolve_addr (c));
30453 }
30454 \f
30455 /* Helper routines for optimize_location_lists.
30456 This pass tries to share identical local lists in .debug_loc
30457 section. */
30458
30459 /* Iteratively hash operands of LOC opcode into HSTATE. */
30460
30461 static void
30462 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30463 {
30464 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30465 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30466
30467 switch (loc->dw_loc_opc)
30468 {
30469 case DW_OP_const4u:
30470 case DW_OP_const8u:
30471 if (loc->dtprel)
30472 goto hash_addr;
30473 /* FALLTHRU */
30474 case DW_OP_const1u:
30475 case DW_OP_const1s:
30476 case DW_OP_const2u:
30477 case DW_OP_const2s:
30478 case DW_OP_const4s:
30479 case DW_OP_const8s:
30480 case DW_OP_constu:
30481 case DW_OP_consts:
30482 case DW_OP_pick:
30483 case DW_OP_plus_uconst:
30484 case DW_OP_breg0:
30485 case DW_OP_breg1:
30486 case DW_OP_breg2:
30487 case DW_OP_breg3:
30488 case DW_OP_breg4:
30489 case DW_OP_breg5:
30490 case DW_OP_breg6:
30491 case DW_OP_breg7:
30492 case DW_OP_breg8:
30493 case DW_OP_breg9:
30494 case DW_OP_breg10:
30495 case DW_OP_breg11:
30496 case DW_OP_breg12:
30497 case DW_OP_breg13:
30498 case DW_OP_breg14:
30499 case DW_OP_breg15:
30500 case DW_OP_breg16:
30501 case DW_OP_breg17:
30502 case DW_OP_breg18:
30503 case DW_OP_breg19:
30504 case DW_OP_breg20:
30505 case DW_OP_breg21:
30506 case DW_OP_breg22:
30507 case DW_OP_breg23:
30508 case DW_OP_breg24:
30509 case DW_OP_breg25:
30510 case DW_OP_breg26:
30511 case DW_OP_breg27:
30512 case DW_OP_breg28:
30513 case DW_OP_breg29:
30514 case DW_OP_breg30:
30515 case DW_OP_breg31:
30516 case DW_OP_regx:
30517 case DW_OP_fbreg:
30518 case DW_OP_piece:
30519 case DW_OP_deref_size:
30520 case DW_OP_xderef_size:
30521 hstate.add_object (val1->v.val_int);
30522 break;
30523 case DW_OP_skip:
30524 case DW_OP_bra:
30525 {
30526 int offset;
30527
30528 gcc_assert (val1->val_class == dw_val_class_loc);
30529 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30530 hstate.add_object (offset);
30531 }
30532 break;
30533 case DW_OP_implicit_value:
30534 hstate.add_object (val1->v.val_unsigned);
30535 switch (val2->val_class)
30536 {
30537 case dw_val_class_const:
30538 hstate.add_object (val2->v.val_int);
30539 break;
30540 case dw_val_class_vec:
30541 {
30542 unsigned int elt_size = val2->v.val_vec.elt_size;
30543 unsigned int len = val2->v.val_vec.length;
30544
30545 hstate.add_int (elt_size);
30546 hstate.add_int (len);
30547 hstate.add (val2->v.val_vec.array, len * elt_size);
30548 }
30549 break;
30550 case dw_val_class_const_double:
30551 hstate.add_object (val2->v.val_double.low);
30552 hstate.add_object (val2->v.val_double.high);
30553 break;
30554 case dw_val_class_wide_int:
30555 hstate.add (val2->v.val_wide->get_val (),
30556 get_full_len (*val2->v.val_wide)
30557 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30558 break;
30559 case dw_val_class_addr:
30560 inchash::add_rtx (val2->v.val_addr, hstate);
30561 break;
30562 default:
30563 gcc_unreachable ();
30564 }
30565 break;
30566 case DW_OP_bregx:
30567 case DW_OP_bit_piece:
30568 hstate.add_object (val1->v.val_int);
30569 hstate.add_object (val2->v.val_int);
30570 break;
30571 case DW_OP_addr:
30572 hash_addr:
30573 if (loc->dtprel)
30574 {
30575 unsigned char dtprel = 0xd1;
30576 hstate.add_object (dtprel);
30577 }
30578 inchash::add_rtx (val1->v.val_addr, hstate);
30579 break;
30580 case DW_OP_GNU_addr_index:
30581 case DW_OP_addrx:
30582 case DW_OP_GNU_const_index:
30583 case DW_OP_constx:
30584 {
30585 if (loc->dtprel)
30586 {
30587 unsigned char dtprel = 0xd1;
30588 hstate.add_object (dtprel);
30589 }
30590 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30591 }
30592 break;
30593 case DW_OP_implicit_pointer:
30594 case DW_OP_GNU_implicit_pointer:
30595 hstate.add_int (val2->v.val_int);
30596 break;
30597 case DW_OP_entry_value:
30598 case DW_OP_GNU_entry_value:
30599 hstate.add_object (val1->v.val_loc);
30600 break;
30601 case DW_OP_regval_type:
30602 case DW_OP_deref_type:
30603 case DW_OP_GNU_regval_type:
30604 case DW_OP_GNU_deref_type:
30605 {
30606 unsigned int byte_size
30607 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30608 unsigned int encoding
30609 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30610 hstate.add_object (val1->v.val_int);
30611 hstate.add_object (byte_size);
30612 hstate.add_object (encoding);
30613 }
30614 break;
30615 case DW_OP_convert:
30616 case DW_OP_reinterpret:
30617 case DW_OP_GNU_convert:
30618 case DW_OP_GNU_reinterpret:
30619 if (val1->val_class == dw_val_class_unsigned_const)
30620 {
30621 hstate.add_object (val1->v.val_unsigned);
30622 break;
30623 }
30624 /* FALLTHRU */
30625 case DW_OP_const_type:
30626 case DW_OP_GNU_const_type:
30627 {
30628 unsigned int byte_size
30629 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30630 unsigned int encoding
30631 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30632 hstate.add_object (byte_size);
30633 hstate.add_object (encoding);
30634 if (loc->dw_loc_opc != DW_OP_const_type
30635 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30636 break;
30637 hstate.add_object (val2->val_class);
30638 switch (val2->val_class)
30639 {
30640 case dw_val_class_const:
30641 hstate.add_object (val2->v.val_int);
30642 break;
30643 case dw_val_class_vec:
30644 {
30645 unsigned int elt_size = val2->v.val_vec.elt_size;
30646 unsigned int len = val2->v.val_vec.length;
30647
30648 hstate.add_object (elt_size);
30649 hstate.add_object (len);
30650 hstate.add (val2->v.val_vec.array, len * elt_size);
30651 }
30652 break;
30653 case dw_val_class_const_double:
30654 hstate.add_object (val2->v.val_double.low);
30655 hstate.add_object (val2->v.val_double.high);
30656 break;
30657 case dw_val_class_wide_int:
30658 hstate.add (val2->v.val_wide->get_val (),
30659 get_full_len (*val2->v.val_wide)
30660 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30661 break;
30662 default:
30663 gcc_unreachable ();
30664 }
30665 }
30666 break;
30667
30668 default:
30669 /* Other codes have no operands. */
30670 break;
30671 }
30672 }
30673
30674 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30675
30676 static inline void
30677 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30678 {
30679 dw_loc_descr_ref l;
30680 bool sizes_computed = false;
30681 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30682 size_of_locs (loc);
30683
30684 for (l = loc; l != NULL; l = l->dw_loc_next)
30685 {
30686 enum dwarf_location_atom opc = l->dw_loc_opc;
30687 hstate.add_object (opc);
30688 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30689 {
30690 size_of_locs (loc);
30691 sizes_computed = true;
30692 }
30693 hash_loc_operands (l, hstate);
30694 }
30695 }
30696
30697 /* Compute hash of the whole location list LIST_HEAD. */
30698
30699 static inline void
30700 hash_loc_list (dw_loc_list_ref list_head)
30701 {
30702 dw_loc_list_ref curr = list_head;
30703 inchash::hash hstate;
30704
30705 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30706 {
30707 hstate.add (curr->begin, strlen (curr->begin) + 1);
30708 hstate.add (curr->end, strlen (curr->end) + 1);
30709 hstate.add_object (curr->vbegin);
30710 hstate.add_object (curr->vend);
30711 if (curr->section)
30712 hstate.add (curr->section, strlen (curr->section) + 1);
30713 hash_locs (curr->expr, hstate);
30714 }
30715 list_head->hash = hstate.end ();
30716 }
30717
30718 /* Return true if X and Y opcodes have the same operands. */
30719
30720 static inline bool
30721 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30722 {
30723 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30724 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30725 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30726 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30727
30728 switch (x->dw_loc_opc)
30729 {
30730 case DW_OP_const4u:
30731 case DW_OP_const8u:
30732 if (x->dtprel)
30733 goto hash_addr;
30734 /* FALLTHRU */
30735 case DW_OP_const1u:
30736 case DW_OP_const1s:
30737 case DW_OP_const2u:
30738 case DW_OP_const2s:
30739 case DW_OP_const4s:
30740 case DW_OP_const8s:
30741 case DW_OP_constu:
30742 case DW_OP_consts:
30743 case DW_OP_pick:
30744 case DW_OP_plus_uconst:
30745 case DW_OP_breg0:
30746 case DW_OP_breg1:
30747 case DW_OP_breg2:
30748 case DW_OP_breg3:
30749 case DW_OP_breg4:
30750 case DW_OP_breg5:
30751 case DW_OP_breg6:
30752 case DW_OP_breg7:
30753 case DW_OP_breg8:
30754 case DW_OP_breg9:
30755 case DW_OP_breg10:
30756 case DW_OP_breg11:
30757 case DW_OP_breg12:
30758 case DW_OP_breg13:
30759 case DW_OP_breg14:
30760 case DW_OP_breg15:
30761 case DW_OP_breg16:
30762 case DW_OP_breg17:
30763 case DW_OP_breg18:
30764 case DW_OP_breg19:
30765 case DW_OP_breg20:
30766 case DW_OP_breg21:
30767 case DW_OP_breg22:
30768 case DW_OP_breg23:
30769 case DW_OP_breg24:
30770 case DW_OP_breg25:
30771 case DW_OP_breg26:
30772 case DW_OP_breg27:
30773 case DW_OP_breg28:
30774 case DW_OP_breg29:
30775 case DW_OP_breg30:
30776 case DW_OP_breg31:
30777 case DW_OP_regx:
30778 case DW_OP_fbreg:
30779 case DW_OP_piece:
30780 case DW_OP_deref_size:
30781 case DW_OP_xderef_size:
30782 return valx1->v.val_int == valy1->v.val_int;
30783 case DW_OP_skip:
30784 case DW_OP_bra:
30785 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30786 can cause irrelevant differences in dw_loc_addr. */
30787 gcc_assert (valx1->val_class == dw_val_class_loc
30788 && valy1->val_class == dw_val_class_loc
30789 && (dwarf_split_debug_info
30790 || x->dw_loc_addr == y->dw_loc_addr));
30791 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30792 case DW_OP_implicit_value:
30793 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30794 || valx2->val_class != valy2->val_class)
30795 return false;
30796 switch (valx2->val_class)
30797 {
30798 case dw_val_class_const:
30799 return valx2->v.val_int == valy2->v.val_int;
30800 case dw_val_class_vec:
30801 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30802 && valx2->v.val_vec.length == valy2->v.val_vec.length
30803 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30804 valx2->v.val_vec.elt_size
30805 * valx2->v.val_vec.length) == 0;
30806 case dw_val_class_const_double:
30807 return valx2->v.val_double.low == valy2->v.val_double.low
30808 && valx2->v.val_double.high == valy2->v.val_double.high;
30809 case dw_val_class_wide_int:
30810 return *valx2->v.val_wide == *valy2->v.val_wide;
30811 case dw_val_class_addr:
30812 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30813 default:
30814 gcc_unreachable ();
30815 }
30816 case DW_OP_bregx:
30817 case DW_OP_bit_piece:
30818 return valx1->v.val_int == valy1->v.val_int
30819 && valx2->v.val_int == valy2->v.val_int;
30820 case DW_OP_addr:
30821 hash_addr:
30822 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30823 case DW_OP_GNU_addr_index:
30824 case DW_OP_addrx:
30825 case DW_OP_GNU_const_index:
30826 case DW_OP_constx:
30827 {
30828 rtx ax1 = valx1->val_entry->addr.rtl;
30829 rtx ay1 = valy1->val_entry->addr.rtl;
30830 return rtx_equal_p (ax1, ay1);
30831 }
30832 case DW_OP_implicit_pointer:
30833 case DW_OP_GNU_implicit_pointer:
30834 return valx1->val_class == dw_val_class_die_ref
30835 && valx1->val_class == valy1->val_class
30836 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30837 && valx2->v.val_int == valy2->v.val_int;
30838 case DW_OP_entry_value:
30839 case DW_OP_GNU_entry_value:
30840 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30841 case DW_OP_const_type:
30842 case DW_OP_GNU_const_type:
30843 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30844 || valx2->val_class != valy2->val_class)
30845 return false;
30846 switch (valx2->val_class)
30847 {
30848 case dw_val_class_const:
30849 return valx2->v.val_int == valy2->v.val_int;
30850 case dw_val_class_vec:
30851 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30852 && valx2->v.val_vec.length == valy2->v.val_vec.length
30853 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30854 valx2->v.val_vec.elt_size
30855 * valx2->v.val_vec.length) == 0;
30856 case dw_val_class_const_double:
30857 return valx2->v.val_double.low == valy2->v.val_double.low
30858 && valx2->v.val_double.high == valy2->v.val_double.high;
30859 case dw_val_class_wide_int:
30860 return *valx2->v.val_wide == *valy2->v.val_wide;
30861 default:
30862 gcc_unreachable ();
30863 }
30864 case DW_OP_regval_type:
30865 case DW_OP_deref_type:
30866 case DW_OP_GNU_regval_type:
30867 case DW_OP_GNU_deref_type:
30868 return valx1->v.val_int == valy1->v.val_int
30869 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30870 case DW_OP_convert:
30871 case DW_OP_reinterpret:
30872 case DW_OP_GNU_convert:
30873 case DW_OP_GNU_reinterpret:
30874 if (valx1->val_class != valy1->val_class)
30875 return false;
30876 if (valx1->val_class == dw_val_class_unsigned_const)
30877 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30878 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30879 case DW_OP_GNU_parameter_ref:
30880 return valx1->val_class == dw_val_class_die_ref
30881 && valx1->val_class == valy1->val_class
30882 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30883 default:
30884 /* Other codes have no operands. */
30885 return true;
30886 }
30887 }
30888
30889 /* Return true if DWARF location expressions X and Y are the same. */
30890
30891 static inline bool
30892 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30893 {
30894 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30895 if (x->dw_loc_opc != y->dw_loc_opc
30896 || x->dtprel != y->dtprel
30897 || !compare_loc_operands (x, y))
30898 break;
30899 return x == NULL && y == NULL;
30900 }
30901
30902 /* Hashtable helpers. */
30903
30904 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30905 {
30906 static inline hashval_t hash (const dw_loc_list_struct *);
30907 static inline bool equal (const dw_loc_list_struct *,
30908 const dw_loc_list_struct *);
30909 };
30910
30911 /* Return precomputed hash of location list X. */
30912
30913 inline hashval_t
30914 loc_list_hasher::hash (const dw_loc_list_struct *x)
30915 {
30916 return x->hash;
30917 }
30918
30919 /* Return true if location lists A and B are the same. */
30920
30921 inline bool
30922 loc_list_hasher::equal (const dw_loc_list_struct *a,
30923 const dw_loc_list_struct *b)
30924 {
30925 if (a == b)
30926 return 1;
30927 if (a->hash != b->hash)
30928 return 0;
30929 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30930 if (strcmp (a->begin, b->begin) != 0
30931 || strcmp (a->end, b->end) != 0
30932 || (a->section == NULL) != (b->section == NULL)
30933 || (a->section && strcmp (a->section, b->section) != 0)
30934 || a->vbegin != b->vbegin || a->vend != b->vend
30935 || !compare_locs (a->expr, b->expr))
30936 break;
30937 return a == NULL && b == NULL;
30938 }
30939
30940 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30941
30942
30943 /* Recursively optimize location lists referenced from DIE
30944 children and share them whenever possible. */
30945
30946 static void
30947 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30948 {
30949 dw_die_ref c;
30950 dw_attr_node *a;
30951 unsigned ix;
30952 dw_loc_list_struct **slot;
30953 bool drop_locviews = false;
30954 bool has_locviews = false;
30955
30956 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30957 if (AT_class (a) == dw_val_class_loc_list)
30958 {
30959 dw_loc_list_ref list = AT_loc_list (a);
30960 /* TODO: perform some optimizations here, before hashing
30961 it and storing into the hash table. */
30962 hash_loc_list (list);
30963 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30964 if (*slot == NULL)
30965 {
30966 *slot = list;
30967 if (loc_list_has_views (list))
30968 gcc_assert (list->vl_symbol);
30969 else if (list->vl_symbol)
30970 {
30971 drop_locviews = true;
30972 list->vl_symbol = NULL;
30973 }
30974 }
30975 else
30976 {
30977 if (list->vl_symbol && !(*slot)->vl_symbol)
30978 drop_locviews = true;
30979 a->dw_attr_val.v.val_loc_list = *slot;
30980 }
30981 }
30982 else if (AT_class (a) == dw_val_class_view_list)
30983 {
30984 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30985 has_locviews = true;
30986 }
30987
30988
30989 if (drop_locviews && has_locviews)
30990 remove_AT (die, DW_AT_GNU_locviews);
30991
30992 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30993 }
30994
30995
30996 /* Recursively assign each location list a unique index into the debug_addr
30997 section. */
30998
30999 static void
31000 index_location_lists (dw_die_ref die)
31001 {
31002 dw_die_ref c;
31003 dw_attr_node *a;
31004 unsigned ix;
31005
31006 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31007 if (AT_class (a) == dw_val_class_loc_list)
31008 {
31009 dw_loc_list_ref list = AT_loc_list (a);
31010 dw_loc_list_ref curr;
31011 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31012 {
31013 /* Don't index an entry that has already been indexed
31014 or won't be output. Make sure skip_loc_list_entry doesn't
31015 call size_of_locs, because that might cause circular dependency,
31016 index_location_lists requiring address table indexes to be
31017 computed, but adding new indexes through add_addr_table_entry
31018 and address table index computation requiring no new additions
31019 to the hash table. In the rare case of DWARF[234] >= 64KB
31020 location expression, we'll just waste unused address table entry
31021 for it. */
31022 if (curr->begin_entry != NULL
31023 || skip_loc_list_entry (curr))
31024 continue;
31025
31026 curr->begin_entry
31027 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31028 }
31029 }
31030
31031 FOR_EACH_CHILD (die, c, index_location_lists (c));
31032 }
31033
31034 /* Optimize location lists referenced from DIE
31035 children and share them whenever possible. */
31036
31037 static void
31038 optimize_location_lists (dw_die_ref die)
31039 {
31040 loc_list_hash_type htab (500);
31041 optimize_location_lists_1 (die, &htab);
31042 }
31043 \f
31044 /* Traverse the limbo die list, and add parent/child links. The only
31045 dies without parents that should be here are concrete instances of
31046 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31047 For concrete instances, we can get the parent die from the abstract
31048 instance. */
31049
31050 static void
31051 flush_limbo_die_list (void)
31052 {
31053 limbo_die_node *node;
31054
31055 /* get_context_die calls force_decl_die, which can put new DIEs on the
31056 limbo list in LTO mode when nested functions are put in a different
31057 partition than that of their parent function. */
31058 while ((node = limbo_die_list))
31059 {
31060 dw_die_ref die = node->die;
31061 limbo_die_list = node->next;
31062
31063 if (die->die_parent == NULL)
31064 {
31065 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31066
31067 if (origin && origin->die_parent)
31068 add_child_die (origin->die_parent, die);
31069 else if (is_cu_die (die))
31070 ;
31071 else if (seen_error ())
31072 /* It's OK to be confused by errors in the input. */
31073 add_child_die (comp_unit_die (), die);
31074 else
31075 {
31076 /* In certain situations, the lexical block containing a
31077 nested function can be optimized away, which results
31078 in the nested function die being orphaned. Likewise
31079 with the return type of that nested function. Force
31080 this to be a child of the containing function.
31081
31082 It may happen that even the containing function got fully
31083 inlined and optimized out. In that case we are lost and
31084 assign the empty child. This should not be big issue as
31085 the function is likely unreachable too. */
31086 gcc_assert (node->created_for);
31087
31088 if (DECL_P (node->created_for))
31089 origin = get_context_die (DECL_CONTEXT (node->created_for));
31090 else if (TYPE_P (node->created_for))
31091 origin = scope_die_for (node->created_for, comp_unit_die ());
31092 else
31093 origin = comp_unit_die ();
31094
31095 add_child_die (origin, die);
31096 }
31097 }
31098 }
31099 }
31100
31101 /* Reset DIEs so we can output them again. */
31102
31103 static void
31104 reset_dies (dw_die_ref die)
31105 {
31106 dw_die_ref c;
31107
31108 /* Remove stuff we re-generate. */
31109 die->die_mark = 0;
31110 die->die_offset = 0;
31111 die->die_abbrev = 0;
31112 remove_AT (die, DW_AT_sibling);
31113
31114 FOR_EACH_CHILD (die, c, reset_dies (c));
31115 }
31116
31117 /* Output stuff that dwarf requires at the end of every file,
31118 and generate the DWARF-2 debugging info. */
31119
31120 static void
31121 dwarf2out_finish (const char *)
31122 {
31123 comdat_type_node *ctnode;
31124 dw_die_ref main_comp_unit_die;
31125 unsigned char checksum[16];
31126 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31127
31128 /* Flush out any latecomers to the limbo party. */
31129 flush_limbo_die_list ();
31130
31131 if (inline_entry_data_table)
31132 gcc_assert (inline_entry_data_table->elements () == 0);
31133
31134 if (flag_checking)
31135 {
31136 verify_die (comp_unit_die ());
31137 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31138 verify_die (node->die);
31139 }
31140
31141 /* We shouldn't have any symbols with delayed asm names for
31142 DIEs generated after early finish. */
31143 gcc_assert (deferred_asm_name == NULL);
31144
31145 gen_remaining_tmpl_value_param_die_attribute ();
31146
31147 if (flag_generate_lto || flag_generate_offload)
31148 {
31149 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31150
31151 /* Prune stuff so that dwarf2out_finish runs successfully
31152 for the fat part of the object. */
31153 reset_dies (comp_unit_die ());
31154 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31155 reset_dies (node->die);
31156
31157 hash_table<comdat_type_hasher> comdat_type_table (100);
31158 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31159 {
31160 comdat_type_node **slot
31161 = comdat_type_table.find_slot (ctnode, INSERT);
31162
31163 /* Don't reset types twice. */
31164 if (*slot != HTAB_EMPTY_ENTRY)
31165 continue;
31166
31167 /* Add a pointer to the line table for the main compilation unit
31168 so that the debugger can make sense of DW_AT_decl_file
31169 attributes. */
31170 if (debug_info_level >= DINFO_LEVEL_TERSE)
31171 reset_dies (ctnode->root_die);
31172
31173 *slot = ctnode;
31174 }
31175
31176 /* Reset die CU symbol so we don't output it twice. */
31177 comp_unit_die ()->die_id.die_symbol = NULL;
31178
31179 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31180 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31181 if (have_macinfo)
31182 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31183
31184 /* Remove indirect string decisions. */
31185 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31186 }
31187
31188 #if ENABLE_ASSERT_CHECKING
31189 {
31190 dw_die_ref die = comp_unit_die (), c;
31191 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31192 }
31193 #endif
31194 resolve_addr (comp_unit_die ());
31195 move_marked_base_types ();
31196
31197 /* Initialize sections and labels used for actual assembler output. */
31198 unsigned generation = init_sections_and_labels (false);
31199
31200 /* Traverse the DIE's and add sibling attributes to those DIE's that
31201 have children. */
31202 add_sibling_attributes (comp_unit_die ());
31203 limbo_die_node *node;
31204 for (node = cu_die_list; node; node = node->next)
31205 add_sibling_attributes (node->die);
31206 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31207 add_sibling_attributes (ctnode->root_die);
31208
31209 /* When splitting DWARF info, we put some attributes in the
31210 skeleton compile_unit DIE that remains in the .o, while
31211 most attributes go in the DWO compile_unit_die. */
31212 if (dwarf_split_debug_info)
31213 {
31214 limbo_die_node *cu;
31215 main_comp_unit_die = gen_compile_unit_die (NULL);
31216 if (dwarf_version >= 5)
31217 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31218 cu = limbo_die_list;
31219 gcc_assert (cu->die == main_comp_unit_die);
31220 limbo_die_list = limbo_die_list->next;
31221 cu->next = cu_die_list;
31222 cu_die_list = cu;
31223 }
31224 else
31225 main_comp_unit_die = comp_unit_die ();
31226
31227 /* Output a terminator label for the .text section. */
31228 switch_to_section (text_section);
31229 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31230 if (cold_text_section)
31231 {
31232 switch_to_section (cold_text_section);
31233 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31234 }
31235
31236 /* We can only use the low/high_pc attributes if all of the code was
31237 in .text. */
31238 if (!have_multiple_function_sections
31239 || (dwarf_version < 3 && dwarf_strict))
31240 {
31241 /* Don't add if the CU has no associated code. */
31242 if (text_section_used)
31243 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31244 text_end_label, true);
31245 }
31246 else
31247 {
31248 unsigned fde_idx;
31249 dw_fde_ref fde;
31250 bool range_list_added = false;
31251
31252 if (text_section_used)
31253 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31254 text_end_label, &range_list_added, true);
31255 if (cold_text_section_used)
31256 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31257 cold_end_label, &range_list_added, true);
31258
31259 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31260 {
31261 if (DECL_IGNORED_P (fde->decl))
31262 continue;
31263 if (!fde->in_std_section)
31264 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31265 fde->dw_fde_end, &range_list_added,
31266 true);
31267 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31268 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31269 fde->dw_fde_second_end, &range_list_added,
31270 true);
31271 }
31272
31273 if (range_list_added)
31274 {
31275 /* We need to give .debug_loc and .debug_ranges an appropriate
31276 "base address". Use zero so that these addresses become
31277 absolute. Historically, we've emitted the unexpected
31278 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31279 Emit both to give time for other tools to adapt. */
31280 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31281 if (! dwarf_strict && dwarf_version < 4)
31282 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31283
31284 add_ranges (NULL);
31285 }
31286 }
31287
31288 /* AIX Assembler inserts the length, so adjust the reference to match the
31289 offset expected by debuggers. */
31290 strcpy (dl_section_ref, debug_line_section_label);
31291 if (XCOFF_DEBUGGING_INFO)
31292 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31293
31294 if (debug_info_level >= DINFO_LEVEL_TERSE)
31295 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31296 dl_section_ref);
31297
31298 if (have_macinfo)
31299 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31300 macinfo_section_label);
31301
31302 if (dwarf_split_debug_info)
31303 {
31304 if (have_location_lists)
31305 {
31306 /* Since we generate the loclists in the split DWARF .dwo
31307 file itself, we don't need to generate a loclists_base
31308 attribute for the split compile unit DIE. That attribute
31309 (and using relocatable sec_offset FORMs) isn't allowed
31310 for a split compile unit. Only if the .debug_loclists
31311 section was in the main file, would we need to generate a
31312 loclists_base attribute here (for the full or skeleton
31313 unit DIE). */
31314
31315 /* optimize_location_lists calculates the size of the lists,
31316 so index them first, and assign indices to the entries.
31317 Although optimize_location_lists will remove entries from
31318 the table, it only does so for duplicates, and therefore
31319 only reduces ref_counts to 1. */
31320 index_location_lists (comp_unit_die ());
31321 }
31322
31323 if (addr_index_table != NULL)
31324 {
31325 unsigned int index = 0;
31326 addr_index_table
31327 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31328 (&index);
31329 }
31330 }
31331
31332 loc_list_idx = 0;
31333 if (have_location_lists)
31334 {
31335 optimize_location_lists (comp_unit_die ());
31336 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31337 if (dwarf_version >= 5 && dwarf_split_debug_info)
31338 assign_location_list_indexes (comp_unit_die ());
31339 }
31340
31341 save_macinfo_strings ();
31342
31343 if (dwarf_split_debug_info)
31344 {
31345 unsigned int index = 0;
31346
31347 /* Add attributes common to skeleton compile_units and
31348 type_units. Because these attributes include strings, it
31349 must be done before freezing the string table. Top-level
31350 skeleton die attrs are added when the skeleton type unit is
31351 created, so ensure it is created by this point. */
31352 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31353 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31354 }
31355
31356 /* Output all of the compilation units. We put the main one last so that
31357 the offsets are available to output_pubnames. */
31358 for (node = cu_die_list; node; node = node->next)
31359 output_comp_unit (node->die, 0, NULL);
31360
31361 hash_table<comdat_type_hasher> comdat_type_table (100);
31362 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31363 {
31364 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31365
31366 /* Don't output duplicate types. */
31367 if (*slot != HTAB_EMPTY_ENTRY)
31368 continue;
31369
31370 /* Add a pointer to the line table for the main compilation unit
31371 so that the debugger can make sense of DW_AT_decl_file
31372 attributes. */
31373 if (debug_info_level >= DINFO_LEVEL_TERSE)
31374 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31375 (!dwarf_split_debug_info
31376 ? dl_section_ref
31377 : debug_skeleton_line_section_label));
31378
31379 output_comdat_type_unit (ctnode);
31380 *slot = ctnode;
31381 }
31382
31383 if (dwarf_split_debug_info)
31384 {
31385 int mark;
31386 struct md5_ctx ctx;
31387
31388 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31389 index_rnglists ();
31390
31391 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31392 md5_init_ctx (&ctx);
31393 mark = 0;
31394 die_checksum (comp_unit_die (), &ctx, &mark);
31395 unmark_all_dies (comp_unit_die ());
31396 md5_finish_ctx (&ctx, checksum);
31397
31398 if (dwarf_version < 5)
31399 {
31400 /* Use the first 8 bytes of the checksum as the dwo_id,
31401 and add it to both comp-unit DIEs. */
31402 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31403 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31404 }
31405
31406 /* Add the base offset of the ranges table to the skeleton
31407 comp-unit DIE. */
31408 if (!vec_safe_is_empty (ranges_table))
31409 {
31410 if (dwarf_version >= 5)
31411 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31412 ranges_base_label);
31413 else
31414 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31415 ranges_section_label);
31416 }
31417
31418 switch_to_section (debug_addr_section);
31419 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31420 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31421 before DWARF5, didn't have a header for .debug_addr units.
31422 DWARF5 specifies a small header when address tables are used. */
31423 if (dwarf_version >= 5)
31424 {
31425 unsigned int last_idx = 0;
31426 unsigned long addrs_length;
31427
31428 addr_index_table->traverse_noresize
31429 <unsigned int *, count_index_addrs> (&last_idx);
31430 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31431
31432 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31433 dw2_asm_output_data (4, 0xffffffff,
31434 "Escape value for 64-bit DWARF extension");
31435 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31436 "Length of Address Unit");
31437 dw2_asm_output_data (2, 5, "DWARF addr version");
31438 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31439 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31440 }
31441 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31442 output_addr_table ();
31443 }
31444
31445 /* Output the main compilation unit if non-empty or if .debug_macinfo
31446 or .debug_macro will be emitted. */
31447 output_comp_unit (comp_unit_die (), have_macinfo,
31448 dwarf_split_debug_info ? checksum : NULL);
31449
31450 if (dwarf_split_debug_info && info_section_emitted)
31451 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31452
31453 /* Output the abbreviation table. */
31454 if (vec_safe_length (abbrev_die_table) != 1)
31455 {
31456 switch_to_section (debug_abbrev_section);
31457 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31458 output_abbrev_section ();
31459 }
31460
31461 /* Output location list section if necessary. */
31462 if (have_location_lists)
31463 {
31464 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31465 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31466 /* Output the location lists info. */
31467 switch_to_section (debug_loc_section);
31468 if (dwarf_version >= 5)
31469 {
31470 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31471 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31472 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31473 dw2_asm_output_data (4, 0xffffffff,
31474 "Initial length escape value indicating "
31475 "64-bit DWARF extension");
31476 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31477 "Length of Location Lists");
31478 ASM_OUTPUT_LABEL (asm_out_file, l1);
31479 output_dwarf_version ();
31480 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31481 dw2_asm_output_data (1, 0, "Segment Size");
31482 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31483 "Offset Entry Count");
31484 }
31485 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31486 if (dwarf_version >= 5 && dwarf_split_debug_info)
31487 {
31488 unsigned int save_loc_list_idx = loc_list_idx;
31489 loc_list_idx = 0;
31490 output_loclists_offsets (comp_unit_die ());
31491 gcc_assert (save_loc_list_idx == loc_list_idx);
31492 }
31493 output_location_lists (comp_unit_die ());
31494 if (dwarf_version >= 5)
31495 ASM_OUTPUT_LABEL (asm_out_file, l2);
31496 }
31497
31498 output_pubtables ();
31499
31500 /* Output the address range information if a CU (.debug_info section)
31501 was emitted. We output an empty table even if we had no functions
31502 to put in it. This because the consumer has no way to tell the
31503 difference between an empty table that we omitted and failure to
31504 generate a table that would have contained data. */
31505 if (info_section_emitted)
31506 {
31507 switch_to_section (debug_aranges_section);
31508 output_aranges ();
31509 }
31510
31511 /* Output ranges section if necessary. */
31512 if (!vec_safe_is_empty (ranges_table))
31513 {
31514 if (dwarf_version >= 5)
31515 output_rnglists (generation);
31516 else
31517 output_ranges ();
31518 }
31519
31520 /* Have to end the macro section. */
31521 if (have_macinfo)
31522 {
31523 switch_to_section (debug_macinfo_section);
31524 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31525 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31526 : debug_skeleton_line_section_label, false);
31527 dw2_asm_output_data (1, 0, "End compilation unit");
31528 }
31529
31530 /* Output the source line correspondence table. We must do this
31531 even if there is no line information. Otherwise, on an empty
31532 translation unit, we will generate a present, but empty,
31533 .debug_info section. IRIX 6.5 `nm' will then complain when
31534 examining the file. This is done late so that any filenames
31535 used by the debug_info section are marked as 'used'. */
31536 switch_to_section (debug_line_section);
31537 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31538 if (! output_asm_line_debug_info ())
31539 output_line_info (false);
31540
31541 if (dwarf_split_debug_info && info_section_emitted)
31542 {
31543 switch_to_section (debug_skeleton_line_section);
31544 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31545 output_line_info (true);
31546 }
31547
31548 /* If we emitted any indirect strings, output the string table too. */
31549 if (debug_str_hash || skeleton_debug_str_hash)
31550 output_indirect_strings ();
31551 if (debug_line_str_hash)
31552 {
31553 switch_to_section (debug_line_str_section);
31554 const enum dwarf_form form = DW_FORM_line_strp;
31555 debug_line_str_hash->traverse<enum dwarf_form,
31556 output_indirect_string> (form);
31557 }
31558
31559 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31560 symview_upper_bound = 0;
31561 if (zero_view_p)
31562 bitmap_clear (zero_view_p);
31563 }
31564
31565 /* Returns a hash value for X (which really is a variable_value_struct). */
31566
31567 inline hashval_t
31568 variable_value_hasher::hash (variable_value_struct *x)
31569 {
31570 return (hashval_t) x->decl_id;
31571 }
31572
31573 /* Return nonzero if decl_id of variable_value_struct X is the same as
31574 UID of decl Y. */
31575
31576 inline bool
31577 variable_value_hasher::equal (variable_value_struct *x, tree y)
31578 {
31579 return x->decl_id == DECL_UID (y);
31580 }
31581
31582 /* Helper function for resolve_variable_value, handle
31583 DW_OP_GNU_variable_value in one location expression.
31584 Return true if exprloc has been changed into loclist. */
31585
31586 static bool
31587 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31588 {
31589 dw_loc_descr_ref next;
31590 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31591 {
31592 next = loc->dw_loc_next;
31593 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31594 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31595 continue;
31596
31597 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31598 if (DECL_CONTEXT (decl) != current_function_decl)
31599 continue;
31600
31601 dw_die_ref ref = lookup_decl_die (decl);
31602 if (ref)
31603 {
31604 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31605 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31606 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31607 continue;
31608 }
31609 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31610 if (l == NULL)
31611 continue;
31612 if (l->dw_loc_next)
31613 {
31614 if (AT_class (a) != dw_val_class_loc)
31615 continue;
31616 switch (a->dw_attr)
31617 {
31618 /* Following attributes allow both exprloc and loclist
31619 classes, so we can change them into a loclist. */
31620 case DW_AT_location:
31621 case DW_AT_string_length:
31622 case DW_AT_return_addr:
31623 case DW_AT_data_member_location:
31624 case DW_AT_frame_base:
31625 case DW_AT_segment:
31626 case DW_AT_static_link:
31627 case DW_AT_use_location:
31628 case DW_AT_vtable_elem_location:
31629 if (prev)
31630 {
31631 prev->dw_loc_next = NULL;
31632 prepend_loc_descr_to_each (l, AT_loc (a));
31633 }
31634 if (next)
31635 add_loc_descr_to_each (l, next);
31636 a->dw_attr_val.val_class = dw_val_class_loc_list;
31637 a->dw_attr_val.val_entry = NULL;
31638 a->dw_attr_val.v.val_loc_list = l;
31639 have_location_lists = true;
31640 return true;
31641 /* Following attributes allow both exprloc and reference,
31642 so if the whole expression is DW_OP_GNU_variable_value alone
31643 we could transform it into reference. */
31644 case DW_AT_byte_size:
31645 case DW_AT_bit_size:
31646 case DW_AT_lower_bound:
31647 case DW_AT_upper_bound:
31648 case DW_AT_bit_stride:
31649 case DW_AT_count:
31650 case DW_AT_allocated:
31651 case DW_AT_associated:
31652 case DW_AT_byte_stride:
31653 if (prev == NULL && next == NULL)
31654 break;
31655 /* FALLTHRU */
31656 default:
31657 if (dwarf_strict)
31658 continue;
31659 break;
31660 }
31661 /* Create DW_TAG_variable that we can refer to. */
31662 gen_decl_die (decl, NULL_TREE, NULL,
31663 lookup_decl_die (current_function_decl));
31664 ref = lookup_decl_die (decl);
31665 if (ref)
31666 {
31667 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31668 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31669 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31670 }
31671 continue;
31672 }
31673 if (prev)
31674 {
31675 prev->dw_loc_next = l->expr;
31676 add_loc_descr (&prev->dw_loc_next, next);
31677 free_loc_descr (loc, NULL);
31678 next = prev->dw_loc_next;
31679 }
31680 else
31681 {
31682 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31683 add_loc_descr (&loc, next);
31684 next = loc;
31685 }
31686 loc = prev;
31687 }
31688 return false;
31689 }
31690
31691 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31692
31693 static void
31694 resolve_variable_value (dw_die_ref die)
31695 {
31696 dw_attr_node *a;
31697 dw_loc_list_ref loc;
31698 unsigned ix;
31699
31700 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31701 switch (AT_class (a))
31702 {
31703 case dw_val_class_loc:
31704 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31705 break;
31706 /* FALLTHRU */
31707 case dw_val_class_loc_list:
31708 loc = AT_loc_list (a);
31709 gcc_assert (loc);
31710 for (; loc; loc = loc->dw_loc_next)
31711 resolve_variable_value_in_expr (a, loc->expr);
31712 break;
31713 default:
31714 break;
31715 }
31716 }
31717
31718 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31719 temporaries in the current function. */
31720
31721 static void
31722 resolve_variable_values (void)
31723 {
31724 if (!variable_value_hash || !current_function_decl)
31725 return;
31726
31727 struct variable_value_struct *node
31728 = variable_value_hash->find_with_hash (current_function_decl,
31729 DECL_UID (current_function_decl));
31730
31731 if (node == NULL)
31732 return;
31733
31734 unsigned int i;
31735 dw_die_ref die;
31736 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31737 resolve_variable_value (die);
31738 }
31739
31740 /* Helper function for note_variable_value, handle one location
31741 expression. */
31742
31743 static void
31744 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31745 {
31746 for (; loc; loc = loc->dw_loc_next)
31747 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31748 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31749 {
31750 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31751 dw_die_ref ref = lookup_decl_die (decl);
31752 if (! ref && (flag_generate_lto || flag_generate_offload))
31753 {
31754 /* ??? This is somewhat a hack because we do not create DIEs
31755 for variables not in BLOCK trees early but when generating
31756 early LTO output we need the dw_val_class_decl_ref to be
31757 fully resolved. For fat LTO objects we'd also like to
31758 undo this after LTO dwarf output. */
31759 gcc_assert (DECL_CONTEXT (decl));
31760 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31761 gcc_assert (ctx != NULL);
31762 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31763 ref = lookup_decl_die (decl);
31764 gcc_assert (ref != NULL);
31765 }
31766 if (ref)
31767 {
31768 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31769 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31770 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31771 continue;
31772 }
31773 if (VAR_P (decl)
31774 && DECL_CONTEXT (decl)
31775 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31776 && lookup_decl_die (DECL_CONTEXT (decl)))
31777 {
31778 if (!variable_value_hash)
31779 variable_value_hash
31780 = hash_table<variable_value_hasher>::create_ggc (10);
31781
31782 tree fndecl = DECL_CONTEXT (decl);
31783 struct variable_value_struct *node;
31784 struct variable_value_struct **slot
31785 = variable_value_hash->find_slot_with_hash (fndecl,
31786 DECL_UID (fndecl),
31787 INSERT);
31788 if (*slot == NULL)
31789 {
31790 node = ggc_cleared_alloc<variable_value_struct> ();
31791 node->decl_id = DECL_UID (fndecl);
31792 *slot = node;
31793 }
31794 else
31795 node = *slot;
31796
31797 vec_safe_push (node->dies, die);
31798 }
31799 }
31800 }
31801
31802 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31803 with dw_val_class_decl_ref operand. */
31804
31805 static void
31806 note_variable_value (dw_die_ref die)
31807 {
31808 dw_die_ref c;
31809 dw_attr_node *a;
31810 dw_loc_list_ref loc;
31811 unsigned ix;
31812
31813 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31814 switch (AT_class (a))
31815 {
31816 case dw_val_class_loc_list:
31817 loc = AT_loc_list (a);
31818 gcc_assert (loc);
31819 if (!loc->noted_variable_value)
31820 {
31821 loc->noted_variable_value = 1;
31822 for (; loc; loc = loc->dw_loc_next)
31823 note_variable_value_in_expr (die, loc->expr);
31824 }
31825 break;
31826 case dw_val_class_loc:
31827 note_variable_value_in_expr (die, AT_loc (a));
31828 break;
31829 default:
31830 break;
31831 }
31832
31833 /* Mark children. */
31834 FOR_EACH_CHILD (die, c, note_variable_value (c));
31835 }
31836
31837 /* Perform any cleanups needed after the early debug generation pass
31838 has run. */
31839
31840 static void
31841 dwarf2out_early_finish (const char *filename)
31842 {
31843 set_early_dwarf s;
31844 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31845
31846 /* PCH might result in DW_AT_producer string being restored from the
31847 header compilation, so always fill it with empty string initially
31848 and overwrite only here. */
31849 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31850 producer_string = gen_producer_string ();
31851 producer->dw_attr_val.v.val_str->refcount--;
31852 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31853
31854 /* Add the name for the main input file now. We delayed this from
31855 dwarf2out_init to avoid complications with PCH. */
31856 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31857 add_comp_dir_attribute (comp_unit_die ());
31858
31859 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31860 DW_AT_comp_dir into .debug_line_str section. */
31861 if (!dwarf2out_as_loc_support
31862 && dwarf_version >= 5
31863 && DWARF5_USE_DEBUG_LINE_STR)
31864 {
31865 for (int i = 0; i < 2; i++)
31866 {
31867 dw_attr_node *a = get_AT (comp_unit_die (),
31868 i ? DW_AT_comp_dir : DW_AT_name);
31869 if (a == NULL
31870 || AT_class (a) != dw_val_class_str
31871 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31872 continue;
31873
31874 if (! debug_line_str_hash)
31875 debug_line_str_hash
31876 = hash_table<indirect_string_hasher>::create_ggc (10);
31877
31878 struct indirect_string_node *node
31879 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31880 set_indirect_string (node);
31881 node->form = DW_FORM_line_strp;
31882 a->dw_attr_val.v.val_str->refcount--;
31883 a->dw_attr_val.v.val_str = node;
31884 }
31885 }
31886
31887 /* With LTO early dwarf was really finished at compile-time, so make
31888 sure to adjust the phase after annotating the LTRANS CU DIE. */
31889 if (in_lto_p)
31890 {
31891 early_dwarf_finished = true;
31892 return;
31893 }
31894
31895 /* Walk through the list of incomplete types again, trying once more to
31896 emit full debugging info for them. */
31897 retry_incomplete_types ();
31898
31899 /* The point here is to flush out the limbo list so that it is empty
31900 and we don't need to stream it for LTO. */
31901 flush_limbo_die_list ();
31902
31903 gen_scheduled_generic_parms_dies ();
31904 gen_remaining_tmpl_value_param_die_attribute ();
31905
31906 /* Add DW_AT_linkage_name for all deferred DIEs. */
31907 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31908 {
31909 tree decl = node->created_for;
31910 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31911 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31912 ended up in deferred_asm_name before we knew it was
31913 constant and never written to disk. */
31914 && DECL_ASSEMBLER_NAME (decl))
31915 {
31916 add_linkage_attr (node->die, decl);
31917 move_linkage_attr (node->die);
31918 }
31919 }
31920 deferred_asm_name = NULL;
31921
31922 if (flag_eliminate_unused_debug_types)
31923 prune_unused_types ();
31924
31925 /* Generate separate COMDAT sections for type DIEs. */
31926 if (use_debug_types)
31927 {
31928 break_out_comdat_types (comp_unit_die ());
31929
31930 /* Each new type_unit DIE was added to the limbo die list when created.
31931 Since these have all been added to comdat_type_list, clear the
31932 limbo die list. */
31933 limbo_die_list = NULL;
31934
31935 /* For each new comdat type unit, copy declarations for incomplete
31936 types to make the new unit self-contained (i.e., no direct
31937 references to the main compile unit). */
31938 for (comdat_type_node *ctnode = comdat_type_list;
31939 ctnode != NULL; ctnode = ctnode->next)
31940 copy_decls_for_unworthy_types (ctnode->root_die);
31941 copy_decls_for_unworthy_types (comp_unit_die ());
31942
31943 /* In the process of copying declarations from one unit to another,
31944 we may have left some declarations behind that are no longer
31945 referenced. Prune them. */
31946 prune_unused_types ();
31947 }
31948
31949 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31950 with dw_val_class_decl_ref operand. */
31951 note_variable_value (comp_unit_die ());
31952 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31953 note_variable_value (node->die);
31954 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31955 ctnode = ctnode->next)
31956 note_variable_value (ctnode->root_die);
31957 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31958 note_variable_value (node->die);
31959
31960 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31961 both the main_cu and all skeleton TUs. Making this call unconditional
31962 would end up either adding a second copy of the AT_pubnames attribute, or
31963 requiring a special case in add_top_level_skeleton_die_attrs. */
31964 if (!dwarf_split_debug_info)
31965 add_AT_pubnames (comp_unit_die ());
31966
31967 /* The early debug phase is now finished. */
31968 early_dwarf_finished = true;
31969
31970 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31971 if ((!flag_generate_lto && !flag_generate_offload)
31972 /* FIXME: Disable debug info generation for PE-COFF targets since the
31973 copy_lto_debug_sections operation of the simple object support in
31974 libiberty is not implemented for them yet. */
31975 || TARGET_PECOFF)
31976 return;
31977
31978 /* Now as we are going to output for LTO initialize sections and labels
31979 to the LTO variants. We don't need a random-seed postfix as other
31980 LTO sections as linking the LTO debug sections into one in a partial
31981 link is fine. */
31982 init_sections_and_labels (true);
31983
31984 /* The output below is modeled after dwarf2out_finish with all
31985 location related output removed and some LTO specific changes.
31986 Some refactoring might make both smaller and easier to match up. */
31987
31988 /* Traverse the DIE's and add add sibling attributes to those DIE's
31989 that have children. */
31990 add_sibling_attributes (comp_unit_die ());
31991 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31992 add_sibling_attributes (node->die);
31993 for (comdat_type_node *ctnode = comdat_type_list;
31994 ctnode != NULL; ctnode = ctnode->next)
31995 add_sibling_attributes (ctnode->root_die);
31996
31997 /* AIX Assembler inserts the length, so adjust the reference to match the
31998 offset expected by debuggers. */
31999 strcpy (dl_section_ref, debug_line_section_label);
32000 if (XCOFF_DEBUGGING_INFO)
32001 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32002
32003 if (debug_info_level >= DINFO_LEVEL_TERSE)
32004 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32005
32006 if (have_macinfo)
32007 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32008 macinfo_section_label);
32009
32010 save_macinfo_strings ();
32011
32012 if (dwarf_split_debug_info)
32013 {
32014 unsigned int index = 0;
32015 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32016 }
32017
32018 /* Output all of the compilation units. We put the main one last so that
32019 the offsets are available to output_pubnames. */
32020 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32021 output_comp_unit (node->die, 0, NULL);
32022
32023 hash_table<comdat_type_hasher> comdat_type_table (100);
32024 for (comdat_type_node *ctnode = comdat_type_list;
32025 ctnode != NULL; ctnode = ctnode->next)
32026 {
32027 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32028
32029 /* Don't output duplicate types. */
32030 if (*slot != HTAB_EMPTY_ENTRY)
32031 continue;
32032
32033 /* Add a pointer to the line table for the main compilation unit
32034 so that the debugger can make sense of DW_AT_decl_file
32035 attributes. */
32036 if (debug_info_level >= DINFO_LEVEL_TERSE)
32037 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32038 (!dwarf_split_debug_info
32039 ? debug_line_section_label
32040 : debug_skeleton_line_section_label));
32041
32042 output_comdat_type_unit (ctnode);
32043 *slot = ctnode;
32044 }
32045
32046 /* Stick a unique symbol to the main debuginfo section. */
32047 compute_comp_unit_symbol (comp_unit_die ());
32048
32049 /* Output the main compilation unit. We always need it if only for
32050 the CU symbol. */
32051 output_comp_unit (comp_unit_die (), true, NULL);
32052
32053 /* Output the abbreviation table. */
32054 if (vec_safe_length (abbrev_die_table) != 1)
32055 {
32056 switch_to_section (debug_abbrev_section);
32057 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32058 output_abbrev_section ();
32059 }
32060
32061 /* Have to end the macro section. */
32062 if (have_macinfo)
32063 {
32064 /* We have to save macinfo state if we need to output it again
32065 for the FAT part of the object. */
32066 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32067 if (flag_fat_lto_objects)
32068 macinfo_table = macinfo_table->copy ();
32069
32070 switch_to_section (debug_macinfo_section);
32071 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32072 output_macinfo (debug_line_section_label, true);
32073 dw2_asm_output_data (1, 0, "End compilation unit");
32074
32075 if (flag_fat_lto_objects)
32076 {
32077 vec_free (macinfo_table);
32078 macinfo_table = saved_macinfo_table;
32079 }
32080 }
32081
32082 /* Emit a skeleton debug_line section. */
32083 switch_to_section (debug_line_section);
32084 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32085 output_line_info (true);
32086
32087 /* If we emitted any indirect strings, output the string table too. */
32088 if (debug_str_hash || skeleton_debug_str_hash)
32089 output_indirect_strings ();
32090
32091 /* Switch back to the text section. */
32092 switch_to_section (text_section);
32093 }
32094
32095 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32096 within the same process. For use by toplev::finalize. */
32097
32098 void
32099 dwarf2out_c_finalize (void)
32100 {
32101 last_var_location_insn = NULL;
32102 cached_next_real_insn = NULL;
32103 used_rtx_array = NULL;
32104 incomplete_types = NULL;
32105 decl_scope_table = NULL;
32106 debug_info_section = NULL;
32107 debug_skeleton_info_section = NULL;
32108 debug_abbrev_section = NULL;
32109 debug_skeleton_abbrev_section = NULL;
32110 debug_aranges_section = NULL;
32111 debug_addr_section = NULL;
32112 debug_macinfo_section = NULL;
32113 debug_line_section = NULL;
32114 debug_skeleton_line_section = NULL;
32115 debug_loc_section = NULL;
32116 debug_pubnames_section = NULL;
32117 debug_pubtypes_section = NULL;
32118 debug_str_section = NULL;
32119 debug_line_str_section = NULL;
32120 debug_str_dwo_section = NULL;
32121 debug_str_offsets_section = NULL;
32122 debug_ranges_section = NULL;
32123 debug_frame_section = NULL;
32124 fde_vec = NULL;
32125 debug_str_hash = NULL;
32126 debug_line_str_hash = NULL;
32127 skeleton_debug_str_hash = NULL;
32128 dw2_string_counter = 0;
32129 have_multiple_function_sections = false;
32130 text_section_used = false;
32131 cold_text_section_used = false;
32132 cold_text_section = NULL;
32133 current_unit_personality = NULL;
32134
32135 early_dwarf = false;
32136 early_dwarf_finished = false;
32137
32138 next_die_offset = 0;
32139 single_comp_unit_die = NULL;
32140 comdat_type_list = NULL;
32141 limbo_die_list = NULL;
32142 file_table = NULL;
32143 decl_die_table = NULL;
32144 common_block_die_table = NULL;
32145 decl_loc_table = NULL;
32146 call_arg_locations = NULL;
32147 call_arg_loc_last = NULL;
32148 call_site_count = -1;
32149 tail_call_site_count = -1;
32150 cached_dw_loc_list_table = NULL;
32151 abbrev_die_table = NULL;
32152 delete dwarf_proc_stack_usage_map;
32153 dwarf_proc_stack_usage_map = NULL;
32154 line_info_label_num = 0;
32155 cur_line_info_table = NULL;
32156 text_section_line_info = NULL;
32157 cold_text_section_line_info = NULL;
32158 separate_line_info = NULL;
32159 info_section_emitted = false;
32160 pubname_table = NULL;
32161 pubtype_table = NULL;
32162 macinfo_table = NULL;
32163 ranges_table = NULL;
32164 ranges_by_label = NULL;
32165 rnglist_idx = 0;
32166 have_location_lists = false;
32167 loclabel_num = 0;
32168 poc_label_num = 0;
32169 last_emitted_file = NULL;
32170 label_num = 0;
32171 tmpl_value_parm_die_table = NULL;
32172 generic_type_instances = NULL;
32173 frame_pointer_fb_offset = 0;
32174 frame_pointer_fb_offset_valid = false;
32175 base_types.release ();
32176 XDELETEVEC (producer_string);
32177 producer_string = NULL;
32178 }
32179
32180 #include "gt-dwarf2out.h"