re PR debug/86462 (Quite huge debug info size increase introduced in r262511)
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* Pointers to various DWARF2 sections. */
154 static GTY(()) section *debug_info_section;
155 static GTY(()) section *debug_skeleton_info_section;
156 static GTY(()) section *debug_abbrev_section;
157 static GTY(()) section *debug_skeleton_abbrev_section;
158 static GTY(()) section *debug_aranges_section;
159 static GTY(()) section *debug_addr_section;
160 static GTY(()) section *debug_macinfo_section;
161 static const char *debug_macinfo_section_name;
162 static unsigned macinfo_label_base = 1;
163 static GTY(()) section *debug_line_section;
164 static GTY(()) section *debug_skeleton_line_section;
165 static GTY(()) section *debug_loc_section;
166 static GTY(()) section *debug_pubnames_section;
167 static GTY(()) section *debug_pubtypes_section;
168 static GTY(()) section *debug_str_section;
169 static GTY(()) section *debug_line_str_section;
170 static GTY(()) section *debug_str_dwo_section;
171 static GTY(()) section *debug_str_offsets_section;
172 static GTY(()) section *debug_ranges_section;
173 static GTY(()) section *debug_frame_section;
174
175 /* Maximum size (in bytes) of an artificially generated label. */
176 #define MAX_ARTIFICIAL_LABEL_BYTES 40
177
178 /* According to the (draft) DWARF 3 specification, the initial length
179 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
180 bytes are 0xffffffff, followed by the length stored in the next 8
181 bytes.
182
183 However, the SGI/MIPS ABI uses an initial length which is equal to
184 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
185
186 #ifndef DWARF_INITIAL_LENGTH_SIZE
187 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
188 #endif
189
190 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
191 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
192 #endif
193
194 /* Round SIZE up to the nearest BOUNDARY. */
195 #define DWARF_ROUND(SIZE,BOUNDARY) \
196 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
197
198 /* CIE identifier. */
199 #if HOST_BITS_PER_WIDE_INT >= 64
200 #define DWARF_CIE_ID \
201 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
202 #else
203 #define DWARF_CIE_ID DW_CIE_ID
204 #endif
205
206
207 /* A vector for a table that contains frame description
208 information for each routine. */
209 #define NOT_INDEXED (-1U)
210 #define NO_INDEX_ASSIGNED (-2U)
211
212 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
213
214 struct GTY((for_user)) indirect_string_node {
215 const char *str;
216 unsigned int refcount;
217 enum dwarf_form form;
218 char *label;
219 unsigned int index;
220 };
221
222 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
223 {
224 typedef const char *compare_type;
225
226 static hashval_t hash (indirect_string_node *);
227 static bool equal (indirect_string_node *, const char *);
228 };
229
230 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
231
232 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
233
234 /* With split_debug_info, both the comp_dir and dwo_name go in the
235 main object file, rather than the dwo, similar to the force_direct
236 parameter elsewhere but with additional complications:
237
238 1) The string is needed in both the main object file and the dwo.
239 That is, the comp_dir and dwo_name will appear in both places.
240
241 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
242 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
243
244 3) GCC chooses the form to use late, depending on the size and
245 reference count.
246
247 Rather than forcing the all debug string handling functions and
248 callers to deal with these complications, simply use a separate,
249 special-cased string table for any attribute that should go in the
250 main object file. This limits the complexity to just the places
251 that need it. */
252
253 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
254
255 static GTY(()) int dw2_string_counter;
256
257 /* True if the compilation unit places functions in more than one section. */
258 static GTY(()) bool have_multiple_function_sections = false;
259
260 /* Whether the default text and cold text sections have been used at all. */
261 static GTY(()) bool text_section_used = false;
262 static GTY(()) bool cold_text_section_used = false;
263
264 /* The default cold text section. */
265 static GTY(()) section *cold_text_section;
266
267 /* The DIE for C++14 'auto' in a function return type. */
268 static GTY(()) dw_die_ref auto_die;
269
270 /* The DIE for C++14 'decltype(auto)' in a function return type. */
271 static GTY(()) dw_die_ref decltype_auto_die;
272
273 /* Forward declarations for functions defined in this file. */
274
275 static void output_call_frame_info (int);
276 static void dwarf2out_note_section_used (void);
277
278 /* Personality decl of current unit. Used only when assembler does not support
279 personality CFI. */
280 static GTY(()) rtx current_unit_personality;
281
282 /* Whether an eh_frame section is required. */
283 static GTY(()) bool do_eh_frame = false;
284
285 /* .debug_rnglists next index. */
286 static unsigned int rnglist_idx;
287
288 /* Data and reference forms for relocatable data. */
289 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
290 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
291
292 #ifndef DEBUG_FRAME_SECTION
293 #define DEBUG_FRAME_SECTION ".debug_frame"
294 #endif
295
296 #ifndef FUNC_BEGIN_LABEL
297 #define FUNC_BEGIN_LABEL "LFB"
298 #endif
299
300 #ifndef FUNC_END_LABEL
301 #define FUNC_END_LABEL "LFE"
302 #endif
303
304 #ifndef PROLOGUE_END_LABEL
305 #define PROLOGUE_END_LABEL "LPE"
306 #endif
307
308 #ifndef EPILOGUE_BEGIN_LABEL
309 #define EPILOGUE_BEGIN_LABEL "LEB"
310 #endif
311
312 #ifndef FRAME_BEGIN_LABEL
313 #define FRAME_BEGIN_LABEL "Lframe"
314 #endif
315 #define CIE_AFTER_SIZE_LABEL "LSCIE"
316 #define CIE_END_LABEL "LECIE"
317 #define FDE_LABEL "LSFDE"
318 #define FDE_AFTER_SIZE_LABEL "LASFDE"
319 #define FDE_END_LABEL "LEFDE"
320 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
321 #define LINE_NUMBER_END_LABEL "LELT"
322 #define LN_PROLOG_AS_LABEL "LASLTP"
323 #define LN_PROLOG_END_LABEL "LELTP"
324 #define DIE_LABEL_PREFIX "DW"
325 \f
326 /* Match the base name of a file to the base name of a compilation unit. */
327
328 static int
329 matches_main_base (const char *path)
330 {
331 /* Cache the last query. */
332 static const char *last_path = NULL;
333 static int last_match = 0;
334 if (path != last_path)
335 {
336 const char *base;
337 int length = base_of_path (path, &base);
338 last_path = path;
339 last_match = (length == main_input_baselength
340 && memcmp (base, main_input_basename, length) == 0);
341 }
342 return last_match;
343 }
344
345 #ifdef DEBUG_DEBUG_STRUCT
346
347 static int
348 dump_struct_debug (tree type, enum debug_info_usage usage,
349 enum debug_struct_file criterion, int generic,
350 int matches, int result)
351 {
352 /* Find the type name. */
353 tree type_decl = TYPE_STUB_DECL (type);
354 tree t = type_decl;
355 const char *name = 0;
356 if (TREE_CODE (t) == TYPE_DECL)
357 t = DECL_NAME (t);
358 if (t)
359 name = IDENTIFIER_POINTER (t);
360
361 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
362 criterion,
363 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
364 matches ? "bas" : "hdr",
365 generic ? "gen" : "ord",
366 usage == DINFO_USAGE_DFN ? ";" :
367 usage == DINFO_USAGE_DIR_USE ? "." : "*",
368 result,
369 (void*) type_decl, name);
370 return result;
371 }
372 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
373 dump_struct_debug (type, usage, criterion, generic, matches, result)
374
375 #else
376
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 (result)
379
380 #endif
381
382 /* Get the number of HOST_WIDE_INTs needed to represent the precision
383 of the number. Some constants have a large uniform precision, so
384 we get the precision needed for the actual value of the number. */
385
386 static unsigned int
387 get_full_len (const wide_int &op)
388 {
389 int prec = wi::min_precision (op, UNSIGNED);
390 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
391 / HOST_BITS_PER_WIDE_INT);
392 }
393
394 static bool
395 should_emit_struct_debug (tree type, enum debug_info_usage usage)
396 {
397 enum debug_struct_file criterion;
398 tree type_decl;
399 bool generic = lang_hooks.types.generic_p (type);
400
401 if (generic)
402 criterion = debug_struct_generic[usage];
403 else
404 criterion = debug_struct_ordinary[usage];
405
406 if (criterion == DINFO_STRUCT_FILE_NONE)
407 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
408 if (criterion == DINFO_STRUCT_FILE_ANY)
409 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
410
411 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
412
413 if (type_decl != NULL)
414 {
415 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
419 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
420 }
421
422 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
423 }
424 \f
425 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
426 switch to the data section instead, and write out a synthetic start label
427 for collect2 the first time around. */
428
429 static void
430 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
431 {
432 if (eh_frame_section == 0)
433 {
434 int flags;
435
436 if (EH_TABLES_CAN_BE_READ_ONLY)
437 {
438 int fde_encoding;
439 int per_encoding;
440 int lsda_encoding;
441
442 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
443 /*global=*/0);
444 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
445 /*global=*/1);
446 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
447 /*global=*/0);
448 flags = ((! flag_pic
449 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
450 && (fde_encoding & 0x70) != DW_EH_PE_aligned
451 && (per_encoding & 0x70) != DW_EH_PE_absptr
452 && (per_encoding & 0x70) != DW_EH_PE_aligned
453 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
454 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
455 ? 0 : SECTION_WRITE);
456 }
457 else
458 flags = SECTION_WRITE;
459
460 #ifdef EH_FRAME_SECTION_NAME
461 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
462 #else
463 eh_frame_section = ((flags == SECTION_WRITE)
464 ? data_section : readonly_data_section);
465 #endif /* EH_FRAME_SECTION_NAME */
466 }
467
468 switch_to_section (eh_frame_section);
469
470 #ifdef EH_FRAME_THROUGH_COLLECT2
471 /* We have no special eh_frame section. Emit special labels to guide
472 collect2. */
473 if (!back)
474 {
475 tree label = get_file_function_name ("F");
476 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
477 targetm.asm_out.globalize_label (asm_out_file,
478 IDENTIFIER_POINTER (label));
479 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
480 }
481 #endif
482 }
483
484 /* Switch [BACK] to the eh or debug frame table section, depending on
485 FOR_EH. */
486
487 static void
488 switch_to_frame_table_section (int for_eh, bool back)
489 {
490 if (for_eh)
491 switch_to_eh_frame_section (back);
492 else
493 {
494 if (!debug_frame_section)
495 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
496 SECTION_DEBUG, NULL);
497 switch_to_section (debug_frame_section);
498 }
499 }
500
501 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
502
503 enum dw_cfi_oprnd_type
504 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
505 {
506 switch (cfi)
507 {
508 case DW_CFA_nop:
509 case DW_CFA_GNU_window_save:
510 case DW_CFA_remember_state:
511 case DW_CFA_restore_state:
512 return dw_cfi_oprnd_unused;
513
514 case DW_CFA_set_loc:
515 case DW_CFA_advance_loc1:
516 case DW_CFA_advance_loc2:
517 case DW_CFA_advance_loc4:
518 case DW_CFA_MIPS_advance_loc8:
519 return dw_cfi_oprnd_addr;
520
521 case DW_CFA_offset:
522 case DW_CFA_offset_extended:
523 case DW_CFA_def_cfa:
524 case DW_CFA_offset_extended_sf:
525 case DW_CFA_def_cfa_sf:
526 case DW_CFA_restore:
527 case DW_CFA_restore_extended:
528 case DW_CFA_undefined:
529 case DW_CFA_same_value:
530 case DW_CFA_def_cfa_register:
531 case DW_CFA_register:
532 case DW_CFA_expression:
533 case DW_CFA_val_expression:
534 return dw_cfi_oprnd_reg_num;
535
536 case DW_CFA_def_cfa_offset:
537 case DW_CFA_GNU_args_size:
538 case DW_CFA_def_cfa_offset_sf:
539 return dw_cfi_oprnd_offset;
540
541 case DW_CFA_def_cfa_expression:
542 return dw_cfi_oprnd_loc;
543
544 default:
545 gcc_unreachable ();
546 }
547 }
548
549 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
550
551 enum dw_cfi_oprnd_type
552 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
553 {
554 switch (cfi)
555 {
556 case DW_CFA_def_cfa:
557 case DW_CFA_def_cfa_sf:
558 case DW_CFA_offset:
559 case DW_CFA_offset_extended_sf:
560 case DW_CFA_offset_extended:
561 return dw_cfi_oprnd_offset;
562
563 case DW_CFA_register:
564 return dw_cfi_oprnd_reg_num;
565
566 case DW_CFA_expression:
567 case DW_CFA_val_expression:
568 return dw_cfi_oprnd_loc;
569
570 case DW_CFA_def_cfa_expression:
571 return dw_cfi_oprnd_cfa_loc;
572
573 default:
574 return dw_cfi_oprnd_unused;
575 }
576 }
577
578 /* Output one FDE. */
579
580 static void
581 output_fde (dw_fde_ref fde, bool for_eh, bool second,
582 char *section_start_label, int fde_encoding, char *augmentation,
583 bool any_lsda_needed, int lsda_encoding)
584 {
585 const char *begin, *end;
586 static unsigned int j;
587 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
588
589 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
590 /* empty */ 0);
591 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
592 for_eh + j);
593 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
594 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
595 if (!XCOFF_DEBUGGING_INFO || for_eh)
596 {
597 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
598 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
599 " indicating 64-bit DWARF extension");
600 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
601 "FDE Length");
602 }
603 ASM_OUTPUT_LABEL (asm_out_file, l1);
604
605 if (for_eh)
606 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
607 else
608 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
609 debug_frame_section, "FDE CIE offset");
610
611 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
612 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
613
614 if (for_eh)
615 {
616 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
617 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
618 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
619 "FDE initial location");
620 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
621 end, begin, "FDE address range");
622 }
623 else
624 {
625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
626 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
627 }
628
629 if (augmentation[0])
630 {
631 if (any_lsda_needed)
632 {
633 int size = size_of_encoded_value (lsda_encoding);
634
635 if (lsda_encoding == DW_EH_PE_aligned)
636 {
637 int offset = ( 4 /* Length */
638 + 4 /* CIE offset */
639 + 2 * size_of_encoded_value (fde_encoding)
640 + 1 /* Augmentation size */ );
641 int pad = -offset & (PTR_SIZE - 1);
642
643 size += pad;
644 gcc_assert (size_of_uleb128 (size) == 1);
645 }
646
647 dw2_asm_output_data_uleb128 (size, "Augmentation size");
648
649 if (fde->uses_eh_lsda)
650 {
651 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
652 fde->funcdef_number);
653 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
654 gen_rtx_SYMBOL_REF (Pmode, l1),
655 false,
656 "Language Specific Data Area");
657 }
658 else
659 {
660 if (lsda_encoding == DW_EH_PE_aligned)
661 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
662 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
663 "Language Specific Data Area (none)");
664 }
665 }
666 else
667 dw2_asm_output_data_uleb128 (0, "Augmentation size");
668 }
669
670 /* Loop through the Call Frame Instructions associated with this FDE. */
671 fde->dw_fde_current_label = begin;
672 {
673 size_t from, until, i;
674
675 from = 0;
676 until = vec_safe_length (fde->dw_fde_cfi);
677
678 if (fde->dw_fde_second_begin == NULL)
679 ;
680 else if (!second)
681 until = fde->dw_fde_switch_cfi_index;
682 else
683 from = fde->dw_fde_switch_cfi_index;
684
685 for (i = from; i < until; i++)
686 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
687 }
688
689 /* If we are to emit a ref/link from function bodies to their frame tables,
690 do it now. This is typically performed to make sure that tables
691 associated with functions are dragged with them and not discarded in
692 garbage collecting links. We need to do this on a per function basis to
693 cope with -ffunction-sections. */
694
695 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
696 /* Switch to the function section, emit the ref to the tables, and
697 switch *back* into the table section. */
698 switch_to_section (function_section (fde->decl));
699 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
700 switch_to_frame_table_section (for_eh, true);
701 #endif
702
703 /* Pad the FDE out to an address sized boundary. */
704 ASM_OUTPUT_ALIGN (asm_out_file,
705 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
706 ASM_OUTPUT_LABEL (asm_out_file, l2);
707
708 j += 2;
709 }
710
711 /* Return true if frame description entry FDE is needed for EH. */
712
713 static bool
714 fde_needed_for_eh_p (dw_fde_ref fde)
715 {
716 if (flag_asynchronous_unwind_tables)
717 return true;
718
719 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
720 return true;
721
722 if (fde->uses_eh_lsda)
723 return true;
724
725 /* If exceptions are enabled, we have collected nothrow info. */
726 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
727 return false;
728
729 return true;
730 }
731
732 /* Output the call frame information used to record information
733 that relates to calculating the frame pointer, and records the
734 location of saved registers. */
735
736 static void
737 output_call_frame_info (int for_eh)
738 {
739 unsigned int i;
740 dw_fde_ref fde;
741 dw_cfi_ref cfi;
742 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
743 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
744 bool any_lsda_needed = false;
745 char augmentation[6];
746 int augmentation_size;
747 int fde_encoding = DW_EH_PE_absptr;
748 int per_encoding = DW_EH_PE_absptr;
749 int lsda_encoding = DW_EH_PE_absptr;
750 int return_reg;
751 rtx personality = NULL;
752 int dw_cie_version;
753
754 /* Don't emit a CIE if there won't be any FDEs. */
755 if (!fde_vec)
756 return;
757
758 /* Nothing to do if the assembler's doing it all. */
759 if (dwarf2out_do_cfi_asm ())
760 return;
761
762 /* If we don't have any functions we'll want to unwind out of, don't emit
763 any EH unwind information. If we make FDEs linkonce, we may have to
764 emit an empty label for an FDE that wouldn't otherwise be emitted. We
765 want to avoid having an FDE kept around when the function it refers to
766 is discarded. Example where this matters: a primary function template
767 in C++ requires EH information, an explicit specialization doesn't. */
768 if (for_eh)
769 {
770 bool any_eh_needed = false;
771
772 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
773 {
774 if (fde->uses_eh_lsda)
775 any_eh_needed = any_lsda_needed = true;
776 else if (fde_needed_for_eh_p (fde))
777 any_eh_needed = true;
778 else if (TARGET_USES_WEAK_UNWIND_INFO)
779 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
780 }
781
782 if (!any_eh_needed)
783 return;
784 }
785
786 /* We're going to be generating comments, so turn on app. */
787 if (flag_debug_asm)
788 app_enable ();
789
790 /* Switch to the proper frame section, first time. */
791 switch_to_frame_table_section (for_eh, false);
792
793 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
794 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
795
796 /* Output the CIE. */
797 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
798 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
799 if (!XCOFF_DEBUGGING_INFO || for_eh)
800 {
801 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
802 dw2_asm_output_data (4, 0xffffffff,
803 "Initial length escape value indicating 64-bit DWARF extension");
804 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
805 "Length of Common Information Entry");
806 }
807 ASM_OUTPUT_LABEL (asm_out_file, l1);
808
809 /* Now that the CIE pointer is PC-relative for EH,
810 use 0 to identify the CIE. */
811 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
812 (for_eh ? 0 : DWARF_CIE_ID),
813 "CIE Identifier Tag");
814
815 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
816 use CIE version 1, unless that would produce incorrect results
817 due to overflowing the return register column. */
818 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
819 dw_cie_version = 1;
820 if (return_reg >= 256 || dwarf_version > 2)
821 dw_cie_version = 3;
822 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
823
824 augmentation[0] = 0;
825 augmentation_size = 0;
826
827 personality = current_unit_personality;
828 if (for_eh)
829 {
830 char *p;
831
832 /* Augmentation:
833 z Indicates that a uleb128 is present to size the
834 augmentation section.
835 L Indicates the encoding (and thus presence) of
836 an LSDA pointer in the FDE augmentation.
837 R Indicates a non-default pointer encoding for
838 FDE code pointers.
839 P Indicates the presence of an encoding + language
840 personality routine in the CIE augmentation. */
841
842 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
843 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
844 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
845
846 p = augmentation + 1;
847 if (personality)
848 {
849 *p++ = 'P';
850 augmentation_size += 1 + size_of_encoded_value (per_encoding);
851 assemble_external_libcall (personality);
852 }
853 if (any_lsda_needed)
854 {
855 *p++ = 'L';
856 augmentation_size += 1;
857 }
858 if (fde_encoding != DW_EH_PE_absptr)
859 {
860 *p++ = 'R';
861 augmentation_size += 1;
862 }
863 if (p > augmentation + 1)
864 {
865 augmentation[0] = 'z';
866 *p = '\0';
867 }
868
869 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
870 if (personality && per_encoding == DW_EH_PE_aligned)
871 {
872 int offset = ( 4 /* Length */
873 + 4 /* CIE Id */
874 + 1 /* CIE version */
875 + strlen (augmentation) + 1 /* Augmentation */
876 + size_of_uleb128 (1) /* Code alignment */
877 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
878 + 1 /* RA column */
879 + 1 /* Augmentation size */
880 + 1 /* Personality encoding */ );
881 int pad = -offset & (PTR_SIZE - 1);
882
883 augmentation_size += pad;
884
885 /* Augmentations should be small, so there's scarce need to
886 iterate for a solution. Die if we exceed one uleb128 byte. */
887 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
888 }
889 }
890
891 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
892 if (dw_cie_version >= 4)
893 {
894 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
895 dw2_asm_output_data (1, 0, "CIE Segment Size");
896 }
897 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
898 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
899 "CIE Data Alignment Factor");
900
901 if (dw_cie_version == 1)
902 dw2_asm_output_data (1, return_reg, "CIE RA Column");
903 else
904 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
905
906 if (augmentation[0])
907 {
908 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
909 if (personality)
910 {
911 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
912 eh_data_format_name (per_encoding));
913 dw2_asm_output_encoded_addr_rtx (per_encoding,
914 personality,
915 true, NULL);
916 }
917
918 if (any_lsda_needed)
919 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
920 eh_data_format_name (lsda_encoding));
921
922 if (fde_encoding != DW_EH_PE_absptr)
923 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
924 eh_data_format_name (fde_encoding));
925 }
926
927 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
928 output_cfi (cfi, NULL, for_eh);
929
930 /* Pad the CIE out to an address sized boundary. */
931 ASM_OUTPUT_ALIGN (asm_out_file,
932 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
933 ASM_OUTPUT_LABEL (asm_out_file, l2);
934
935 /* Loop through all of the FDE's. */
936 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
937 {
938 unsigned int k;
939
940 /* Don't emit EH unwind info for leaf functions that don't need it. */
941 if (for_eh && !fde_needed_for_eh_p (fde))
942 continue;
943
944 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
945 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
946 augmentation, any_lsda_needed, lsda_encoding);
947 }
948
949 if (for_eh && targetm.terminate_dw2_eh_frame_info)
950 dw2_asm_output_data (4, 0, "End of Table");
951
952 /* Turn off app to make assembly quicker. */
953 if (flag_debug_asm)
954 app_disable ();
955 }
956
957 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
958
959 static void
960 dwarf2out_do_cfi_startproc (bool second)
961 {
962 int enc;
963 rtx ref;
964
965 fprintf (asm_out_file, "\t.cfi_startproc\n");
966
967 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
968 eh unwinders. */
969 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
970 return;
971
972 rtx personality = get_personality_function (current_function_decl);
973
974 if (personality)
975 {
976 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
977 ref = personality;
978
979 /* ??? The GAS support isn't entirely consistent. We have to
980 handle indirect support ourselves, but PC-relative is done
981 in the assembler. Further, the assembler can't handle any
982 of the weirder relocation types. */
983 if (enc & DW_EH_PE_indirect)
984 ref = dw2_force_const_mem (ref, true);
985
986 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
987 output_addr_const (asm_out_file, ref);
988 fputc ('\n', asm_out_file);
989 }
990
991 if (crtl->uses_eh_lsda)
992 {
993 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
994
995 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
996 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
997 current_function_funcdef_no);
998 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
999 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1000
1001 if (enc & DW_EH_PE_indirect)
1002 ref = dw2_force_const_mem (ref, true);
1003
1004 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1005 output_addr_const (asm_out_file, ref);
1006 fputc ('\n', asm_out_file);
1007 }
1008 }
1009
1010 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1011 this allocation may be done before pass_final. */
1012
1013 dw_fde_ref
1014 dwarf2out_alloc_current_fde (void)
1015 {
1016 dw_fde_ref fde;
1017
1018 fde = ggc_cleared_alloc<dw_fde_node> ();
1019 fde->decl = current_function_decl;
1020 fde->funcdef_number = current_function_funcdef_no;
1021 fde->fde_index = vec_safe_length (fde_vec);
1022 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1023 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1024 fde->nothrow = crtl->nothrow;
1025 fde->drap_reg = INVALID_REGNUM;
1026 fde->vdrap_reg = INVALID_REGNUM;
1027
1028 /* Record the FDE associated with this function. */
1029 cfun->fde = fde;
1030 vec_safe_push (fde_vec, fde);
1031
1032 return fde;
1033 }
1034
1035 /* Output a marker (i.e. a label) for the beginning of a function, before
1036 the prologue. */
1037
1038 void
1039 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1040 unsigned int column ATTRIBUTE_UNUSED,
1041 const char *file ATTRIBUTE_UNUSED)
1042 {
1043 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1044 char * dup_label;
1045 dw_fde_ref fde;
1046 section *fnsec;
1047 bool do_frame;
1048
1049 current_function_func_begin_label = NULL;
1050
1051 do_frame = dwarf2out_do_frame ();
1052
1053 /* ??? current_function_func_begin_label is also used by except.c for
1054 call-site information. We must emit this label if it might be used. */
1055 if (!do_frame
1056 && (!flag_exceptions
1057 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1058 return;
1059
1060 fnsec = function_section (current_function_decl);
1061 switch_to_section (fnsec);
1062 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1063 current_function_funcdef_no);
1064 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1065 current_function_funcdef_no);
1066 dup_label = xstrdup (label);
1067 current_function_func_begin_label = dup_label;
1068
1069 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1070 if (!do_frame)
1071 return;
1072
1073 /* Unlike the debug version, the EH version of frame unwind info is a per-
1074 function setting so we need to record whether we need it for the unit. */
1075 do_eh_frame |= dwarf2out_do_eh_frame ();
1076
1077 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1078 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1079 would include pass_dwarf2_frame. If we've not created the FDE yet,
1080 do so now. */
1081 fde = cfun->fde;
1082 if (fde == NULL)
1083 fde = dwarf2out_alloc_current_fde ();
1084
1085 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1086 fde->dw_fde_begin = dup_label;
1087 fde->dw_fde_current_label = dup_label;
1088 fde->in_std_section = (fnsec == text_section
1089 || (cold_text_section && fnsec == cold_text_section));
1090
1091 /* We only want to output line number information for the genuine dwarf2
1092 prologue case, not the eh frame case. */
1093 #ifdef DWARF2_DEBUGGING_INFO
1094 if (file)
1095 dwarf2out_source_line (line, column, file, 0, true);
1096 #endif
1097
1098 if (dwarf2out_do_cfi_asm ())
1099 dwarf2out_do_cfi_startproc (false);
1100 else
1101 {
1102 rtx personality = get_personality_function (current_function_decl);
1103 if (!current_unit_personality)
1104 current_unit_personality = personality;
1105
1106 /* We cannot keep a current personality per function as without CFI
1107 asm, at the point where we emit the CFI data, there is no current
1108 function anymore. */
1109 if (personality && current_unit_personality != personality)
1110 sorry ("multiple EH personalities are supported only with assemblers "
1111 "supporting .cfi_personality directive");
1112 }
1113 }
1114
1115 /* Output a marker (i.e. a label) for the end of the generated code
1116 for a function prologue. This gets called *after* the prologue code has
1117 been generated. */
1118
1119 void
1120 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1121 const char *file ATTRIBUTE_UNUSED)
1122 {
1123 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1124
1125 /* Output a label to mark the endpoint of the code generated for this
1126 function. */
1127 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1128 current_function_funcdef_no);
1129 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1130 current_function_funcdef_no);
1131 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1132 }
1133
1134 /* Output a marker (i.e. a label) for the beginning of the generated code
1135 for a function epilogue. This gets called *before* the prologue code has
1136 been generated. */
1137
1138 void
1139 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1140 const char *file ATTRIBUTE_UNUSED)
1141 {
1142 dw_fde_ref fde = cfun->fde;
1143 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1144
1145 if (fde->dw_fde_vms_begin_epilogue)
1146 return;
1147
1148 /* Output a label to mark the endpoint of the code generated for this
1149 function. */
1150 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1151 current_function_funcdef_no);
1152 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1153 current_function_funcdef_no);
1154 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1155 }
1156
1157 /* Output a marker (i.e. a label) for the absolute end of the generated code
1158 for a function definition. This gets called *after* the epilogue code has
1159 been generated. */
1160
1161 void
1162 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1163 const char *file ATTRIBUTE_UNUSED)
1164 {
1165 dw_fde_ref fde;
1166 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1167
1168 last_var_location_insn = NULL;
1169 cached_next_real_insn = NULL;
1170
1171 if (dwarf2out_do_cfi_asm ())
1172 fprintf (asm_out_file, "\t.cfi_endproc\n");
1173
1174 /* Output a label to mark the endpoint of the code generated for this
1175 function. */
1176 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1177 current_function_funcdef_no);
1178 ASM_OUTPUT_LABEL (asm_out_file, label);
1179 fde = cfun->fde;
1180 gcc_assert (fde != NULL);
1181 if (fde->dw_fde_second_begin == NULL)
1182 fde->dw_fde_end = xstrdup (label);
1183 }
1184
1185 void
1186 dwarf2out_frame_finish (void)
1187 {
1188 /* Output call frame information. */
1189 if (targetm.debug_unwind_info () == UI_DWARF2)
1190 output_call_frame_info (0);
1191
1192 /* Output another copy for the unwinder. */
1193 if (do_eh_frame)
1194 output_call_frame_info (1);
1195 }
1196
1197 /* Note that the current function section is being used for code. */
1198
1199 static void
1200 dwarf2out_note_section_used (void)
1201 {
1202 section *sec = current_function_section ();
1203 if (sec == text_section)
1204 text_section_used = true;
1205 else if (sec == cold_text_section)
1206 cold_text_section_used = true;
1207 }
1208
1209 static void var_location_switch_text_section (void);
1210 static void set_cur_line_info_table (section *);
1211
1212 void
1213 dwarf2out_switch_text_section (void)
1214 {
1215 section *sect;
1216 dw_fde_ref fde = cfun->fde;
1217
1218 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1219
1220 if (!in_cold_section_p)
1221 {
1222 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1223 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1224 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1225 }
1226 else
1227 {
1228 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1229 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1230 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1231 }
1232 have_multiple_function_sections = true;
1233
1234 /* There is no need to mark used sections when not debugging. */
1235 if (cold_text_section != NULL)
1236 dwarf2out_note_section_used ();
1237
1238 if (dwarf2out_do_cfi_asm ())
1239 fprintf (asm_out_file, "\t.cfi_endproc\n");
1240
1241 /* Now do the real section switch. */
1242 sect = current_function_section ();
1243 switch_to_section (sect);
1244
1245 fde->second_in_std_section
1246 = (sect == text_section
1247 || (cold_text_section && sect == cold_text_section));
1248
1249 if (dwarf2out_do_cfi_asm ())
1250 dwarf2out_do_cfi_startproc (true);
1251
1252 var_location_switch_text_section ();
1253
1254 if (cold_text_section != NULL)
1255 set_cur_line_info_table (sect);
1256 }
1257 \f
1258 /* And now, the subset of the debugging information support code necessary
1259 for emitting location expressions. */
1260
1261 /* Data about a single source file. */
1262 struct GTY((for_user)) dwarf_file_data {
1263 const char * filename;
1264 int emitted_number;
1265 };
1266
1267 /* Describe an entry into the .debug_addr section. */
1268
1269 enum ate_kind {
1270 ate_kind_rtx,
1271 ate_kind_rtx_dtprel,
1272 ate_kind_label
1273 };
1274
1275 struct GTY((for_user)) addr_table_entry {
1276 enum ate_kind kind;
1277 unsigned int refcount;
1278 unsigned int index;
1279 union addr_table_entry_struct_union
1280 {
1281 rtx GTY ((tag ("0"))) rtl;
1282 char * GTY ((tag ("1"))) label;
1283 }
1284 GTY ((desc ("%1.kind"))) addr;
1285 };
1286
1287 typedef unsigned int var_loc_view;
1288
1289 /* Location lists are ranges + location descriptions for that range,
1290 so you can track variables that are in different places over
1291 their entire life. */
1292 typedef struct GTY(()) dw_loc_list_struct {
1293 dw_loc_list_ref dw_loc_next;
1294 const char *begin; /* Label and addr_entry for start of range */
1295 addr_table_entry *begin_entry;
1296 const char *end; /* Label for end of range */
1297 char *ll_symbol; /* Label for beginning of location list.
1298 Only on head of list. */
1299 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1300 const char *section; /* Section this loclist is relative to */
1301 dw_loc_descr_ref expr;
1302 var_loc_view vbegin, vend;
1303 hashval_t hash;
1304 /* True if all addresses in this and subsequent lists are known to be
1305 resolved. */
1306 bool resolved_addr;
1307 /* True if this list has been replaced by dw_loc_next. */
1308 bool replaced;
1309 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1310 section. */
1311 unsigned char emitted : 1;
1312 /* True if hash field is index rather than hash value. */
1313 unsigned char num_assigned : 1;
1314 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1315 unsigned char offset_emitted : 1;
1316 /* True if note_variable_value_in_expr has been called on it. */
1317 unsigned char noted_variable_value : 1;
1318 /* True if the range should be emitted even if begin and end
1319 are the same. */
1320 bool force;
1321 } dw_loc_list_node;
1322
1323 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1324 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1325
1326 /* Convert a DWARF stack opcode into its string name. */
1327
1328 static const char *
1329 dwarf_stack_op_name (unsigned int op)
1330 {
1331 const char *name = get_DW_OP_name (op);
1332
1333 if (name != NULL)
1334 return name;
1335
1336 return "OP_<unknown>";
1337 }
1338
1339 /* Return TRUE iff we're to output location view lists as a separate
1340 attribute next to the location lists, as an extension compatible
1341 with DWARF 2 and above. */
1342
1343 static inline bool
1344 dwarf2out_locviews_in_attribute ()
1345 {
1346 return debug_variable_location_views == 1;
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as part of the
1350 location lists, as proposed for standardization after DWARF 5. */
1351
1352 static inline bool
1353 dwarf2out_locviews_in_loclist ()
1354 {
1355 #ifndef DW_LLE_view_pair
1356 return false;
1357 #else
1358 return debug_variable_location_views == -1;
1359 #endif
1360 }
1361
1362 /* Return a pointer to a newly allocated location description. Location
1363 descriptions are simple expression terms that can be strung
1364 together to form more complicated location (address) descriptions. */
1365
1366 static inline dw_loc_descr_ref
1367 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1368 unsigned HOST_WIDE_INT oprnd2)
1369 {
1370 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1371
1372 descr->dw_loc_opc = op;
1373 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1374 descr->dw_loc_oprnd1.val_entry = NULL;
1375 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1376 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1377 descr->dw_loc_oprnd2.val_entry = NULL;
1378 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1379
1380 return descr;
1381 }
1382
1383 /* Add a location description term to a location description expression. */
1384
1385 static inline void
1386 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1387 {
1388 dw_loc_descr_ref *d;
1389
1390 /* Find the end of the chain. */
1391 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1392 ;
1393
1394 *d = descr;
1395 }
1396
1397 /* Compare two location operands for exact equality. */
1398
1399 static bool
1400 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1401 {
1402 if (a->val_class != b->val_class)
1403 return false;
1404 switch (a->val_class)
1405 {
1406 case dw_val_class_none:
1407 return true;
1408 case dw_val_class_addr:
1409 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1410
1411 case dw_val_class_offset:
1412 case dw_val_class_unsigned_const:
1413 case dw_val_class_const:
1414 case dw_val_class_unsigned_const_implicit:
1415 case dw_val_class_const_implicit:
1416 case dw_val_class_range_list:
1417 /* These are all HOST_WIDE_INT, signed or unsigned. */
1418 return a->v.val_unsigned == b->v.val_unsigned;
1419
1420 case dw_val_class_loc:
1421 return a->v.val_loc == b->v.val_loc;
1422 case dw_val_class_loc_list:
1423 return a->v.val_loc_list == b->v.val_loc_list;
1424 case dw_val_class_view_list:
1425 return a->v.val_view_list == b->v.val_view_list;
1426 case dw_val_class_die_ref:
1427 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1428 case dw_val_class_fde_ref:
1429 return a->v.val_fde_index == b->v.val_fde_index;
1430 case dw_val_class_symview:
1431 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1432 case dw_val_class_lbl_id:
1433 case dw_val_class_lineptr:
1434 case dw_val_class_macptr:
1435 case dw_val_class_loclistsptr:
1436 case dw_val_class_high_pc:
1437 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1438 case dw_val_class_str:
1439 return a->v.val_str == b->v.val_str;
1440 case dw_val_class_flag:
1441 return a->v.val_flag == b->v.val_flag;
1442 case dw_val_class_file:
1443 case dw_val_class_file_implicit:
1444 return a->v.val_file == b->v.val_file;
1445 case dw_val_class_decl_ref:
1446 return a->v.val_decl_ref == b->v.val_decl_ref;
1447
1448 case dw_val_class_const_double:
1449 return (a->v.val_double.high == b->v.val_double.high
1450 && a->v.val_double.low == b->v.val_double.low);
1451
1452 case dw_val_class_wide_int:
1453 return *a->v.val_wide == *b->v.val_wide;
1454
1455 case dw_val_class_vec:
1456 {
1457 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1458 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1459
1460 return (a_len == b_len
1461 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1462 }
1463
1464 case dw_val_class_data8:
1465 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1466
1467 case dw_val_class_vms_delta:
1468 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1469 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1470
1471 case dw_val_class_discr_value:
1472 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1473 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1474 case dw_val_class_discr_list:
1475 /* It makes no sense comparing two discriminant value lists. */
1476 return false;
1477 }
1478 gcc_unreachable ();
1479 }
1480
1481 /* Compare two location atoms for exact equality. */
1482
1483 static bool
1484 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1485 {
1486 if (a->dw_loc_opc != b->dw_loc_opc)
1487 return false;
1488
1489 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1490 address size, but since we always allocate cleared storage it
1491 should be zero for other types of locations. */
1492 if (a->dtprel != b->dtprel)
1493 return false;
1494
1495 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1496 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1497 }
1498
1499 /* Compare two complete location expressions for exact equality. */
1500
1501 bool
1502 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1503 {
1504 while (1)
1505 {
1506 if (a == b)
1507 return true;
1508 if (a == NULL || b == NULL)
1509 return false;
1510 if (!loc_descr_equal_p_1 (a, b))
1511 return false;
1512
1513 a = a->dw_loc_next;
1514 b = b->dw_loc_next;
1515 }
1516 }
1517
1518
1519 /* Add a constant POLY_OFFSET to a location expression. */
1520
1521 static void
1522 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1523 {
1524 dw_loc_descr_ref loc;
1525 HOST_WIDE_INT *p;
1526
1527 gcc_assert (*list_head != NULL);
1528
1529 if (known_eq (poly_offset, 0))
1530 return;
1531
1532 /* Find the end of the chain. */
1533 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1534 ;
1535
1536 HOST_WIDE_INT offset;
1537 if (!poly_offset.is_constant (&offset))
1538 {
1539 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1540 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1541 return;
1542 }
1543
1544 p = NULL;
1545 if (loc->dw_loc_opc == DW_OP_fbreg
1546 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1547 p = &loc->dw_loc_oprnd1.v.val_int;
1548 else if (loc->dw_loc_opc == DW_OP_bregx)
1549 p = &loc->dw_loc_oprnd2.v.val_int;
1550
1551 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1552 offset. Don't optimize if an signed integer overflow would happen. */
1553 if (p != NULL
1554 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1555 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1556 *p += offset;
1557
1558 else if (offset > 0)
1559 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1560
1561 else
1562 {
1563 loc->dw_loc_next
1564 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1565 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1566 }
1567 }
1568
1569 /* Return a pointer to a newly allocated location description for
1570 REG and OFFSET. */
1571
1572 static inline dw_loc_descr_ref
1573 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1574 {
1575 HOST_WIDE_INT const_offset;
1576 if (offset.is_constant (&const_offset))
1577 {
1578 if (reg <= 31)
1579 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1580 const_offset, 0);
1581 else
1582 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1583 }
1584 else
1585 {
1586 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1587 loc_descr_plus_const (&ret, offset);
1588 return ret;
1589 }
1590 }
1591
1592 /* Add a constant OFFSET to a location list. */
1593
1594 static void
1595 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1596 {
1597 dw_loc_list_ref d;
1598 for (d = list_head; d != NULL; d = d->dw_loc_next)
1599 loc_descr_plus_const (&d->expr, offset);
1600 }
1601
1602 #define DWARF_REF_SIZE \
1603 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1604
1605 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1606 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1607 DW_FORM_data16 with 128 bits. */
1608 #define DWARF_LARGEST_DATA_FORM_BITS \
1609 (dwarf_version >= 5 ? 128 : 64)
1610
1611 /* Utility inline function for construction of ops that were GNU extension
1612 before DWARF 5. */
1613 static inline enum dwarf_location_atom
1614 dwarf_OP (enum dwarf_location_atom op)
1615 {
1616 switch (op)
1617 {
1618 case DW_OP_implicit_pointer:
1619 if (dwarf_version < 5)
1620 return DW_OP_GNU_implicit_pointer;
1621 break;
1622
1623 case DW_OP_entry_value:
1624 if (dwarf_version < 5)
1625 return DW_OP_GNU_entry_value;
1626 break;
1627
1628 case DW_OP_const_type:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_const_type;
1631 break;
1632
1633 case DW_OP_regval_type:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_regval_type;
1636 break;
1637
1638 case DW_OP_deref_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_deref_type;
1641 break;
1642
1643 case DW_OP_convert:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_convert;
1646 break;
1647
1648 case DW_OP_reinterpret:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_reinterpret;
1651 break;
1652
1653 case DW_OP_addrx:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_addr_index;
1656 break;
1657
1658 case DW_OP_constx:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_const_index;
1661 break;
1662
1663 default:
1664 break;
1665 }
1666 return op;
1667 }
1668
1669 /* Similarly for attributes. */
1670 static inline enum dwarf_attribute
1671 dwarf_AT (enum dwarf_attribute at)
1672 {
1673 switch (at)
1674 {
1675 case DW_AT_call_return_pc:
1676 if (dwarf_version < 5)
1677 return DW_AT_low_pc;
1678 break;
1679
1680 case DW_AT_call_tail_call:
1681 if (dwarf_version < 5)
1682 return DW_AT_GNU_tail_call;
1683 break;
1684
1685 case DW_AT_call_origin:
1686 if (dwarf_version < 5)
1687 return DW_AT_abstract_origin;
1688 break;
1689
1690 case DW_AT_call_target:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_call_site_target;
1693 break;
1694
1695 case DW_AT_call_target_clobbered:
1696 if (dwarf_version < 5)
1697 return DW_AT_GNU_call_site_target_clobbered;
1698 break;
1699
1700 case DW_AT_call_parameter:
1701 if (dwarf_version < 5)
1702 return DW_AT_abstract_origin;
1703 break;
1704
1705 case DW_AT_call_value:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_value;
1708 break;
1709
1710 case DW_AT_call_data_value:
1711 if (dwarf_version < 5)
1712 return DW_AT_GNU_call_site_data_value;
1713 break;
1714
1715 case DW_AT_call_all_calls:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_all_call_sites;
1718 break;
1719
1720 case DW_AT_call_all_tail_calls:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_all_tail_call_sites;
1723 break;
1724
1725 case DW_AT_dwo_name:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_dwo_name;
1728 break;
1729
1730 case DW_AT_addr_base:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_addr_base;
1733 break;
1734
1735 default:
1736 break;
1737 }
1738 return at;
1739 }
1740
1741 /* And similarly for tags. */
1742 static inline enum dwarf_tag
1743 dwarf_TAG (enum dwarf_tag tag)
1744 {
1745 switch (tag)
1746 {
1747 case DW_TAG_call_site:
1748 if (dwarf_version < 5)
1749 return DW_TAG_GNU_call_site;
1750 break;
1751
1752 case DW_TAG_call_site_parameter:
1753 if (dwarf_version < 5)
1754 return DW_TAG_GNU_call_site_parameter;
1755 break;
1756
1757 default:
1758 break;
1759 }
1760 return tag;
1761 }
1762
1763 /* And similarly for forms. */
1764 static inline enum dwarf_form
1765 dwarf_FORM (enum dwarf_form form)
1766 {
1767 switch (form)
1768 {
1769 case DW_FORM_addrx:
1770 if (dwarf_version < 5)
1771 return DW_FORM_GNU_addr_index;
1772 break;
1773
1774 case DW_FORM_strx:
1775 if (dwarf_version < 5)
1776 return DW_FORM_GNU_str_index;
1777 break;
1778
1779 default:
1780 break;
1781 }
1782 return form;
1783 }
1784
1785 static unsigned long int get_base_type_offset (dw_die_ref);
1786
1787 /* Return the size of a location descriptor. */
1788
1789 static unsigned long
1790 size_of_loc_descr (dw_loc_descr_ref loc)
1791 {
1792 unsigned long size = 1;
1793
1794 switch (loc->dw_loc_opc)
1795 {
1796 case DW_OP_addr:
1797 size += DWARF2_ADDR_SIZE;
1798 break;
1799 case DW_OP_GNU_addr_index:
1800 case DW_OP_addrx:
1801 case DW_OP_GNU_const_index:
1802 case DW_OP_constx:
1803 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1804 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1805 break;
1806 case DW_OP_const1u:
1807 case DW_OP_const1s:
1808 size += 1;
1809 break;
1810 case DW_OP_const2u:
1811 case DW_OP_const2s:
1812 size += 2;
1813 break;
1814 case DW_OP_const4u:
1815 case DW_OP_const4s:
1816 size += 4;
1817 break;
1818 case DW_OP_const8u:
1819 case DW_OP_const8s:
1820 size += 8;
1821 break;
1822 case DW_OP_constu:
1823 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1824 break;
1825 case DW_OP_consts:
1826 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1827 break;
1828 case DW_OP_pick:
1829 size += 1;
1830 break;
1831 case DW_OP_plus_uconst:
1832 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1833 break;
1834 case DW_OP_skip:
1835 case DW_OP_bra:
1836 size += 2;
1837 break;
1838 case DW_OP_breg0:
1839 case DW_OP_breg1:
1840 case DW_OP_breg2:
1841 case DW_OP_breg3:
1842 case DW_OP_breg4:
1843 case DW_OP_breg5:
1844 case DW_OP_breg6:
1845 case DW_OP_breg7:
1846 case DW_OP_breg8:
1847 case DW_OP_breg9:
1848 case DW_OP_breg10:
1849 case DW_OP_breg11:
1850 case DW_OP_breg12:
1851 case DW_OP_breg13:
1852 case DW_OP_breg14:
1853 case DW_OP_breg15:
1854 case DW_OP_breg16:
1855 case DW_OP_breg17:
1856 case DW_OP_breg18:
1857 case DW_OP_breg19:
1858 case DW_OP_breg20:
1859 case DW_OP_breg21:
1860 case DW_OP_breg22:
1861 case DW_OP_breg23:
1862 case DW_OP_breg24:
1863 case DW_OP_breg25:
1864 case DW_OP_breg26:
1865 case DW_OP_breg27:
1866 case DW_OP_breg28:
1867 case DW_OP_breg29:
1868 case DW_OP_breg30:
1869 case DW_OP_breg31:
1870 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1871 break;
1872 case DW_OP_regx:
1873 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1874 break;
1875 case DW_OP_fbreg:
1876 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1877 break;
1878 case DW_OP_bregx:
1879 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1880 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1881 break;
1882 case DW_OP_piece:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_bit_piece:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1888 break;
1889 case DW_OP_deref_size:
1890 case DW_OP_xderef_size:
1891 size += 1;
1892 break;
1893 case DW_OP_call2:
1894 size += 2;
1895 break;
1896 case DW_OP_call4:
1897 size += 4;
1898 break;
1899 case DW_OP_call_ref:
1900 case DW_OP_GNU_variable_value:
1901 size += DWARF_REF_SIZE;
1902 break;
1903 case DW_OP_implicit_value:
1904 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1905 + loc->dw_loc_oprnd1.v.val_unsigned;
1906 break;
1907 case DW_OP_implicit_pointer:
1908 case DW_OP_GNU_implicit_pointer:
1909 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1910 break;
1911 case DW_OP_entry_value:
1912 case DW_OP_GNU_entry_value:
1913 {
1914 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1915 size += size_of_uleb128 (op_size) + op_size;
1916 break;
1917 }
1918 case DW_OP_const_type:
1919 case DW_OP_GNU_const_type:
1920 {
1921 unsigned long o
1922 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1923 size += size_of_uleb128 (o) + 1;
1924 switch (loc->dw_loc_oprnd2.val_class)
1925 {
1926 case dw_val_class_vec:
1927 size += loc->dw_loc_oprnd2.v.val_vec.length
1928 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1929 break;
1930 case dw_val_class_const:
1931 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1932 break;
1933 case dw_val_class_const_double:
1934 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1935 break;
1936 case dw_val_class_wide_int:
1937 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1938 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1939 break;
1940 default:
1941 gcc_unreachable ();
1942 }
1943 break;
1944 }
1945 case DW_OP_regval_type:
1946 case DW_OP_GNU_regval_type:
1947 {
1948 unsigned long o
1949 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1950 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1951 + size_of_uleb128 (o);
1952 }
1953 break;
1954 case DW_OP_deref_type:
1955 case DW_OP_GNU_deref_type:
1956 {
1957 unsigned long o
1958 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1959 size += 1 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_convert:
1963 case DW_OP_reinterpret:
1964 case DW_OP_GNU_convert:
1965 case DW_OP_GNU_reinterpret:
1966 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1967 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1968 else
1969 {
1970 unsigned long o
1971 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1972 size += size_of_uleb128 (o);
1973 }
1974 break;
1975 case DW_OP_GNU_parameter_ref:
1976 size += 4;
1977 break;
1978 default:
1979 break;
1980 }
1981
1982 return size;
1983 }
1984
1985 /* Return the size of a series of location descriptors. */
1986
1987 unsigned long
1988 size_of_locs (dw_loc_descr_ref loc)
1989 {
1990 dw_loc_descr_ref l;
1991 unsigned long size;
1992
1993 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
1994 field, to avoid writing to a PCH file. */
1995 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
1996 {
1997 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
1998 break;
1999 size += size_of_loc_descr (l);
2000 }
2001 if (! l)
2002 return size;
2003
2004 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2005 {
2006 l->dw_loc_addr = size;
2007 size += size_of_loc_descr (l);
2008 }
2009
2010 return size;
2011 }
2012
2013 /* Return the size of the value in a DW_AT_discr_value attribute. */
2014
2015 static int
2016 size_of_discr_value (dw_discr_value *discr_value)
2017 {
2018 if (discr_value->pos)
2019 return size_of_uleb128 (discr_value->v.uval);
2020 else
2021 return size_of_sleb128 (discr_value->v.sval);
2022 }
2023
2024 /* Return the size of the value in a DW_AT_discr_list attribute. */
2025
2026 static int
2027 size_of_discr_list (dw_discr_list_ref discr_list)
2028 {
2029 int size = 0;
2030
2031 for (dw_discr_list_ref list = discr_list;
2032 list != NULL;
2033 list = list->dw_discr_next)
2034 {
2035 /* One byte for the discriminant value descriptor, and then one or two
2036 LEB128 numbers, depending on whether it's a single case label or a
2037 range label. */
2038 size += 1;
2039 size += size_of_discr_value (&list->dw_discr_lower_bound);
2040 if (list->dw_discr_range != 0)
2041 size += size_of_discr_value (&list->dw_discr_upper_bound);
2042 }
2043 return size;
2044 }
2045
2046 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2047 static void get_ref_die_offset_label (char *, dw_die_ref);
2048 static unsigned long int get_ref_die_offset (dw_die_ref);
2049
2050 /* Output location description stack opcode's operands (if any).
2051 The for_eh_or_skip parameter controls whether register numbers are
2052 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2053 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2054 info). This should be suppressed for the cases that have not been converted
2055 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2056
2057 static void
2058 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2059 {
2060 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2061 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2062
2063 switch (loc->dw_loc_opc)
2064 {
2065 #ifdef DWARF2_DEBUGGING_INFO
2066 case DW_OP_const2u:
2067 case DW_OP_const2s:
2068 dw2_asm_output_data (2, val1->v.val_int, NULL);
2069 break;
2070 case DW_OP_const4u:
2071 if (loc->dtprel)
2072 {
2073 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2074 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2075 val1->v.val_addr);
2076 fputc ('\n', asm_out_file);
2077 break;
2078 }
2079 /* FALLTHRU */
2080 case DW_OP_const4s:
2081 dw2_asm_output_data (4, val1->v.val_int, NULL);
2082 break;
2083 case DW_OP_const8u:
2084 if (loc->dtprel)
2085 {
2086 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2087 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2088 val1->v.val_addr);
2089 fputc ('\n', asm_out_file);
2090 break;
2091 }
2092 /* FALLTHRU */
2093 case DW_OP_const8s:
2094 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2095 dw2_asm_output_data (8, val1->v.val_int, NULL);
2096 break;
2097 case DW_OP_skip:
2098 case DW_OP_bra:
2099 {
2100 int offset;
2101
2102 gcc_assert (val1->val_class == dw_val_class_loc);
2103 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2104
2105 dw2_asm_output_data (2, offset, NULL);
2106 }
2107 break;
2108 case DW_OP_implicit_value:
2109 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2110 switch (val2->val_class)
2111 {
2112 case dw_val_class_const:
2113 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2114 break;
2115 case dw_val_class_vec:
2116 {
2117 unsigned int elt_size = val2->v.val_vec.elt_size;
2118 unsigned int len = val2->v.val_vec.length;
2119 unsigned int i;
2120 unsigned char *p;
2121
2122 if (elt_size > sizeof (HOST_WIDE_INT))
2123 {
2124 elt_size /= 2;
2125 len *= 2;
2126 }
2127 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2128 i < len;
2129 i++, p += elt_size)
2130 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2131 "fp or vector constant word %u", i);
2132 }
2133 break;
2134 case dw_val_class_const_double:
2135 {
2136 unsigned HOST_WIDE_INT first, second;
2137
2138 if (WORDS_BIG_ENDIAN)
2139 {
2140 first = val2->v.val_double.high;
2141 second = val2->v.val_double.low;
2142 }
2143 else
2144 {
2145 first = val2->v.val_double.low;
2146 second = val2->v.val_double.high;
2147 }
2148 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2149 first, NULL);
2150 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2151 second, NULL);
2152 }
2153 break;
2154 case dw_val_class_wide_int:
2155 {
2156 int i;
2157 int len = get_full_len (*val2->v.val_wide);
2158 if (WORDS_BIG_ENDIAN)
2159 for (i = len - 1; i >= 0; --i)
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 val2->v.val_wide->elt (i), NULL);
2162 else
2163 for (i = 0; i < len; ++i)
2164 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2165 val2->v.val_wide->elt (i), NULL);
2166 }
2167 break;
2168 case dw_val_class_addr:
2169 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2170 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2171 break;
2172 default:
2173 gcc_unreachable ();
2174 }
2175 break;
2176 #else
2177 case DW_OP_const2u:
2178 case DW_OP_const2s:
2179 case DW_OP_const4u:
2180 case DW_OP_const4s:
2181 case DW_OP_const8u:
2182 case DW_OP_const8s:
2183 case DW_OP_skip:
2184 case DW_OP_bra:
2185 case DW_OP_implicit_value:
2186 /* We currently don't make any attempt to make sure these are
2187 aligned properly like we do for the main unwind info, so
2188 don't support emitting things larger than a byte if we're
2189 only doing unwinding. */
2190 gcc_unreachable ();
2191 #endif
2192 case DW_OP_const1u:
2193 case DW_OP_const1s:
2194 dw2_asm_output_data (1, val1->v.val_int, NULL);
2195 break;
2196 case DW_OP_constu:
2197 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2198 break;
2199 case DW_OP_consts:
2200 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2201 break;
2202 case DW_OP_pick:
2203 dw2_asm_output_data (1, val1->v.val_int, NULL);
2204 break;
2205 case DW_OP_plus_uconst:
2206 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2207 break;
2208 case DW_OP_breg0:
2209 case DW_OP_breg1:
2210 case DW_OP_breg2:
2211 case DW_OP_breg3:
2212 case DW_OP_breg4:
2213 case DW_OP_breg5:
2214 case DW_OP_breg6:
2215 case DW_OP_breg7:
2216 case DW_OP_breg8:
2217 case DW_OP_breg9:
2218 case DW_OP_breg10:
2219 case DW_OP_breg11:
2220 case DW_OP_breg12:
2221 case DW_OP_breg13:
2222 case DW_OP_breg14:
2223 case DW_OP_breg15:
2224 case DW_OP_breg16:
2225 case DW_OP_breg17:
2226 case DW_OP_breg18:
2227 case DW_OP_breg19:
2228 case DW_OP_breg20:
2229 case DW_OP_breg21:
2230 case DW_OP_breg22:
2231 case DW_OP_breg23:
2232 case DW_OP_breg24:
2233 case DW_OP_breg25:
2234 case DW_OP_breg26:
2235 case DW_OP_breg27:
2236 case DW_OP_breg28:
2237 case DW_OP_breg29:
2238 case DW_OP_breg30:
2239 case DW_OP_breg31:
2240 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2241 break;
2242 case DW_OP_regx:
2243 {
2244 unsigned r = val1->v.val_unsigned;
2245 if (for_eh_or_skip >= 0)
2246 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2247 gcc_assert (size_of_uleb128 (r)
2248 == size_of_uleb128 (val1->v.val_unsigned));
2249 dw2_asm_output_data_uleb128 (r, NULL);
2250 }
2251 break;
2252 case DW_OP_fbreg:
2253 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2254 break;
2255 case DW_OP_bregx:
2256 {
2257 unsigned r = val1->v.val_unsigned;
2258 if (for_eh_or_skip >= 0)
2259 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2260 gcc_assert (size_of_uleb128 (r)
2261 == size_of_uleb128 (val1->v.val_unsigned));
2262 dw2_asm_output_data_uleb128 (r, NULL);
2263 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2264 }
2265 break;
2266 case DW_OP_piece:
2267 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2268 break;
2269 case DW_OP_bit_piece:
2270 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2271 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2272 break;
2273 case DW_OP_deref_size:
2274 case DW_OP_xderef_size:
2275 dw2_asm_output_data (1, val1->v.val_int, NULL);
2276 break;
2277
2278 case DW_OP_addr:
2279 if (loc->dtprel)
2280 {
2281 if (targetm.asm_out.output_dwarf_dtprel)
2282 {
2283 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2284 DWARF2_ADDR_SIZE,
2285 val1->v.val_addr);
2286 fputc ('\n', asm_out_file);
2287 }
2288 else
2289 gcc_unreachable ();
2290 }
2291 else
2292 {
2293 #ifdef DWARF2_DEBUGGING_INFO
2294 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2295 #else
2296 gcc_unreachable ();
2297 #endif
2298 }
2299 break;
2300
2301 case DW_OP_GNU_addr_index:
2302 case DW_OP_addrx:
2303 case DW_OP_GNU_const_index:
2304 case DW_OP_constx:
2305 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2306 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2307 "(index into .debug_addr)");
2308 break;
2309
2310 case DW_OP_call2:
2311 case DW_OP_call4:
2312 {
2313 unsigned long die_offset
2314 = get_ref_die_offset (val1->v.val_die_ref.die);
2315 /* Make sure the offset has been computed and that we can encode it as
2316 an operand. */
2317 gcc_assert (die_offset > 0
2318 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2319 ? 0xffff
2320 : 0xffffffff));
2321 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2322 die_offset, NULL);
2323 }
2324 break;
2325
2326 case DW_OP_call_ref:
2327 case DW_OP_GNU_variable_value:
2328 {
2329 char label[MAX_ARTIFICIAL_LABEL_BYTES
2330 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2331 gcc_assert (val1->val_class == dw_val_class_die_ref);
2332 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2333 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2334 }
2335 break;
2336
2337 case DW_OP_implicit_pointer:
2338 case DW_OP_GNU_implicit_pointer:
2339 {
2340 char label[MAX_ARTIFICIAL_LABEL_BYTES
2341 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2342 gcc_assert (val1->val_class == dw_val_class_die_ref);
2343 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2344 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2345 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2346 }
2347 break;
2348
2349 case DW_OP_entry_value:
2350 case DW_OP_GNU_entry_value:
2351 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2352 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2353 break;
2354
2355 case DW_OP_const_type:
2356 case DW_OP_GNU_const_type:
2357 {
2358 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2359 gcc_assert (o);
2360 dw2_asm_output_data_uleb128 (o, NULL);
2361 switch (val2->val_class)
2362 {
2363 case dw_val_class_const:
2364 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2365 dw2_asm_output_data (1, l, NULL);
2366 dw2_asm_output_data (l, val2->v.val_int, NULL);
2367 break;
2368 case dw_val_class_vec:
2369 {
2370 unsigned int elt_size = val2->v.val_vec.elt_size;
2371 unsigned int len = val2->v.val_vec.length;
2372 unsigned int i;
2373 unsigned char *p;
2374
2375 l = len * elt_size;
2376 dw2_asm_output_data (1, l, NULL);
2377 if (elt_size > sizeof (HOST_WIDE_INT))
2378 {
2379 elt_size /= 2;
2380 len *= 2;
2381 }
2382 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2383 i < len;
2384 i++, p += elt_size)
2385 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2386 "fp or vector constant word %u", i);
2387 }
2388 break;
2389 case dw_val_class_const_double:
2390 {
2391 unsigned HOST_WIDE_INT first, second;
2392 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2393
2394 dw2_asm_output_data (1, 2 * l, NULL);
2395 if (WORDS_BIG_ENDIAN)
2396 {
2397 first = val2->v.val_double.high;
2398 second = val2->v.val_double.low;
2399 }
2400 else
2401 {
2402 first = val2->v.val_double.low;
2403 second = val2->v.val_double.high;
2404 }
2405 dw2_asm_output_data (l, first, NULL);
2406 dw2_asm_output_data (l, second, NULL);
2407 }
2408 break;
2409 case dw_val_class_wide_int:
2410 {
2411 int i;
2412 int len = get_full_len (*val2->v.val_wide);
2413 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2414
2415 dw2_asm_output_data (1, len * l, NULL);
2416 if (WORDS_BIG_ENDIAN)
2417 for (i = len - 1; i >= 0; --i)
2418 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2419 else
2420 for (i = 0; i < len; ++i)
2421 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2422 }
2423 break;
2424 default:
2425 gcc_unreachable ();
2426 }
2427 }
2428 break;
2429 case DW_OP_regval_type:
2430 case DW_OP_GNU_regval_type:
2431 {
2432 unsigned r = val1->v.val_unsigned;
2433 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2434 gcc_assert (o);
2435 if (for_eh_or_skip >= 0)
2436 {
2437 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2438 gcc_assert (size_of_uleb128 (r)
2439 == size_of_uleb128 (val1->v.val_unsigned));
2440 }
2441 dw2_asm_output_data_uleb128 (r, NULL);
2442 dw2_asm_output_data_uleb128 (o, NULL);
2443 }
2444 break;
2445 case DW_OP_deref_type:
2446 case DW_OP_GNU_deref_type:
2447 {
2448 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2449 gcc_assert (o);
2450 dw2_asm_output_data (1, val1->v.val_int, NULL);
2451 dw2_asm_output_data_uleb128 (o, NULL);
2452 }
2453 break;
2454 case DW_OP_convert:
2455 case DW_OP_reinterpret:
2456 case DW_OP_GNU_convert:
2457 case DW_OP_GNU_reinterpret:
2458 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2459 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2460 else
2461 {
2462 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2463 gcc_assert (o);
2464 dw2_asm_output_data_uleb128 (o, NULL);
2465 }
2466 break;
2467
2468 case DW_OP_GNU_parameter_ref:
2469 {
2470 unsigned long o;
2471 gcc_assert (val1->val_class == dw_val_class_die_ref);
2472 o = get_ref_die_offset (val1->v.val_die_ref.die);
2473 dw2_asm_output_data (4, o, NULL);
2474 }
2475 break;
2476
2477 default:
2478 /* Other codes have no operands. */
2479 break;
2480 }
2481 }
2482
2483 /* Output a sequence of location operations.
2484 The for_eh_or_skip parameter controls whether register numbers are
2485 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2486 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2487 info). This should be suppressed for the cases that have not been converted
2488 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2489
2490 void
2491 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2492 {
2493 for (; loc != NULL; loc = loc->dw_loc_next)
2494 {
2495 enum dwarf_location_atom opc = loc->dw_loc_opc;
2496 /* Output the opcode. */
2497 if (for_eh_or_skip >= 0
2498 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2499 {
2500 unsigned r = (opc - DW_OP_breg0);
2501 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2502 gcc_assert (r <= 31);
2503 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2504 }
2505 else if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2507 {
2508 unsigned r = (opc - DW_OP_reg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2512 }
2513
2514 dw2_asm_output_data (1, opc,
2515 "%s", dwarf_stack_op_name (opc));
2516
2517 /* Output the operand(s) (if any). */
2518 output_loc_operands (loc, for_eh_or_skip);
2519 }
2520 }
2521
2522 /* Output location description stack opcode's operands (if any).
2523 The output is single bytes on a line, suitable for .cfi_escape. */
2524
2525 static void
2526 output_loc_operands_raw (dw_loc_descr_ref loc)
2527 {
2528 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2529 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2530
2531 switch (loc->dw_loc_opc)
2532 {
2533 case DW_OP_addr:
2534 case DW_OP_GNU_addr_index:
2535 case DW_OP_addrx:
2536 case DW_OP_GNU_const_index:
2537 case DW_OP_constx:
2538 case DW_OP_implicit_value:
2539 /* We cannot output addresses in .cfi_escape, only bytes. */
2540 gcc_unreachable ();
2541
2542 case DW_OP_const1u:
2543 case DW_OP_const1s:
2544 case DW_OP_pick:
2545 case DW_OP_deref_size:
2546 case DW_OP_xderef_size:
2547 fputc (',', asm_out_file);
2548 dw2_asm_output_data_raw (1, val1->v.val_int);
2549 break;
2550
2551 case DW_OP_const2u:
2552 case DW_OP_const2s:
2553 fputc (',', asm_out_file);
2554 dw2_asm_output_data_raw (2, val1->v.val_int);
2555 break;
2556
2557 case DW_OP_const4u:
2558 case DW_OP_const4s:
2559 fputc (',', asm_out_file);
2560 dw2_asm_output_data_raw (4, val1->v.val_int);
2561 break;
2562
2563 case DW_OP_const8u:
2564 case DW_OP_const8s:
2565 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (8, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_skip:
2571 case DW_OP_bra:
2572 {
2573 int offset;
2574
2575 gcc_assert (val1->val_class == dw_val_class_loc);
2576 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2577
2578 fputc (',', asm_out_file);
2579 dw2_asm_output_data_raw (2, offset);
2580 }
2581 break;
2582
2583 case DW_OP_regx:
2584 {
2585 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2586 gcc_assert (size_of_uleb128 (r)
2587 == size_of_uleb128 (val1->v.val_unsigned));
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_uleb128_raw (r);
2590 }
2591 break;
2592
2593 case DW_OP_constu:
2594 case DW_OP_plus_uconst:
2595 case DW_OP_piece:
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2598 break;
2599
2600 case DW_OP_bit_piece:
2601 fputc (',', asm_out_file);
2602 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2603 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2604 break;
2605
2606 case DW_OP_consts:
2607 case DW_OP_breg0:
2608 case DW_OP_breg1:
2609 case DW_OP_breg2:
2610 case DW_OP_breg3:
2611 case DW_OP_breg4:
2612 case DW_OP_breg5:
2613 case DW_OP_breg6:
2614 case DW_OP_breg7:
2615 case DW_OP_breg8:
2616 case DW_OP_breg9:
2617 case DW_OP_breg10:
2618 case DW_OP_breg11:
2619 case DW_OP_breg12:
2620 case DW_OP_breg13:
2621 case DW_OP_breg14:
2622 case DW_OP_breg15:
2623 case DW_OP_breg16:
2624 case DW_OP_breg17:
2625 case DW_OP_breg18:
2626 case DW_OP_breg19:
2627 case DW_OP_breg20:
2628 case DW_OP_breg21:
2629 case DW_OP_breg22:
2630 case DW_OP_breg23:
2631 case DW_OP_breg24:
2632 case DW_OP_breg25:
2633 case DW_OP_breg26:
2634 case DW_OP_breg27:
2635 case DW_OP_breg28:
2636 case DW_OP_breg29:
2637 case DW_OP_breg30:
2638 case DW_OP_breg31:
2639 case DW_OP_fbreg:
2640 fputc (',', asm_out_file);
2641 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2642 break;
2643
2644 case DW_OP_bregx:
2645 {
2646 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2647 gcc_assert (size_of_uleb128 (r)
2648 == size_of_uleb128 (val1->v.val_unsigned));
2649 fputc (',', asm_out_file);
2650 dw2_asm_output_data_uleb128_raw (r);
2651 fputc (',', asm_out_file);
2652 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2653 }
2654 break;
2655
2656 case DW_OP_implicit_pointer:
2657 case DW_OP_entry_value:
2658 case DW_OP_const_type:
2659 case DW_OP_regval_type:
2660 case DW_OP_deref_type:
2661 case DW_OP_convert:
2662 case DW_OP_reinterpret:
2663 case DW_OP_GNU_implicit_pointer:
2664 case DW_OP_GNU_entry_value:
2665 case DW_OP_GNU_const_type:
2666 case DW_OP_GNU_regval_type:
2667 case DW_OP_GNU_deref_type:
2668 case DW_OP_GNU_convert:
2669 case DW_OP_GNU_reinterpret:
2670 case DW_OP_GNU_parameter_ref:
2671 gcc_unreachable ();
2672 break;
2673
2674 default:
2675 /* Other codes have no operands. */
2676 break;
2677 }
2678 }
2679
2680 void
2681 output_loc_sequence_raw (dw_loc_descr_ref loc)
2682 {
2683 while (1)
2684 {
2685 enum dwarf_location_atom opc = loc->dw_loc_opc;
2686 /* Output the opcode. */
2687 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2688 {
2689 unsigned r = (opc - DW_OP_breg0);
2690 r = DWARF2_FRAME_REG_OUT (r, 1);
2691 gcc_assert (r <= 31);
2692 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2693 }
2694 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2695 {
2696 unsigned r = (opc - DW_OP_reg0);
2697 r = DWARF2_FRAME_REG_OUT (r, 1);
2698 gcc_assert (r <= 31);
2699 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2700 }
2701 /* Output the opcode. */
2702 fprintf (asm_out_file, "%#x", opc);
2703 output_loc_operands_raw (loc);
2704
2705 if (!loc->dw_loc_next)
2706 break;
2707 loc = loc->dw_loc_next;
2708
2709 fputc (',', asm_out_file);
2710 }
2711 }
2712
2713 /* This function builds a dwarf location descriptor sequence from a
2714 dw_cfa_location, adding the given OFFSET to the result of the
2715 expression. */
2716
2717 struct dw_loc_descr_node *
2718 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2719 {
2720 struct dw_loc_descr_node *head, *tmp;
2721
2722 offset += cfa->offset;
2723
2724 if (cfa->indirect)
2725 {
2726 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2727 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2728 head->dw_loc_oprnd1.val_entry = NULL;
2729 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2730 add_loc_descr (&head, tmp);
2731 loc_descr_plus_const (&head, offset);
2732 }
2733 else
2734 head = new_reg_loc_descr (cfa->reg, offset);
2735
2736 return head;
2737 }
2738
2739 /* This function builds a dwarf location descriptor sequence for
2740 the address at OFFSET from the CFA when stack is aligned to
2741 ALIGNMENT byte. */
2742
2743 struct dw_loc_descr_node *
2744 build_cfa_aligned_loc (dw_cfa_location *cfa,
2745 poly_int64 offset, HOST_WIDE_INT alignment)
2746 {
2747 struct dw_loc_descr_node *head;
2748 unsigned int dwarf_fp
2749 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2750
2751 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2752 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2753 {
2754 head = new_reg_loc_descr (dwarf_fp, 0);
2755 add_loc_descr (&head, int_loc_descriptor (alignment));
2756 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2757 loc_descr_plus_const (&head, offset);
2758 }
2759 else
2760 head = new_reg_loc_descr (dwarf_fp, offset);
2761 return head;
2762 }
2763 \f
2764 /* And now, the support for symbolic debugging information. */
2765
2766 /* .debug_str support. */
2767
2768 static void dwarf2out_init (const char *);
2769 static void dwarf2out_finish (const char *);
2770 static void dwarf2out_early_finish (const char *);
2771 static void dwarf2out_assembly_start (void);
2772 static void dwarf2out_define (unsigned int, const char *);
2773 static void dwarf2out_undef (unsigned int, const char *);
2774 static void dwarf2out_start_source_file (unsigned, const char *);
2775 static void dwarf2out_end_source_file (unsigned);
2776 static void dwarf2out_function_decl (tree);
2777 static void dwarf2out_begin_block (unsigned, unsigned);
2778 static void dwarf2out_end_block (unsigned, unsigned);
2779 static bool dwarf2out_ignore_block (const_tree);
2780 static void dwarf2out_early_global_decl (tree);
2781 static void dwarf2out_late_global_decl (tree);
2782 static void dwarf2out_type_decl (tree, int);
2783 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2784 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2785 dw_die_ref);
2786 static void dwarf2out_abstract_function (tree);
2787 static void dwarf2out_var_location (rtx_insn *);
2788 static void dwarf2out_inline_entry (tree);
2789 static void dwarf2out_size_function (tree);
2790 static void dwarf2out_begin_function (tree);
2791 static void dwarf2out_end_function (unsigned int);
2792 static void dwarf2out_register_main_translation_unit (tree unit);
2793 static void dwarf2out_set_name (tree, tree);
2794 static void dwarf2out_register_external_die (tree decl, const char *sym,
2795 unsigned HOST_WIDE_INT off);
2796 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2797 unsigned HOST_WIDE_INT *off);
2798
2799 /* The debug hooks structure. */
2800
2801 const struct gcc_debug_hooks dwarf2_debug_hooks =
2802 {
2803 dwarf2out_init,
2804 dwarf2out_finish,
2805 dwarf2out_early_finish,
2806 dwarf2out_assembly_start,
2807 dwarf2out_define,
2808 dwarf2out_undef,
2809 dwarf2out_start_source_file,
2810 dwarf2out_end_source_file,
2811 dwarf2out_begin_block,
2812 dwarf2out_end_block,
2813 dwarf2out_ignore_block,
2814 dwarf2out_source_line,
2815 dwarf2out_begin_prologue,
2816 #if VMS_DEBUGGING_INFO
2817 dwarf2out_vms_end_prologue,
2818 dwarf2out_vms_begin_epilogue,
2819 #else
2820 debug_nothing_int_charstar,
2821 debug_nothing_int_charstar,
2822 #endif
2823 dwarf2out_end_epilogue,
2824 dwarf2out_begin_function,
2825 dwarf2out_end_function, /* end_function */
2826 dwarf2out_register_main_translation_unit,
2827 dwarf2out_function_decl, /* function_decl */
2828 dwarf2out_early_global_decl,
2829 dwarf2out_late_global_decl,
2830 dwarf2out_type_decl, /* type_decl */
2831 dwarf2out_imported_module_or_decl,
2832 dwarf2out_die_ref_for_decl,
2833 dwarf2out_register_external_die,
2834 debug_nothing_tree, /* deferred_inline_function */
2835 /* The DWARF 2 backend tries to reduce debugging bloat by not
2836 emitting the abstract description of inline functions until
2837 something tries to reference them. */
2838 dwarf2out_abstract_function, /* outlining_inline_function */
2839 debug_nothing_rtx_code_label, /* label */
2840 debug_nothing_int, /* handle_pch */
2841 dwarf2out_var_location,
2842 dwarf2out_inline_entry, /* inline_entry */
2843 dwarf2out_size_function, /* size_function */
2844 dwarf2out_switch_text_section,
2845 dwarf2out_set_name,
2846 1, /* start_end_main_source_file */
2847 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2848 };
2849
2850 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2851 {
2852 dwarf2out_init,
2853 debug_nothing_charstar,
2854 debug_nothing_charstar,
2855 dwarf2out_assembly_start,
2856 debug_nothing_int_charstar,
2857 debug_nothing_int_charstar,
2858 debug_nothing_int_charstar,
2859 debug_nothing_int,
2860 debug_nothing_int_int, /* begin_block */
2861 debug_nothing_int_int, /* end_block */
2862 debug_true_const_tree, /* ignore_block */
2863 dwarf2out_source_line, /* source_line */
2864 debug_nothing_int_int_charstar, /* begin_prologue */
2865 debug_nothing_int_charstar, /* end_prologue */
2866 debug_nothing_int_charstar, /* begin_epilogue */
2867 debug_nothing_int_charstar, /* end_epilogue */
2868 debug_nothing_tree, /* begin_function */
2869 debug_nothing_int, /* end_function */
2870 debug_nothing_tree, /* register_main_translation_unit */
2871 debug_nothing_tree, /* function_decl */
2872 debug_nothing_tree, /* early_global_decl */
2873 debug_nothing_tree, /* late_global_decl */
2874 debug_nothing_tree_int, /* type_decl */
2875 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2876 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2877 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2878 debug_nothing_tree, /* deferred_inline_function */
2879 debug_nothing_tree, /* outlining_inline_function */
2880 debug_nothing_rtx_code_label, /* label */
2881 debug_nothing_int, /* handle_pch */
2882 debug_nothing_rtx_insn, /* var_location */
2883 debug_nothing_tree, /* inline_entry */
2884 debug_nothing_tree, /* size_function */
2885 debug_nothing_void, /* switch_text_section */
2886 debug_nothing_tree_tree, /* set_name */
2887 0, /* start_end_main_source_file */
2888 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2889 };
2890 \f
2891 /* NOTE: In the comments in this file, many references are made to
2892 "Debugging Information Entries". This term is abbreviated as `DIE'
2893 throughout the remainder of this file. */
2894
2895 /* An internal representation of the DWARF output is built, and then
2896 walked to generate the DWARF debugging info. The walk of the internal
2897 representation is done after the entire program has been compiled.
2898 The types below are used to describe the internal representation. */
2899
2900 /* Whether to put type DIEs into their own section .debug_types instead
2901 of making them part of the .debug_info section. Only supported for
2902 Dwarf V4 or higher and the user didn't disable them through
2903 -fno-debug-types-section. It is more efficient to put them in a
2904 separate comdat sections since the linker will then be able to
2905 remove duplicates. But not all tools support .debug_types sections
2906 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2907 it is DW_UT_type unit type in .debug_info section. */
2908
2909 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2910
2911 /* Various DIE's use offsets relative to the beginning of the
2912 .debug_info section to refer to each other. */
2913
2914 typedef long int dw_offset;
2915
2916 struct comdat_type_node;
2917
2918 /* The entries in the line_info table more-or-less mirror the opcodes
2919 that are used in the real dwarf line table. Arrays of these entries
2920 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2921 supported. */
2922
2923 enum dw_line_info_opcode {
2924 /* Emit DW_LNE_set_address; the operand is the label index. */
2925 LI_set_address,
2926
2927 /* Emit a row to the matrix with the given line. This may be done
2928 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2929 special opcodes. */
2930 LI_set_line,
2931
2932 /* Emit a DW_LNS_set_file. */
2933 LI_set_file,
2934
2935 /* Emit a DW_LNS_set_column. */
2936 LI_set_column,
2937
2938 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2939 LI_negate_stmt,
2940
2941 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2942 LI_set_prologue_end,
2943 LI_set_epilogue_begin,
2944
2945 /* Emit a DW_LNE_set_discriminator. */
2946 LI_set_discriminator,
2947
2948 /* Output a Fixed Advance PC; the target PC is the label index; the
2949 base PC is the previous LI_adv_address or LI_set_address entry.
2950 We only use this when emitting debug views without assembler
2951 support, at explicit user request. Ideally, we should only use
2952 it when the offset might be zero but we can't tell: it's the only
2953 way to maybe change the PC without resetting the view number. */
2954 LI_adv_address
2955 };
2956
2957 typedef struct GTY(()) dw_line_info_struct {
2958 enum dw_line_info_opcode opcode;
2959 unsigned int val;
2960 } dw_line_info_entry;
2961
2962
2963 struct GTY(()) dw_line_info_table {
2964 /* The label that marks the end of this section. */
2965 const char *end_label;
2966
2967 /* The values for the last row of the matrix, as collected in the table.
2968 These are used to minimize the changes to the next row. */
2969 unsigned int file_num;
2970 unsigned int line_num;
2971 unsigned int column_num;
2972 int discrim_num;
2973 bool is_stmt;
2974 bool in_use;
2975
2976 /* This denotes the NEXT view number.
2977
2978 If it is 0, it is known that the NEXT view will be the first view
2979 at the given PC.
2980
2981 If it is -1, we're forcing the view number to be reset, e.g. at a
2982 function entry.
2983
2984 The meaning of other nonzero values depends on whether we're
2985 computing views internally or leaving it for the assembler to do
2986 so. If we're emitting them internally, view denotes the view
2987 number since the last known advance of PC. If we're leaving it
2988 for the assembler, it denotes the LVU label number that we're
2989 going to ask the assembler to assign. */
2990 var_loc_view view;
2991
2992 /* This counts the number of symbolic views emitted in this table
2993 since the latest view reset. Its max value, over all tables,
2994 sets symview_upper_bound. */
2995 var_loc_view symviews_since_reset;
2996
2997 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
2998 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
2999 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3000 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3001
3002 vec<dw_line_info_entry, va_gc> *entries;
3003 };
3004
3005 /* This is an upper bound for view numbers that the assembler may
3006 assign to symbolic views output in this translation. It is used to
3007 decide how big a field to use to represent view numbers in
3008 symview-classed attributes. */
3009
3010 static var_loc_view symview_upper_bound;
3011
3012 /* If we're keep track of location views and their reset points, and
3013 INSN is a reset point (i.e., it necessarily advances the PC), mark
3014 the next view in TABLE as reset. */
3015
3016 static void
3017 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3018 {
3019 if (!debug_internal_reset_location_views)
3020 return;
3021
3022 /* Maybe turn (part of?) this test into a default target hook. */
3023 int reset = 0;
3024
3025 if (targetm.reset_location_view)
3026 reset = targetm.reset_location_view (insn);
3027
3028 if (reset)
3029 ;
3030 else if (JUMP_TABLE_DATA_P (insn))
3031 reset = 1;
3032 else if (GET_CODE (insn) == USE
3033 || GET_CODE (insn) == CLOBBER
3034 || GET_CODE (insn) == ASM_INPUT
3035 || asm_noperands (insn) >= 0)
3036 ;
3037 else if (get_attr_min_length (insn) > 0)
3038 reset = 1;
3039
3040 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3041 RESET_NEXT_VIEW (table->view);
3042 }
3043
3044 /* Each DIE attribute has a field specifying the attribute kind,
3045 a link to the next attribute in the chain, and an attribute value.
3046 Attributes are typically linked below the DIE they modify. */
3047
3048 typedef struct GTY(()) dw_attr_struct {
3049 enum dwarf_attribute dw_attr;
3050 dw_val_node dw_attr_val;
3051 }
3052 dw_attr_node;
3053
3054
3055 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3056 The children of each node form a circular list linked by
3057 die_sib. die_child points to the node *before* the "first" child node. */
3058
3059 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3060 union die_symbol_or_type_node
3061 {
3062 const char * GTY ((tag ("0"))) die_symbol;
3063 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3064 }
3065 GTY ((desc ("%0.comdat_type_p"))) die_id;
3066 vec<dw_attr_node, va_gc> *die_attr;
3067 dw_die_ref die_parent;
3068 dw_die_ref die_child;
3069 dw_die_ref die_sib;
3070 dw_die_ref die_definition; /* ref from a specification to its definition */
3071 dw_offset die_offset;
3072 unsigned long die_abbrev;
3073 int die_mark;
3074 unsigned int decl_id;
3075 enum dwarf_tag die_tag;
3076 /* Die is used and must not be pruned as unused. */
3077 BOOL_BITFIELD die_perennial_p : 1;
3078 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3079 /* For an external ref to die_symbol if die_offset contains an extra
3080 offset to that symbol. */
3081 BOOL_BITFIELD with_offset : 1;
3082 /* Whether this DIE was removed from the DIE tree, for example via
3083 prune_unused_types. We don't consider those present from the
3084 DIE lookup routines. */
3085 BOOL_BITFIELD removed : 1;
3086 /* Lots of spare bits. */
3087 }
3088 die_node;
3089
3090 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3091 static bool early_dwarf;
3092 static bool early_dwarf_finished;
3093 struct set_early_dwarf {
3094 bool saved;
3095 set_early_dwarf () : saved(early_dwarf)
3096 {
3097 gcc_assert (! early_dwarf_finished);
3098 early_dwarf = true;
3099 }
3100 ~set_early_dwarf () { early_dwarf = saved; }
3101 };
3102
3103 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3104 #define FOR_EACH_CHILD(die, c, expr) do { \
3105 c = die->die_child; \
3106 if (c) do { \
3107 c = c->die_sib; \
3108 expr; \
3109 } while (c != die->die_child); \
3110 } while (0)
3111
3112 /* The pubname structure */
3113
3114 typedef struct GTY(()) pubname_struct {
3115 dw_die_ref die;
3116 const char *name;
3117 }
3118 pubname_entry;
3119
3120
3121 struct GTY(()) dw_ranges {
3122 const char *label;
3123 /* If this is positive, it's a block number, otherwise it's a
3124 bitwise-negated index into dw_ranges_by_label. */
3125 int num;
3126 /* Index for the range list for DW_FORM_rnglistx. */
3127 unsigned int idx : 31;
3128 /* True if this range might be possibly in a different section
3129 from previous entry. */
3130 unsigned int maybe_new_sec : 1;
3131 };
3132
3133 /* A structure to hold a macinfo entry. */
3134
3135 typedef struct GTY(()) macinfo_struct {
3136 unsigned char code;
3137 unsigned HOST_WIDE_INT lineno;
3138 const char *info;
3139 }
3140 macinfo_entry;
3141
3142
3143 struct GTY(()) dw_ranges_by_label {
3144 const char *begin;
3145 const char *end;
3146 };
3147
3148 /* The comdat type node structure. */
3149 struct GTY(()) comdat_type_node
3150 {
3151 dw_die_ref root_die;
3152 dw_die_ref type_die;
3153 dw_die_ref skeleton_die;
3154 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3155 comdat_type_node *next;
3156 };
3157
3158 /* A list of DIEs for which we can't determine ancestry (parent_die
3159 field) just yet. Later in dwarf2out_finish we will fill in the
3160 missing bits. */
3161 typedef struct GTY(()) limbo_die_struct {
3162 dw_die_ref die;
3163 /* The tree for which this DIE was created. We use this to
3164 determine ancestry later. */
3165 tree created_for;
3166 struct limbo_die_struct *next;
3167 }
3168 limbo_die_node;
3169
3170 typedef struct skeleton_chain_struct
3171 {
3172 dw_die_ref old_die;
3173 dw_die_ref new_die;
3174 struct skeleton_chain_struct *parent;
3175 }
3176 skeleton_chain_node;
3177
3178 /* Define a macro which returns nonzero for a TYPE_DECL which was
3179 implicitly generated for a type.
3180
3181 Note that, unlike the C front-end (which generates a NULL named
3182 TYPE_DECL node for each complete tagged type, each array type,
3183 and each function type node created) the C++ front-end generates
3184 a _named_ TYPE_DECL node for each tagged type node created.
3185 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3186 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3187 front-end, but for each type, tagged or not. */
3188
3189 #define TYPE_DECL_IS_STUB(decl) \
3190 (DECL_NAME (decl) == NULL_TREE \
3191 || (DECL_ARTIFICIAL (decl) \
3192 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3193 /* This is necessary for stub decls that \
3194 appear in nested inline functions. */ \
3195 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3196 && (decl_ultimate_origin (decl) \
3197 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3198
3199 /* Information concerning the compilation unit's programming
3200 language, and compiler version. */
3201
3202 /* Fixed size portion of the DWARF compilation unit header. */
3203 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3204 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3205 + (dwarf_version >= 5 ? 4 : 3))
3206
3207 /* Fixed size portion of the DWARF comdat type unit header. */
3208 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3209 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3210 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3211
3212 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3213 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3214 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3215
3216 /* Fixed size portion of public names info. */
3217 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3218
3219 /* Fixed size portion of the address range info. */
3220 #define DWARF_ARANGES_HEADER_SIZE \
3221 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3222 DWARF2_ADDR_SIZE * 2) \
3223 - DWARF_INITIAL_LENGTH_SIZE)
3224
3225 /* Size of padding portion in the address range info. It must be
3226 aligned to twice the pointer size. */
3227 #define DWARF_ARANGES_PAD_SIZE \
3228 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3229 DWARF2_ADDR_SIZE * 2) \
3230 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3231
3232 /* Use assembler line directives if available. */
3233 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3234 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3235 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3236 #else
3237 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3238 #endif
3239 #endif
3240
3241 /* Use assembler views in line directives if available. */
3242 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3243 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3244 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3245 #else
3246 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3247 #endif
3248 #endif
3249
3250 /* Return true if GCC configure detected assembler support for .loc. */
3251
3252 bool
3253 dwarf2out_default_as_loc_support (void)
3254 {
3255 return DWARF2_ASM_LINE_DEBUG_INFO;
3256 #if (GCC_VERSION >= 3000)
3257 # undef DWARF2_ASM_LINE_DEBUG_INFO
3258 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3259 #endif
3260 }
3261
3262 /* Return true if GCC configure detected assembler support for views
3263 in .loc directives. */
3264
3265 bool
3266 dwarf2out_default_as_locview_support (void)
3267 {
3268 return DWARF2_ASM_VIEW_DEBUG_INFO;
3269 #if (GCC_VERSION >= 3000)
3270 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3271 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3272 #endif
3273 }
3274
3275 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3276 view computation, and it refers to a view identifier for which we
3277 will not emit a label because it is known to map to a view number
3278 zero. We won't allocate the bitmap if we're not using assembler
3279 support for location views, but we have to make the variable
3280 visible for GGC and for code that will be optimized out for lack of
3281 support but that's still parsed and compiled. We could abstract it
3282 out with macros, but it's not worth it. */
3283 static GTY(()) bitmap zero_view_p;
3284
3285 /* Evaluate to TRUE iff N is known to identify the first location view
3286 at its PC. When not using assembler location view computation,
3287 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3288 and views label numbers recorded in it are the ones known to be
3289 zero. */
3290 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3291 || (N) == (var_loc_view)-1 \
3292 || (zero_view_p \
3293 && bitmap_bit_p (zero_view_p, (N))))
3294
3295 /* Return true iff we're to emit .loc directives for the assembler to
3296 generate line number sections.
3297
3298 When we're not emitting views, all we need from the assembler is
3299 support for .loc directives.
3300
3301 If we are emitting views, we can only use the assembler's .loc
3302 support if it also supports views.
3303
3304 When the compiler is emitting the line number programs and
3305 computing view numbers itself, it resets view numbers at known PC
3306 changes and counts from that, and then it emits view numbers as
3307 literal constants in locviewlists. There are cases in which the
3308 compiler is not sure about PC changes, e.g. when extra alignment is
3309 requested for a label. In these cases, the compiler may not reset
3310 the view counter, and the potential PC advance in the line number
3311 program will use an opcode that does not reset the view counter
3312 even if the PC actually changes, so that compiler and debug info
3313 consumer can keep view numbers in sync.
3314
3315 When the compiler defers view computation to the assembler, it
3316 emits symbolic view numbers in locviewlists, with the exception of
3317 views known to be zero (forced resets, or reset after
3318 compiler-visible PC changes): instead of emitting symbols for
3319 these, we emit literal zero and assert the assembler agrees with
3320 the compiler's assessment. We could use symbolic views everywhere,
3321 instead of special-casing zero views, but then we'd be unable to
3322 optimize out locviewlists that contain only zeros. */
3323
3324 static bool
3325 output_asm_line_debug_info (void)
3326 {
3327 return (dwarf2out_as_loc_support
3328 && (dwarf2out_as_locview_support
3329 || !debug_variable_location_views));
3330 }
3331
3332 /* Minimum line offset in a special line info. opcode.
3333 This value was chosen to give a reasonable range of values. */
3334 #define DWARF_LINE_BASE -10
3335
3336 /* First special line opcode - leave room for the standard opcodes. */
3337 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3338
3339 /* Range of line offsets in a special line info. opcode. */
3340 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3341
3342 /* Flag that indicates the initial value of the is_stmt_start flag.
3343 In the present implementation, we do not mark any lines as
3344 the beginning of a source statement, because that information
3345 is not made available by the GCC front-end. */
3346 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3347
3348 /* Maximum number of operations per instruction bundle. */
3349 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3350 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3351 #endif
3352
3353 /* This location is used by calc_die_sizes() to keep track
3354 the offset of each DIE within the .debug_info section. */
3355 static unsigned long next_die_offset;
3356
3357 /* Record the root of the DIE's built for the current compilation unit. */
3358 static GTY(()) dw_die_ref single_comp_unit_die;
3359
3360 /* A list of type DIEs that have been separated into comdat sections. */
3361 static GTY(()) comdat_type_node *comdat_type_list;
3362
3363 /* A list of CU DIEs that have been separated. */
3364 static GTY(()) limbo_die_node *cu_die_list;
3365
3366 /* A list of DIEs with a NULL parent waiting to be relocated. */
3367 static GTY(()) limbo_die_node *limbo_die_list;
3368
3369 /* A list of DIEs for which we may have to generate
3370 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3371 static GTY(()) limbo_die_node *deferred_asm_name;
3372
3373 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3374 {
3375 typedef const char *compare_type;
3376
3377 static hashval_t hash (dwarf_file_data *);
3378 static bool equal (dwarf_file_data *, const char *);
3379 };
3380
3381 /* Filenames referenced by this compilation unit. */
3382 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3383
3384 struct decl_die_hasher : ggc_ptr_hash<die_node>
3385 {
3386 typedef tree compare_type;
3387
3388 static hashval_t hash (die_node *);
3389 static bool equal (die_node *, tree);
3390 };
3391 /* A hash table of references to DIE's that describe declarations.
3392 The key is a DECL_UID() which is a unique number identifying each decl. */
3393 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3394
3395 struct GTY ((for_user)) variable_value_struct {
3396 unsigned int decl_id;
3397 vec<dw_die_ref, va_gc> *dies;
3398 };
3399
3400 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3401 {
3402 typedef tree compare_type;
3403
3404 static hashval_t hash (variable_value_struct *);
3405 static bool equal (variable_value_struct *, tree);
3406 };
3407 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3408 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3409 DECL_CONTEXT of the referenced VAR_DECLs. */
3410 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3411
3412 struct block_die_hasher : ggc_ptr_hash<die_struct>
3413 {
3414 static hashval_t hash (die_struct *);
3415 static bool equal (die_struct *, die_struct *);
3416 };
3417
3418 /* A hash table of references to DIE's that describe COMMON blocks.
3419 The key is DECL_UID() ^ die_parent. */
3420 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3421
3422 typedef struct GTY(()) die_arg_entry_struct {
3423 dw_die_ref die;
3424 tree arg;
3425 } die_arg_entry;
3426
3427
3428 /* Node of the variable location list. */
3429 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3430 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3431 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3432 in mode of the EXPR_LIST node and first EXPR_LIST operand
3433 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3434 location or NULL for padding. For larger bitsizes,
3435 mode is 0 and first operand is a CONCAT with bitsize
3436 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3437 NULL as second operand. */
3438 rtx GTY (()) loc;
3439 const char * GTY (()) label;
3440 struct var_loc_node * GTY (()) next;
3441 var_loc_view view;
3442 };
3443
3444 /* Variable location list. */
3445 struct GTY ((for_user)) var_loc_list_def {
3446 struct var_loc_node * GTY (()) first;
3447
3448 /* Pointer to the last but one or last element of the
3449 chained list. If the list is empty, both first and
3450 last are NULL, if the list contains just one node
3451 or the last node certainly is not redundant, it points
3452 to the last node, otherwise points to the last but one.
3453 Do not mark it for GC because it is marked through the chain. */
3454 struct var_loc_node * GTY ((skip ("%h"))) last;
3455
3456 /* Pointer to the last element before section switch,
3457 if NULL, either sections weren't switched or first
3458 is after section switch. */
3459 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3460
3461 /* DECL_UID of the variable decl. */
3462 unsigned int decl_id;
3463 };
3464 typedef struct var_loc_list_def var_loc_list;
3465
3466 /* Call argument location list. */
3467 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3468 rtx GTY (()) call_arg_loc_note;
3469 const char * GTY (()) label;
3470 tree GTY (()) block;
3471 bool tail_call_p;
3472 rtx GTY (()) symbol_ref;
3473 struct call_arg_loc_node * GTY (()) next;
3474 };
3475
3476
3477 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3478 {
3479 typedef const_tree compare_type;
3480
3481 static hashval_t hash (var_loc_list *);
3482 static bool equal (var_loc_list *, const_tree);
3483 };
3484
3485 /* Table of decl location linked lists. */
3486 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3487
3488 /* Head and tail of call_arg_loc chain. */
3489 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3490 static struct call_arg_loc_node *call_arg_loc_last;
3491
3492 /* Number of call sites in the current function. */
3493 static int call_site_count = -1;
3494 /* Number of tail call sites in the current function. */
3495 static int tail_call_site_count = -1;
3496
3497 /* A cached location list. */
3498 struct GTY ((for_user)) cached_dw_loc_list_def {
3499 /* The DECL_UID of the decl that this entry describes. */
3500 unsigned int decl_id;
3501
3502 /* The cached location list. */
3503 dw_loc_list_ref loc_list;
3504 };
3505 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3506
3507 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3508 {
3509
3510 typedef const_tree compare_type;
3511
3512 static hashval_t hash (cached_dw_loc_list *);
3513 static bool equal (cached_dw_loc_list *, const_tree);
3514 };
3515
3516 /* Table of cached location lists. */
3517 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3518
3519 /* A vector of references to DIE's that are uniquely identified by their tag,
3520 presence/absence of children DIE's, and list of attribute/value pairs. */
3521 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3522
3523 /* A hash map to remember the stack usage for DWARF procedures. The value
3524 stored is the stack size difference between before the DWARF procedure
3525 invokation and after it returned. In other words, for a DWARF procedure
3526 that consumes N stack slots and that pushes M ones, this stores M - N. */
3527 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3528
3529 /* A global counter for generating labels for line number data. */
3530 static unsigned int line_info_label_num;
3531
3532 /* The current table to which we should emit line number information
3533 for the current function. This will be set up at the beginning of
3534 assembly for the function. */
3535 static GTY(()) dw_line_info_table *cur_line_info_table;
3536
3537 /* The two default tables of line number info. */
3538 static GTY(()) dw_line_info_table *text_section_line_info;
3539 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3540
3541 /* The set of all non-default tables of line number info. */
3542 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3543
3544 /* A flag to tell pubnames/types export if there is an info section to
3545 refer to. */
3546 static bool info_section_emitted;
3547
3548 /* A pointer to the base of a table that contains a list of publicly
3549 accessible names. */
3550 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3551
3552 /* A pointer to the base of a table that contains a list of publicly
3553 accessible types. */
3554 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3555
3556 /* A pointer to the base of a table that contains a list of macro
3557 defines/undefines (and file start/end markers). */
3558 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3559
3560 /* True if .debug_macinfo or .debug_macros section is going to be
3561 emitted. */
3562 #define have_macinfo \
3563 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3564 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3565 && !macinfo_table->is_empty ())
3566
3567 /* Vector of dies for which we should generate .debug_ranges info. */
3568 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3569
3570 /* Vector of pairs of labels referenced in ranges_table. */
3571 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3572
3573 /* Whether we have location lists that need outputting */
3574 static GTY(()) bool have_location_lists;
3575
3576 /* Unique label counter. */
3577 static GTY(()) unsigned int loclabel_num;
3578
3579 /* Unique label counter for point-of-call tables. */
3580 static GTY(()) unsigned int poc_label_num;
3581
3582 /* The last file entry emitted by maybe_emit_file(). */
3583 static GTY(()) struct dwarf_file_data * last_emitted_file;
3584
3585 /* Number of internal labels generated by gen_internal_sym(). */
3586 static GTY(()) int label_num;
3587
3588 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3589
3590 /* Instances of generic types for which we need to generate debug
3591 info that describe their generic parameters and arguments. That
3592 generation needs to happen once all types are properly laid out so
3593 we do it at the end of compilation. */
3594 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3595
3596 /* Offset from the "steady-state frame pointer" to the frame base,
3597 within the current function. */
3598 static poly_int64 frame_pointer_fb_offset;
3599 static bool frame_pointer_fb_offset_valid;
3600
3601 static vec<dw_die_ref> base_types;
3602
3603 /* Flags to represent a set of attribute classes for attributes that represent
3604 a scalar value (bounds, pointers, ...). */
3605 enum dw_scalar_form
3606 {
3607 dw_scalar_form_constant = 0x01,
3608 dw_scalar_form_exprloc = 0x02,
3609 dw_scalar_form_reference = 0x04
3610 };
3611
3612 /* Forward declarations for functions defined in this file. */
3613
3614 static int is_pseudo_reg (const_rtx);
3615 static tree type_main_variant (tree);
3616 static int is_tagged_type (const_tree);
3617 static const char *dwarf_tag_name (unsigned);
3618 static const char *dwarf_attr_name (unsigned);
3619 static const char *dwarf_form_name (unsigned);
3620 static tree decl_ultimate_origin (const_tree);
3621 static tree decl_class_context (tree);
3622 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3623 static inline enum dw_val_class AT_class (dw_attr_node *);
3624 static inline unsigned int AT_index (dw_attr_node *);
3625 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3626 static inline unsigned AT_flag (dw_attr_node *);
3627 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3628 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3629 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3630 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3631 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3632 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3633 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3634 unsigned int, unsigned char *);
3635 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3636 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3637 static inline const char *AT_string (dw_attr_node *);
3638 static enum dwarf_form AT_string_form (dw_attr_node *);
3639 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3640 static void add_AT_specification (dw_die_ref, dw_die_ref);
3641 static inline dw_die_ref AT_ref (dw_attr_node *);
3642 static inline int AT_ref_external (dw_attr_node *);
3643 static inline void set_AT_ref_external (dw_attr_node *, int);
3644 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3645 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3646 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3647 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3648 dw_loc_list_ref);
3649 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3650 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3651 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3652 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3653 static void remove_addr_table_entry (addr_table_entry *);
3654 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3655 static inline rtx AT_addr (dw_attr_node *);
3656 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3657 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3658 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3659 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3660 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3661 const char *);
3662 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3663 unsigned HOST_WIDE_INT);
3664 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3665 unsigned long, bool);
3666 static inline const char *AT_lbl (dw_attr_node *);
3667 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3668 static const char *get_AT_low_pc (dw_die_ref);
3669 static const char *get_AT_hi_pc (dw_die_ref);
3670 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3671 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3672 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3673 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3674 static bool is_cxx (void);
3675 static bool is_cxx (const_tree);
3676 static bool is_fortran (void);
3677 static bool is_ada (void);
3678 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3679 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3680 static void add_child_die (dw_die_ref, dw_die_ref);
3681 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3682 static dw_die_ref lookup_type_die (tree);
3683 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3684 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3685 static void equate_type_number_to_die (tree, dw_die_ref);
3686 static dw_die_ref lookup_decl_die (tree);
3687 static var_loc_list *lookup_decl_loc (const_tree);
3688 static void equate_decl_number_to_die (tree, dw_die_ref);
3689 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3690 static void print_spaces (FILE *);
3691 static void print_die (dw_die_ref, FILE *);
3692 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3693 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3694 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3695 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3696 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3697 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3698 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3699 struct md5_ctx *, int *);
3700 struct checksum_attributes;
3701 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3702 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3703 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3704 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3705 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3706 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3707 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3708 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3709 static int is_type_die (dw_die_ref);
3710 static int is_comdat_die (dw_die_ref);
3711 static inline bool is_template_instantiation (dw_die_ref);
3712 static int is_declaration_die (dw_die_ref);
3713 static int should_move_die_to_comdat (dw_die_ref);
3714 static dw_die_ref clone_as_declaration (dw_die_ref);
3715 static dw_die_ref clone_die (dw_die_ref);
3716 static dw_die_ref clone_tree (dw_die_ref);
3717 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3718 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3719 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3720 static dw_die_ref generate_skeleton (dw_die_ref);
3721 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3722 dw_die_ref,
3723 dw_die_ref);
3724 static void break_out_comdat_types (dw_die_ref);
3725 static void copy_decls_for_unworthy_types (dw_die_ref);
3726
3727 static void add_sibling_attributes (dw_die_ref);
3728 static void output_location_lists (dw_die_ref);
3729 static int constant_size (unsigned HOST_WIDE_INT);
3730 static unsigned long size_of_die (dw_die_ref);
3731 static void calc_die_sizes (dw_die_ref);
3732 static void calc_base_type_die_sizes (void);
3733 static void mark_dies (dw_die_ref);
3734 static void unmark_dies (dw_die_ref);
3735 static void unmark_all_dies (dw_die_ref);
3736 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3737 static unsigned long size_of_aranges (void);
3738 static enum dwarf_form value_format (dw_attr_node *);
3739 static void output_value_format (dw_attr_node *);
3740 static void output_abbrev_section (void);
3741 static void output_die_abbrevs (unsigned long, dw_die_ref);
3742 static void output_die (dw_die_ref);
3743 static void output_compilation_unit_header (enum dwarf_unit_type);
3744 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3745 static void output_comdat_type_unit (comdat_type_node *);
3746 static const char *dwarf2_name (tree, int);
3747 static void add_pubname (tree, dw_die_ref);
3748 static void add_enumerator_pubname (const char *, dw_die_ref);
3749 static void add_pubname_string (const char *, dw_die_ref);
3750 static void add_pubtype (tree, dw_die_ref);
3751 static void output_pubnames (vec<pubname_entry, va_gc> *);
3752 static void output_aranges (void);
3753 static unsigned int add_ranges (const_tree, bool = false);
3754 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3755 bool *, bool);
3756 static void output_ranges (void);
3757 static dw_line_info_table *new_line_info_table (void);
3758 static void output_line_info (bool);
3759 static void output_file_names (void);
3760 static dw_die_ref base_type_die (tree, bool);
3761 static int is_base_type (tree);
3762 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3763 static int decl_quals (const_tree);
3764 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3765 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3766 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3767 static int type_is_enum (const_tree);
3768 static unsigned int dbx_reg_number (const_rtx);
3769 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3770 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3771 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3772 enum var_init_status);
3773 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3774 enum var_init_status);
3775 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3776 enum var_init_status);
3777 static int is_based_loc (const_rtx);
3778 static bool resolve_one_addr (rtx *);
3779 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3780 enum var_init_status);
3781 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3782 enum var_init_status);
3783 struct loc_descr_context;
3784 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3785 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3786 static dw_loc_list_ref loc_list_from_tree (tree, int,
3787 struct loc_descr_context *);
3788 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3789 struct loc_descr_context *);
3790 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3791 static tree field_type (const_tree);
3792 static unsigned int simple_type_align_in_bits (const_tree);
3793 static unsigned int simple_decl_align_in_bits (const_tree);
3794 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3795 struct vlr_context;
3796 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3797 HOST_WIDE_INT *);
3798 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3799 dw_loc_list_ref);
3800 static void add_data_member_location_attribute (dw_die_ref, tree,
3801 struct vlr_context *);
3802 static bool add_const_value_attribute (dw_die_ref, rtx);
3803 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3804 static void insert_wide_int (const wide_int &, unsigned char *, int);
3805 static void insert_float (const_rtx, unsigned char *);
3806 static rtx rtl_for_decl_location (tree);
3807 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3808 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3809 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3810 static void add_name_attribute (dw_die_ref, const char *);
3811 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3812 static void add_comp_dir_attribute (dw_die_ref);
3813 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3814 struct loc_descr_context *);
3815 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3816 struct loc_descr_context *);
3817 static void add_subscript_info (dw_die_ref, tree, bool);
3818 static void add_byte_size_attribute (dw_die_ref, tree);
3819 static void add_alignment_attribute (dw_die_ref, tree);
3820 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3821 struct vlr_context *);
3822 static void add_bit_size_attribute (dw_die_ref, tree);
3823 static void add_prototyped_attribute (dw_die_ref, tree);
3824 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3825 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3826 static void add_src_coords_attributes (dw_die_ref, tree);
3827 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3828 static void add_discr_value (dw_die_ref, dw_discr_value *);
3829 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3830 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3831 static dw_die_ref scope_die_for (tree, dw_die_ref);
3832 static inline int local_scope_p (dw_die_ref);
3833 static inline int class_scope_p (dw_die_ref);
3834 static inline int class_or_namespace_scope_p (dw_die_ref);
3835 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3836 static void add_calling_convention_attribute (dw_die_ref, tree);
3837 static const char *type_tag (const_tree);
3838 static tree member_declared_type (const_tree);
3839 #if 0
3840 static const char *decl_start_label (tree);
3841 #endif
3842 static void gen_array_type_die (tree, dw_die_ref);
3843 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3844 #if 0
3845 static void gen_entry_point_die (tree, dw_die_ref);
3846 #endif
3847 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3848 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3850 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3851 static void gen_formal_types_die (tree, dw_die_ref);
3852 static void gen_subprogram_die (tree, dw_die_ref);
3853 static void gen_variable_die (tree, tree, dw_die_ref);
3854 static void gen_const_die (tree, dw_die_ref);
3855 static void gen_label_die (tree, dw_die_ref);
3856 static void gen_lexical_block_die (tree, dw_die_ref);
3857 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3858 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3859 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3860 static dw_die_ref gen_compile_unit_die (const char *);
3861 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3862 static void gen_member_die (tree, dw_die_ref);
3863 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3864 enum debug_info_usage);
3865 static void gen_subroutine_type_die (tree, dw_die_ref);
3866 static void gen_typedef_die (tree, dw_die_ref);
3867 static void gen_type_die (tree, dw_die_ref);
3868 static void gen_block_die (tree, dw_die_ref);
3869 static void decls_for_scope (tree, dw_die_ref);
3870 static bool is_naming_typedef_decl (const_tree);
3871 static inline dw_die_ref get_context_die (tree);
3872 static void gen_namespace_die (tree, dw_die_ref);
3873 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3874 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3875 static dw_die_ref force_decl_die (tree);
3876 static dw_die_ref force_type_die (tree);
3877 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3878 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3879 static struct dwarf_file_data * lookup_filename (const char *);
3880 static void retry_incomplete_types (void);
3881 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3882 static void gen_generic_params_dies (tree);
3883 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3884 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3885 static void splice_child_die (dw_die_ref, dw_die_ref);
3886 static int file_info_cmp (const void *, const void *);
3887 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3888 const char *, var_loc_view, const char *);
3889 static void output_loc_list (dw_loc_list_ref);
3890 static char *gen_internal_sym (const char *);
3891 static bool want_pubnames (void);
3892
3893 static void prune_unmark_dies (dw_die_ref);
3894 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3895 static void prune_unused_types_mark (dw_die_ref, int);
3896 static void prune_unused_types_walk (dw_die_ref);
3897 static void prune_unused_types_walk_attribs (dw_die_ref);
3898 static void prune_unused_types_prune (dw_die_ref);
3899 static void prune_unused_types (void);
3900 static int maybe_emit_file (struct dwarf_file_data *fd);
3901 static inline const char *AT_vms_delta1 (dw_attr_node *);
3902 static inline const char *AT_vms_delta2 (dw_attr_node *);
3903 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3904 const char *, const char *);
3905 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3906 static void gen_remaining_tmpl_value_param_die_attribute (void);
3907 static bool generic_type_p (tree);
3908 static void schedule_generic_params_dies_gen (tree t);
3909 static void gen_scheduled_generic_parms_dies (void);
3910 static void resolve_variable_values (void);
3911
3912 static const char *comp_dir_string (void);
3913
3914 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3915
3916 /* enum for tracking thread-local variables whose address is really an offset
3917 relative to the TLS pointer, which will need link-time relocation, but will
3918 not need relocation by the DWARF consumer. */
3919
3920 enum dtprel_bool
3921 {
3922 dtprel_false = 0,
3923 dtprel_true = 1
3924 };
3925
3926 /* Return the operator to use for an address of a variable. For dtprel_true, we
3927 use DW_OP_const*. For regular variables, which need both link-time
3928 relocation and consumer-level relocation (e.g., to account for shared objects
3929 loaded at a random address), we use DW_OP_addr*. */
3930
3931 static inline enum dwarf_location_atom
3932 dw_addr_op (enum dtprel_bool dtprel)
3933 {
3934 if (dtprel == dtprel_true)
3935 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3936 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3937 else
3938 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3939 }
3940
3941 /* Return a pointer to a newly allocated address location description. If
3942 dwarf_split_debug_info is true, then record the address with the appropriate
3943 relocation. */
3944 static inline dw_loc_descr_ref
3945 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3946 {
3947 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3948
3949 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3950 ref->dw_loc_oprnd1.v.val_addr = addr;
3951 ref->dtprel = dtprel;
3952 if (dwarf_split_debug_info)
3953 ref->dw_loc_oprnd1.val_entry
3954 = add_addr_table_entry (addr,
3955 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3956 else
3957 ref->dw_loc_oprnd1.val_entry = NULL;
3958
3959 return ref;
3960 }
3961
3962 /* Section names used to hold DWARF debugging information. */
3963
3964 #ifndef DEBUG_INFO_SECTION
3965 #define DEBUG_INFO_SECTION ".debug_info"
3966 #endif
3967 #ifndef DEBUG_DWO_INFO_SECTION
3968 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3969 #endif
3970 #ifndef DEBUG_LTO_INFO_SECTION
3971 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3972 #endif
3973 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3974 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3975 #endif
3976 #ifndef DEBUG_ABBREV_SECTION
3977 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3978 #endif
3979 #ifndef DEBUG_LTO_ABBREV_SECTION
3980 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3981 #endif
3982 #ifndef DEBUG_DWO_ABBREV_SECTION
3983 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3984 #endif
3985 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3986 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3987 #endif
3988 #ifndef DEBUG_ARANGES_SECTION
3989 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3990 #endif
3991 #ifndef DEBUG_ADDR_SECTION
3992 #define DEBUG_ADDR_SECTION ".debug_addr"
3993 #endif
3994 #ifndef DEBUG_MACINFO_SECTION
3995 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3996 #endif
3997 #ifndef DEBUG_LTO_MACINFO_SECTION
3998 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3999 #endif
4000 #ifndef DEBUG_DWO_MACINFO_SECTION
4001 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4002 #endif
4003 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4004 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4005 #endif
4006 #ifndef DEBUG_MACRO_SECTION
4007 #define DEBUG_MACRO_SECTION ".debug_macro"
4008 #endif
4009 #ifndef DEBUG_LTO_MACRO_SECTION
4010 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4011 #endif
4012 #ifndef DEBUG_DWO_MACRO_SECTION
4013 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4014 #endif
4015 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4016 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4017 #endif
4018 #ifndef DEBUG_LINE_SECTION
4019 #define DEBUG_LINE_SECTION ".debug_line"
4020 #endif
4021 #ifndef DEBUG_LTO_LINE_SECTION
4022 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4023 #endif
4024 #ifndef DEBUG_DWO_LINE_SECTION
4025 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4026 #endif
4027 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4028 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4029 #endif
4030 #ifndef DEBUG_LOC_SECTION
4031 #define DEBUG_LOC_SECTION ".debug_loc"
4032 #endif
4033 #ifndef DEBUG_DWO_LOC_SECTION
4034 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4035 #endif
4036 #ifndef DEBUG_LOCLISTS_SECTION
4037 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4038 #endif
4039 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4040 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4041 #endif
4042 #ifndef DEBUG_PUBNAMES_SECTION
4043 #define DEBUG_PUBNAMES_SECTION \
4044 ((debug_generate_pub_sections == 2) \
4045 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4046 #endif
4047 #ifndef DEBUG_PUBTYPES_SECTION
4048 #define DEBUG_PUBTYPES_SECTION \
4049 ((debug_generate_pub_sections == 2) \
4050 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4051 #endif
4052 #ifndef DEBUG_STR_OFFSETS_SECTION
4053 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4054 #endif
4055 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4056 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4057 #endif
4058 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4059 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4060 #endif
4061 #ifndef DEBUG_STR_SECTION
4062 #define DEBUG_STR_SECTION ".debug_str"
4063 #endif
4064 #ifndef DEBUG_LTO_STR_SECTION
4065 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4066 #endif
4067 #ifndef DEBUG_STR_DWO_SECTION
4068 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4069 #endif
4070 #ifndef DEBUG_LTO_STR_DWO_SECTION
4071 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4072 #endif
4073 #ifndef DEBUG_RANGES_SECTION
4074 #define DEBUG_RANGES_SECTION ".debug_ranges"
4075 #endif
4076 #ifndef DEBUG_RNGLISTS_SECTION
4077 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4078 #endif
4079 #ifndef DEBUG_LINE_STR_SECTION
4080 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4081 #endif
4082 #ifndef DEBUG_LTO_LINE_STR_SECTION
4083 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4084 #endif
4085
4086 /* Standard ELF section names for compiled code and data. */
4087 #ifndef TEXT_SECTION_NAME
4088 #define TEXT_SECTION_NAME ".text"
4089 #endif
4090
4091 /* Section flags for .debug_str section. */
4092 #define DEBUG_STR_SECTION_FLAGS \
4093 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4094 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4095 : SECTION_DEBUG)
4096
4097 /* Section flags for .debug_str.dwo section. */
4098 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4099
4100 /* Attribute used to refer to the macro section. */
4101 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4102 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4103
4104 /* Labels we insert at beginning sections we can reference instead of
4105 the section names themselves. */
4106
4107 #ifndef TEXT_SECTION_LABEL
4108 #define TEXT_SECTION_LABEL "Ltext"
4109 #endif
4110 #ifndef COLD_TEXT_SECTION_LABEL
4111 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4112 #endif
4113 #ifndef DEBUG_LINE_SECTION_LABEL
4114 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4115 #endif
4116 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4117 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4118 #endif
4119 #ifndef DEBUG_INFO_SECTION_LABEL
4120 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4121 #endif
4122 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4123 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4124 #endif
4125 #ifndef DEBUG_ABBREV_SECTION_LABEL
4126 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4127 #endif
4128 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4129 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4130 #endif
4131 #ifndef DEBUG_ADDR_SECTION_LABEL
4132 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4133 #endif
4134 #ifndef DEBUG_LOC_SECTION_LABEL
4135 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4136 #endif
4137 #ifndef DEBUG_RANGES_SECTION_LABEL
4138 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4139 #endif
4140 #ifndef DEBUG_MACINFO_SECTION_LABEL
4141 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4142 #endif
4143 #ifndef DEBUG_MACRO_SECTION_LABEL
4144 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4145 #endif
4146 #define SKELETON_COMP_DIE_ABBREV 1
4147 #define SKELETON_TYPE_DIE_ABBREV 2
4148
4149 /* Definitions of defaults for formats and names of various special
4150 (artificial) labels which may be generated within this file (when the -g
4151 options is used and DWARF2_DEBUGGING_INFO is in effect.
4152 If necessary, these may be overridden from within the tm.h file, but
4153 typically, overriding these defaults is unnecessary. */
4154
4155 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4170
4171 #ifndef TEXT_END_LABEL
4172 #define TEXT_END_LABEL "Letext"
4173 #endif
4174 #ifndef COLD_END_LABEL
4175 #define COLD_END_LABEL "Letext_cold"
4176 #endif
4177 #ifndef BLOCK_BEGIN_LABEL
4178 #define BLOCK_BEGIN_LABEL "LBB"
4179 #endif
4180 #ifndef BLOCK_INLINE_ENTRY_LABEL
4181 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4182 #endif
4183 #ifndef BLOCK_END_LABEL
4184 #define BLOCK_END_LABEL "LBE"
4185 #endif
4186 #ifndef LINE_CODE_LABEL
4187 #define LINE_CODE_LABEL "LM"
4188 #endif
4189
4190 \f
4191 /* Return the root of the DIE's built for the current compilation unit. */
4192 static dw_die_ref
4193 comp_unit_die (void)
4194 {
4195 if (!single_comp_unit_die)
4196 single_comp_unit_die = gen_compile_unit_die (NULL);
4197 return single_comp_unit_die;
4198 }
4199
4200 /* We allow a language front-end to designate a function that is to be
4201 called to "demangle" any name before it is put into a DIE. */
4202
4203 static const char *(*demangle_name_func) (const char *);
4204
4205 void
4206 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4207 {
4208 demangle_name_func = func;
4209 }
4210
4211 /* Test if rtl node points to a pseudo register. */
4212
4213 static inline int
4214 is_pseudo_reg (const_rtx rtl)
4215 {
4216 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4217 || (GET_CODE (rtl) == SUBREG
4218 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4219 }
4220
4221 /* Return a reference to a type, with its const and volatile qualifiers
4222 removed. */
4223
4224 static inline tree
4225 type_main_variant (tree type)
4226 {
4227 type = TYPE_MAIN_VARIANT (type);
4228
4229 /* ??? There really should be only one main variant among any group of
4230 variants of a given type (and all of the MAIN_VARIANT values for all
4231 members of the group should point to that one type) but sometimes the C
4232 front-end messes this up for array types, so we work around that bug
4233 here. */
4234 if (TREE_CODE (type) == ARRAY_TYPE)
4235 while (type != TYPE_MAIN_VARIANT (type))
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 return type;
4239 }
4240
4241 /* Return nonzero if the given type node represents a tagged type. */
4242
4243 static inline int
4244 is_tagged_type (const_tree type)
4245 {
4246 enum tree_code code = TREE_CODE (type);
4247
4248 return (code == RECORD_TYPE || code == UNION_TYPE
4249 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4250 }
4251
4252 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4253
4254 static void
4255 get_ref_die_offset_label (char *label, dw_die_ref ref)
4256 {
4257 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4258 }
4259
4260 /* Return die_offset of a DIE reference to a base type. */
4261
4262 static unsigned long int
4263 get_base_type_offset (dw_die_ref ref)
4264 {
4265 if (ref->die_offset)
4266 return ref->die_offset;
4267 if (comp_unit_die ()->die_abbrev)
4268 {
4269 calc_base_type_die_sizes ();
4270 gcc_assert (ref->die_offset);
4271 }
4272 return ref->die_offset;
4273 }
4274
4275 /* Return die_offset of a DIE reference other than base type. */
4276
4277 static unsigned long int
4278 get_ref_die_offset (dw_die_ref ref)
4279 {
4280 gcc_assert (ref->die_offset);
4281 return ref->die_offset;
4282 }
4283
4284 /* Convert a DIE tag into its string name. */
4285
4286 static const char *
4287 dwarf_tag_name (unsigned int tag)
4288 {
4289 const char *name = get_DW_TAG_name (tag);
4290
4291 if (name != NULL)
4292 return name;
4293
4294 return "DW_TAG_<unknown>";
4295 }
4296
4297 /* Convert a DWARF attribute code into its string name. */
4298
4299 static const char *
4300 dwarf_attr_name (unsigned int attr)
4301 {
4302 const char *name;
4303
4304 switch (attr)
4305 {
4306 #if VMS_DEBUGGING_INFO
4307 case DW_AT_HP_prologue:
4308 return "DW_AT_HP_prologue";
4309 #else
4310 case DW_AT_MIPS_loop_unroll_factor:
4311 return "DW_AT_MIPS_loop_unroll_factor";
4312 #endif
4313
4314 #if VMS_DEBUGGING_INFO
4315 case DW_AT_HP_epilogue:
4316 return "DW_AT_HP_epilogue";
4317 #else
4318 case DW_AT_MIPS_stride:
4319 return "DW_AT_MIPS_stride";
4320 #endif
4321 }
4322
4323 name = get_DW_AT_name (attr);
4324
4325 if (name != NULL)
4326 return name;
4327
4328 return "DW_AT_<unknown>";
4329 }
4330
4331 /* Convert a DWARF value form code into its string name. */
4332
4333 static const char *
4334 dwarf_form_name (unsigned int form)
4335 {
4336 const char *name = get_DW_FORM_name (form);
4337
4338 if (name != NULL)
4339 return name;
4340
4341 return "DW_FORM_<unknown>";
4342 }
4343 \f
4344 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4345 instance of an inlined instance of a decl which is local to an inline
4346 function, so we have to trace all of the way back through the origin chain
4347 to find out what sort of node actually served as the original seed for the
4348 given block. */
4349
4350 static tree
4351 decl_ultimate_origin (const_tree decl)
4352 {
4353 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4354 return NULL_TREE;
4355
4356 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4357 we're trying to output the abstract instance of this function. */
4358 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4359 return NULL_TREE;
4360
4361 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4362 most distant ancestor, this should never happen. */
4363 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4364
4365 return DECL_ABSTRACT_ORIGIN (decl);
4366 }
4367
4368 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4369 of a virtual function may refer to a base class, so we check the 'this'
4370 parameter. */
4371
4372 static tree
4373 decl_class_context (tree decl)
4374 {
4375 tree context = NULL_TREE;
4376
4377 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4378 context = DECL_CONTEXT (decl);
4379 else
4380 context = TYPE_MAIN_VARIANT
4381 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4382
4383 if (context && !TYPE_P (context))
4384 context = NULL_TREE;
4385
4386 return context;
4387 }
4388 \f
4389 /* Add an attribute/value pair to a DIE. */
4390
4391 static inline void
4392 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4393 {
4394 /* Maybe this should be an assert? */
4395 if (die == NULL)
4396 return;
4397
4398 if (flag_checking)
4399 {
4400 /* Check we do not add duplicate attrs. Can't use get_AT here
4401 because that recurses to the specification/abstract origin DIE. */
4402 dw_attr_node *a;
4403 unsigned ix;
4404 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4405 gcc_assert (a->dw_attr != attr->dw_attr);
4406 }
4407
4408 vec_safe_reserve (die->die_attr, 1);
4409 vec_safe_push (die->die_attr, *attr);
4410 }
4411
4412 static inline enum dw_val_class
4413 AT_class (dw_attr_node *a)
4414 {
4415 return a->dw_attr_val.val_class;
4416 }
4417
4418 /* Return the index for any attribute that will be referenced with a
4419 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4420 indices are stored in dw_attr_val.v.val_str for reference counting
4421 pruning. */
4422
4423 static inline unsigned int
4424 AT_index (dw_attr_node *a)
4425 {
4426 if (AT_class (a) == dw_val_class_str)
4427 return a->dw_attr_val.v.val_str->index;
4428 else if (a->dw_attr_val.val_entry != NULL)
4429 return a->dw_attr_val.val_entry->index;
4430 return NOT_INDEXED;
4431 }
4432
4433 /* Add a flag value attribute to a DIE. */
4434
4435 static inline void
4436 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4437 {
4438 dw_attr_node attr;
4439
4440 attr.dw_attr = attr_kind;
4441 attr.dw_attr_val.val_class = dw_val_class_flag;
4442 attr.dw_attr_val.val_entry = NULL;
4443 attr.dw_attr_val.v.val_flag = flag;
4444 add_dwarf_attr (die, &attr);
4445 }
4446
4447 static inline unsigned
4448 AT_flag (dw_attr_node *a)
4449 {
4450 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4451 return a->dw_attr_val.v.val_flag;
4452 }
4453
4454 /* Add a signed integer attribute value to a DIE. */
4455
4456 static inline void
4457 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4458 {
4459 dw_attr_node attr;
4460
4461 attr.dw_attr = attr_kind;
4462 attr.dw_attr_val.val_class = dw_val_class_const;
4463 attr.dw_attr_val.val_entry = NULL;
4464 attr.dw_attr_val.v.val_int = int_val;
4465 add_dwarf_attr (die, &attr);
4466 }
4467
4468 static inline HOST_WIDE_INT
4469 AT_int (dw_attr_node *a)
4470 {
4471 gcc_assert (a && (AT_class (a) == dw_val_class_const
4472 || AT_class (a) == dw_val_class_const_implicit));
4473 return a->dw_attr_val.v.val_int;
4474 }
4475
4476 /* Add an unsigned integer attribute value to a DIE. */
4477
4478 static inline void
4479 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4480 unsigned HOST_WIDE_INT unsigned_val)
4481 {
4482 dw_attr_node attr;
4483
4484 attr.dw_attr = attr_kind;
4485 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4486 attr.dw_attr_val.val_entry = NULL;
4487 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4488 add_dwarf_attr (die, &attr);
4489 }
4490
4491 static inline unsigned HOST_WIDE_INT
4492 AT_unsigned (dw_attr_node *a)
4493 {
4494 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4495 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4496 return a->dw_attr_val.v.val_unsigned;
4497 }
4498
4499 /* Add an unsigned wide integer attribute value to a DIE. */
4500
4501 static inline void
4502 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4503 const wide_int& w)
4504 {
4505 dw_attr_node attr;
4506
4507 attr.dw_attr = attr_kind;
4508 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4509 attr.dw_attr_val.val_entry = NULL;
4510 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4511 *attr.dw_attr_val.v.val_wide = w;
4512 add_dwarf_attr (die, &attr);
4513 }
4514
4515 /* Add an unsigned double integer attribute value to a DIE. */
4516
4517 static inline void
4518 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4519 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4520 {
4521 dw_attr_node attr;
4522
4523 attr.dw_attr = attr_kind;
4524 attr.dw_attr_val.val_class = dw_val_class_const_double;
4525 attr.dw_attr_val.val_entry = NULL;
4526 attr.dw_attr_val.v.val_double.high = high;
4527 attr.dw_attr_val.v.val_double.low = low;
4528 add_dwarf_attr (die, &attr);
4529 }
4530
4531 /* Add a floating point attribute value to a DIE and return it. */
4532
4533 static inline void
4534 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4535 unsigned int length, unsigned int elt_size, unsigned char *array)
4536 {
4537 dw_attr_node attr;
4538
4539 attr.dw_attr = attr_kind;
4540 attr.dw_attr_val.val_class = dw_val_class_vec;
4541 attr.dw_attr_val.val_entry = NULL;
4542 attr.dw_attr_val.v.val_vec.length = length;
4543 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4544 attr.dw_attr_val.v.val_vec.array = array;
4545 add_dwarf_attr (die, &attr);
4546 }
4547
4548 /* Add an 8-byte data attribute value to a DIE. */
4549
4550 static inline void
4551 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4552 unsigned char data8[8])
4553 {
4554 dw_attr_node attr;
4555
4556 attr.dw_attr = attr_kind;
4557 attr.dw_attr_val.val_class = dw_val_class_data8;
4558 attr.dw_attr_val.val_entry = NULL;
4559 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4560 add_dwarf_attr (die, &attr);
4561 }
4562
4563 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4564 dwarf_split_debug_info, address attributes in dies destined for the
4565 final executable have force_direct set to avoid using indexed
4566 references. */
4567
4568 static inline void
4569 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4570 bool force_direct)
4571 {
4572 dw_attr_node attr;
4573 char * lbl_id;
4574
4575 lbl_id = xstrdup (lbl_low);
4576 attr.dw_attr = DW_AT_low_pc;
4577 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4578 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4579 if (dwarf_split_debug_info && !force_direct)
4580 attr.dw_attr_val.val_entry
4581 = add_addr_table_entry (lbl_id, ate_kind_label);
4582 else
4583 attr.dw_attr_val.val_entry = NULL;
4584 add_dwarf_attr (die, &attr);
4585
4586 attr.dw_attr = DW_AT_high_pc;
4587 if (dwarf_version < 4)
4588 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4589 else
4590 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4591 lbl_id = xstrdup (lbl_high);
4592 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4593 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4594 && dwarf_split_debug_info && !force_direct)
4595 attr.dw_attr_val.val_entry
4596 = add_addr_table_entry (lbl_id, ate_kind_label);
4597 else
4598 attr.dw_attr_val.val_entry = NULL;
4599 add_dwarf_attr (die, &attr);
4600 }
4601
4602 /* Hash and equality functions for debug_str_hash. */
4603
4604 hashval_t
4605 indirect_string_hasher::hash (indirect_string_node *x)
4606 {
4607 return htab_hash_string (x->str);
4608 }
4609
4610 bool
4611 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4612 {
4613 return strcmp (x1->str, x2) == 0;
4614 }
4615
4616 /* Add STR to the given string hash table. */
4617
4618 static struct indirect_string_node *
4619 find_AT_string_in_table (const char *str,
4620 hash_table<indirect_string_hasher> *table)
4621 {
4622 struct indirect_string_node *node;
4623
4624 indirect_string_node **slot
4625 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4626 if (*slot == NULL)
4627 {
4628 node = ggc_cleared_alloc<indirect_string_node> ();
4629 node->str = ggc_strdup (str);
4630 *slot = node;
4631 }
4632 else
4633 node = *slot;
4634
4635 node->refcount++;
4636 return node;
4637 }
4638
4639 /* Add STR to the indirect string hash table. */
4640
4641 static struct indirect_string_node *
4642 find_AT_string (const char *str)
4643 {
4644 if (! debug_str_hash)
4645 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4646
4647 return find_AT_string_in_table (str, debug_str_hash);
4648 }
4649
4650 /* Add a string attribute value to a DIE. */
4651
4652 static inline void
4653 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4654 {
4655 dw_attr_node attr;
4656 struct indirect_string_node *node;
4657
4658 node = find_AT_string (str);
4659
4660 attr.dw_attr = attr_kind;
4661 attr.dw_attr_val.val_class = dw_val_class_str;
4662 attr.dw_attr_val.val_entry = NULL;
4663 attr.dw_attr_val.v.val_str = node;
4664 add_dwarf_attr (die, &attr);
4665 }
4666
4667 static inline const char *
4668 AT_string (dw_attr_node *a)
4669 {
4670 gcc_assert (a && AT_class (a) == dw_val_class_str);
4671 return a->dw_attr_val.v.val_str->str;
4672 }
4673
4674 /* Call this function directly to bypass AT_string_form's logic to put
4675 the string inline in the die. */
4676
4677 static void
4678 set_indirect_string (struct indirect_string_node *node)
4679 {
4680 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4681 /* Already indirect is a no op. */
4682 if (node->form == DW_FORM_strp
4683 || node->form == DW_FORM_line_strp
4684 || node->form == dwarf_FORM (DW_FORM_strx))
4685 {
4686 gcc_assert (node->label);
4687 return;
4688 }
4689 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4690 ++dw2_string_counter;
4691 node->label = xstrdup (label);
4692
4693 if (!dwarf_split_debug_info)
4694 {
4695 node->form = DW_FORM_strp;
4696 node->index = NOT_INDEXED;
4697 }
4698 else
4699 {
4700 node->form = dwarf_FORM (DW_FORM_strx);
4701 node->index = NO_INDEX_ASSIGNED;
4702 }
4703 }
4704
4705 /* A helper function for dwarf2out_finish, called to reset indirect
4706 string decisions done for early LTO dwarf output before fat object
4707 dwarf output. */
4708
4709 int
4710 reset_indirect_string (indirect_string_node **h, void *)
4711 {
4712 struct indirect_string_node *node = *h;
4713 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4714 {
4715 free (node->label);
4716 node->label = NULL;
4717 node->form = (dwarf_form) 0;
4718 node->index = 0;
4719 }
4720 return 1;
4721 }
4722
4723 /* Find out whether a string should be output inline in DIE
4724 or out-of-line in .debug_str section. */
4725
4726 static enum dwarf_form
4727 find_string_form (struct indirect_string_node *node)
4728 {
4729 unsigned int len;
4730
4731 if (node->form)
4732 return node->form;
4733
4734 len = strlen (node->str) + 1;
4735
4736 /* If the string is shorter or equal to the size of the reference, it is
4737 always better to put it inline. */
4738 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4739 return node->form = DW_FORM_string;
4740
4741 /* If we cannot expect the linker to merge strings in .debug_str
4742 section, only put it into .debug_str if it is worth even in this
4743 single module. */
4744 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4745 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4746 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4747 return node->form = DW_FORM_string;
4748
4749 set_indirect_string (node);
4750
4751 return node->form;
4752 }
4753
4754 /* Find out whether the string referenced from the attribute should be
4755 output inline in DIE or out-of-line in .debug_str section. */
4756
4757 static enum dwarf_form
4758 AT_string_form (dw_attr_node *a)
4759 {
4760 gcc_assert (a && AT_class (a) == dw_val_class_str);
4761 return find_string_form (a->dw_attr_val.v.val_str);
4762 }
4763
4764 /* Add a DIE reference attribute value to a DIE. */
4765
4766 static inline void
4767 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4768 {
4769 dw_attr_node attr;
4770 gcc_checking_assert (targ_die != NULL);
4771
4772 /* With LTO we can end up trying to reference something we didn't create
4773 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4774 if (targ_die == NULL)
4775 return;
4776
4777 attr.dw_attr = attr_kind;
4778 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4779 attr.dw_attr_val.val_entry = NULL;
4780 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4781 attr.dw_attr_val.v.val_die_ref.external = 0;
4782 add_dwarf_attr (die, &attr);
4783 }
4784
4785 /* Change DIE reference REF to point to NEW_DIE instead. */
4786
4787 static inline void
4788 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4789 {
4790 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4791 ref->dw_attr_val.v.val_die_ref.die = new_die;
4792 ref->dw_attr_val.v.val_die_ref.external = 0;
4793 }
4794
4795 /* Add an AT_specification attribute to a DIE, and also make the back
4796 pointer from the specification to the definition. */
4797
4798 static inline void
4799 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4800 {
4801 add_AT_die_ref (die, DW_AT_specification, targ_die);
4802 gcc_assert (!targ_die->die_definition);
4803 targ_die->die_definition = die;
4804 }
4805
4806 static inline dw_die_ref
4807 AT_ref (dw_attr_node *a)
4808 {
4809 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4810 return a->dw_attr_val.v.val_die_ref.die;
4811 }
4812
4813 static inline int
4814 AT_ref_external (dw_attr_node *a)
4815 {
4816 if (a && AT_class (a) == dw_val_class_die_ref)
4817 return a->dw_attr_val.v.val_die_ref.external;
4818
4819 return 0;
4820 }
4821
4822 static inline void
4823 set_AT_ref_external (dw_attr_node *a, int i)
4824 {
4825 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4826 a->dw_attr_val.v.val_die_ref.external = i;
4827 }
4828
4829 /* Add an FDE reference attribute value to a DIE. */
4830
4831 static inline void
4832 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4833 {
4834 dw_attr_node attr;
4835
4836 attr.dw_attr = attr_kind;
4837 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4838 attr.dw_attr_val.val_entry = NULL;
4839 attr.dw_attr_val.v.val_fde_index = targ_fde;
4840 add_dwarf_attr (die, &attr);
4841 }
4842
4843 /* Add a location description attribute value to a DIE. */
4844
4845 static inline void
4846 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4847 {
4848 dw_attr_node attr;
4849
4850 attr.dw_attr = attr_kind;
4851 attr.dw_attr_val.val_class = dw_val_class_loc;
4852 attr.dw_attr_val.val_entry = NULL;
4853 attr.dw_attr_val.v.val_loc = loc;
4854 add_dwarf_attr (die, &attr);
4855 }
4856
4857 static inline dw_loc_descr_ref
4858 AT_loc (dw_attr_node *a)
4859 {
4860 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4861 return a->dw_attr_val.v.val_loc;
4862 }
4863
4864 static inline void
4865 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4866 {
4867 dw_attr_node attr;
4868
4869 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4870 return;
4871
4872 attr.dw_attr = attr_kind;
4873 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4874 attr.dw_attr_val.val_entry = NULL;
4875 attr.dw_attr_val.v.val_loc_list = loc_list;
4876 add_dwarf_attr (die, &attr);
4877 have_location_lists = true;
4878 }
4879
4880 static inline dw_loc_list_ref
4881 AT_loc_list (dw_attr_node *a)
4882 {
4883 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4884 return a->dw_attr_val.v.val_loc_list;
4885 }
4886
4887 /* Add a view list attribute to DIE. It must have a DW_AT_location
4888 attribute, because the view list complements the location list. */
4889
4890 static inline void
4891 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4892 {
4893 dw_attr_node attr;
4894
4895 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4896 return;
4897
4898 attr.dw_attr = attr_kind;
4899 attr.dw_attr_val.val_class = dw_val_class_view_list;
4900 attr.dw_attr_val.val_entry = NULL;
4901 attr.dw_attr_val.v.val_view_list = die;
4902 add_dwarf_attr (die, &attr);
4903 gcc_checking_assert (get_AT (die, DW_AT_location));
4904 gcc_assert (have_location_lists);
4905 }
4906
4907 /* Return a pointer to the location list referenced by the attribute.
4908 If the named attribute is a view list, look up the corresponding
4909 DW_AT_location attribute and return its location list. */
4910
4911 static inline dw_loc_list_ref *
4912 AT_loc_list_ptr (dw_attr_node *a)
4913 {
4914 gcc_assert (a);
4915 switch (AT_class (a))
4916 {
4917 case dw_val_class_loc_list:
4918 return &a->dw_attr_val.v.val_loc_list;
4919 case dw_val_class_view_list:
4920 {
4921 dw_attr_node *l;
4922 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4923 if (!l)
4924 return NULL;
4925 gcc_checking_assert (l + 1 == a);
4926 return AT_loc_list_ptr (l);
4927 }
4928 default:
4929 gcc_unreachable ();
4930 }
4931 }
4932
4933 /* Return the location attribute value associated with a view list
4934 attribute value. */
4935
4936 static inline dw_val_node *
4937 view_list_to_loc_list_val_node (dw_val_node *val)
4938 {
4939 gcc_assert (val->val_class == dw_val_class_view_list);
4940 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4941 if (!loc)
4942 return NULL;
4943 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4944 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4945 return &loc->dw_attr_val;
4946 }
4947
4948 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4949 {
4950 static hashval_t hash (addr_table_entry *);
4951 static bool equal (addr_table_entry *, addr_table_entry *);
4952 };
4953
4954 /* Table of entries into the .debug_addr section. */
4955
4956 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4957
4958 /* Hash an address_table_entry. */
4959
4960 hashval_t
4961 addr_hasher::hash (addr_table_entry *a)
4962 {
4963 inchash::hash hstate;
4964 switch (a->kind)
4965 {
4966 case ate_kind_rtx:
4967 hstate.add_int (0);
4968 break;
4969 case ate_kind_rtx_dtprel:
4970 hstate.add_int (1);
4971 break;
4972 case ate_kind_label:
4973 return htab_hash_string (a->addr.label);
4974 default:
4975 gcc_unreachable ();
4976 }
4977 inchash::add_rtx (a->addr.rtl, hstate);
4978 return hstate.end ();
4979 }
4980
4981 /* Determine equality for two address_table_entries. */
4982
4983 bool
4984 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4985 {
4986 if (a1->kind != a2->kind)
4987 return 0;
4988 switch (a1->kind)
4989 {
4990 case ate_kind_rtx:
4991 case ate_kind_rtx_dtprel:
4992 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4993 case ate_kind_label:
4994 return strcmp (a1->addr.label, a2->addr.label) == 0;
4995 default:
4996 gcc_unreachable ();
4997 }
4998 }
4999
5000 /* Initialize an addr_table_entry. */
5001
5002 void
5003 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5004 {
5005 e->kind = kind;
5006 switch (kind)
5007 {
5008 case ate_kind_rtx:
5009 case ate_kind_rtx_dtprel:
5010 e->addr.rtl = (rtx) addr;
5011 break;
5012 case ate_kind_label:
5013 e->addr.label = (char *) addr;
5014 break;
5015 }
5016 e->refcount = 0;
5017 e->index = NO_INDEX_ASSIGNED;
5018 }
5019
5020 /* Add attr to the address table entry to the table. Defer setting an
5021 index until output time. */
5022
5023 static addr_table_entry *
5024 add_addr_table_entry (void *addr, enum ate_kind kind)
5025 {
5026 addr_table_entry *node;
5027 addr_table_entry finder;
5028
5029 gcc_assert (dwarf_split_debug_info);
5030 if (! addr_index_table)
5031 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5032 init_addr_table_entry (&finder, kind, addr);
5033 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5034
5035 if (*slot == HTAB_EMPTY_ENTRY)
5036 {
5037 node = ggc_cleared_alloc<addr_table_entry> ();
5038 init_addr_table_entry (node, kind, addr);
5039 *slot = node;
5040 }
5041 else
5042 node = *slot;
5043
5044 node->refcount++;
5045 return node;
5046 }
5047
5048 /* Remove an entry from the addr table by decrementing its refcount.
5049 Strictly, decrementing the refcount would be enough, but the
5050 assertion that the entry is actually in the table has found
5051 bugs. */
5052
5053 static void
5054 remove_addr_table_entry (addr_table_entry *entry)
5055 {
5056 gcc_assert (dwarf_split_debug_info && addr_index_table);
5057 /* After an index is assigned, the table is frozen. */
5058 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5059 entry->refcount--;
5060 }
5061
5062 /* Given a location list, remove all addresses it refers to from the
5063 address_table. */
5064
5065 static void
5066 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5067 {
5068 for (; descr; descr = descr->dw_loc_next)
5069 if (descr->dw_loc_oprnd1.val_entry != NULL)
5070 {
5071 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5072 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5073 }
5074 }
5075
5076 /* A helper function for dwarf2out_finish called through
5077 htab_traverse. Assign an addr_table_entry its index. All entries
5078 must be collected into the table when this function is called,
5079 because the indexing code relies on htab_traverse to traverse nodes
5080 in the same order for each run. */
5081
5082 int
5083 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5084 {
5085 addr_table_entry *node = *h;
5086
5087 /* Don't index unreferenced nodes. */
5088 if (node->refcount == 0)
5089 return 1;
5090
5091 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5092 node->index = *index;
5093 *index += 1;
5094
5095 return 1;
5096 }
5097
5098 /* Add an address constant attribute value to a DIE. When using
5099 dwarf_split_debug_info, address attributes in dies destined for the
5100 final executable should be direct references--setting the parameter
5101 force_direct ensures this behavior. */
5102
5103 static inline void
5104 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5105 bool force_direct)
5106 {
5107 dw_attr_node attr;
5108
5109 attr.dw_attr = attr_kind;
5110 attr.dw_attr_val.val_class = dw_val_class_addr;
5111 attr.dw_attr_val.v.val_addr = addr;
5112 if (dwarf_split_debug_info && !force_direct)
5113 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5114 else
5115 attr.dw_attr_val.val_entry = NULL;
5116 add_dwarf_attr (die, &attr);
5117 }
5118
5119 /* Get the RTX from to an address DIE attribute. */
5120
5121 static inline rtx
5122 AT_addr (dw_attr_node *a)
5123 {
5124 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5125 return a->dw_attr_val.v.val_addr;
5126 }
5127
5128 /* Add a file attribute value to a DIE. */
5129
5130 static inline void
5131 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5132 struct dwarf_file_data *fd)
5133 {
5134 dw_attr_node attr;
5135
5136 attr.dw_attr = attr_kind;
5137 attr.dw_attr_val.val_class = dw_val_class_file;
5138 attr.dw_attr_val.val_entry = NULL;
5139 attr.dw_attr_val.v.val_file = fd;
5140 add_dwarf_attr (die, &attr);
5141 }
5142
5143 /* Get the dwarf_file_data from a file DIE attribute. */
5144
5145 static inline struct dwarf_file_data *
5146 AT_file (dw_attr_node *a)
5147 {
5148 gcc_assert (a && (AT_class (a) == dw_val_class_file
5149 || AT_class (a) == dw_val_class_file_implicit));
5150 return a->dw_attr_val.v.val_file;
5151 }
5152
5153 /* Add a vms delta attribute value to a DIE. */
5154
5155 static inline void
5156 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5157 const char *lbl1, const char *lbl2)
5158 {
5159 dw_attr_node attr;
5160
5161 attr.dw_attr = attr_kind;
5162 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5163 attr.dw_attr_val.val_entry = NULL;
5164 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5165 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5166 add_dwarf_attr (die, &attr);
5167 }
5168
5169 /* Add a symbolic view identifier attribute value to a DIE. */
5170
5171 static inline void
5172 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5173 const char *view_label)
5174 {
5175 dw_attr_node attr;
5176
5177 attr.dw_attr = attr_kind;
5178 attr.dw_attr_val.val_class = dw_val_class_symview;
5179 attr.dw_attr_val.val_entry = NULL;
5180 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5181 add_dwarf_attr (die, &attr);
5182 }
5183
5184 /* Add a label identifier attribute value to a DIE. */
5185
5186 static inline void
5187 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5188 const char *lbl_id)
5189 {
5190 dw_attr_node attr;
5191
5192 attr.dw_attr = attr_kind;
5193 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5194 attr.dw_attr_val.val_entry = NULL;
5195 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5196 if (dwarf_split_debug_info)
5197 attr.dw_attr_val.val_entry
5198 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5199 ate_kind_label);
5200 add_dwarf_attr (die, &attr);
5201 }
5202
5203 /* Add a section offset attribute value to a DIE, an offset into the
5204 debug_line section. */
5205
5206 static inline void
5207 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5208 const char *label)
5209 {
5210 dw_attr_node attr;
5211
5212 attr.dw_attr = attr_kind;
5213 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5214 attr.dw_attr_val.val_entry = NULL;
5215 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5216 add_dwarf_attr (die, &attr);
5217 }
5218
5219 /* Add a section offset attribute value to a DIE, an offset into the
5220 debug_loclists section. */
5221
5222 static inline void
5223 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5224 const char *label)
5225 {
5226 dw_attr_node attr;
5227
5228 attr.dw_attr = attr_kind;
5229 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5230 attr.dw_attr_val.val_entry = NULL;
5231 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5232 add_dwarf_attr (die, &attr);
5233 }
5234
5235 /* Add a section offset attribute value to a DIE, an offset into the
5236 debug_macinfo section. */
5237
5238 static inline void
5239 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5240 const char *label)
5241 {
5242 dw_attr_node attr;
5243
5244 attr.dw_attr = attr_kind;
5245 attr.dw_attr_val.val_class = dw_val_class_macptr;
5246 attr.dw_attr_val.val_entry = NULL;
5247 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5248 add_dwarf_attr (die, &attr);
5249 }
5250
5251 /* Add an offset attribute value to a DIE. */
5252
5253 static inline void
5254 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5255 unsigned HOST_WIDE_INT offset)
5256 {
5257 dw_attr_node attr;
5258
5259 attr.dw_attr = attr_kind;
5260 attr.dw_attr_val.val_class = dw_val_class_offset;
5261 attr.dw_attr_val.val_entry = NULL;
5262 attr.dw_attr_val.v.val_offset = offset;
5263 add_dwarf_attr (die, &attr);
5264 }
5265
5266 /* Add a range_list attribute value to a DIE. When using
5267 dwarf_split_debug_info, address attributes in dies destined for the
5268 final executable should be direct references--setting the parameter
5269 force_direct ensures this behavior. */
5270
5271 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5272 #define RELOCATED_OFFSET (NULL)
5273
5274 static void
5275 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5276 long unsigned int offset, bool force_direct)
5277 {
5278 dw_attr_node attr;
5279
5280 attr.dw_attr = attr_kind;
5281 attr.dw_attr_val.val_class = dw_val_class_range_list;
5282 /* For the range_list attribute, use val_entry to store whether the
5283 offset should follow split-debug-info or normal semantics. This
5284 value is read in output_range_list_offset. */
5285 if (dwarf_split_debug_info && !force_direct)
5286 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5287 else
5288 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5289 attr.dw_attr_val.v.val_offset = offset;
5290 add_dwarf_attr (die, &attr);
5291 }
5292
5293 /* Return the start label of a delta attribute. */
5294
5295 static inline const char *
5296 AT_vms_delta1 (dw_attr_node *a)
5297 {
5298 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5299 return a->dw_attr_val.v.val_vms_delta.lbl1;
5300 }
5301
5302 /* Return the end label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta2 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl2;
5309 }
5310
5311 static inline const char *
5312 AT_lbl (dw_attr_node *a)
5313 {
5314 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5315 || AT_class (a) == dw_val_class_lineptr
5316 || AT_class (a) == dw_val_class_macptr
5317 || AT_class (a) == dw_val_class_loclistsptr
5318 || AT_class (a) == dw_val_class_high_pc));
5319 return a->dw_attr_val.v.val_lbl_id;
5320 }
5321
5322 /* Get the attribute of type attr_kind. */
5323
5324 static dw_attr_node *
5325 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5326 {
5327 dw_attr_node *a;
5328 unsigned ix;
5329 dw_die_ref spec = NULL;
5330
5331 if (! die)
5332 return NULL;
5333
5334 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5335 if (a->dw_attr == attr_kind)
5336 return a;
5337 else if (a->dw_attr == DW_AT_specification
5338 || a->dw_attr == DW_AT_abstract_origin)
5339 spec = AT_ref (a);
5340
5341 if (spec)
5342 return get_AT (spec, attr_kind);
5343
5344 return NULL;
5345 }
5346
5347 /* Returns the parent of the declaration of DIE. */
5348
5349 static dw_die_ref
5350 get_die_parent (dw_die_ref die)
5351 {
5352 dw_die_ref t;
5353
5354 if (!die)
5355 return NULL;
5356
5357 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5358 || (t = get_AT_ref (die, DW_AT_specification)))
5359 die = t;
5360
5361 return die->die_parent;
5362 }
5363
5364 /* Return the "low pc" attribute value, typically associated with a subprogram
5365 DIE. Return null if the "low pc" attribute is either not present, or if it
5366 cannot be represented as an assembler label identifier. */
5367
5368 static inline const char *
5369 get_AT_low_pc (dw_die_ref die)
5370 {
5371 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5372
5373 return a ? AT_lbl (a) : NULL;
5374 }
5375
5376 /* Return the "high pc" attribute value, typically associated with a subprogram
5377 DIE. Return null if the "high pc" attribute is either not present, or if it
5378 cannot be represented as an assembler label identifier. */
5379
5380 static inline const char *
5381 get_AT_hi_pc (dw_die_ref die)
5382 {
5383 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5384
5385 return a ? AT_lbl (a) : NULL;
5386 }
5387
5388 /* Return the value of the string attribute designated by ATTR_KIND, or
5389 NULL if it is not present. */
5390
5391 static inline const char *
5392 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5393 {
5394 dw_attr_node *a = get_AT (die, attr_kind);
5395
5396 return a ? AT_string (a) : NULL;
5397 }
5398
5399 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5400 if it is not present. */
5401
5402 static inline int
5403 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5404 {
5405 dw_attr_node *a = get_AT (die, attr_kind);
5406
5407 return a ? AT_flag (a) : 0;
5408 }
5409
5410 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5411 if it is not present. */
5412
5413 static inline unsigned
5414 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5415 {
5416 dw_attr_node *a = get_AT (die, attr_kind);
5417
5418 return a ? AT_unsigned (a) : 0;
5419 }
5420
5421 static inline dw_die_ref
5422 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5423 {
5424 dw_attr_node *a = get_AT (die, attr_kind);
5425
5426 return a ? AT_ref (a) : NULL;
5427 }
5428
5429 static inline struct dwarf_file_data *
5430 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5431 {
5432 dw_attr_node *a = get_AT (die, attr_kind);
5433
5434 return a ? AT_file (a) : NULL;
5435 }
5436
5437 /* Return TRUE if the language is C++. */
5438
5439 static inline bool
5440 is_cxx (void)
5441 {
5442 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5443
5444 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5445 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5446 }
5447
5448 /* Return TRUE if DECL was created by the C++ frontend. */
5449
5450 static bool
5451 is_cxx (const_tree decl)
5452 {
5453 if (in_lto_p)
5454 {
5455 const_tree context = get_ultimate_context (decl);
5456 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5457 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5458 }
5459 return is_cxx ();
5460 }
5461
5462 /* Return TRUE if the language is Fortran. */
5463
5464 static inline bool
5465 is_fortran (void)
5466 {
5467 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5468
5469 return (lang == DW_LANG_Fortran77
5470 || lang == DW_LANG_Fortran90
5471 || lang == DW_LANG_Fortran95
5472 || lang == DW_LANG_Fortran03
5473 || lang == DW_LANG_Fortran08);
5474 }
5475
5476 static inline bool
5477 is_fortran (const_tree decl)
5478 {
5479 if (in_lto_p)
5480 {
5481 const_tree context = get_ultimate_context (decl);
5482 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5483 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5484 "GNU Fortran", 11) == 0
5485 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5486 "GNU F77") == 0);
5487 }
5488 return is_fortran ();
5489 }
5490
5491 /* Return TRUE if the language is Ada. */
5492
5493 static inline bool
5494 is_ada (void)
5495 {
5496 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5497
5498 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5499 }
5500
5501 /* Remove the specified attribute if present. Return TRUE if removal
5502 was successful. */
5503
5504 static bool
5505 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5506 {
5507 dw_attr_node *a;
5508 unsigned ix;
5509
5510 if (! die)
5511 return false;
5512
5513 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5514 if (a->dw_attr == attr_kind)
5515 {
5516 if (AT_class (a) == dw_val_class_str)
5517 if (a->dw_attr_val.v.val_str->refcount)
5518 a->dw_attr_val.v.val_str->refcount--;
5519
5520 /* vec::ordered_remove should help reduce the number of abbrevs
5521 that are needed. */
5522 die->die_attr->ordered_remove (ix);
5523 return true;
5524 }
5525 return false;
5526 }
5527
5528 /* Remove CHILD from its parent. PREV must have the property that
5529 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5530
5531 static void
5532 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5533 {
5534 gcc_assert (child->die_parent == prev->die_parent);
5535 gcc_assert (prev->die_sib == child);
5536 if (prev == child)
5537 {
5538 gcc_assert (child->die_parent->die_child == child);
5539 prev = NULL;
5540 }
5541 else
5542 prev->die_sib = child->die_sib;
5543 if (child->die_parent->die_child == child)
5544 child->die_parent->die_child = prev;
5545 child->die_sib = NULL;
5546 }
5547
5548 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5549 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5550
5551 static void
5552 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5553 {
5554 dw_die_ref parent = old_child->die_parent;
5555
5556 gcc_assert (parent == prev->die_parent);
5557 gcc_assert (prev->die_sib == old_child);
5558
5559 new_child->die_parent = parent;
5560 if (prev == old_child)
5561 {
5562 gcc_assert (parent->die_child == old_child);
5563 new_child->die_sib = new_child;
5564 }
5565 else
5566 {
5567 prev->die_sib = new_child;
5568 new_child->die_sib = old_child->die_sib;
5569 }
5570 if (old_child->die_parent->die_child == old_child)
5571 old_child->die_parent->die_child = new_child;
5572 old_child->die_sib = NULL;
5573 }
5574
5575 /* Move all children from OLD_PARENT to NEW_PARENT. */
5576
5577 static void
5578 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5579 {
5580 dw_die_ref c;
5581 new_parent->die_child = old_parent->die_child;
5582 old_parent->die_child = NULL;
5583 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5584 }
5585
5586 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5587 matches TAG. */
5588
5589 static void
5590 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5591 {
5592 dw_die_ref c;
5593
5594 c = die->die_child;
5595 if (c) do {
5596 dw_die_ref prev = c;
5597 c = c->die_sib;
5598 while (c->die_tag == tag)
5599 {
5600 remove_child_with_prev (c, prev);
5601 c->die_parent = NULL;
5602 /* Might have removed every child. */
5603 if (die->die_child == NULL)
5604 return;
5605 c = prev->die_sib;
5606 }
5607 } while (c != die->die_child);
5608 }
5609
5610 /* Add a CHILD_DIE as the last child of DIE. */
5611
5612 static void
5613 add_child_die (dw_die_ref die, dw_die_ref child_die)
5614 {
5615 /* FIXME this should probably be an assert. */
5616 if (! die || ! child_die)
5617 return;
5618 gcc_assert (die != child_die);
5619
5620 child_die->die_parent = die;
5621 if (die->die_child)
5622 {
5623 child_die->die_sib = die->die_child->die_sib;
5624 die->die_child->die_sib = child_die;
5625 }
5626 else
5627 child_die->die_sib = child_die;
5628 die->die_child = child_die;
5629 }
5630
5631 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5632
5633 static void
5634 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5635 dw_die_ref after_die)
5636 {
5637 gcc_assert (die
5638 && child_die
5639 && after_die
5640 && die->die_child
5641 && die != child_die);
5642
5643 child_die->die_parent = die;
5644 child_die->die_sib = after_die->die_sib;
5645 after_die->die_sib = child_die;
5646 if (die->die_child == after_die)
5647 die->die_child = child_die;
5648 }
5649
5650 /* Unassociate CHILD from its parent, and make its parent be
5651 NEW_PARENT. */
5652
5653 static void
5654 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5655 {
5656 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5657 if (p->die_sib == child)
5658 {
5659 remove_child_with_prev (child, p);
5660 break;
5661 }
5662 add_child_die (new_parent, child);
5663 }
5664
5665 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5666 is the specification, to the end of PARENT's list of children.
5667 This is done by removing and re-adding it. */
5668
5669 static void
5670 splice_child_die (dw_die_ref parent, dw_die_ref child)
5671 {
5672 /* We want the declaration DIE from inside the class, not the
5673 specification DIE at toplevel. */
5674 if (child->die_parent != parent)
5675 {
5676 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5677
5678 if (tmp)
5679 child = tmp;
5680 }
5681
5682 gcc_assert (child->die_parent == parent
5683 || (child->die_parent
5684 == get_AT_ref (parent, DW_AT_specification)));
5685
5686 reparent_child (child, parent);
5687 }
5688
5689 /* Create and return a new die with TAG_VALUE as tag. */
5690
5691 static inline dw_die_ref
5692 new_die_raw (enum dwarf_tag tag_value)
5693 {
5694 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5695 die->die_tag = tag_value;
5696 return die;
5697 }
5698
5699 /* Create and return a new die with a parent of PARENT_DIE. If
5700 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5701 associated tree T must be supplied to determine parenthood
5702 later. */
5703
5704 static inline dw_die_ref
5705 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5706 {
5707 dw_die_ref die = new_die_raw (tag_value);
5708
5709 if (parent_die != NULL)
5710 add_child_die (parent_die, die);
5711 else
5712 {
5713 limbo_die_node *limbo_node;
5714
5715 /* No DIEs created after early dwarf should end up in limbo,
5716 because the limbo list should not persist past LTO
5717 streaming. */
5718 if (tag_value != DW_TAG_compile_unit
5719 /* These are allowed because they're generated while
5720 breaking out COMDAT units late. */
5721 && tag_value != DW_TAG_type_unit
5722 && tag_value != DW_TAG_skeleton_unit
5723 && !early_dwarf
5724 /* Allow nested functions to live in limbo because they will
5725 only temporarily live there, as decls_for_scope will fix
5726 them up. */
5727 && (TREE_CODE (t) != FUNCTION_DECL
5728 || !decl_function_context (t))
5729 /* Same as nested functions above but for types. Types that
5730 are local to a function will be fixed in
5731 decls_for_scope. */
5732 && (!RECORD_OR_UNION_TYPE_P (t)
5733 || !TYPE_CONTEXT (t)
5734 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5735 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5736 especially in the ltrans stage, but once we implement LTO
5737 dwarf streaming, we should remove this exception. */
5738 && !in_lto_p)
5739 {
5740 fprintf (stderr, "symbol ended up in limbo too late:");
5741 debug_generic_stmt (t);
5742 gcc_unreachable ();
5743 }
5744
5745 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5746 limbo_node->die = die;
5747 limbo_node->created_for = t;
5748 limbo_node->next = limbo_die_list;
5749 limbo_die_list = limbo_node;
5750 }
5751
5752 return die;
5753 }
5754
5755 /* Return the DIE associated with the given type specifier. */
5756
5757 static inline dw_die_ref
5758 lookup_type_die (tree type)
5759 {
5760 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5761 if (die && die->removed)
5762 {
5763 TYPE_SYMTAB_DIE (type) = NULL;
5764 return NULL;
5765 }
5766 return die;
5767 }
5768
5769 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5770 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5771 anonymous type instead the one of the naming typedef. */
5772
5773 static inline dw_die_ref
5774 strip_naming_typedef (tree type, dw_die_ref type_die)
5775 {
5776 if (type
5777 && TREE_CODE (type) == RECORD_TYPE
5778 && type_die
5779 && type_die->die_tag == DW_TAG_typedef
5780 && is_naming_typedef_decl (TYPE_NAME (type)))
5781 type_die = get_AT_ref (type_die, DW_AT_type);
5782 return type_die;
5783 }
5784
5785 /* Like lookup_type_die, but if type is an anonymous type named by a
5786 typedef[1], return the DIE of the anonymous type instead the one of
5787 the naming typedef. This is because in gen_typedef_die, we did
5788 equate the anonymous struct named by the typedef with the DIE of
5789 the naming typedef. So by default, lookup_type_die on an anonymous
5790 struct yields the DIE of the naming typedef.
5791
5792 [1]: Read the comment of is_naming_typedef_decl to learn about what
5793 a naming typedef is. */
5794
5795 static inline dw_die_ref
5796 lookup_type_die_strip_naming_typedef (tree type)
5797 {
5798 dw_die_ref die = lookup_type_die (type);
5799 return strip_naming_typedef (type, die);
5800 }
5801
5802 /* Equate a DIE to a given type specifier. */
5803
5804 static inline void
5805 equate_type_number_to_die (tree type, dw_die_ref type_die)
5806 {
5807 TYPE_SYMTAB_DIE (type) = type_die;
5808 }
5809
5810 /* Returns a hash value for X (which really is a die_struct). */
5811
5812 inline hashval_t
5813 decl_die_hasher::hash (die_node *x)
5814 {
5815 return (hashval_t) x->decl_id;
5816 }
5817
5818 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5819
5820 inline bool
5821 decl_die_hasher::equal (die_node *x, tree y)
5822 {
5823 return (x->decl_id == DECL_UID (y));
5824 }
5825
5826 /* Return the DIE associated with a given declaration. */
5827
5828 static inline dw_die_ref
5829 lookup_decl_die (tree decl)
5830 {
5831 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5832 NO_INSERT);
5833 if (!die)
5834 return NULL;
5835 if ((*die)->removed)
5836 {
5837 decl_die_table->clear_slot (die);
5838 return NULL;
5839 }
5840 return *die;
5841 }
5842
5843
5844 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5845 style reference. Return true if we found one refering to a DIE for
5846 DECL, otherwise return false. */
5847
5848 static bool
5849 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5850 unsigned HOST_WIDE_INT *off)
5851 {
5852 dw_die_ref die;
5853
5854 if ((flag_wpa || flag_incremental_link == INCREMENTAL_LINK_LTO)
5855 && !decl_die_table)
5856 return false;
5857
5858 if (TREE_CODE (decl) == BLOCK)
5859 die = BLOCK_DIE (decl);
5860 else
5861 die = lookup_decl_die (decl);
5862 if (!die)
5863 return false;
5864
5865 /* During WPA stage and incremental linking we currently use DIEs
5866 to store the decl <-> label + offset map. That's quite inefficient
5867 but it works for now. */
5868 if (flag_wpa
5869 || flag_incremental_link == INCREMENTAL_LINK_LTO)
5870 {
5871 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5872 if (!ref)
5873 {
5874 gcc_assert (die == comp_unit_die ());
5875 return false;
5876 }
5877 *off = ref->die_offset;
5878 *sym = ref->die_id.die_symbol;
5879 return true;
5880 }
5881
5882 /* Similar to get_ref_die_offset_label, but using the "correct"
5883 label. */
5884 *off = die->die_offset;
5885 while (die->die_parent)
5886 die = die->die_parent;
5887 /* For the containing CU DIE we compute a die_symbol in
5888 compute_comp_unit_symbol. */
5889 gcc_assert (die->die_tag == DW_TAG_compile_unit
5890 && die->die_id.die_symbol != NULL);
5891 *sym = die->die_id.die_symbol;
5892 return true;
5893 }
5894
5895 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5896
5897 static void
5898 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5899 const char *symbol, HOST_WIDE_INT offset)
5900 {
5901 /* Create a fake DIE that contains the reference. Don't use
5902 new_die because we don't want to end up in the limbo list. */
5903 dw_die_ref ref = new_die_raw (die->die_tag);
5904 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5905 ref->die_offset = offset;
5906 ref->with_offset = 1;
5907 add_AT_die_ref (die, attr_kind, ref);
5908 }
5909
5910 /* Create a DIE for DECL if required and add a reference to a DIE
5911 at SYMBOL + OFFSET which contains attributes dumped early. */
5912
5913 static void
5914 dwarf2out_register_external_die (tree decl, const char *sym,
5915 unsigned HOST_WIDE_INT off)
5916 {
5917 if (debug_info_level == DINFO_LEVEL_NONE)
5918 return;
5919
5920 if ((flag_wpa
5921 || flag_incremental_link == INCREMENTAL_LINK_LTO) && !decl_die_table)
5922 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5923
5924 dw_die_ref die
5925 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5926 gcc_assert (!die);
5927
5928 tree ctx;
5929 dw_die_ref parent = NULL;
5930 /* Need to lookup a DIE for the decls context - the containing
5931 function or translation unit. */
5932 if (TREE_CODE (decl) == BLOCK)
5933 {
5934 ctx = BLOCK_SUPERCONTEXT (decl);
5935 /* ??? We do not output DIEs for all scopes thus skip as
5936 many DIEs as needed. */
5937 while (TREE_CODE (ctx) == BLOCK
5938 && !BLOCK_DIE (ctx))
5939 ctx = BLOCK_SUPERCONTEXT (ctx);
5940 }
5941 else
5942 ctx = DECL_CONTEXT (decl);
5943 /* Peel types in the context stack. */
5944 while (ctx && TYPE_P (ctx))
5945 ctx = TYPE_CONTEXT (ctx);
5946 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5947 if (debug_info_level <= DINFO_LEVEL_TERSE)
5948 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5949 ctx = DECL_CONTEXT (ctx);
5950 if (ctx)
5951 {
5952 if (TREE_CODE (ctx) == BLOCK)
5953 parent = BLOCK_DIE (ctx);
5954 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5955 /* Keep the 1:1 association during WPA. */
5956 && !flag_wpa
5957 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5958 /* Otherwise all late annotations go to the main CU which
5959 imports the original CUs. */
5960 parent = comp_unit_die ();
5961 else if (TREE_CODE (ctx) == FUNCTION_DECL
5962 && TREE_CODE (decl) != PARM_DECL
5963 && TREE_CODE (decl) != BLOCK)
5964 /* Leave function local entities parent determination to when
5965 we process scope vars. */
5966 ;
5967 else
5968 parent = lookup_decl_die (ctx);
5969 }
5970 else
5971 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5972 Handle this case gracefully by globalizing stuff. */
5973 parent = comp_unit_die ();
5974 /* Create a DIE "stub". */
5975 switch (TREE_CODE (decl))
5976 {
5977 case TRANSLATION_UNIT_DECL:
5978 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
5979 {
5980 die = comp_unit_die ();
5981 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5982 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5983 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5984 to create a DIE for the original CUs. */
5985 return;
5986 }
5987 /* Keep the 1:1 association during WPA. */
5988 die = new_die (DW_TAG_compile_unit, NULL, decl);
5989 break;
5990 case NAMESPACE_DECL:
5991 if (is_fortran (decl))
5992 die = new_die (DW_TAG_module, parent, decl);
5993 else
5994 die = new_die (DW_TAG_namespace, parent, decl);
5995 break;
5996 case FUNCTION_DECL:
5997 die = new_die (DW_TAG_subprogram, parent, decl);
5998 break;
5999 case VAR_DECL:
6000 die = new_die (DW_TAG_variable, parent, decl);
6001 break;
6002 case RESULT_DECL:
6003 die = new_die (DW_TAG_variable, parent, decl);
6004 break;
6005 case PARM_DECL:
6006 die = new_die (DW_TAG_formal_parameter, parent, decl);
6007 break;
6008 case CONST_DECL:
6009 die = new_die (DW_TAG_constant, parent, decl);
6010 break;
6011 case LABEL_DECL:
6012 die = new_die (DW_TAG_label, parent, decl);
6013 break;
6014 case BLOCK:
6015 die = new_die (DW_TAG_lexical_block, parent, decl);
6016 break;
6017 default:
6018 gcc_unreachable ();
6019 }
6020 if (TREE_CODE (decl) == BLOCK)
6021 BLOCK_DIE (decl) = die;
6022 else
6023 equate_decl_number_to_die (decl, die);
6024
6025 /* Add a reference to the DIE providing early debug at $sym + off. */
6026 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6027 }
6028
6029 /* Returns a hash value for X (which really is a var_loc_list). */
6030
6031 inline hashval_t
6032 decl_loc_hasher::hash (var_loc_list *x)
6033 {
6034 return (hashval_t) x->decl_id;
6035 }
6036
6037 /* Return nonzero if decl_id of var_loc_list X is the same as
6038 UID of decl *Y. */
6039
6040 inline bool
6041 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6042 {
6043 return (x->decl_id == DECL_UID (y));
6044 }
6045
6046 /* Return the var_loc list associated with a given declaration. */
6047
6048 static inline var_loc_list *
6049 lookup_decl_loc (const_tree decl)
6050 {
6051 if (!decl_loc_table)
6052 return NULL;
6053 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6054 }
6055
6056 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6057
6058 inline hashval_t
6059 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6060 {
6061 return (hashval_t) x->decl_id;
6062 }
6063
6064 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6065 UID of decl *Y. */
6066
6067 inline bool
6068 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6069 {
6070 return (x->decl_id == DECL_UID (y));
6071 }
6072
6073 /* Equate a DIE to a particular declaration. */
6074
6075 static void
6076 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6077 {
6078 unsigned int decl_id = DECL_UID (decl);
6079
6080 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6081 decl_die->decl_id = decl_id;
6082 }
6083
6084 /* Return how many bits covers PIECE EXPR_LIST. */
6085
6086 static HOST_WIDE_INT
6087 decl_piece_bitsize (rtx piece)
6088 {
6089 int ret = (int) GET_MODE (piece);
6090 if (ret)
6091 return ret;
6092 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6093 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6094 return INTVAL (XEXP (XEXP (piece, 0), 0));
6095 }
6096
6097 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6098
6099 static rtx *
6100 decl_piece_varloc_ptr (rtx piece)
6101 {
6102 if ((int) GET_MODE (piece))
6103 return &XEXP (piece, 0);
6104 else
6105 return &XEXP (XEXP (piece, 0), 1);
6106 }
6107
6108 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6109 Next is the chain of following piece nodes. */
6110
6111 static rtx_expr_list *
6112 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6113 {
6114 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6115 return alloc_EXPR_LIST (bitsize, loc_note, next);
6116 else
6117 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6118 GEN_INT (bitsize),
6119 loc_note), next);
6120 }
6121
6122 /* Return rtx that should be stored into loc field for
6123 LOC_NOTE and BITPOS/BITSIZE. */
6124
6125 static rtx
6126 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6127 HOST_WIDE_INT bitsize)
6128 {
6129 if (bitsize != -1)
6130 {
6131 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6132 if (bitpos != 0)
6133 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6134 }
6135 return loc_note;
6136 }
6137
6138 /* This function either modifies location piece list *DEST in
6139 place (if SRC and INNER is NULL), or copies location piece list
6140 *SRC to *DEST while modifying it. Location BITPOS is modified
6141 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6142 not copied and if needed some padding around it is added.
6143 When modifying in place, DEST should point to EXPR_LIST where
6144 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6145 to the start of the whole list and INNER points to the EXPR_LIST
6146 where earlier pieces cover PIECE_BITPOS bits. */
6147
6148 static void
6149 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6150 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6151 HOST_WIDE_INT bitsize, rtx loc_note)
6152 {
6153 HOST_WIDE_INT diff;
6154 bool copy = inner != NULL;
6155
6156 if (copy)
6157 {
6158 /* First copy all nodes preceding the current bitpos. */
6159 while (src != inner)
6160 {
6161 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6162 decl_piece_bitsize (*src), NULL_RTX);
6163 dest = &XEXP (*dest, 1);
6164 src = &XEXP (*src, 1);
6165 }
6166 }
6167 /* Add padding if needed. */
6168 if (bitpos != piece_bitpos)
6169 {
6170 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6171 copy ? NULL_RTX : *dest);
6172 dest = &XEXP (*dest, 1);
6173 }
6174 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6175 {
6176 gcc_assert (!copy);
6177 /* A piece with correct bitpos and bitsize already exist,
6178 just update the location for it and return. */
6179 *decl_piece_varloc_ptr (*dest) = loc_note;
6180 return;
6181 }
6182 /* Add the piece that changed. */
6183 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6184 dest = &XEXP (*dest, 1);
6185 /* Skip over pieces that overlap it. */
6186 diff = bitpos - piece_bitpos + bitsize;
6187 if (!copy)
6188 src = dest;
6189 while (diff > 0 && *src)
6190 {
6191 rtx piece = *src;
6192 diff -= decl_piece_bitsize (piece);
6193 if (copy)
6194 src = &XEXP (piece, 1);
6195 else
6196 {
6197 *src = XEXP (piece, 1);
6198 free_EXPR_LIST_node (piece);
6199 }
6200 }
6201 /* Add padding if needed. */
6202 if (diff < 0 && *src)
6203 {
6204 if (!copy)
6205 dest = src;
6206 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 }
6209 if (!copy)
6210 return;
6211 /* Finally copy all nodes following it. */
6212 while (*src)
6213 {
6214 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6215 decl_piece_bitsize (*src), NULL_RTX);
6216 dest = &XEXP (*dest, 1);
6217 src = &XEXP (*src, 1);
6218 }
6219 }
6220
6221 /* Add a variable location node to the linked list for DECL. */
6222
6223 static struct var_loc_node *
6224 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6225 {
6226 unsigned int decl_id;
6227 var_loc_list *temp;
6228 struct var_loc_node *loc = NULL;
6229 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6230
6231 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6232 {
6233 tree realdecl = DECL_DEBUG_EXPR (decl);
6234 if (handled_component_p (realdecl)
6235 || (TREE_CODE (realdecl) == MEM_REF
6236 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6237 {
6238 bool reverse;
6239 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6240 &bitsize, &reverse);
6241 if (!innerdecl
6242 || !DECL_P (innerdecl)
6243 || DECL_IGNORED_P (innerdecl)
6244 || TREE_STATIC (innerdecl)
6245 || bitsize == 0
6246 || bitpos + bitsize > 256)
6247 return NULL;
6248 decl = innerdecl;
6249 }
6250 }
6251
6252 decl_id = DECL_UID (decl);
6253 var_loc_list **slot
6254 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6255 if (*slot == NULL)
6256 {
6257 temp = ggc_cleared_alloc<var_loc_list> ();
6258 temp->decl_id = decl_id;
6259 *slot = temp;
6260 }
6261 else
6262 temp = *slot;
6263
6264 /* For PARM_DECLs try to keep around the original incoming value,
6265 even if that means we'll emit a zero-range .debug_loc entry. */
6266 if (temp->last
6267 && temp->first == temp->last
6268 && TREE_CODE (decl) == PARM_DECL
6269 && NOTE_P (temp->first->loc)
6270 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6271 && DECL_INCOMING_RTL (decl)
6272 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6273 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6274 == GET_CODE (DECL_INCOMING_RTL (decl))
6275 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6276 && (bitsize != -1
6277 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6278 NOTE_VAR_LOCATION_LOC (loc_note))
6279 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6280 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6281 {
6282 loc = ggc_cleared_alloc<var_loc_node> ();
6283 temp->first->next = loc;
6284 temp->last = loc;
6285 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6286 }
6287 else if (temp->last)
6288 {
6289 struct var_loc_node *last = temp->last, *unused = NULL;
6290 rtx *piece_loc = NULL, last_loc_note;
6291 HOST_WIDE_INT piece_bitpos = 0;
6292 if (last->next)
6293 {
6294 last = last->next;
6295 gcc_assert (last->next == NULL);
6296 }
6297 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6298 {
6299 piece_loc = &last->loc;
6300 do
6301 {
6302 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6303 if (piece_bitpos + cur_bitsize > bitpos)
6304 break;
6305 piece_bitpos += cur_bitsize;
6306 piece_loc = &XEXP (*piece_loc, 1);
6307 }
6308 while (*piece_loc);
6309 }
6310 /* TEMP->LAST here is either pointer to the last but one or
6311 last element in the chained list, LAST is pointer to the
6312 last element. */
6313 if (label && strcmp (last->label, label) == 0 && last->view == view)
6314 {
6315 /* For SRA optimized variables if there weren't any real
6316 insns since last note, just modify the last node. */
6317 if (piece_loc != NULL)
6318 {
6319 adjust_piece_list (piece_loc, NULL, NULL,
6320 bitpos, piece_bitpos, bitsize, loc_note);
6321 return NULL;
6322 }
6323 /* If the last note doesn't cover any instructions, remove it. */
6324 if (temp->last != last)
6325 {
6326 temp->last->next = NULL;
6327 unused = last;
6328 last = temp->last;
6329 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6330 }
6331 else
6332 {
6333 gcc_assert (temp->first == temp->last
6334 || (temp->first->next == temp->last
6335 && TREE_CODE (decl) == PARM_DECL));
6336 memset (temp->last, '\0', sizeof (*temp->last));
6337 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6338 return temp->last;
6339 }
6340 }
6341 if (bitsize == -1 && NOTE_P (last->loc))
6342 last_loc_note = last->loc;
6343 else if (piece_loc != NULL
6344 && *piece_loc != NULL_RTX
6345 && piece_bitpos == bitpos
6346 && decl_piece_bitsize (*piece_loc) == bitsize)
6347 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6348 else
6349 last_loc_note = NULL_RTX;
6350 /* If the current location is the same as the end of the list,
6351 and either both or neither of the locations is uninitialized,
6352 we have nothing to do. */
6353 if (last_loc_note == NULL_RTX
6354 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6355 NOTE_VAR_LOCATION_LOC (loc_note)))
6356 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6357 != NOTE_VAR_LOCATION_STATUS (loc_note))
6358 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6359 == VAR_INIT_STATUS_UNINITIALIZED)
6360 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6361 == VAR_INIT_STATUS_UNINITIALIZED))))
6362 {
6363 /* Add LOC to the end of list and update LAST. If the last
6364 element of the list has been removed above, reuse its
6365 memory for the new node, otherwise allocate a new one. */
6366 if (unused)
6367 {
6368 loc = unused;
6369 memset (loc, '\0', sizeof (*loc));
6370 }
6371 else
6372 loc = ggc_cleared_alloc<var_loc_node> ();
6373 if (bitsize == -1 || piece_loc == NULL)
6374 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6375 else
6376 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6377 bitpos, piece_bitpos, bitsize, loc_note);
6378 last->next = loc;
6379 /* Ensure TEMP->LAST will point either to the new last but one
6380 element of the chain, or to the last element in it. */
6381 if (last != temp->last)
6382 temp->last = last;
6383 }
6384 else if (unused)
6385 ggc_free (unused);
6386 }
6387 else
6388 {
6389 loc = ggc_cleared_alloc<var_loc_node> ();
6390 temp->first = loc;
6391 temp->last = loc;
6392 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6393 }
6394 return loc;
6395 }
6396 \f
6397 /* Keep track of the number of spaces used to indent the
6398 output of the debugging routines that print the structure of
6399 the DIE internal representation. */
6400 static int print_indent;
6401
6402 /* Indent the line the number of spaces given by print_indent. */
6403
6404 static inline void
6405 print_spaces (FILE *outfile)
6406 {
6407 fprintf (outfile, "%*s", print_indent, "");
6408 }
6409
6410 /* Print a type signature in hex. */
6411
6412 static inline void
6413 print_signature (FILE *outfile, char *sig)
6414 {
6415 int i;
6416
6417 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6418 fprintf (outfile, "%02x", sig[i] & 0xff);
6419 }
6420
6421 static inline void
6422 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6423 {
6424 if (discr_value->pos)
6425 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6426 else
6427 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6428 }
6429
6430 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6431
6432 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6433 RECURSE, output location descriptor operations. */
6434
6435 static void
6436 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6437 {
6438 switch (val->val_class)
6439 {
6440 case dw_val_class_addr:
6441 fprintf (outfile, "address");
6442 break;
6443 case dw_val_class_offset:
6444 fprintf (outfile, "offset");
6445 break;
6446 case dw_val_class_loc:
6447 fprintf (outfile, "location descriptor");
6448 if (val->v.val_loc == NULL)
6449 fprintf (outfile, " -> <null>\n");
6450 else if (recurse)
6451 {
6452 fprintf (outfile, ":\n");
6453 print_indent += 4;
6454 print_loc_descr (val->v.val_loc, outfile);
6455 print_indent -= 4;
6456 }
6457 else
6458 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6459 break;
6460 case dw_val_class_loc_list:
6461 fprintf (outfile, "location list -> label:%s",
6462 val->v.val_loc_list->ll_symbol);
6463 break;
6464 case dw_val_class_view_list:
6465 val = view_list_to_loc_list_val_node (val);
6466 fprintf (outfile, "location list with views -> labels:%s and %s",
6467 val->v.val_loc_list->ll_symbol,
6468 val->v.val_loc_list->vl_symbol);
6469 break;
6470 case dw_val_class_range_list:
6471 fprintf (outfile, "range list");
6472 break;
6473 case dw_val_class_const:
6474 case dw_val_class_const_implicit:
6475 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6476 break;
6477 case dw_val_class_unsigned_const:
6478 case dw_val_class_unsigned_const_implicit:
6479 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6480 break;
6481 case dw_val_class_const_double:
6482 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6483 HOST_WIDE_INT_PRINT_UNSIGNED")",
6484 val->v.val_double.high,
6485 val->v.val_double.low);
6486 break;
6487 case dw_val_class_wide_int:
6488 {
6489 int i = val->v.val_wide->get_len ();
6490 fprintf (outfile, "constant (");
6491 gcc_assert (i > 0);
6492 if (val->v.val_wide->elt (i - 1) == 0)
6493 fprintf (outfile, "0x");
6494 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6495 val->v.val_wide->elt (--i));
6496 while (--i >= 0)
6497 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6498 val->v.val_wide->elt (i));
6499 fprintf (outfile, ")");
6500 break;
6501 }
6502 case dw_val_class_vec:
6503 fprintf (outfile, "floating-point or vector constant");
6504 break;
6505 case dw_val_class_flag:
6506 fprintf (outfile, "%u", val->v.val_flag);
6507 break;
6508 case dw_val_class_die_ref:
6509 if (val->v.val_die_ref.die != NULL)
6510 {
6511 dw_die_ref die = val->v.val_die_ref.die;
6512
6513 if (die->comdat_type_p)
6514 {
6515 fprintf (outfile, "die -> signature: ");
6516 print_signature (outfile,
6517 die->die_id.die_type_node->signature);
6518 }
6519 else if (die->die_id.die_symbol)
6520 {
6521 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6522 if (die->with_offset)
6523 fprintf (outfile, " + %ld", die->die_offset);
6524 }
6525 else
6526 fprintf (outfile, "die -> %ld", die->die_offset);
6527 fprintf (outfile, " (%p)", (void *) die);
6528 }
6529 else
6530 fprintf (outfile, "die -> <null>");
6531 break;
6532 case dw_val_class_vms_delta:
6533 fprintf (outfile, "delta: @slotcount(%s-%s)",
6534 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6535 break;
6536 case dw_val_class_symview:
6537 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6538 break;
6539 case dw_val_class_lbl_id:
6540 case dw_val_class_lineptr:
6541 case dw_val_class_macptr:
6542 case dw_val_class_loclistsptr:
6543 case dw_val_class_high_pc:
6544 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6545 break;
6546 case dw_val_class_str:
6547 if (val->v.val_str->str != NULL)
6548 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6549 else
6550 fprintf (outfile, "<null>");
6551 break;
6552 case dw_val_class_file:
6553 case dw_val_class_file_implicit:
6554 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6555 val->v.val_file->emitted_number);
6556 break;
6557 case dw_val_class_data8:
6558 {
6559 int i;
6560
6561 for (i = 0; i < 8; i++)
6562 fprintf (outfile, "%02x", val->v.val_data8[i]);
6563 break;
6564 }
6565 case dw_val_class_discr_value:
6566 print_discr_value (outfile, &val->v.val_discr_value);
6567 break;
6568 case dw_val_class_discr_list:
6569 for (dw_discr_list_ref node = val->v.val_discr_list;
6570 node != NULL;
6571 node = node->dw_discr_next)
6572 {
6573 if (node->dw_discr_range)
6574 {
6575 fprintf (outfile, " .. ");
6576 print_discr_value (outfile, &node->dw_discr_lower_bound);
6577 print_discr_value (outfile, &node->dw_discr_upper_bound);
6578 }
6579 else
6580 print_discr_value (outfile, &node->dw_discr_lower_bound);
6581
6582 if (node->dw_discr_next != NULL)
6583 fprintf (outfile, " | ");
6584 }
6585 default:
6586 break;
6587 }
6588 }
6589
6590 /* Likewise, for a DIE attribute. */
6591
6592 static void
6593 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6594 {
6595 print_dw_val (&a->dw_attr_val, recurse, outfile);
6596 }
6597
6598
6599 /* Print the list of operands in the LOC location description to OUTFILE. This
6600 routine is a debugging aid only. */
6601
6602 static void
6603 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6604 {
6605 dw_loc_descr_ref l = loc;
6606
6607 if (loc == NULL)
6608 {
6609 print_spaces (outfile);
6610 fprintf (outfile, "<null>\n");
6611 return;
6612 }
6613
6614 for (l = loc; l != NULL; l = l->dw_loc_next)
6615 {
6616 print_spaces (outfile);
6617 fprintf (outfile, "(%p) %s",
6618 (void *) l,
6619 dwarf_stack_op_name (l->dw_loc_opc));
6620 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6621 {
6622 fprintf (outfile, " ");
6623 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6624 }
6625 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6626 {
6627 fprintf (outfile, ", ");
6628 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6629 }
6630 fprintf (outfile, "\n");
6631 }
6632 }
6633
6634 /* Print the information associated with a given DIE, and its children.
6635 This routine is a debugging aid only. */
6636
6637 static void
6638 print_die (dw_die_ref die, FILE *outfile)
6639 {
6640 dw_attr_node *a;
6641 dw_die_ref c;
6642 unsigned ix;
6643
6644 print_spaces (outfile);
6645 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6646 die->die_offset, dwarf_tag_name (die->die_tag),
6647 (void*) die);
6648 print_spaces (outfile);
6649 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6650 fprintf (outfile, " offset: %ld", die->die_offset);
6651 fprintf (outfile, " mark: %d\n", die->die_mark);
6652
6653 if (die->comdat_type_p)
6654 {
6655 print_spaces (outfile);
6656 fprintf (outfile, " signature: ");
6657 print_signature (outfile, die->die_id.die_type_node->signature);
6658 fprintf (outfile, "\n");
6659 }
6660
6661 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6662 {
6663 print_spaces (outfile);
6664 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6665
6666 print_attribute (a, true, outfile);
6667 fprintf (outfile, "\n");
6668 }
6669
6670 if (die->die_child != NULL)
6671 {
6672 print_indent += 4;
6673 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6674 print_indent -= 4;
6675 }
6676 if (print_indent == 0)
6677 fprintf (outfile, "\n");
6678 }
6679
6680 /* Print the list of operations in the LOC location description. */
6681
6682 DEBUG_FUNCTION void
6683 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6684 {
6685 print_loc_descr (loc, stderr);
6686 }
6687
6688 /* Print the information collected for a given DIE. */
6689
6690 DEBUG_FUNCTION void
6691 debug_dwarf_die (dw_die_ref die)
6692 {
6693 print_die (die, stderr);
6694 }
6695
6696 DEBUG_FUNCTION void
6697 debug (die_struct &ref)
6698 {
6699 print_die (&ref, stderr);
6700 }
6701
6702 DEBUG_FUNCTION void
6703 debug (die_struct *ptr)
6704 {
6705 if (ptr)
6706 debug (*ptr);
6707 else
6708 fprintf (stderr, "<nil>\n");
6709 }
6710
6711
6712 /* Print all DWARF information collected for the compilation unit.
6713 This routine is a debugging aid only. */
6714
6715 DEBUG_FUNCTION void
6716 debug_dwarf (void)
6717 {
6718 print_indent = 0;
6719 print_die (comp_unit_die (), stderr);
6720 }
6721
6722 /* Verify the DIE tree structure. */
6723
6724 DEBUG_FUNCTION void
6725 verify_die (dw_die_ref die)
6726 {
6727 gcc_assert (!die->die_mark);
6728 if (die->die_parent == NULL
6729 && die->die_sib == NULL)
6730 return;
6731 /* Verify the die_sib list is cyclic. */
6732 dw_die_ref x = die;
6733 do
6734 {
6735 x->die_mark = 1;
6736 x = x->die_sib;
6737 }
6738 while (x && !x->die_mark);
6739 gcc_assert (x == die);
6740 x = die;
6741 do
6742 {
6743 /* Verify all dies have the same parent. */
6744 gcc_assert (x->die_parent == die->die_parent);
6745 if (x->die_child)
6746 {
6747 /* Verify the child has the proper parent and recurse. */
6748 gcc_assert (x->die_child->die_parent == x);
6749 verify_die (x->die_child);
6750 }
6751 x->die_mark = 0;
6752 x = x->die_sib;
6753 }
6754 while (x && x->die_mark);
6755 }
6756
6757 /* Sanity checks on DIEs. */
6758
6759 static void
6760 check_die (dw_die_ref die)
6761 {
6762 unsigned ix;
6763 dw_attr_node *a;
6764 bool inline_found = false;
6765 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6766 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6767 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6768 {
6769 switch (a->dw_attr)
6770 {
6771 case DW_AT_inline:
6772 if (a->dw_attr_val.v.val_unsigned)
6773 inline_found = true;
6774 break;
6775 case DW_AT_location:
6776 ++n_location;
6777 break;
6778 case DW_AT_low_pc:
6779 ++n_low_pc;
6780 break;
6781 case DW_AT_high_pc:
6782 ++n_high_pc;
6783 break;
6784 case DW_AT_artificial:
6785 ++n_artificial;
6786 break;
6787 case DW_AT_decl_column:
6788 ++n_decl_column;
6789 break;
6790 case DW_AT_decl_line:
6791 ++n_decl_line;
6792 break;
6793 case DW_AT_decl_file:
6794 ++n_decl_file;
6795 break;
6796 default:
6797 break;
6798 }
6799 }
6800 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6801 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6802 {
6803 fprintf (stderr, "Duplicate attributes in DIE:\n");
6804 debug_dwarf_die (die);
6805 gcc_unreachable ();
6806 }
6807 if (inline_found)
6808 {
6809 /* A debugging information entry that is a member of an abstract
6810 instance tree [that has DW_AT_inline] should not contain any
6811 attributes which describe aspects of the subroutine which vary
6812 between distinct inlined expansions or distinct out-of-line
6813 expansions. */
6814 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6815 gcc_assert (a->dw_attr != DW_AT_low_pc
6816 && a->dw_attr != DW_AT_high_pc
6817 && a->dw_attr != DW_AT_location
6818 && a->dw_attr != DW_AT_frame_base
6819 && a->dw_attr != DW_AT_call_all_calls
6820 && a->dw_attr != DW_AT_GNU_all_call_sites);
6821 }
6822 }
6823 \f
6824 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6825 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6826 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6827
6828 /* Calculate the checksum of a location expression. */
6829
6830 static inline void
6831 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6832 {
6833 int tem;
6834 inchash::hash hstate;
6835 hashval_t hash;
6836
6837 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6838 CHECKSUM (tem);
6839 hash_loc_operands (loc, hstate);
6840 hash = hstate.end();
6841 CHECKSUM (hash);
6842 }
6843
6844 /* Calculate the checksum of an attribute. */
6845
6846 static void
6847 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6848 {
6849 dw_loc_descr_ref loc;
6850 rtx r;
6851
6852 CHECKSUM (at->dw_attr);
6853
6854 /* We don't care that this was compiled with a different compiler
6855 snapshot; if the output is the same, that's what matters. */
6856 if (at->dw_attr == DW_AT_producer)
6857 return;
6858
6859 switch (AT_class (at))
6860 {
6861 case dw_val_class_const:
6862 case dw_val_class_const_implicit:
6863 CHECKSUM (at->dw_attr_val.v.val_int);
6864 break;
6865 case dw_val_class_unsigned_const:
6866 case dw_val_class_unsigned_const_implicit:
6867 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6868 break;
6869 case dw_val_class_const_double:
6870 CHECKSUM (at->dw_attr_val.v.val_double);
6871 break;
6872 case dw_val_class_wide_int:
6873 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6874 get_full_len (*at->dw_attr_val.v.val_wide)
6875 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6876 break;
6877 case dw_val_class_vec:
6878 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6879 (at->dw_attr_val.v.val_vec.length
6880 * at->dw_attr_val.v.val_vec.elt_size));
6881 break;
6882 case dw_val_class_flag:
6883 CHECKSUM (at->dw_attr_val.v.val_flag);
6884 break;
6885 case dw_val_class_str:
6886 CHECKSUM_STRING (AT_string (at));
6887 break;
6888
6889 case dw_val_class_addr:
6890 r = AT_addr (at);
6891 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6892 CHECKSUM_STRING (XSTR (r, 0));
6893 break;
6894
6895 case dw_val_class_offset:
6896 CHECKSUM (at->dw_attr_val.v.val_offset);
6897 break;
6898
6899 case dw_val_class_loc:
6900 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6901 loc_checksum (loc, ctx);
6902 break;
6903
6904 case dw_val_class_die_ref:
6905 die_checksum (AT_ref (at), ctx, mark);
6906 break;
6907
6908 case dw_val_class_fde_ref:
6909 case dw_val_class_vms_delta:
6910 case dw_val_class_symview:
6911 case dw_val_class_lbl_id:
6912 case dw_val_class_lineptr:
6913 case dw_val_class_macptr:
6914 case dw_val_class_loclistsptr:
6915 case dw_val_class_high_pc:
6916 break;
6917
6918 case dw_val_class_file:
6919 case dw_val_class_file_implicit:
6920 CHECKSUM_STRING (AT_file (at)->filename);
6921 break;
6922
6923 case dw_val_class_data8:
6924 CHECKSUM (at->dw_attr_val.v.val_data8);
6925 break;
6926
6927 default:
6928 break;
6929 }
6930 }
6931
6932 /* Calculate the checksum of a DIE. */
6933
6934 static void
6935 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6936 {
6937 dw_die_ref c;
6938 dw_attr_node *a;
6939 unsigned ix;
6940
6941 /* To avoid infinite recursion. */
6942 if (die->die_mark)
6943 {
6944 CHECKSUM (die->die_mark);
6945 return;
6946 }
6947 die->die_mark = ++(*mark);
6948
6949 CHECKSUM (die->die_tag);
6950
6951 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6952 attr_checksum (a, ctx, mark);
6953
6954 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6955 }
6956
6957 #undef CHECKSUM
6958 #undef CHECKSUM_BLOCK
6959 #undef CHECKSUM_STRING
6960
6961 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6962 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6963 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6964 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6965 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6966 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6967 #define CHECKSUM_ATTR(FOO) \
6968 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6969
6970 /* Calculate the checksum of a number in signed LEB128 format. */
6971
6972 static void
6973 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6974 {
6975 unsigned char byte;
6976 bool more;
6977
6978 while (1)
6979 {
6980 byte = (value & 0x7f);
6981 value >>= 7;
6982 more = !((value == 0 && (byte & 0x40) == 0)
6983 || (value == -1 && (byte & 0x40) != 0));
6984 if (more)
6985 byte |= 0x80;
6986 CHECKSUM (byte);
6987 if (!more)
6988 break;
6989 }
6990 }
6991
6992 /* Calculate the checksum of a number in unsigned LEB128 format. */
6993
6994 static void
6995 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6996 {
6997 while (1)
6998 {
6999 unsigned char byte = (value & 0x7f);
7000 value >>= 7;
7001 if (value != 0)
7002 /* More bytes to follow. */
7003 byte |= 0x80;
7004 CHECKSUM (byte);
7005 if (value == 0)
7006 break;
7007 }
7008 }
7009
7010 /* Checksum the context of the DIE. This adds the names of any
7011 surrounding namespaces or structures to the checksum. */
7012
7013 static void
7014 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7015 {
7016 const char *name;
7017 dw_die_ref spec;
7018 int tag = die->die_tag;
7019
7020 if (tag != DW_TAG_namespace
7021 && tag != DW_TAG_structure_type
7022 && tag != DW_TAG_class_type)
7023 return;
7024
7025 name = get_AT_string (die, DW_AT_name);
7026
7027 spec = get_AT_ref (die, DW_AT_specification);
7028 if (spec != NULL)
7029 die = spec;
7030
7031 if (die->die_parent != NULL)
7032 checksum_die_context (die->die_parent, ctx);
7033
7034 CHECKSUM_ULEB128 ('C');
7035 CHECKSUM_ULEB128 (tag);
7036 if (name != NULL)
7037 CHECKSUM_STRING (name);
7038 }
7039
7040 /* Calculate the checksum of a location expression. */
7041
7042 static inline void
7043 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7044 {
7045 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7046 were emitted as a DW_FORM_sdata instead of a location expression. */
7047 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7048 {
7049 CHECKSUM_ULEB128 (DW_FORM_sdata);
7050 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7051 return;
7052 }
7053
7054 /* Otherwise, just checksum the raw location expression. */
7055 while (loc != NULL)
7056 {
7057 inchash::hash hstate;
7058 hashval_t hash;
7059
7060 CHECKSUM_ULEB128 (loc->dtprel);
7061 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7062 hash_loc_operands (loc, hstate);
7063 hash = hstate.end ();
7064 CHECKSUM (hash);
7065 loc = loc->dw_loc_next;
7066 }
7067 }
7068
7069 /* Calculate the checksum of an attribute. */
7070
7071 static void
7072 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7073 struct md5_ctx *ctx, int *mark)
7074 {
7075 dw_loc_descr_ref loc;
7076 rtx r;
7077
7078 if (AT_class (at) == dw_val_class_die_ref)
7079 {
7080 dw_die_ref target_die = AT_ref (at);
7081
7082 /* For pointer and reference types, we checksum only the (qualified)
7083 name of the target type (if there is a name). For friend entries,
7084 we checksum only the (qualified) name of the target type or function.
7085 This allows the checksum to remain the same whether the target type
7086 is complete or not. */
7087 if ((at->dw_attr == DW_AT_type
7088 && (tag == DW_TAG_pointer_type
7089 || tag == DW_TAG_reference_type
7090 || tag == DW_TAG_rvalue_reference_type
7091 || tag == DW_TAG_ptr_to_member_type))
7092 || (at->dw_attr == DW_AT_friend
7093 && tag == DW_TAG_friend))
7094 {
7095 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7096
7097 if (name_attr != NULL)
7098 {
7099 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7100
7101 if (decl == NULL)
7102 decl = target_die;
7103 CHECKSUM_ULEB128 ('N');
7104 CHECKSUM_ULEB128 (at->dw_attr);
7105 if (decl->die_parent != NULL)
7106 checksum_die_context (decl->die_parent, ctx);
7107 CHECKSUM_ULEB128 ('E');
7108 CHECKSUM_STRING (AT_string (name_attr));
7109 return;
7110 }
7111 }
7112
7113 /* For all other references to another DIE, we check to see if the
7114 target DIE has already been visited. If it has, we emit a
7115 backward reference; if not, we descend recursively. */
7116 if (target_die->die_mark > 0)
7117 {
7118 CHECKSUM_ULEB128 ('R');
7119 CHECKSUM_ULEB128 (at->dw_attr);
7120 CHECKSUM_ULEB128 (target_die->die_mark);
7121 }
7122 else
7123 {
7124 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7125
7126 if (decl == NULL)
7127 decl = target_die;
7128 target_die->die_mark = ++(*mark);
7129 CHECKSUM_ULEB128 ('T');
7130 CHECKSUM_ULEB128 (at->dw_attr);
7131 if (decl->die_parent != NULL)
7132 checksum_die_context (decl->die_parent, ctx);
7133 die_checksum_ordered (target_die, ctx, mark);
7134 }
7135 return;
7136 }
7137
7138 CHECKSUM_ULEB128 ('A');
7139 CHECKSUM_ULEB128 (at->dw_attr);
7140
7141 switch (AT_class (at))
7142 {
7143 case dw_val_class_const:
7144 case dw_val_class_const_implicit:
7145 CHECKSUM_ULEB128 (DW_FORM_sdata);
7146 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7147 break;
7148
7149 case dw_val_class_unsigned_const:
7150 case dw_val_class_unsigned_const_implicit:
7151 CHECKSUM_ULEB128 (DW_FORM_sdata);
7152 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7153 break;
7154
7155 case dw_val_class_const_double:
7156 CHECKSUM_ULEB128 (DW_FORM_block);
7157 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7158 CHECKSUM (at->dw_attr_val.v.val_double);
7159 break;
7160
7161 case dw_val_class_wide_int:
7162 CHECKSUM_ULEB128 (DW_FORM_block);
7163 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7164 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7165 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7166 get_full_len (*at->dw_attr_val.v.val_wide)
7167 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7168 break;
7169
7170 case dw_val_class_vec:
7171 CHECKSUM_ULEB128 (DW_FORM_block);
7172 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7173 * at->dw_attr_val.v.val_vec.elt_size);
7174 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7175 (at->dw_attr_val.v.val_vec.length
7176 * at->dw_attr_val.v.val_vec.elt_size));
7177 break;
7178
7179 case dw_val_class_flag:
7180 CHECKSUM_ULEB128 (DW_FORM_flag);
7181 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7182 break;
7183
7184 case dw_val_class_str:
7185 CHECKSUM_ULEB128 (DW_FORM_string);
7186 CHECKSUM_STRING (AT_string (at));
7187 break;
7188
7189 case dw_val_class_addr:
7190 r = AT_addr (at);
7191 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7192 CHECKSUM_ULEB128 (DW_FORM_string);
7193 CHECKSUM_STRING (XSTR (r, 0));
7194 break;
7195
7196 case dw_val_class_offset:
7197 CHECKSUM_ULEB128 (DW_FORM_sdata);
7198 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7199 break;
7200
7201 case dw_val_class_loc:
7202 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7203 loc_checksum_ordered (loc, ctx);
7204 break;
7205
7206 case dw_val_class_fde_ref:
7207 case dw_val_class_symview:
7208 case dw_val_class_lbl_id:
7209 case dw_val_class_lineptr:
7210 case dw_val_class_macptr:
7211 case dw_val_class_loclistsptr:
7212 case dw_val_class_high_pc:
7213 break;
7214
7215 case dw_val_class_file:
7216 case dw_val_class_file_implicit:
7217 CHECKSUM_ULEB128 (DW_FORM_string);
7218 CHECKSUM_STRING (AT_file (at)->filename);
7219 break;
7220
7221 case dw_val_class_data8:
7222 CHECKSUM (at->dw_attr_val.v.val_data8);
7223 break;
7224
7225 default:
7226 break;
7227 }
7228 }
7229
7230 struct checksum_attributes
7231 {
7232 dw_attr_node *at_name;
7233 dw_attr_node *at_type;
7234 dw_attr_node *at_friend;
7235 dw_attr_node *at_accessibility;
7236 dw_attr_node *at_address_class;
7237 dw_attr_node *at_alignment;
7238 dw_attr_node *at_allocated;
7239 dw_attr_node *at_artificial;
7240 dw_attr_node *at_associated;
7241 dw_attr_node *at_binary_scale;
7242 dw_attr_node *at_bit_offset;
7243 dw_attr_node *at_bit_size;
7244 dw_attr_node *at_bit_stride;
7245 dw_attr_node *at_byte_size;
7246 dw_attr_node *at_byte_stride;
7247 dw_attr_node *at_const_value;
7248 dw_attr_node *at_containing_type;
7249 dw_attr_node *at_count;
7250 dw_attr_node *at_data_location;
7251 dw_attr_node *at_data_member_location;
7252 dw_attr_node *at_decimal_scale;
7253 dw_attr_node *at_decimal_sign;
7254 dw_attr_node *at_default_value;
7255 dw_attr_node *at_digit_count;
7256 dw_attr_node *at_discr;
7257 dw_attr_node *at_discr_list;
7258 dw_attr_node *at_discr_value;
7259 dw_attr_node *at_encoding;
7260 dw_attr_node *at_endianity;
7261 dw_attr_node *at_explicit;
7262 dw_attr_node *at_is_optional;
7263 dw_attr_node *at_location;
7264 dw_attr_node *at_lower_bound;
7265 dw_attr_node *at_mutable;
7266 dw_attr_node *at_ordering;
7267 dw_attr_node *at_picture_string;
7268 dw_attr_node *at_prototyped;
7269 dw_attr_node *at_small;
7270 dw_attr_node *at_segment;
7271 dw_attr_node *at_string_length;
7272 dw_attr_node *at_string_length_bit_size;
7273 dw_attr_node *at_string_length_byte_size;
7274 dw_attr_node *at_threads_scaled;
7275 dw_attr_node *at_upper_bound;
7276 dw_attr_node *at_use_location;
7277 dw_attr_node *at_use_UTF8;
7278 dw_attr_node *at_variable_parameter;
7279 dw_attr_node *at_virtuality;
7280 dw_attr_node *at_visibility;
7281 dw_attr_node *at_vtable_elem_location;
7282 };
7283
7284 /* Collect the attributes that we will want to use for the checksum. */
7285
7286 static void
7287 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7288 {
7289 dw_attr_node *a;
7290 unsigned ix;
7291
7292 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7293 {
7294 switch (a->dw_attr)
7295 {
7296 case DW_AT_name:
7297 attrs->at_name = a;
7298 break;
7299 case DW_AT_type:
7300 attrs->at_type = a;
7301 break;
7302 case DW_AT_friend:
7303 attrs->at_friend = a;
7304 break;
7305 case DW_AT_accessibility:
7306 attrs->at_accessibility = a;
7307 break;
7308 case DW_AT_address_class:
7309 attrs->at_address_class = a;
7310 break;
7311 case DW_AT_alignment:
7312 attrs->at_alignment = a;
7313 break;
7314 case DW_AT_allocated:
7315 attrs->at_allocated = a;
7316 break;
7317 case DW_AT_artificial:
7318 attrs->at_artificial = a;
7319 break;
7320 case DW_AT_associated:
7321 attrs->at_associated = a;
7322 break;
7323 case DW_AT_binary_scale:
7324 attrs->at_binary_scale = a;
7325 break;
7326 case DW_AT_bit_offset:
7327 attrs->at_bit_offset = a;
7328 break;
7329 case DW_AT_bit_size:
7330 attrs->at_bit_size = a;
7331 break;
7332 case DW_AT_bit_stride:
7333 attrs->at_bit_stride = a;
7334 break;
7335 case DW_AT_byte_size:
7336 attrs->at_byte_size = a;
7337 break;
7338 case DW_AT_byte_stride:
7339 attrs->at_byte_stride = a;
7340 break;
7341 case DW_AT_const_value:
7342 attrs->at_const_value = a;
7343 break;
7344 case DW_AT_containing_type:
7345 attrs->at_containing_type = a;
7346 break;
7347 case DW_AT_count:
7348 attrs->at_count = a;
7349 break;
7350 case DW_AT_data_location:
7351 attrs->at_data_location = a;
7352 break;
7353 case DW_AT_data_member_location:
7354 attrs->at_data_member_location = a;
7355 break;
7356 case DW_AT_decimal_scale:
7357 attrs->at_decimal_scale = a;
7358 break;
7359 case DW_AT_decimal_sign:
7360 attrs->at_decimal_sign = a;
7361 break;
7362 case DW_AT_default_value:
7363 attrs->at_default_value = a;
7364 break;
7365 case DW_AT_digit_count:
7366 attrs->at_digit_count = a;
7367 break;
7368 case DW_AT_discr:
7369 attrs->at_discr = a;
7370 break;
7371 case DW_AT_discr_list:
7372 attrs->at_discr_list = a;
7373 break;
7374 case DW_AT_discr_value:
7375 attrs->at_discr_value = a;
7376 break;
7377 case DW_AT_encoding:
7378 attrs->at_encoding = a;
7379 break;
7380 case DW_AT_endianity:
7381 attrs->at_endianity = a;
7382 break;
7383 case DW_AT_explicit:
7384 attrs->at_explicit = a;
7385 break;
7386 case DW_AT_is_optional:
7387 attrs->at_is_optional = a;
7388 break;
7389 case DW_AT_location:
7390 attrs->at_location = a;
7391 break;
7392 case DW_AT_lower_bound:
7393 attrs->at_lower_bound = a;
7394 break;
7395 case DW_AT_mutable:
7396 attrs->at_mutable = a;
7397 break;
7398 case DW_AT_ordering:
7399 attrs->at_ordering = a;
7400 break;
7401 case DW_AT_picture_string:
7402 attrs->at_picture_string = a;
7403 break;
7404 case DW_AT_prototyped:
7405 attrs->at_prototyped = a;
7406 break;
7407 case DW_AT_small:
7408 attrs->at_small = a;
7409 break;
7410 case DW_AT_segment:
7411 attrs->at_segment = a;
7412 break;
7413 case DW_AT_string_length:
7414 attrs->at_string_length = a;
7415 break;
7416 case DW_AT_string_length_bit_size:
7417 attrs->at_string_length_bit_size = a;
7418 break;
7419 case DW_AT_string_length_byte_size:
7420 attrs->at_string_length_byte_size = a;
7421 break;
7422 case DW_AT_threads_scaled:
7423 attrs->at_threads_scaled = a;
7424 break;
7425 case DW_AT_upper_bound:
7426 attrs->at_upper_bound = a;
7427 break;
7428 case DW_AT_use_location:
7429 attrs->at_use_location = a;
7430 break;
7431 case DW_AT_use_UTF8:
7432 attrs->at_use_UTF8 = a;
7433 break;
7434 case DW_AT_variable_parameter:
7435 attrs->at_variable_parameter = a;
7436 break;
7437 case DW_AT_virtuality:
7438 attrs->at_virtuality = a;
7439 break;
7440 case DW_AT_visibility:
7441 attrs->at_visibility = a;
7442 break;
7443 case DW_AT_vtable_elem_location:
7444 attrs->at_vtable_elem_location = a;
7445 break;
7446 default:
7447 break;
7448 }
7449 }
7450 }
7451
7452 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7453
7454 static void
7455 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7456 {
7457 dw_die_ref c;
7458 dw_die_ref decl;
7459 struct checksum_attributes attrs;
7460
7461 CHECKSUM_ULEB128 ('D');
7462 CHECKSUM_ULEB128 (die->die_tag);
7463
7464 memset (&attrs, 0, sizeof (attrs));
7465
7466 decl = get_AT_ref (die, DW_AT_specification);
7467 if (decl != NULL)
7468 collect_checksum_attributes (&attrs, decl);
7469 collect_checksum_attributes (&attrs, die);
7470
7471 CHECKSUM_ATTR (attrs.at_name);
7472 CHECKSUM_ATTR (attrs.at_accessibility);
7473 CHECKSUM_ATTR (attrs.at_address_class);
7474 CHECKSUM_ATTR (attrs.at_allocated);
7475 CHECKSUM_ATTR (attrs.at_artificial);
7476 CHECKSUM_ATTR (attrs.at_associated);
7477 CHECKSUM_ATTR (attrs.at_binary_scale);
7478 CHECKSUM_ATTR (attrs.at_bit_offset);
7479 CHECKSUM_ATTR (attrs.at_bit_size);
7480 CHECKSUM_ATTR (attrs.at_bit_stride);
7481 CHECKSUM_ATTR (attrs.at_byte_size);
7482 CHECKSUM_ATTR (attrs.at_byte_stride);
7483 CHECKSUM_ATTR (attrs.at_const_value);
7484 CHECKSUM_ATTR (attrs.at_containing_type);
7485 CHECKSUM_ATTR (attrs.at_count);
7486 CHECKSUM_ATTR (attrs.at_data_location);
7487 CHECKSUM_ATTR (attrs.at_data_member_location);
7488 CHECKSUM_ATTR (attrs.at_decimal_scale);
7489 CHECKSUM_ATTR (attrs.at_decimal_sign);
7490 CHECKSUM_ATTR (attrs.at_default_value);
7491 CHECKSUM_ATTR (attrs.at_digit_count);
7492 CHECKSUM_ATTR (attrs.at_discr);
7493 CHECKSUM_ATTR (attrs.at_discr_list);
7494 CHECKSUM_ATTR (attrs.at_discr_value);
7495 CHECKSUM_ATTR (attrs.at_encoding);
7496 CHECKSUM_ATTR (attrs.at_endianity);
7497 CHECKSUM_ATTR (attrs.at_explicit);
7498 CHECKSUM_ATTR (attrs.at_is_optional);
7499 CHECKSUM_ATTR (attrs.at_location);
7500 CHECKSUM_ATTR (attrs.at_lower_bound);
7501 CHECKSUM_ATTR (attrs.at_mutable);
7502 CHECKSUM_ATTR (attrs.at_ordering);
7503 CHECKSUM_ATTR (attrs.at_picture_string);
7504 CHECKSUM_ATTR (attrs.at_prototyped);
7505 CHECKSUM_ATTR (attrs.at_small);
7506 CHECKSUM_ATTR (attrs.at_segment);
7507 CHECKSUM_ATTR (attrs.at_string_length);
7508 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7509 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7510 CHECKSUM_ATTR (attrs.at_threads_scaled);
7511 CHECKSUM_ATTR (attrs.at_upper_bound);
7512 CHECKSUM_ATTR (attrs.at_use_location);
7513 CHECKSUM_ATTR (attrs.at_use_UTF8);
7514 CHECKSUM_ATTR (attrs.at_variable_parameter);
7515 CHECKSUM_ATTR (attrs.at_virtuality);
7516 CHECKSUM_ATTR (attrs.at_visibility);
7517 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7518 CHECKSUM_ATTR (attrs.at_type);
7519 CHECKSUM_ATTR (attrs.at_friend);
7520 CHECKSUM_ATTR (attrs.at_alignment);
7521
7522 /* Checksum the child DIEs. */
7523 c = die->die_child;
7524 if (c) do {
7525 dw_attr_node *name_attr;
7526
7527 c = c->die_sib;
7528 name_attr = get_AT (c, DW_AT_name);
7529 if (is_template_instantiation (c))
7530 {
7531 /* Ignore instantiations of member type and function templates. */
7532 }
7533 else if (name_attr != NULL
7534 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7535 {
7536 /* Use a shallow checksum for named nested types and member
7537 functions. */
7538 CHECKSUM_ULEB128 ('S');
7539 CHECKSUM_ULEB128 (c->die_tag);
7540 CHECKSUM_STRING (AT_string (name_attr));
7541 }
7542 else
7543 {
7544 /* Use a deep checksum for other children. */
7545 /* Mark this DIE so it gets processed when unmarking. */
7546 if (c->die_mark == 0)
7547 c->die_mark = -1;
7548 die_checksum_ordered (c, ctx, mark);
7549 }
7550 } while (c != die->die_child);
7551
7552 CHECKSUM_ULEB128 (0);
7553 }
7554
7555 /* Add a type name and tag to a hash. */
7556 static void
7557 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7558 {
7559 CHECKSUM_ULEB128 (tag);
7560 CHECKSUM_STRING (name);
7561 }
7562
7563 #undef CHECKSUM
7564 #undef CHECKSUM_STRING
7565 #undef CHECKSUM_ATTR
7566 #undef CHECKSUM_LEB128
7567 #undef CHECKSUM_ULEB128
7568
7569 /* Generate the type signature for DIE. This is computed by generating an
7570 MD5 checksum over the DIE's tag, its relevant attributes, and its
7571 children. Attributes that are references to other DIEs are processed
7572 by recursion, using the MARK field to prevent infinite recursion.
7573 If the DIE is nested inside a namespace or another type, we also
7574 need to include that context in the signature. The lower 64 bits
7575 of the resulting MD5 checksum comprise the signature. */
7576
7577 static void
7578 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7579 {
7580 int mark;
7581 const char *name;
7582 unsigned char checksum[16];
7583 struct md5_ctx ctx;
7584 dw_die_ref decl;
7585 dw_die_ref parent;
7586
7587 name = get_AT_string (die, DW_AT_name);
7588 decl = get_AT_ref (die, DW_AT_specification);
7589 parent = get_die_parent (die);
7590
7591 /* First, compute a signature for just the type name (and its surrounding
7592 context, if any. This is stored in the type unit DIE for link-time
7593 ODR (one-definition rule) checking. */
7594
7595 if (is_cxx () && name != NULL)
7596 {
7597 md5_init_ctx (&ctx);
7598
7599 /* Checksum the names of surrounding namespaces and structures. */
7600 if (parent != NULL)
7601 checksum_die_context (parent, &ctx);
7602
7603 /* Checksum the current DIE. */
7604 die_odr_checksum (die->die_tag, name, &ctx);
7605 md5_finish_ctx (&ctx, checksum);
7606
7607 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7608 }
7609
7610 /* Next, compute the complete type signature. */
7611
7612 md5_init_ctx (&ctx);
7613 mark = 1;
7614 die->die_mark = mark;
7615
7616 /* Checksum the names of surrounding namespaces and structures. */
7617 if (parent != NULL)
7618 checksum_die_context (parent, &ctx);
7619
7620 /* Checksum the DIE and its children. */
7621 die_checksum_ordered (die, &ctx, &mark);
7622 unmark_all_dies (die);
7623 md5_finish_ctx (&ctx, checksum);
7624
7625 /* Store the signature in the type node and link the type DIE and the
7626 type node together. */
7627 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7628 DWARF_TYPE_SIGNATURE_SIZE);
7629 die->comdat_type_p = true;
7630 die->die_id.die_type_node = type_node;
7631 type_node->type_die = die;
7632
7633 /* If the DIE is a specification, link its declaration to the type node
7634 as well. */
7635 if (decl != NULL)
7636 {
7637 decl->comdat_type_p = true;
7638 decl->die_id.die_type_node = type_node;
7639 }
7640 }
7641
7642 /* Do the location expressions look same? */
7643 static inline int
7644 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7645 {
7646 return loc1->dw_loc_opc == loc2->dw_loc_opc
7647 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7648 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7649 }
7650
7651 /* Do the values look the same? */
7652 static int
7653 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7654 {
7655 dw_loc_descr_ref loc1, loc2;
7656 rtx r1, r2;
7657
7658 if (v1->val_class != v2->val_class)
7659 return 0;
7660
7661 switch (v1->val_class)
7662 {
7663 case dw_val_class_const:
7664 case dw_val_class_const_implicit:
7665 return v1->v.val_int == v2->v.val_int;
7666 case dw_val_class_unsigned_const:
7667 case dw_val_class_unsigned_const_implicit:
7668 return v1->v.val_unsigned == v2->v.val_unsigned;
7669 case dw_val_class_const_double:
7670 return v1->v.val_double.high == v2->v.val_double.high
7671 && v1->v.val_double.low == v2->v.val_double.low;
7672 case dw_val_class_wide_int:
7673 return *v1->v.val_wide == *v2->v.val_wide;
7674 case dw_val_class_vec:
7675 if (v1->v.val_vec.length != v2->v.val_vec.length
7676 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7677 return 0;
7678 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7679 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7680 return 0;
7681 return 1;
7682 case dw_val_class_flag:
7683 return v1->v.val_flag == v2->v.val_flag;
7684 case dw_val_class_str:
7685 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7686
7687 case dw_val_class_addr:
7688 r1 = v1->v.val_addr;
7689 r2 = v2->v.val_addr;
7690 if (GET_CODE (r1) != GET_CODE (r2))
7691 return 0;
7692 return !rtx_equal_p (r1, r2);
7693
7694 case dw_val_class_offset:
7695 return v1->v.val_offset == v2->v.val_offset;
7696
7697 case dw_val_class_loc:
7698 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7699 loc1 && loc2;
7700 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7701 if (!same_loc_p (loc1, loc2, mark))
7702 return 0;
7703 return !loc1 && !loc2;
7704
7705 case dw_val_class_die_ref:
7706 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7707
7708 case dw_val_class_symview:
7709 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7710
7711 case dw_val_class_fde_ref:
7712 case dw_val_class_vms_delta:
7713 case dw_val_class_lbl_id:
7714 case dw_val_class_lineptr:
7715 case dw_val_class_macptr:
7716 case dw_val_class_loclistsptr:
7717 case dw_val_class_high_pc:
7718 return 1;
7719
7720 case dw_val_class_file:
7721 case dw_val_class_file_implicit:
7722 return v1->v.val_file == v2->v.val_file;
7723
7724 case dw_val_class_data8:
7725 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7726
7727 default:
7728 return 1;
7729 }
7730 }
7731
7732 /* Do the attributes look the same? */
7733
7734 static int
7735 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7736 {
7737 if (at1->dw_attr != at2->dw_attr)
7738 return 0;
7739
7740 /* We don't care that this was compiled with a different compiler
7741 snapshot; if the output is the same, that's what matters. */
7742 if (at1->dw_attr == DW_AT_producer)
7743 return 1;
7744
7745 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7746 }
7747
7748 /* Do the dies look the same? */
7749
7750 static int
7751 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7752 {
7753 dw_die_ref c1, c2;
7754 dw_attr_node *a1;
7755 unsigned ix;
7756
7757 /* To avoid infinite recursion. */
7758 if (die1->die_mark)
7759 return die1->die_mark == die2->die_mark;
7760 die1->die_mark = die2->die_mark = ++(*mark);
7761
7762 if (die1->die_tag != die2->die_tag)
7763 return 0;
7764
7765 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7766 return 0;
7767
7768 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7769 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7770 return 0;
7771
7772 c1 = die1->die_child;
7773 c2 = die2->die_child;
7774 if (! c1)
7775 {
7776 if (c2)
7777 return 0;
7778 }
7779 else
7780 for (;;)
7781 {
7782 if (!same_die_p (c1, c2, mark))
7783 return 0;
7784 c1 = c1->die_sib;
7785 c2 = c2->die_sib;
7786 if (c1 == die1->die_child)
7787 {
7788 if (c2 == die2->die_child)
7789 break;
7790 else
7791 return 0;
7792 }
7793 }
7794
7795 return 1;
7796 }
7797
7798 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7799 children, and set die_symbol. */
7800
7801 static void
7802 compute_comp_unit_symbol (dw_die_ref unit_die)
7803 {
7804 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7805 const char *base = die_name ? lbasename (die_name) : "anonymous";
7806 char *name = XALLOCAVEC (char, strlen (base) + 64);
7807 char *p;
7808 int i, mark;
7809 unsigned char checksum[16];
7810 struct md5_ctx ctx;
7811
7812 /* Compute the checksum of the DIE, then append part of it as hex digits to
7813 the name filename of the unit. */
7814
7815 md5_init_ctx (&ctx);
7816 mark = 0;
7817 die_checksum (unit_die, &ctx, &mark);
7818 unmark_all_dies (unit_die);
7819 md5_finish_ctx (&ctx, checksum);
7820
7821 /* When we this for comp_unit_die () we have a DW_AT_name that might
7822 not start with a letter but with anything valid for filenames and
7823 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7824 character is not a letter. */
7825 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7826 clean_symbol_name (name);
7827
7828 p = name + strlen (name);
7829 for (i = 0; i < 4; i++)
7830 {
7831 sprintf (p, "%.2x", checksum[i]);
7832 p += 2;
7833 }
7834
7835 unit_die->die_id.die_symbol = xstrdup (name);
7836 }
7837
7838 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7839
7840 static int
7841 is_type_die (dw_die_ref die)
7842 {
7843 switch (die->die_tag)
7844 {
7845 case DW_TAG_array_type:
7846 case DW_TAG_class_type:
7847 case DW_TAG_interface_type:
7848 case DW_TAG_enumeration_type:
7849 case DW_TAG_pointer_type:
7850 case DW_TAG_reference_type:
7851 case DW_TAG_rvalue_reference_type:
7852 case DW_TAG_string_type:
7853 case DW_TAG_structure_type:
7854 case DW_TAG_subroutine_type:
7855 case DW_TAG_union_type:
7856 case DW_TAG_ptr_to_member_type:
7857 case DW_TAG_set_type:
7858 case DW_TAG_subrange_type:
7859 case DW_TAG_base_type:
7860 case DW_TAG_const_type:
7861 case DW_TAG_file_type:
7862 case DW_TAG_packed_type:
7863 case DW_TAG_volatile_type:
7864 case DW_TAG_typedef:
7865 return 1;
7866 default:
7867 return 0;
7868 }
7869 }
7870
7871 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7872 Basically, we want to choose the bits that are likely to be shared between
7873 compilations (types) and leave out the bits that are specific to individual
7874 compilations (functions). */
7875
7876 static int
7877 is_comdat_die (dw_die_ref c)
7878 {
7879 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7880 we do for stabs. The advantage is a greater likelihood of sharing between
7881 objects that don't include headers in the same order (and therefore would
7882 put the base types in a different comdat). jason 8/28/00 */
7883
7884 if (c->die_tag == DW_TAG_base_type)
7885 return 0;
7886
7887 if (c->die_tag == DW_TAG_pointer_type
7888 || c->die_tag == DW_TAG_reference_type
7889 || c->die_tag == DW_TAG_rvalue_reference_type
7890 || c->die_tag == DW_TAG_const_type
7891 || c->die_tag == DW_TAG_volatile_type)
7892 {
7893 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7894
7895 return t ? is_comdat_die (t) : 0;
7896 }
7897
7898 return is_type_die (c);
7899 }
7900
7901 /* Returns true iff C is a compile-unit DIE. */
7902
7903 static inline bool
7904 is_cu_die (dw_die_ref c)
7905 {
7906 return c && (c->die_tag == DW_TAG_compile_unit
7907 || c->die_tag == DW_TAG_skeleton_unit);
7908 }
7909
7910 /* Returns true iff C is a unit DIE of some sort. */
7911
7912 static inline bool
7913 is_unit_die (dw_die_ref c)
7914 {
7915 return c && (c->die_tag == DW_TAG_compile_unit
7916 || c->die_tag == DW_TAG_partial_unit
7917 || c->die_tag == DW_TAG_type_unit
7918 || c->die_tag == DW_TAG_skeleton_unit);
7919 }
7920
7921 /* Returns true iff C is a namespace DIE. */
7922
7923 static inline bool
7924 is_namespace_die (dw_die_ref c)
7925 {
7926 return c && c->die_tag == DW_TAG_namespace;
7927 }
7928
7929 /* Returns true iff C is a class or structure DIE. */
7930
7931 static inline bool
7932 is_class_die (dw_die_ref c)
7933 {
7934 return c && (c->die_tag == DW_TAG_class_type
7935 || c->die_tag == DW_TAG_structure_type);
7936 }
7937
7938 /* Return non-zero if this DIE is a template parameter. */
7939
7940 static inline bool
7941 is_template_parameter (dw_die_ref die)
7942 {
7943 switch (die->die_tag)
7944 {
7945 case DW_TAG_template_type_param:
7946 case DW_TAG_template_value_param:
7947 case DW_TAG_GNU_template_template_param:
7948 case DW_TAG_GNU_template_parameter_pack:
7949 return true;
7950 default:
7951 return false;
7952 }
7953 }
7954
7955 /* Return non-zero if this DIE represents a template instantiation. */
7956
7957 static inline bool
7958 is_template_instantiation (dw_die_ref die)
7959 {
7960 dw_die_ref c;
7961
7962 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7963 return false;
7964 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7965 return false;
7966 }
7967
7968 static char *
7969 gen_internal_sym (const char *prefix)
7970 {
7971 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7972
7973 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7974 return xstrdup (buf);
7975 }
7976
7977 /* Return non-zero if this DIE is a declaration. */
7978
7979 static int
7980 is_declaration_die (dw_die_ref die)
7981 {
7982 dw_attr_node *a;
7983 unsigned ix;
7984
7985 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7986 if (a->dw_attr == DW_AT_declaration)
7987 return 1;
7988
7989 return 0;
7990 }
7991
7992 /* Return non-zero if this DIE is nested inside a subprogram. */
7993
7994 static int
7995 is_nested_in_subprogram (dw_die_ref die)
7996 {
7997 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7998
7999 if (decl == NULL)
8000 decl = die;
8001 return local_scope_p (decl);
8002 }
8003
8004 /* Return non-zero if this DIE contains a defining declaration of a
8005 subprogram. */
8006
8007 static int
8008 contains_subprogram_definition (dw_die_ref die)
8009 {
8010 dw_die_ref c;
8011
8012 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8013 return 1;
8014 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8015 return 0;
8016 }
8017
8018 /* Return non-zero if this is a type DIE that should be moved to a
8019 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8020 unit type. */
8021
8022 static int
8023 should_move_die_to_comdat (dw_die_ref die)
8024 {
8025 switch (die->die_tag)
8026 {
8027 case DW_TAG_class_type:
8028 case DW_TAG_structure_type:
8029 case DW_TAG_enumeration_type:
8030 case DW_TAG_union_type:
8031 /* Don't move declarations, inlined instances, types nested in a
8032 subprogram, or types that contain subprogram definitions. */
8033 if (is_declaration_die (die)
8034 || get_AT (die, DW_AT_abstract_origin)
8035 || is_nested_in_subprogram (die)
8036 || contains_subprogram_definition (die))
8037 return 0;
8038 return 1;
8039 case DW_TAG_array_type:
8040 case DW_TAG_interface_type:
8041 case DW_TAG_pointer_type:
8042 case DW_TAG_reference_type:
8043 case DW_TAG_rvalue_reference_type:
8044 case DW_TAG_string_type:
8045 case DW_TAG_subroutine_type:
8046 case DW_TAG_ptr_to_member_type:
8047 case DW_TAG_set_type:
8048 case DW_TAG_subrange_type:
8049 case DW_TAG_base_type:
8050 case DW_TAG_const_type:
8051 case DW_TAG_file_type:
8052 case DW_TAG_packed_type:
8053 case DW_TAG_volatile_type:
8054 case DW_TAG_typedef:
8055 default:
8056 return 0;
8057 }
8058 }
8059
8060 /* Make a clone of DIE. */
8061
8062 static dw_die_ref
8063 clone_die (dw_die_ref die)
8064 {
8065 dw_die_ref clone = new_die_raw (die->die_tag);
8066 dw_attr_node *a;
8067 unsigned ix;
8068
8069 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8070 add_dwarf_attr (clone, a);
8071
8072 return clone;
8073 }
8074
8075 /* Make a clone of the tree rooted at DIE. */
8076
8077 static dw_die_ref
8078 clone_tree (dw_die_ref die)
8079 {
8080 dw_die_ref c;
8081 dw_die_ref clone = clone_die (die);
8082
8083 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8084
8085 return clone;
8086 }
8087
8088 /* Make a clone of DIE as a declaration. */
8089
8090 static dw_die_ref
8091 clone_as_declaration (dw_die_ref die)
8092 {
8093 dw_die_ref clone;
8094 dw_die_ref decl;
8095 dw_attr_node *a;
8096 unsigned ix;
8097
8098 /* If the DIE is already a declaration, just clone it. */
8099 if (is_declaration_die (die))
8100 return clone_die (die);
8101
8102 /* If the DIE is a specification, just clone its declaration DIE. */
8103 decl = get_AT_ref (die, DW_AT_specification);
8104 if (decl != NULL)
8105 {
8106 clone = clone_die (decl);
8107 if (die->comdat_type_p)
8108 add_AT_die_ref (clone, DW_AT_signature, die);
8109 return clone;
8110 }
8111
8112 clone = new_die_raw (die->die_tag);
8113
8114 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8115 {
8116 /* We don't want to copy over all attributes.
8117 For example we don't want DW_AT_byte_size because otherwise we will no
8118 longer have a declaration and GDB will treat it as a definition. */
8119
8120 switch (a->dw_attr)
8121 {
8122 case DW_AT_abstract_origin:
8123 case DW_AT_artificial:
8124 case DW_AT_containing_type:
8125 case DW_AT_external:
8126 case DW_AT_name:
8127 case DW_AT_type:
8128 case DW_AT_virtuality:
8129 case DW_AT_linkage_name:
8130 case DW_AT_MIPS_linkage_name:
8131 add_dwarf_attr (clone, a);
8132 break;
8133 case DW_AT_byte_size:
8134 case DW_AT_alignment:
8135 default:
8136 break;
8137 }
8138 }
8139
8140 if (die->comdat_type_p)
8141 add_AT_die_ref (clone, DW_AT_signature, die);
8142
8143 add_AT_flag (clone, DW_AT_declaration, 1);
8144 return clone;
8145 }
8146
8147
8148 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8149
8150 struct decl_table_entry
8151 {
8152 dw_die_ref orig;
8153 dw_die_ref copy;
8154 };
8155
8156 /* Helpers to manipulate hash table of copied declarations. */
8157
8158 /* Hashtable helpers. */
8159
8160 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8161 {
8162 typedef die_struct *compare_type;
8163 static inline hashval_t hash (const decl_table_entry *);
8164 static inline bool equal (const decl_table_entry *, const die_struct *);
8165 };
8166
8167 inline hashval_t
8168 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8169 {
8170 return htab_hash_pointer (entry->orig);
8171 }
8172
8173 inline bool
8174 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8175 const die_struct *entry2)
8176 {
8177 return entry1->orig == entry2;
8178 }
8179
8180 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8181
8182 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8183 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8184 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8185 to check if the ancestor has already been copied into UNIT. */
8186
8187 static dw_die_ref
8188 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8189 decl_hash_type *decl_table)
8190 {
8191 dw_die_ref parent = die->die_parent;
8192 dw_die_ref new_parent = unit;
8193 dw_die_ref copy;
8194 decl_table_entry **slot = NULL;
8195 struct decl_table_entry *entry = NULL;
8196
8197 if (decl_table)
8198 {
8199 /* Check if the entry has already been copied to UNIT. */
8200 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8201 INSERT);
8202 if (*slot != HTAB_EMPTY_ENTRY)
8203 {
8204 entry = *slot;
8205 return entry->copy;
8206 }
8207
8208 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8209 entry = XCNEW (struct decl_table_entry);
8210 entry->orig = die;
8211 entry->copy = NULL;
8212 *slot = entry;
8213 }
8214
8215 if (parent != NULL)
8216 {
8217 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8218 if (spec != NULL)
8219 parent = spec;
8220 if (!is_unit_die (parent))
8221 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8222 }
8223
8224 copy = clone_as_declaration (die);
8225 add_child_die (new_parent, copy);
8226
8227 if (decl_table)
8228 {
8229 /* Record the pointer to the copy. */
8230 entry->copy = copy;
8231 }
8232
8233 return copy;
8234 }
8235 /* Copy the declaration context to the new type unit DIE. This includes
8236 any surrounding namespace or type declarations. If the DIE has an
8237 AT_specification attribute, it also includes attributes and children
8238 attached to the specification, and returns a pointer to the original
8239 parent of the declaration DIE. Returns NULL otherwise. */
8240
8241 static dw_die_ref
8242 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8243 {
8244 dw_die_ref decl;
8245 dw_die_ref new_decl;
8246 dw_die_ref orig_parent = NULL;
8247
8248 decl = get_AT_ref (die, DW_AT_specification);
8249 if (decl == NULL)
8250 decl = die;
8251 else
8252 {
8253 unsigned ix;
8254 dw_die_ref c;
8255 dw_attr_node *a;
8256
8257 /* The original DIE will be changed to a declaration, and must
8258 be moved to be a child of the original declaration DIE. */
8259 orig_parent = decl->die_parent;
8260
8261 /* Copy the type node pointer from the new DIE to the original
8262 declaration DIE so we can forward references later. */
8263 decl->comdat_type_p = true;
8264 decl->die_id.die_type_node = die->die_id.die_type_node;
8265
8266 remove_AT (die, DW_AT_specification);
8267
8268 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8269 {
8270 if (a->dw_attr != DW_AT_name
8271 && a->dw_attr != DW_AT_declaration
8272 && a->dw_attr != DW_AT_external)
8273 add_dwarf_attr (die, a);
8274 }
8275
8276 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8277 }
8278
8279 if (decl->die_parent != NULL
8280 && !is_unit_die (decl->die_parent))
8281 {
8282 new_decl = copy_ancestor_tree (unit, decl, NULL);
8283 if (new_decl != NULL)
8284 {
8285 remove_AT (new_decl, DW_AT_signature);
8286 add_AT_specification (die, new_decl);
8287 }
8288 }
8289
8290 return orig_parent;
8291 }
8292
8293 /* Generate the skeleton ancestor tree for the given NODE, then clone
8294 the DIE and add the clone into the tree. */
8295
8296 static void
8297 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8298 {
8299 if (node->new_die != NULL)
8300 return;
8301
8302 node->new_die = clone_as_declaration (node->old_die);
8303
8304 if (node->parent != NULL)
8305 {
8306 generate_skeleton_ancestor_tree (node->parent);
8307 add_child_die (node->parent->new_die, node->new_die);
8308 }
8309 }
8310
8311 /* Generate a skeleton tree of DIEs containing any declarations that are
8312 found in the original tree. We traverse the tree looking for declaration
8313 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8314
8315 static void
8316 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8317 {
8318 skeleton_chain_node node;
8319 dw_die_ref c;
8320 dw_die_ref first;
8321 dw_die_ref prev = NULL;
8322 dw_die_ref next = NULL;
8323
8324 node.parent = parent;
8325
8326 first = c = parent->old_die->die_child;
8327 if (c)
8328 next = c->die_sib;
8329 if (c) do {
8330 if (prev == NULL || prev->die_sib == c)
8331 prev = c;
8332 c = next;
8333 next = (c == first ? NULL : c->die_sib);
8334 node.old_die = c;
8335 node.new_die = NULL;
8336 if (is_declaration_die (c))
8337 {
8338 if (is_template_instantiation (c))
8339 {
8340 /* Instantiated templates do not need to be cloned into the
8341 type unit. Just move the DIE and its children back to
8342 the skeleton tree (in the main CU). */
8343 remove_child_with_prev (c, prev);
8344 add_child_die (parent->new_die, c);
8345 c = prev;
8346 }
8347 else if (c->comdat_type_p)
8348 {
8349 /* This is the skeleton of earlier break_out_comdat_types
8350 type. Clone the existing DIE, but keep the children
8351 under the original (which is in the main CU). */
8352 dw_die_ref clone = clone_die (c);
8353
8354 replace_child (c, clone, prev);
8355 generate_skeleton_ancestor_tree (parent);
8356 add_child_die (parent->new_die, c);
8357 c = clone;
8358 continue;
8359 }
8360 else
8361 {
8362 /* Clone the existing DIE, move the original to the skeleton
8363 tree (which is in the main CU), and put the clone, with
8364 all the original's children, where the original came from
8365 (which is about to be moved to the type unit). */
8366 dw_die_ref clone = clone_die (c);
8367 move_all_children (c, clone);
8368
8369 /* If the original has a DW_AT_object_pointer attribute,
8370 it would now point to a child DIE just moved to the
8371 cloned tree, so we need to remove that attribute from
8372 the original. */
8373 remove_AT (c, DW_AT_object_pointer);
8374
8375 replace_child (c, clone, prev);
8376 generate_skeleton_ancestor_tree (parent);
8377 add_child_die (parent->new_die, c);
8378 node.old_die = clone;
8379 node.new_die = c;
8380 c = clone;
8381 }
8382 }
8383 generate_skeleton_bottom_up (&node);
8384 } while (next != NULL);
8385 }
8386
8387 /* Wrapper function for generate_skeleton_bottom_up. */
8388
8389 static dw_die_ref
8390 generate_skeleton (dw_die_ref die)
8391 {
8392 skeleton_chain_node node;
8393
8394 node.old_die = die;
8395 node.new_die = NULL;
8396 node.parent = NULL;
8397
8398 /* If this type definition is nested inside another type,
8399 and is not an instantiation of a template, always leave
8400 at least a declaration in its place. */
8401 if (die->die_parent != NULL
8402 && is_type_die (die->die_parent)
8403 && !is_template_instantiation (die))
8404 node.new_die = clone_as_declaration (die);
8405
8406 generate_skeleton_bottom_up (&node);
8407 return node.new_die;
8408 }
8409
8410 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8411 declaration. The original DIE is moved to a new compile unit so that
8412 existing references to it follow it to the new location. If any of the
8413 original DIE's descendants is a declaration, we need to replace the
8414 original DIE with a skeleton tree and move the declarations back into the
8415 skeleton tree. */
8416
8417 static dw_die_ref
8418 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8419 dw_die_ref prev)
8420 {
8421 dw_die_ref skeleton, orig_parent;
8422
8423 /* Copy the declaration context to the type unit DIE. If the returned
8424 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8425 that DIE. */
8426 orig_parent = copy_declaration_context (unit, child);
8427
8428 skeleton = generate_skeleton (child);
8429 if (skeleton == NULL)
8430 remove_child_with_prev (child, prev);
8431 else
8432 {
8433 skeleton->comdat_type_p = true;
8434 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8435
8436 /* If the original DIE was a specification, we need to put
8437 the skeleton under the parent DIE of the declaration.
8438 This leaves the original declaration in the tree, but
8439 it will be pruned later since there are no longer any
8440 references to it. */
8441 if (orig_parent != NULL)
8442 {
8443 remove_child_with_prev (child, prev);
8444 add_child_die (orig_parent, skeleton);
8445 }
8446 else
8447 replace_child (child, skeleton, prev);
8448 }
8449
8450 return skeleton;
8451 }
8452
8453 static void
8454 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8455 comdat_type_node *type_node,
8456 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8457
8458 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8459 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8460 DWARF procedure references in the DW_AT_location attribute. */
8461
8462 static dw_die_ref
8463 copy_dwarf_procedure (dw_die_ref die,
8464 comdat_type_node *type_node,
8465 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8466 {
8467 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8468
8469 /* DWARF procedures are not supposed to have children... */
8470 gcc_assert (die->die_child == NULL);
8471
8472 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8473 gcc_assert (vec_safe_length (die->die_attr) == 1
8474 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8475
8476 /* Do not copy more than once DWARF procedures. */
8477 bool existed;
8478 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8479 if (existed)
8480 return die_copy;
8481
8482 die_copy = clone_die (die);
8483 add_child_die (type_node->root_die, die_copy);
8484 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8485 return die_copy;
8486 }
8487
8488 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8489 procedures in DIE's attributes. */
8490
8491 static void
8492 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8493 comdat_type_node *type_node,
8494 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8495 {
8496 dw_attr_node *a;
8497 unsigned i;
8498
8499 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8500 {
8501 dw_loc_descr_ref loc;
8502
8503 if (a->dw_attr_val.val_class != dw_val_class_loc)
8504 continue;
8505
8506 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8507 {
8508 switch (loc->dw_loc_opc)
8509 {
8510 case DW_OP_call2:
8511 case DW_OP_call4:
8512 case DW_OP_call_ref:
8513 gcc_assert (loc->dw_loc_oprnd1.val_class
8514 == dw_val_class_die_ref);
8515 loc->dw_loc_oprnd1.v.val_die_ref.die
8516 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8517 type_node,
8518 copied_dwarf_procs);
8519
8520 default:
8521 break;
8522 }
8523 }
8524 }
8525 }
8526
8527 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8528 rewrite references to point to the copies.
8529
8530 References are looked for in DIE's attributes and recursively in all its
8531 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8532 mapping from old DWARF procedures to their copy. It is used not to copy
8533 twice the same DWARF procedure under TYPE_NODE. */
8534
8535 static void
8536 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8537 comdat_type_node *type_node,
8538 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8539 {
8540 dw_die_ref c;
8541
8542 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8543 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8544 type_node,
8545 copied_dwarf_procs));
8546 }
8547
8548 /* Traverse the DIE and set up additional .debug_types or .debug_info
8549 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8550 section. */
8551
8552 static void
8553 break_out_comdat_types (dw_die_ref die)
8554 {
8555 dw_die_ref c;
8556 dw_die_ref first;
8557 dw_die_ref prev = NULL;
8558 dw_die_ref next = NULL;
8559 dw_die_ref unit = NULL;
8560
8561 first = c = die->die_child;
8562 if (c)
8563 next = c->die_sib;
8564 if (c) do {
8565 if (prev == NULL || prev->die_sib == c)
8566 prev = c;
8567 c = next;
8568 next = (c == first ? NULL : c->die_sib);
8569 if (should_move_die_to_comdat (c))
8570 {
8571 dw_die_ref replacement;
8572 comdat_type_node *type_node;
8573
8574 /* Break out nested types into their own type units. */
8575 break_out_comdat_types (c);
8576
8577 /* Create a new type unit DIE as the root for the new tree, and
8578 add it to the list of comdat types. */
8579 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8580 add_AT_unsigned (unit, DW_AT_language,
8581 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8582 type_node = ggc_cleared_alloc<comdat_type_node> ();
8583 type_node->root_die = unit;
8584 type_node->next = comdat_type_list;
8585 comdat_type_list = type_node;
8586
8587 /* Generate the type signature. */
8588 generate_type_signature (c, type_node);
8589
8590 /* Copy the declaration context, attributes, and children of the
8591 declaration into the new type unit DIE, then remove this DIE
8592 from the main CU (or replace it with a skeleton if necessary). */
8593 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8594 type_node->skeleton_die = replacement;
8595
8596 /* Add the DIE to the new compunit. */
8597 add_child_die (unit, c);
8598
8599 /* Types can reference DWARF procedures for type size or data location
8600 expressions. Calls in DWARF expressions cannot target procedures
8601 that are not in the same section. So we must copy DWARF procedures
8602 along with this type and then rewrite references to them. */
8603 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8604 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8605
8606 if (replacement != NULL)
8607 c = replacement;
8608 }
8609 else if (c->die_tag == DW_TAG_namespace
8610 || c->die_tag == DW_TAG_class_type
8611 || c->die_tag == DW_TAG_structure_type
8612 || c->die_tag == DW_TAG_union_type)
8613 {
8614 /* Look for nested types that can be broken out. */
8615 break_out_comdat_types (c);
8616 }
8617 } while (next != NULL);
8618 }
8619
8620 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8621 Enter all the cloned children into the hash table decl_table. */
8622
8623 static dw_die_ref
8624 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8625 {
8626 dw_die_ref c;
8627 dw_die_ref clone;
8628 struct decl_table_entry *entry;
8629 decl_table_entry **slot;
8630
8631 if (die->die_tag == DW_TAG_subprogram)
8632 clone = clone_as_declaration (die);
8633 else
8634 clone = clone_die (die);
8635
8636 slot = decl_table->find_slot_with_hash (die,
8637 htab_hash_pointer (die), INSERT);
8638
8639 /* Assert that DIE isn't in the hash table yet. If it would be there
8640 before, the ancestors would be necessarily there as well, therefore
8641 clone_tree_partial wouldn't be called. */
8642 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8643
8644 entry = XCNEW (struct decl_table_entry);
8645 entry->orig = die;
8646 entry->copy = clone;
8647 *slot = entry;
8648
8649 if (die->die_tag != DW_TAG_subprogram)
8650 FOR_EACH_CHILD (die, c,
8651 add_child_die (clone, clone_tree_partial (c, decl_table)));
8652
8653 return clone;
8654 }
8655
8656 /* Walk the DIE and its children, looking for references to incomplete
8657 or trivial types that are unmarked (i.e., that are not in the current
8658 type_unit). */
8659
8660 static void
8661 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8662 {
8663 dw_die_ref c;
8664 dw_attr_node *a;
8665 unsigned ix;
8666
8667 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8668 {
8669 if (AT_class (a) == dw_val_class_die_ref)
8670 {
8671 dw_die_ref targ = AT_ref (a);
8672 decl_table_entry **slot;
8673 struct decl_table_entry *entry;
8674
8675 if (targ->die_mark != 0 || targ->comdat_type_p)
8676 continue;
8677
8678 slot = decl_table->find_slot_with_hash (targ,
8679 htab_hash_pointer (targ),
8680 INSERT);
8681
8682 if (*slot != HTAB_EMPTY_ENTRY)
8683 {
8684 /* TARG has already been copied, so we just need to
8685 modify the reference to point to the copy. */
8686 entry = *slot;
8687 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8688 }
8689 else
8690 {
8691 dw_die_ref parent = unit;
8692 dw_die_ref copy = clone_die (targ);
8693
8694 /* Record in DECL_TABLE that TARG has been copied.
8695 Need to do this now, before the recursive call,
8696 because DECL_TABLE may be expanded and SLOT
8697 would no longer be a valid pointer. */
8698 entry = XCNEW (struct decl_table_entry);
8699 entry->orig = targ;
8700 entry->copy = copy;
8701 *slot = entry;
8702
8703 /* If TARG is not a declaration DIE, we need to copy its
8704 children. */
8705 if (!is_declaration_die (targ))
8706 {
8707 FOR_EACH_CHILD (
8708 targ, c,
8709 add_child_die (copy,
8710 clone_tree_partial (c, decl_table)));
8711 }
8712
8713 /* Make sure the cloned tree is marked as part of the
8714 type unit. */
8715 mark_dies (copy);
8716
8717 /* If TARG has surrounding context, copy its ancestor tree
8718 into the new type unit. */
8719 if (targ->die_parent != NULL
8720 && !is_unit_die (targ->die_parent))
8721 parent = copy_ancestor_tree (unit, targ->die_parent,
8722 decl_table);
8723
8724 add_child_die (parent, copy);
8725 a->dw_attr_val.v.val_die_ref.die = copy;
8726
8727 /* Make sure the newly-copied DIE is walked. If it was
8728 installed in a previously-added context, it won't
8729 get visited otherwise. */
8730 if (parent != unit)
8731 {
8732 /* Find the highest point of the newly-added tree,
8733 mark each node along the way, and walk from there. */
8734 parent->die_mark = 1;
8735 while (parent->die_parent
8736 && parent->die_parent->die_mark == 0)
8737 {
8738 parent = parent->die_parent;
8739 parent->die_mark = 1;
8740 }
8741 copy_decls_walk (unit, parent, decl_table);
8742 }
8743 }
8744 }
8745 }
8746
8747 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8748 }
8749
8750 /* Copy declarations for "unworthy" types into the new comdat section.
8751 Incomplete types, modified types, and certain other types aren't broken
8752 out into comdat sections of their own, so they don't have a signature,
8753 and we need to copy the declaration into the same section so that we
8754 don't have an external reference. */
8755
8756 static void
8757 copy_decls_for_unworthy_types (dw_die_ref unit)
8758 {
8759 mark_dies (unit);
8760 decl_hash_type decl_table (10);
8761 copy_decls_walk (unit, unit, &decl_table);
8762 unmark_dies (unit);
8763 }
8764
8765 /* Traverse the DIE and add a sibling attribute if it may have the
8766 effect of speeding up access to siblings. To save some space,
8767 avoid generating sibling attributes for DIE's without children. */
8768
8769 static void
8770 add_sibling_attributes (dw_die_ref die)
8771 {
8772 dw_die_ref c;
8773
8774 if (! die->die_child)
8775 return;
8776
8777 if (die->die_parent && die != die->die_parent->die_child)
8778 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8779
8780 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8781 }
8782
8783 /* Output all location lists for the DIE and its children. */
8784
8785 static void
8786 output_location_lists (dw_die_ref die)
8787 {
8788 dw_die_ref c;
8789 dw_attr_node *a;
8790 unsigned ix;
8791
8792 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8793 if (AT_class (a) == dw_val_class_loc_list)
8794 output_loc_list (AT_loc_list (a));
8795
8796 FOR_EACH_CHILD (die, c, output_location_lists (c));
8797 }
8798
8799 /* During assign_location_list_indexes and output_loclists_offset the
8800 current index, after it the number of assigned indexes (i.e. how
8801 large the .debug_loclists* offset table should be). */
8802 static unsigned int loc_list_idx;
8803
8804 /* Output all location list offsets for the DIE and its children. */
8805
8806 static void
8807 output_loclists_offsets (dw_die_ref die)
8808 {
8809 dw_die_ref c;
8810 dw_attr_node *a;
8811 unsigned ix;
8812
8813 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8814 if (AT_class (a) == dw_val_class_loc_list)
8815 {
8816 dw_loc_list_ref l = AT_loc_list (a);
8817 if (l->offset_emitted)
8818 continue;
8819 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8820 loc_section_label, NULL);
8821 gcc_assert (l->hash == loc_list_idx);
8822 loc_list_idx++;
8823 l->offset_emitted = true;
8824 }
8825
8826 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8827 }
8828
8829 /* Recursively set indexes of location lists. */
8830
8831 static void
8832 assign_location_list_indexes (dw_die_ref die)
8833 {
8834 dw_die_ref c;
8835 dw_attr_node *a;
8836 unsigned ix;
8837
8838 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8839 if (AT_class (a) == dw_val_class_loc_list)
8840 {
8841 dw_loc_list_ref list = AT_loc_list (a);
8842 if (!list->num_assigned)
8843 {
8844 list->num_assigned = true;
8845 list->hash = loc_list_idx++;
8846 }
8847 }
8848
8849 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8850 }
8851
8852 /* We want to limit the number of external references, because they are
8853 larger than local references: a relocation takes multiple words, and
8854 even a sig8 reference is always eight bytes, whereas a local reference
8855 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8856 So if we encounter multiple external references to the same type DIE, we
8857 make a local typedef stub for it and redirect all references there.
8858
8859 This is the element of the hash table for keeping track of these
8860 references. */
8861
8862 struct external_ref
8863 {
8864 dw_die_ref type;
8865 dw_die_ref stub;
8866 unsigned n_refs;
8867 };
8868
8869 /* Hashtable helpers. */
8870
8871 struct external_ref_hasher : free_ptr_hash <external_ref>
8872 {
8873 static inline hashval_t hash (const external_ref *);
8874 static inline bool equal (const external_ref *, const external_ref *);
8875 };
8876
8877 inline hashval_t
8878 external_ref_hasher::hash (const external_ref *r)
8879 {
8880 dw_die_ref die = r->type;
8881 hashval_t h = 0;
8882
8883 /* We can't use the address of the DIE for hashing, because
8884 that will make the order of the stub DIEs non-deterministic. */
8885 if (! die->comdat_type_p)
8886 /* We have a symbol; use it to compute a hash. */
8887 h = htab_hash_string (die->die_id.die_symbol);
8888 else
8889 {
8890 /* We have a type signature; use a subset of the bits as the hash.
8891 The 8-byte signature is at least as large as hashval_t. */
8892 comdat_type_node *type_node = die->die_id.die_type_node;
8893 memcpy (&h, type_node->signature, sizeof (h));
8894 }
8895 return h;
8896 }
8897
8898 inline bool
8899 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8900 {
8901 return r1->type == r2->type;
8902 }
8903
8904 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8905
8906 /* Return a pointer to the external_ref for references to DIE. */
8907
8908 static struct external_ref *
8909 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8910 {
8911 struct external_ref ref, *ref_p;
8912 external_ref **slot;
8913
8914 ref.type = die;
8915 slot = map->find_slot (&ref, INSERT);
8916 if (*slot != HTAB_EMPTY_ENTRY)
8917 return *slot;
8918
8919 ref_p = XCNEW (struct external_ref);
8920 ref_p->type = die;
8921 *slot = ref_p;
8922 return ref_p;
8923 }
8924
8925 /* Subroutine of optimize_external_refs, below.
8926
8927 If we see a type skeleton, record it as our stub. If we see external
8928 references, remember how many we've seen. */
8929
8930 static void
8931 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8932 {
8933 dw_die_ref c;
8934 dw_attr_node *a;
8935 unsigned ix;
8936 struct external_ref *ref_p;
8937
8938 if (is_type_die (die)
8939 && (c = get_AT_ref (die, DW_AT_signature)))
8940 {
8941 /* This is a local skeleton; use it for local references. */
8942 ref_p = lookup_external_ref (map, c);
8943 ref_p->stub = die;
8944 }
8945
8946 /* Scan the DIE references, and remember any that refer to DIEs from
8947 other CUs (i.e. those which are not marked). */
8948 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8949 if (AT_class (a) == dw_val_class_die_ref
8950 && (c = AT_ref (a))->die_mark == 0
8951 && is_type_die (c))
8952 {
8953 ref_p = lookup_external_ref (map, c);
8954 ref_p->n_refs++;
8955 }
8956
8957 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8958 }
8959
8960 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8961 points to an external_ref, DATA is the CU we're processing. If we don't
8962 already have a local stub, and we have multiple refs, build a stub. */
8963
8964 int
8965 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8966 {
8967 struct external_ref *ref_p = *slot;
8968
8969 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8970 {
8971 /* We have multiple references to this type, so build a small stub.
8972 Both of these forms are a bit dodgy from the perspective of the
8973 DWARF standard, since technically they should have names. */
8974 dw_die_ref cu = data;
8975 dw_die_ref type = ref_p->type;
8976 dw_die_ref stub = NULL;
8977
8978 if (type->comdat_type_p)
8979 {
8980 /* If we refer to this type via sig8, use AT_signature. */
8981 stub = new_die (type->die_tag, cu, NULL_TREE);
8982 add_AT_die_ref (stub, DW_AT_signature, type);
8983 }
8984 else
8985 {
8986 /* Otherwise, use a typedef with no name. */
8987 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8988 add_AT_die_ref (stub, DW_AT_type, type);
8989 }
8990
8991 stub->die_mark++;
8992 ref_p->stub = stub;
8993 }
8994 return 1;
8995 }
8996
8997 /* DIE is a unit; look through all the DIE references to see if there are
8998 any external references to types, and if so, create local stubs for
8999 them which will be applied in build_abbrev_table. This is useful because
9000 references to local DIEs are smaller. */
9001
9002 static external_ref_hash_type *
9003 optimize_external_refs (dw_die_ref die)
9004 {
9005 external_ref_hash_type *map = new external_ref_hash_type (10);
9006 optimize_external_refs_1 (die, map);
9007 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9008 return map;
9009 }
9010
9011 /* The following 3 variables are temporaries that are computed only during the
9012 build_abbrev_table call and used and released during the following
9013 optimize_abbrev_table call. */
9014
9015 /* First abbrev_id that can be optimized based on usage. */
9016 static unsigned int abbrev_opt_start;
9017
9018 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9019 abbrev_id smaller than this, because they must be already sized
9020 during build_abbrev_table). */
9021 static unsigned int abbrev_opt_base_type_end;
9022
9023 /* Vector of usage counts during build_abbrev_table. Indexed by
9024 abbrev_id - abbrev_opt_start. */
9025 static vec<unsigned int> abbrev_usage_count;
9026
9027 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9028 static vec<dw_die_ref> sorted_abbrev_dies;
9029
9030 /* The format of each DIE (and its attribute value pairs) is encoded in an
9031 abbreviation table. This routine builds the abbreviation table and assigns
9032 a unique abbreviation id for each abbreviation entry. The children of each
9033 die are visited recursively. */
9034
9035 static void
9036 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9037 {
9038 unsigned int abbrev_id = 0;
9039 dw_die_ref c;
9040 dw_attr_node *a;
9041 unsigned ix;
9042 dw_die_ref abbrev;
9043
9044 /* Scan the DIE references, and replace any that refer to
9045 DIEs from other CUs (i.e. those which are not marked) with
9046 the local stubs we built in optimize_external_refs. */
9047 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9048 if (AT_class (a) == dw_val_class_die_ref
9049 && (c = AT_ref (a))->die_mark == 0)
9050 {
9051 struct external_ref *ref_p;
9052 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9053
9054 ref_p = lookup_external_ref (extern_map, c);
9055 if (ref_p->stub && ref_p->stub != die)
9056 change_AT_die_ref (a, ref_p->stub);
9057 else
9058 /* We aren't changing this reference, so mark it external. */
9059 set_AT_ref_external (a, 1);
9060 }
9061
9062 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9063 {
9064 dw_attr_node *die_a, *abbrev_a;
9065 unsigned ix;
9066 bool ok = true;
9067
9068 if (abbrev_id == 0)
9069 continue;
9070 if (abbrev->die_tag != die->die_tag)
9071 continue;
9072 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9073 continue;
9074
9075 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9076 continue;
9077
9078 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9079 {
9080 abbrev_a = &(*abbrev->die_attr)[ix];
9081 if ((abbrev_a->dw_attr != die_a->dw_attr)
9082 || (value_format (abbrev_a) != value_format (die_a)))
9083 {
9084 ok = false;
9085 break;
9086 }
9087 }
9088 if (ok)
9089 break;
9090 }
9091
9092 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9093 {
9094 vec_safe_push (abbrev_die_table, die);
9095 if (abbrev_opt_start)
9096 abbrev_usage_count.safe_push (0);
9097 }
9098 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9099 {
9100 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9101 sorted_abbrev_dies.safe_push (die);
9102 }
9103
9104 die->die_abbrev = abbrev_id;
9105 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9106 }
9107
9108 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9109 by die_abbrev's usage count, from the most commonly used
9110 abbreviation to the least. */
9111
9112 static int
9113 die_abbrev_cmp (const void *p1, const void *p2)
9114 {
9115 dw_die_ref die1 = *(const dw_die_ref *) p1;
9116 dw_die_ref die2 = *(const dw_die_ref *) p2;
9117
9118 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9119 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9120
9121 if (die1->die_abbrev >= abbrev_opt_base_type_end
9122 && die2->die_abbrev >= abbrev_opt_base_type_end)
9123 {
9124 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9125 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9126 return -1;
9127 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9128 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9129 return 1;
9130 }
9131
9132 /* Stabilize the sort. */
9133 if (die1->die_abbrev < die2->die_abbrev)
9134 return -1;
9135 if (die1->die_abbrev > die2->die_abbrev)
9136 return 1;
9137
9138 return 0;
9139 }
9140
9141 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9142 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9143 into dw_val_class_const_implicit or
9144 dw_val_class_unsigned_const_implicit. */
9145
9146 static void
9147 optimize_implicit_const (unsigned int first_id, unsigned int end,
9148 vec<bool> &implicit_consts)
9149 {
9150 /* It never makes sense if there is just one DIE using the abbreviation. */
9151 if (end < first_id + 2)
9152 return;
9153
9154 dw_attr_node *a;
9155 unsigned ix, i;
9156 dw_die_ref die = sorted_abbrev_dies[first_id];
9157 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9158 if (implicit_consts[ix])
9159 {
9160 enum dw_val_class new_class = dw_val_class_none;
9161 switch (AT_class (a))
9162 {
9163 case dw_val_class_unsigned_const:
9164 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9165 continue;
9166
9167 /* The .debug_abbrev section will grow by
9168 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9169 in all the DIEs using that abbreviation. */
9170 if (constant_size (AT_unsigned (a)) * (end - first_id)
9171 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9172 continue;
9173
9174 new_class = dw_val_class_unsigned_const_implicit;
9175 break;
9176
9177 case dw_val_class_const:
9178 new_class = dw_val_class_const_implicit;
9179 break;
9180
9181 case dw_val_class_file:
9182 new_class = dw_val_class_file_implicit;
9183 break;
9184
9185 default:
9186 continue;
9187 }
9188 for (i = first_id; i < end; i++)
9189 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9190 = new_class;
9191 }
9192 }
9193
9194 /* Attempt to optimize abbreviation table from abbrev_opt_start
9195 abbreviation above. */
9196
9197 static void
9198 optimize_abbrev_table (void)
9199 {
9200 if (abbrev_opt_start
9201 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9202 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9203 {
9204 auto_vec<bool, 32> implicit_consts;
9205 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9206
9207 unsigned int abbrev_id = abbrev_opt_start - 1;
9208 unsigned int first_id = ~0U;
9209 unsigned int last_abbrev_id = 0;
9210 unsigned int i;
9211 dw_die_ref die;
9212 if (abbrev_opt_base_type_end > abbrev_opt_start)
9213 abbrev_id = abbrev_opt_base_type_end - 1;
9214 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9215 most commonly used abbreviations come first. */
9216 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9217 {
9218 dw_attr_node *a;
9219 unsigned ix;
9220
9221 /* If calc_base_type_die_sizes has been called, the CU and
9222 base types after it can't be optimized, because we've already
9223 calculated their DIE offsets. We've sorted them first. */
9224 if (die->die_abbrev < abbrev_opt_base_type_end)
9225 continue;
9226 if (die->die_abbrev != last_abbrev_id)
9227 {
9228 last_abbrev_id = die->die_abbrev;
9229 if (dwarf_version >= 5 && first_id != ~0U)
9230 optimize_implicit_const (first_id, i, implicit_consts);
9231 abbrev_id++;
9232 (*abbrev_die_table)[abbrev_id] = die;
9233 if (dwarf_version >= 5)
9234 {
9235 first_id = i;
9236 implicit_consts.truncate (0);
9237
9238 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9239 switch (AT_class (a))
9240 {
9241 case dw_val_class_const:
9242 case dw_val_class_unsigned_const:
9243 case dw_val_class_file:
9244 implicit_consts.safe_push (true);
9245 break;
9246 default:
9247 implicit_consts.safe_push (false);
9248 break;
9249 }
9250 }
9251 }
9252 else if (dwarf_version >= 5)
9253 {
9254 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9255 if (!implicit_consts[ix])
9256 continue;
9257 else
9258 {
9259 dw_attr_node *other_a
9260 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9261 if (!dw_val_equal_p (&a->dw_attr_val,
9262 &other_a->dw_attr_val))
9263 implicit_consts[ix] = false;
9264 }
9265 }
9266 die->die_abbrev = abbrev_id;
9267 }
9268 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9269 if (dwarf_version >= 5 && first_id != ~0U)
9270 optimize_implicit_const (first_id, i, implicit_consts);
9271 }
9272
9273 abbrev_opt_start = 0;
9274 abbrev_opt_base_type_end = 0;
9275 abbrev_usage_count.release ();
9276 sorted_abbrev_dies.release ();
9277 }
9278 \f
9279 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9280
9281 static int
9282 constant_size (unsigned HOST_WIDE_INT value)
9283 {
9284 int log;
9285
9286 if (value == 0)
9287 log = 0;
9288 else
9289 log = floor_log2 (value);
9290
9291 log = log / 8;
9292 log = 1 << (floor_log2 (log) + 1);
9293
9294 return log;
9295 }
9296
9297 /* Return the size of a DIE as it is represented in the
9298 .debug_info section. */
9299
9300 static unsigned long
9301 size_of_die (dw_die_ref die)
9302 {
9303 unsigned long size = 0;
9304 dw_attr_node *a;
9305 unsigned ix;
9306 enum dwarf_form form;
9307
9308 size += size_of_uleb128 (die->die_abbrev);
9309 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9310 {
9311 switch (AT_class (a))
9312 {
9313 case dw_val_class_addr:
9314 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9315 {
9316 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9317 size += size_of_uleb128 (AT_index (a));
9318 }
9319 else
9320 size += DWARF2_ADDR_SIZE;
9321 break;
9322 case dw_val_class_offset:
9323 size += DWARF_OFFSET_SIZE;
9324 break;
9325 case dw_val_class_loc:
9326 {
9327 unsigned long lsize = size_of_locs (AT_loc (a));
9328
9329 /* Block length. */
9330 if (dwarf_version >= 4)
9331 size += size_of_uleb128 (lsize);
9332 else
9333 size += constant_size (lsize);
9334 size += lsize;
9335 }
9336 break;
9337 case dw_val_class_loc_list:
9338 case dw_val_class_view_list:
9339 if (dwarf_split_debug_info && dwarf_version >= 5)
9340 {
9341 gcc_assert (AT_loc_list (a)->num_assigned);
9342 size += size_of_uleb128 (AT_loc_list (a)->hash);
9343 }
9344 else
9345 size += DWARF_OFFSET_SIZE;
9346 break;
9347 case dw_val_class_range_list:
9348 if (value_format (a) == DW_FORM_rnglistx)
9349 {
9350 gcc_assert (rnglist_idx);
9351 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9352 size += size_of_uleb128 (r->idx);
9353 }
9354 else
9355 size += DWARF_OFFSET_SIZE;
9356 break;
9357 case dw_val_class_const:
9358 size += size_of_sleb128 (AT_int (a));
9359 break;
9360 case dw_val_class_unsigned_const:
9361 {
9362 int csize = constant_size (AT_unsigned (a));
9363 if (dwarf_version == 3
9364 && a->dw_attr == DW_AT_data_member_location
9365 && csize >= 4)
9366 size += size_of_uleb128 (AT_unsigned (a));
9367 else
9368 size += csize;
9369 }
9370 break;
9371 case dw_val_class_symview:
9372 if (symview_upper_bound <= 0xff)
9373 size += 1;
9374 else if (symview_upper_bound <= 0xffff)
9375 size += 2;
9376 else if (symview_upper_bound <= 0xffffffff)
9377 size += 4;
9378 else
9379 size += 8;
9380 break;
9381 case dw_val_class_const_implicit:
9382 case dw_val_class_unsigned_const_implicit:
9383 case dw_val_class_file_implicit:
9384 /* These occupy no size in the DIE, just an extra sleb128 in
9385 .debug_abbrev. */
9386 break;
9387 case dw_val_class_const_double:
9388 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9389 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9390 size++; /* block */
9391 break;
9392 case dw_val_class_wide_int:
9393 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9394 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9395 if (get_full_len (*a->dw_attr_val.v.val_wide)
9396 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9397 size++; /* block */
9398 break;
9399 case dw_val_class_vec:
9400 size += constant_size (a->dw_attr_val.v.val_vec.length
9401 * a->dw_attr_val.v.val_vec.elt_size)
9402 + a->dw_attr_val.v.val_vec.length
9403 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9404 break;
9405 case dw_val_class_flag:
9406 if (dwarf_version >= 4)
9407 /* Currently all add_AT_flag calls pass in 1 as last argument,
9408 so DW_FORM_flag_present can be used. If that ever changes,
9409 we'll need to use DW_FORM_flag and have some optimization
9410 in build_abbrev_table that will change those to
9411 DW_FORM_flag_present if it is set to 1 in all DIEs using
9412 the same abbrev entry. */
9413 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9414 else
9415 size += 1;
9416 break;
9417 case dw_val_class_die_ref:
9418 if (AT_ref_external (a))
9419 {
9420 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9421 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9422 is sized by target address length, whereas in DWARF3
9423 it's always sized as an offset. */
9424 if (use_debug_types)
9425 size += DWARF_TYPE_SIGNATURE_SIZE;
9426 else if (dwarf_version == 2)
9427 size += DWARF2_ADDR_SIZE;
9428 else
9429 size += DWARF_OFFSET_SIZE;
9430 }
9431 else
9432 size += DWARF_OFFSET_SIZE;
9433 break;
9434 case dw_val_class_fde_ref:
9435 size += DWARF_OFFSET_SIZE;
9436 break;
9437 case dw_val_class_lbl_id:
9438 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9439 {
9440 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9441 size += size_of_uleb128 (AT_index (a));
9442 }
9443 else
9444 size += DWARF2_ADDR_SIZE;
9445 break;
9446 case dw_val_class_lineptr:
9447 case dw_val_class_macptr:
9448 case dw_val_class_loclistsptr:
9449 size += DWARF_OFFSET_SIZE;
9450 break;
9451 case dw_val_class_str:
9452 form = AT_string_form (a);
9453 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9454 size += DWARF_OFFSET_SIZE;
9455 else if (form == dwarf_FORM (DW_FORM_strx))
9456 size += size_of_uleb128 (AT_index (a));
9457 else
9458 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9459 break;
9460 case dw_val_class_file:
9461 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9462 break;
9463 case dw_val_class_data8:
9464 size += 8;
9465 break;
9466 case dw_val_class_vms_delta:
9467 size += DWARF_OFFSET_SIZE;
9468 break;
9469 case dw_val_class_high_pc:
9470 size += DWARF2_ADDR_SIZE;
9471 break;
9472 case dw_val_class_discr_value:
9473 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9474 break;
9475 case dw_val_class_discr_list:
9476 {
9477 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9478
9479 /* This is a block, so we have the block length and then its
9480 data. */
9481 size += constant_size (block_size) + block_size;
9482 }
9483 break;
9484 default:
9485 gcc_unreachable ();
9486 }
9487 }
9488
9489 return size;
9490 }
9491
9492 /* Size the debugging information associated with a given DIE. Visits the
9493 DIE's children recursively. Updates the global variable next_die_offset, on
9494 each time through. Uses the current value of next_die_offset to update the
9495 die_offset field in each DIE. */
9496
9497 static void
9498 calc_die_sizes (dw_die_ref die)
9499 {
9500 dw_die_ref c;
9501
9502 gcc_assert (die->die_offset == 0
9503 || (unsigned long int) die->die_offset == next_die_offset);
9504 die->die_offset = next_die_offset;
9505 next_die_offset += size_of_die (die);
9506
9507 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9508
9509 if (die->die_child != NULL)
9510 /* Count the null byte used to terminate sibling lists. */
9511 next_die_offset += 1;
9512 }
9513
9514 /* Size just the base type children at the start of the CU.
9515 This is needed because build_abbrev needs to size locs
9516 and sizing of type based stack ops needs to know die_offset
9517 values for the base types. */
9518
9519 static void
9520 calc_base_type_die_sizes (void)
9521 {
9522 unsigned long die_offset = (dwarf_split_debug_info
9523 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9524 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9525 unsigned int i;
9526 dw_die_ref base_type;
9527 #if ENABLE_ASSERT_CHECKING
9528 dw_die_ref prev = comp_unit_die ()->die_child;
9529 #endif
9530
9531 die_offset += size_of_die (comp_unit_die ());
9532 for (i = 0; base_types.iterate (i, &base_type); i++)
9533 {
9534 #if ENABLE_ASSERT_CHECKING
9535 gcc_assert (base_type->die_offset == 0
9536 && prev->die_sib == base_type
9537 && base_type->die_child == NULL
9538 && base_type->die_abbrev);
9539 prev = base_type;
9540 #endif
9541 if (abbrev_opt_start
9542 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9543 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9544 base_type->die_offset = die_offset;
9545 die_offset += size_of_die (base_type);
9546 }
9547 }
9548
9549 /* Set the marks for a die and its children. We do this so
9550 that we know whether or not a reference needs to use FORM_ref_addr; only
9551 DIEs in the same CU will be marked. We used to clear out the offset
9552 and use that as the flag, but ran into ordering problems. */
9553
9554 static void
9555 mark_dies (dw_die_ref die)
9556 {
9557 dw_die_ref c;
9558
9559 gcc_assert (!die->die_mark);
9560
9561 die->die_mark = 1;
9562 FOR_EACH_CHILD (die, c, mark_dies (c));
9563 }
9564
9565 /* Clear the marks for a die and its children. */
9566
9567 static void
9568 unmark_dies (dw_die_ref die)
9569 {
9570 dw_die_ref c;
9571
9572 if (! use_debug_types)
9573 gcc_assert (die->die_mark);
9574
9575 die->die_mark = 0;
9576 FOR_EACH_CHILD (die, c, unmark_dies (c));
9577 }
9578
9579 /* Clear the marks for a die, its children and referred dies. */
9580
9581 static void
9582 unmark_all_dies (dw_die_ref die)
9583 {
9584 dw_die_ref c;
9585 dw_attr_node *a;
9586 unsigned ix;
9587
9588 if (!die->die_mark)
9589 return;
9590 die->die_mark = 0;
9591
9592 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9593
9594 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9595 if (AT_class (a) == dw_val_class_die_ref)
9596 unmark_all_dies (AT_ref (a));
9597 }
9598
9599 /* Calculate if the entry should appear in the final output file. It may be
9600 from a pruned a type. */
9601
9602 static bool
9603 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9604 {
9605 /* By limiting gnu pubnames to definitions only, gold can generate a
9606 gdb index without entries for declarations, which don't include
9607 enough information to be useful. */
9608 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9609 return false;
9610
9611 if (table == pubname_table)
9612 {
9613 /* Enumerator names are part of the pubname table, but the
9614 parent DW_TAG_enumeration_type die may have been pruned.
9615 Don't output them if that is the case. */
9616 if (p->die->die_tag == DW_TAG_enumerator &&
9617 (p->die->die_parent == NULL
9618 || !p->die->die_parent->die_perennial_p))
9619 return false;
9620
9621 /* Everything else in the pubname table is included. */
9622 return true;
9623 }
9624
9625 /* The pubtypes table shouldn't include types that have been
9626 pruned. */
9627 return (p->die->die_offset != 0
9628 || !flag_eliminate_unused_debug_types);
9629 }
9630
9631 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9632 generated for the compilation unit. */
9633
9634 static unsigned long
9635 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9636 {
9637 unsigned long size;
9638 unsigned i;
9639 pubname_entry *p;
9640 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9641
9642 size = DWARF_PUBNAMES_HEADER_SIZE;
9643 FOR_EACH_VEC_ELT (*names, i, p)
9644 if (include_pubname_in_output (names, p))
9645 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9646
9647 size += DWARF_OFFSET_SIZE;
9648 return size;
9649 }
9650
9651 /* Return the size of the information in the .debug_aranges section. */
9652
9653 static unsigned long
9654 size_of_aranges (void)
9655 {
9656 unsigned long size;
9657
9658 size = DWARF_ARANGES_HEADER_SIZE;
9659
9660 /* Count the address/length pair for this compilation unit. */
9661 if (text_section_used)
9662 size += 2 * DWARF2_ADDR_SIZE;
9663 if (cold_text_section_used)
9664 size += 2 * DWARF2_ADDR_SIZE;
9665 if (have_multiple_function_sections)
9666 {
9667 unsigned fde_idx;
9668 dw_fde_ref fde;
9669
9670 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9671 {
9672 if (DECL_IGNORED_P (fde->decl))
9673 continue;
9674 if (!fde->in_std_section)
9675 size += 2 * DWARF2_ADDR_SIZE;
9676 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9677 size += 2 * DWARF2_ADDR_SIZE;
9678 }
9679 }
9680
9681 /* Count the two zero words used to terminated the address range table. */
9682 size += 2 * DWARF2_ADDR_SIZE;
9683 return size;
9684 }
9685 \f
9686 /* Select the encoding of an attribute value. */
9687
9688 static enum dwarf_form
9689 value_format (dw_attr_node *a)
9690 {
9691 switch (AT_class (a))
9692 {
9693 case dw_val_class_addr:
9694 /* Only very few attributes allow DW_FORM_addr. */
9695 switch (a->dw_attr)
9696 {
9697 case DW_AT_low_pc:
9698 case DW_AT_high_pc:
9699 case DW_AT_entry_pc:
9700 case DW_AT_trampoline:
9701 return (AT_index (a) == NOT_INDEXED
9702 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9703 default:
9704 break;
9705 }
9706 switch (DWARF2_ADDR_SIZE)
9707 {
9708 case 1:
9709 return DW_FORM_data1;
9710 case 2:
9711 return DW_FORM_data2;
9712 case 4:
9713 return DW_FORM_data4;
9714 case 8:
9715 return DW_FORM_data8;
9716 default:
9717 gcc_unreachable ();
9718 }
9719 case dw_val_class_loc_list:
9720 case dw_val_class_view_list:
9721 if (dwarf_split_debug_info
9722 && dwarf_version >= 5
9723 && AT_loc_list (a)->num_assigned)
9724 return DW_FORM_loclistx;
9725 /* FALLTHRU */
9726 case dw_val_class_range_list:
9727 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9728 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9729 care about sizes of .debug* sections in shared libraries and
9730 executables and don't take into account relocations that affect just
9731 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9732 table in the .debug_rnglists section. */
9733 if (dwarf_split_debug_info
9734 && dwarf_version >= 5
9735 && AT_class (a) == dw_val_class_range_list
9736 && rnglist_idx
9737 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9738 return DW_FORM_rnglistx;
9739 if (dwarf_version >= 4)
9740 return DW_FORM_sec_offset;
9741 /* FALLTHRU */
9742 case dw_val_class_vms_delta:
9743 case dw_val_class_offset:
9744 switch (DWARF_OFFSET_SIZE)
9745 {
9746 case 4:
9747 return DW_FORM_data4;
9748 case 8:
9749 return DW_FORM_data8;
9750 default:
9751 gcc_unreachable ();
9752 }
9753 case dw_val_class_loc:
9754 if (dwarf_version >= 4)
9755 return DW_FORM_exprloc;
9756 switch (constant_size (size_of_locs (AT_loc (a))))
9757 {
9758 case 1:
9759 return DW_FORM_block1;
9760 case 2:
9761 return DW_FORM_block2;
9762 case 4:
9763 return DW_FORM_block4;
9764 default:
9765 gcc_unreachable ();
9766 }
9767 case dw_val_class_const:
9768 return DW_FORM_sdata;
9769 case dw_val_class_unsigned_const:
9770 switch (constant_size (AT_unsigned (a)))
9771 {
9772 case 1:
9773 return DW_FORM_data1;
9774 case 2:
9775 return DW_FORM_data2;
9776 case 4:
9777 /* In DWARF3 DW_AT_data_member_location with
9778 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9779 constant, so we need to use DW_FORM_udata if we need
9780 a large constant. */
9781 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9782 return DW_FORM_udata;
9783 return DW_FORM_data4;
9784 case 8:
9785 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9786 return DW_FORM_udata;
9787 return DW_FORM_data8;
9788 default:
9789 gcc_unreachable ();
9790 }
9791 case dw_val_class_const_implicit:
9792 case dw_val_class_unsigned_const_implicit:
9793 case dw_val_class_file_implicit:
9794 return DW_FORM_implicit_const;
9795 case dw_val_class_const_double:
9796 switch (HOST_BITS_PER_WIDE_INT)
9797 {
9798 case 8:
9799 return DW_FORM_data2;
9800 case 16:
9801 return DW_FORM_data4;
9802 case 32:
9803 return DW_FORM_data8;
9804 case 64:
9805 if (dwarf_version >= 5)
9806 return DW_FORM_data16;
9807 /* FALLTHRU */
9808 default:
9809 return DW_FORM_block1;
9810 }
9811 case dw_val_class_wide_int:
9812 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9813 {
9814 case 8:
9815 return DW_FORM_data1;
9816 case 16:
9817 return DW_FORM_data2;
9818 case 32:
9819 return DW_FORM_data4;
9820 case 64:
9821 return DW_FORM_data8;
9822 case 128:
9823 if (dwarf_version >= 5)
9824 return DW_FORM_data16;
9825 /* FALLTHRU */
9826 default:
9827 return DW_FORM_block1;
9828 }
9829 case dw_val_class_symview:
9830 /* ??? We might use uleb128, but then we'd have to compute
9831 .debug_info offsets in the assembler. */
9832 if (symview_upper_bound <= 0xff)
9833 return DW_FORM_data1;
9834 else if (symview_upper_bound <= 0xffff)
9835 return DW_FORM_data2;
9836 else if (symview_upper_bound <= 0xffffffff)
9837 return DW_FORM_data4;
9838 else
9839 return DW_FORM_data8;
9840 case dw_val_class_vec:
9841 switch (constant_size (a->dw_attr_val.v.val_vec.length
9842 * a->dw_attr_val.v.val_vec.elt_size))
9843 {
9844 case 1:
9845 return DW_FORM_block1;
9846 case 2:
9847 return DW_FORM_block2;
9848 case 4:
9849 return DW_FORM_block4;
9850 default:
9851 gcc_unreachable ();
9852 }
9853 case dw_val_class_flag:
9854 if (dwarf_version >= 4)
9855 {
9856 /* Currently all add_AT_flag calls pass in 1 as last argument,
9857 so DW_FORM_flag_present can be used. If that ever changes,
9858 we'll need to use DW_FORM_flag and have some optimization
9859 in build_abbrev_table that will change those to
9860 DW_FORM_flag_present if it is set to 1 in all DIEs using
9861 the same abbrev entry. */
9862 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9863 return DW_FORM_flag_present;
9864 }
9865 return DW_FORM_flag;
9866 case dw_val_class_die_ref:
9867 if (AT_ref_external (a))
9868 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9869 else
9870 return DW_FORM_ref;
9871 case dw_val_class_fde_ref:
9872 return DW_FORM_data;
9873 case dw_val_class_lbl_id:
9874 return (AT_index (a) == NOT_INDEXED
9875 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9876 case dw_val_class_lineptr:
9877 case dw_val_class_macptr:
9878 case dw_val_class_loclistsptr:
9879 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9880 case dw_val_class_str:
9881 return AT_string_form (a);
9882 case dw_val_class_file:
9883 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9884 {
9885 case 1:
9886 return DW_FORM_data1;
9887 case 2:
9888 return DW_FORM_data2;
9889 case 4:
9890 return DW_FORM_data4;
9891 default:
9892 gcc_unreachable ();
9893 }
9894
9895 case dw_val_class_data8:
9896 return DW_FORM_data8;
9897
9898 case dw_val_class_high_pc:
9899 switch (DWARF2_ADDR_SIZE)
9900 {
9901 case 1:
9902 return DW_FORM_data1;
9903 case 2:
9904 return DW_FORM_data2;
9905 case 4:
9906 return DW_FORM_data4;
9907 case 8:
9908 return DW_FORM_data8;
9909 default:
9910 gcc_unreachable ();
9911 }
9912
9913 case dw_val_class_discr_value:
9914 return (a->dw_attr_val.v.val_discr_value.pos
9915 ? DW_FORM_udata
9916 : DW_FORM_sdata);
9917 case dw_val_class_discr_list:
9918 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9919 {
9920 case 1:
9921 return DW_FORM_block1;
9922 case 2:
9923 return DW_FORM_block2;
9924 case 4:
9925 return DW_FORM_block4;
9926 default:
9927 gcc_unreachable ();
9928 }
9929
9930 default:
9931 gcc_unreachable ();
9932 }
9933 }
9934
9935 /* Output the encoding of an attribute value. */
9936
9937 static void
9938 output_value_format (dw_attr_node *a)
9939 {
9940 enum dwarf_form form = value_format (a);
9941
9942 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9943 }
9944
9945 /* Given a die and id, produce the appropriate abbreviations. */
9946
9947 static void
9948 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9949 {
9950 unsigned ix;
9951 dw_attr_node *a_attr;
9952
9953 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9954 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9955 dwarf_tag_name (abbrev->die_tag));
9956
9957 if (abbrev->die_child != NULL)
9958 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9959 else
9960 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9961
9962 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9963 {
9964 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9965 dwarf_attr_name (a_attr->dw_attr));
9966 output_value_format (a_attr);
9967 if (value_format (a_attr) == DW_FORM_implicit_const)
9968 {
9969 if (AT_class (a_attr) == dw_val_class_file_implicit)
9970 {
9971 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9972 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9973 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9974 }
9975 else
9976 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9977 }
9978 }
9979
9980 dw2_asm_output_data (1, 0, NULL);
9981 dw2_asm_output_data (1, 0, NULL);
9982 }
9983
9984
9985 /* Output the .debug_abbrev section which defines the DIE abbreviation
9986 table. */
9987
9988 static void
9989 output_abbrev_section (void)
9990 {
9991 unsigned int abbrev_id;
9992 dw_die_ref abbrev;
9993
9994 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9995 if (abbrev_id != 0)
9996 output_die_abbrevs (abbrev_id, abbrev);
9997
9998 /* Terminate the table. */
9999 dw2_asm_output_data (1, 0, NULL);
10000 }
10001
10002 /* Return a new location list, given the begin and end range, and the
10003 expression. */
10004
10005 static inline dw_loc_list_ref
10006 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10007 const char *end, var_loc_view vend,
10008 const char *section)
10009 {
10010 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10011
10012 retlist->begin = begin;
10013 retlist->begin_entry = NULL;
10014 retlist->end = end;
10015 retlist->expr = expr;
10016 retlist->section = section;
10017 retlist->vbegin = vbegin;
10018 retlist->vend = vend;
10019
10020 return retlist;
10021 }
10022
10023 /* Return true iff there's any nonzero view number in the loc list.
10024
10025 ??? When views are not enabled, we'll often extend a single range
10026 to the entire function, so that we emit a single location
10027 expression rather than a location list. With views, even with a
10028 single range, we'll output a list if start or end have a nonzero
10029 view. If we change this, we may want to stop splitting a single
10030 range in dw_loc_list just because of a nonzero view, even if it
10031 straddles across hot/cold partitions. */
10032
10033 static bool
10034 loc_list_has_views (dw_loc_list_ref list)
10035 {
10036 if (!debug_variable_location_views)
10037 return false;
10038
10039 for (dw_loc_list_ref loc = list;
10040 loc != NULL; loc = loc->dw_loc_next)
10041 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10042 return true;
10043
10044 return false;
10045 }
10046
10047 /* Generate a new internal symbol for this location list node, if it
10048 hasn't got one yet. */
10049
10050 static inline void
10051 gen_llsym (dw_loc_list_ref list)
10052 {
10053 gcc_assert (!list->ll_symbol);
10054 list->ll_symbol = gen_internal_sym ("LLST");
10055
10056 if (!loc_list_has_views (list))
10057 return;
10058
10059 if (dwarf2out_locviews_in_attribute ())
10060 {
10061 /* Use the same label_num for the view list. */
10062 label_num--;
10063 list->vl_symbol = gen_internal_sym ("LVUS");
10064 }
10065 else
10066 list->vl_symbol = list->ll_symbol;
10067 }
10068
10069 /* Generate a symbol for the list, but only if we really want to emit
10070 it as a list. */
10071
10072 static inline void
10073 maybe_gen_llsym (dw_loc_list_ref list)
10074 {
10075 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10076 return;
10077
10078 gen_llsym (list);
10079 }
10080
10081 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10082 NULL, don't consider size of the location expression. If we're not
10083 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10084 representation in *SIZEP. */
10085
10086 static bool
10087 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10088 {
10089 /* Don't output an entry that starts and ends at the same address. */
10090 if (strcmp (curr->begin, curr->end) == 0
10091 && curr->vbegin == curr->vend && !curr->force)
10092 return true;
10093
10094 if (!sizep)
10095 return false;
10096
10097 unsigned long size = size_of_locs (curr->expr);
10098
10099 /* If the expression is too large, drop it on the floor. We could
10100 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10101 in the expression, but >= 64KB expressions for a single value
10102 in a single range are unlikely very useful. */
10103 if (dwarf_version < 5 && size > 0xffff)
10104 return true;
10105
10106 *sizep = size;
10107
10108 return false;
10109 }
10110
10111 /* Output a view pair loclist entry for CURR, if it requires one. */
10112
10113 static void
10114 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10115 {
10116 if (!dwarf2out_locviews_in_loclist ())
10117 return;
10118
10119 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10120 return;
10121
10122 #ifdef DW_LLE_view_pair
10123 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10124
10125 if (dwarf2out_as_locview_support)
10126 {
10127 if (ZERO_VIEW_P (curr->vbegin))
10128 dw2_asm_output_data_uleb128 (0, "Location view begin");
10129 else
10130 {
10131 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10132 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10133 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10134 }
10135
10136 if (ZERO_VIEW_P (curr->vend))
10137 dw2_asm_output_data_uleb128 (0, "Location view end");
10138 else
10139 {
10140 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10141 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10142 dw2_asm_output_symname_uleb128 (label, "Location view end");
10143 }
10144 }
10145 else
10146 {
10147 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10148 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10149 }
10150 #endif /* DW_LLE_view_pair */
10151
10152 return;
10153 }
10154
10155 /* Output the location list given to us. */
10156
10157 static void
10158 output_loc_list (dw_loc_list_ref list_head)
10159 {
10160 int vcount = 0, lcount = 0;
10161
10162 if (list_head->emitted)
10163 return;
10164 list_head->emitted = true;
10165
10166 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10167 {
10168 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10169
10170 for (dw_loc_list_ref curr = list_head; curr != NULL;
10171 curr = curr->dw_loc_next)
10172 {
10173 unsigned long size;
10174
10175 if (skip_loc_list_entry (curr, &size))
10176 continue;
10177
10178 vcount++;
10179
10180 /* ?? dwarf_split_debug_info? */
10181 if (dwarf2out_as_locview_support)
10182 {
10183 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10184
10185 if (!ZERO_VIEW_P (curr->vbegin))
10186 {
10187 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10188 dw2_asm_output_symname_uleb128 (label,
10189 "View list begin (%s)",
10190 list_head->vl_symbol);
10191 }
10192 else
10193 dw2_asm_output_data_uleb128 (0,
10194 "View list begin (%s)",
10195 list_head->vl_symbol);
10196
10197 if (!ZERO_VIEW_P (curr->vend))
10198 {
10199 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10200 dw2_asm_output_symname_uleb128 (label,
10201 "View list end (%s)",
10202 list_head->vl_symbol);
10203 }
10204 else
10205 dw2_asm_output_data_uleb128 (0,
10206 "View list end (%s)",
10207 list_head->vl_symbol);
10208 }
10209 else
10210 {
10211 dw2_asm_output_data_uleb128 (curr->vbegin,
10212 "View list begin (%s)",
10213 list_head->vl_symbol);
10214 dw2_asm_output_data_uleb128 (curr->vend,
10215 "View list end (%s)",
10216 list_head->vl_symbol);
10217 }
10218 }
10219 }
10220
10221 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10222
10223 const char *last_section = NULL;
10224 const char *base_label = NULL;
10225
10226 /* Walk the location list, and output each range + expression. */
10227 for (dw_loc_list_ref curr = list_head; curr != NULL;
10228 curr = curr->dw_loc_next)
10229 {
10230 unsigned long size;
10231
10232 /* Skip this entry? If we skip it here, we must skip it in the
10233 view list above as well. */
10234 if (skip_loc_list_entry (curr, &size))
10235 continue;
10236
10237 lcount++;
10238
10239 if (dwarf_version >= 5)
10240 {
10241 if (dwarf_split_debug_info)
10242 {
10243 dwarf2out_maybe_output_loclist_view_pair (curr);
10244 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10245 uleb128 index into .debug_addr and uleb128 length. */
10246 dw2_asm_output_data (1, DW_LLE_startx_length,
10247 "DW_LLE_startx_length (%s)",
10248 list_head->ll_symbol);
10249 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10250 "Location list range start index "
10251 "(%s)", curr->begin);
10252 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10253 For that case we probably need to emit DW_LLE_startx_endx,
10254 but we'd need 2 .debug_addr entries rather than just one. */
10255 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10256 "Location list length (%s)",
10257 list_head->ll_symbol);
10258 }
10259 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10260 {
10261 dwarf2out_maybe_output_loclist_view_pair (curr);
10262 /* If all code is in .text section, the base address is
10263 already provided by the CU attributes. Use
10264 DW_LLE_offset_pair where both addresses are uleb128 encoded
10265 offsets against that base. */
10266 dw2_asm_output_data (1, DW_LLE_offset_pair,
10267 "DW_LLE_offset_pair (%s)",
10268 list_head->ll_symbol);
10269 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10270 "Location list begin address (%s)",
10271 list_head->ll_symbol);
10272 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10273 "Location list end address (%s)",
10274 list_head->ll_symbol);
10275 }
10276 else if (HAVE_AS_LEB128)
10277 {
10278 /* Otherwise, find out how many consecutive entries could share
10279 the same base entry. If just one, emit DW_LLE_start_length,
10280 otherwise emit DW_LLE_base_address for the base address
10281 followed by a series of DW_LLE_offset_pair. */
10282 if (last_section == NULL || curr->section != last_section)
10283 {
10284 dw_loc_list_ref curr2;
10285 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10286 curr2 = curr2->dw_loc_next)
10287 {
10288 if (strcmp (curr2->begin, curr2->end) == 0
10289 && !curr2->force)
10290 continue;
10291 break;
10292 }
10293 if (curr2 == NULL || curr->section != curr2->section)
10294 last_section = NULL;
10295 else
10296 {
10297 last_section = curr->section;
10298 base_label = curr->begin;
10299 dw2_asm_output_data (1, DW_LLE_base_address,
10300 "DW_LLE_base_address (%s)",
10301 list_head->ll_symbol);
10302 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10303 "Base address (%s)",
10304 list_head->ll_symbol);
10305 }
10306 }
10307 /* Only one entry with the same base address. Use
10308 DW_LLE_start_length with absolute address and uleb128
10309 length. */
10310 if (last_section == NULL)
10311 {
10312 dwarf2out_maybe_output_loclist_view_pair (curr);
10313 dw2_asm_output_data (1, DW_LLE_start_length,
10314 "DW_LLE_start_length (%s)",
10315 list_head->ll_symbol);
10316 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10317 "Location list begin address (%s)",
10318 list_head->ll_symbol);
10319 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10320 "Location list length "
10321 "(%s)", list_head->ll_symbol);
10322 }
10323 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10324 DW_LLE_base_address. */
10325 else
10326 {
10327 dwarf2out_maybe_output_loclist_view_pair (curr);
10328 dw2_asm_output_data (1, DW_LLE_offset_pair,
10329 "DW_LLE_offset_pair (%s)",
10330 list_head->ll_symbol);
10331 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10332 "Location list begin address "
10333 "(%s)", list_head->ll_symbol);
10334 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10335 "Location list end address "
10336 "(%s)", list_head->ll_symbol);
10337 }
10338 }
10339 /* The assembler does not support .uleb128 directive. Emit
10340 DW_LLE_start_end with a pair of absolute addresses. */
10341 else
10342 {
10343 dwarf2out_maybe_output_loclist_view_pair (curr);
10344 dw2_asm_output_data (1, DW_LLE_start_end,
10345 "DW_LLE_start_end (%s)",
10346 list_head->ll_symbol);
10347 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10348 "Location list begin address (%s)",
10349 list_head->ll_symbol);
10350 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10351 "Location list end address (%s)",
10352 list_head->ll_symbol);
10353 }
10354 }
10355 else if (dwarf_split_debug_info)
10356 {
10357 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10358 and 4 byte length. */
10359 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10360 "Location list start/length entry (%s)",
10361 list_head->ll_symbol);
10362 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10363 "Location list range start index (%s)",
10364 curr->begin);
10365 /* The length field is 4 bytes. If we ever need to support
10366 an 8-byte length, we can add a new DW_LLE code or fall back
10367 to DW_LLE_GNU_start_end_entry. */
10368 dw2_asm_output_delta (4, curr->end, curr->begin,
10369 "Location list range length (%s)",
10370 list_head->ll_symbol);
10371 }
10372 else if (!have_multiple_function_sections)
10373 {
10374 /* Pair of relative addresses against start of text section. */
10375 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10376 "Location list begin address (%s)",
10377 list_head->ll_symbol);
10378 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10379 "Location list end address (%s)",
10380 list_head->ll_symbol);
10381 }
10382 else
10383 {
10384 /* Pair of absolute addresses. */
10385 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10386 "Location list begin address (%s)",
10387 list_head->ll_symbol);
10388 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10389 "Location list end address (%s)",
10390 list_head->ll_symbol);
10391 }
10392
10393 /* Output the block length for this list of location operations. */
10394 if (dwarf_version >= 5)
10395 dw2_asm_output_data_uleb128 (size, "Location expression size");
10396 else
10397 {
10398 gcc_assert (size <= 0xffff);
10399 dw2_asm_output_data (2, size, "Location expression size");
10400 }
10401
10402 output_loc_sequence (curr->expr, -1);
10403 }
10404
10405 /* And finally list termination. */
10406 if (dwarf_version >= 5)
10407 dw2_asm_output_data (1, DW_LLE_end_of_list,
10408 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10409 else if (dwarf_split_debug_info)
10410 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10411 "Location list terminator (%s)",
10412 list_head->ll_symbol);
10413 else
10414 {
10415 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10416 "Location list terminator begin (%s)",
10417 list_head->ll_symbol);
10418 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10419 "Location list terminator end (%s)",
10420 list_head->ll_symbol);
10421 }
10422
10423 gcc_assert (!list_head->vl_symbol
10424 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10425 }
10426
10427 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10428 section. Emit a relocated reference if val_entry is NULL, otherwise,
10429 emit an indirect reference. */
10430
10431 static void
10432 output_range_list_offset (dw_attr_node *a)
10433 {
10434 const char *name = dwarf_attr_name (a->dw_attr);
10435
10436 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10437 {
10438 if (dwarf_version >= 5)
10439 {
10440 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10441 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10442 debug_ranges_section, "%s", name);
10443 }
10444 else
10445 {
10446 char *p = strchr (ranges_section_label, '\0');
10447 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10448 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10449 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10450 debug_ranges_section, "%s", name);
10451 *p = '\0';
10452 }
10453 }
10454 else if (dwarf_version >= 5)
10455 {
10456 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10457 gcc_assert (rnglist_idx);
10458 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10459 }
10460 else
10461 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10462 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10463 "%s (offset from %s)", name, ranges_section_label);
10464 }
10465
10466 /* Output the offset into the debug_loc section. */
10467
10468 static void
10469 output_loc_list_offset (dw_attr_node *a)
10470 {
10471 char *sym = AT_loc_list (a)->ll_symbol;
10472
10473 gcc_assert (sym);
10474 if (!dwarf_split_debug_info)
10475 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10476 "%s", dwarf_attr_name (a->dw_attr));
10477 else if (dwarf_version >= 5)
10478 {
10479 gcc_assert (AT_loc_list (a)->num_assigned);
10480 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10481 dwarf_attr_name (a->dw_attr),
10482 sym);
10483 }
10484 else
10485 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10486 "%s", dwarf_attr_name (a->dw_attr));
10487 }
10488
10489 /* Output the offset into the debug_loc section. */
10490
10491 static void
10492 output_view_list_offset (dw_attr_node *a)
10493 {
10494 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10495
10496 gcc_assert (sym);
10497 if (dwarf_split_debug_info)
10498 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10499 "%s", dwarf_attr_name (a->dw_attr));
10500 else
10501 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10502 "%s", dwarf_attr_name (a->dw_attr));
10503 }
10504
10505 /* Output an attribute's index or value appropriately. */
10506
10507 static void
10508 output_attr_index_or_value (dw_attr_node *a)
10509 {
10510 const char *name = dwarf_attr_name (a->dw_attr);
10511
10512 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10513 {
10514 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10515 return;
10516 }
10517 switch (AT_class (a))
10518 {
10519 case dw_val_class_addr:
10520 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10521 break;
10522 case dw_val_class_high_pc:
10523 case dw_val_class_lbl_id:
10524 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10525 break;
10526 default:
10527 gcc_unreachable ();
10528 }
10529 }
10530
10531 /* Output a type signature. */
10532
10533 static inline void
10534 output_signature (const char *sig, const char *name)
10535 {
10536 int i;
10537
10538 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10539 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10540 }
10541
10542 /* Output a discriminant value. */
10543
10544 static inline void
10545 output_discr_value (dw_discr_value *discr_value, const char *name)
10546 {
10547 if (discr_value->pos)
10548 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10549 else
10550 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10551 }
10552
10553 /* Output the DIE and its attributes. Called recursively to generate
10554 the definitions of each child DIE. */
10555
10556 static void
10557 output_die (dw_die_ref die)
10558 {
10559 dw_attr_node *a;
10560 dw_die_ref c;
10561 unsigned long size;
10562 unsigned ix;
10563
10564 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10565 (unsigned long)die->die_offset,
10566 dwarf_tag_name (die->die_tag));
10567
10568 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10569 {
10570 const char *name = dwarf_attr_name (a->dw_attr);
10571
10572 switch (AT_class (a))
10573 {
10574 case dw_val_class_addr:
10575 output_attr_index_or_value (a);
10576 break;
10577
10578 case dw_val_class_offset:
10579 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10580 "%s", name);
10581 break;
10582
10583 case dw_val_class_range_list:
10584 output_range_list_offset (a);
10585 break;
10586
10587 case dw_val_class_loc:
10588 size = size_of_locs (AT_loc (a));
10589
10590 /* Output the block length for this list of location operations. */
10591 if (dwarf_version >= 4)
10592 dw2_asm_output_data_uleb128 (size, "%s", name);
10593 else
10594 dw2_asm_output_data (constant_size (size), size, "%s", name);
10595
10596 output_loc_sequence (AT_loc (a), -1);
10597 break;
10598
10599 case dw_val_class_const:
10600 /* ??? It would be slightly more efficient to use a scheme like is
10601 used for unsigned constants below, but gdb 4.x does not sign
10602 extend. Gdb 5.x does sign extend. */
10603 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10604 break;
10605
10606 case dw_val_class_unsigned_const:
10607 {
10608 int csize = constant_size (AT_unsigned (a));
10609 if (dwarf_version == 3
10610 && a->dw_attr == DW_AT_data_member_location
10611 && csize >= 4)
10612 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10613 else
10614 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10615 }
10616 break;
10617
10618 case dw_val_class_symview:
10619 {
10620 int vsize;
10621 if (symview_upper_bound <= 0xff)
10622 vsize = 1;
10623 else if (symview_upper_bound <= 0xffff)
10624 vsize = 2;
10625 else if (symview_upper_bound <= 0xffffffff)
10626 vsize = 4;
10627 else
10628 vsize = 8;
10629 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10630 "%s", name);
10631 }
10632 break;
10633
10634 case dw_val_class_const_implicit:
10635 if (flag_debug_asm)
10636 fprintf (asm_out_file, "\t\t\t%s %s ("
10637 HOST_WIDE_INT_PRINT_DEC ")\n",
10638 ASM_COMMENT_START, name, AT_int (a));
10639 break;
10640
10641 case dw_val_class_unsigned_const_implicit:
10642 if (flag_debug_asm)
10643 fprintf (asm_out_file, "\t\t\t%s %s ("
10644 HOST_WIDE_INT_PRINT_HEX ")\n",
10645 ASM_COMMENT_START, name, AT_unsigned (a));
10646 break;
10647
10648 case dw_val_class_const_double:
10649 {
10650 unsigned HOST_WIDE_INT first, second;
10651
10652 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10653 dw2_asm_output_data (1,
10654 HOST_BITS_PER_DOUBLE_INT
10655 / HOST_BITS_PER_CHAR,
10656 NULL);
10657
10658 if (WORDS_BIG_ENDIAN)
10659 {
10660 first = a->dw_attr_val.v.val_double.high;
10661 second = a->dw_attr_val.v.val_double.low;
10662 }
10663 else
10664 {
10665 first = a->dw_attr_val.v.val_double.low;
10666 second = a->dw_attr_val.v.val_double.high;
10667 }
10668
10669 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10670 first, "%s", name);
10671 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10672 second, NULL);
10673 }
10674 break;
10675
10676 case dw_val_class_wide_int:
10677 {
10678 int i;
10679 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10680 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10681 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10682 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10683 * l, NULL);
10684
10685 if (WORDS_BIG_ENDIAN)
10686 for (i = len - 1; i >= 0; --i)
10687 {
10688 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10689 "%s", name);
10690 name = "";
10691 }
10692 else
10693 for (i = 0; i < len; ++i)
10694 {
10695 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10696 "%s", name);
10697 name = "";
10698 }
10699 }
10700 break;
10701
10702 case dw_val_class_vec:
10703 {
10704 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10705 unsigned int len = a->dw_attr_val.v.val_vec.length;
10706 unsigned int i;
10707 unsigned char *p;
10708
10709 dw2_asm_output_data (constant_size (len * elt_size),
10710 len * elt_size, "%s", name);
10711 if (elt_size > sizeof (HOST_WIDE_INT))
10712 {
10713 elt_size /= 2;
10714 len *= 2;
10715 }
10716 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10717 i < len;
10718 i++, p += elt_size)
10719 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10720 "fp or vector constant word %u", i);
10721 break;
10722 }
10723
10724 case dw_val_class_flag:
10725 if (dwarf_version >= 4)
10726 {
10727 /* Currently all add_AT_flag calls pass in 1 as last argument,
10728 so DW_FORM_flag_present can be used. If that ever changes,
10729 we'll need to use DW_FORM_flag and have some optimization
10730 in build_abbrev_table that will change those to
10731 DW_FORM_flag_present if it is set to 1 in all DIEs using
10732 the same abbrev entry. */
10733 gcc_assert (AT_flag (a) == 1);
10734 if (flag_debug_asm)
10735 fprintf (asm_out_file, "\t\t\t%s %s\n",
10736 ASM_COMMENT_START, name);
10737 break;
10738 }
10739 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10740 break;
10741
10742 case dw_val_class_loc_list:
10743 output_loc_list_offset (a);
10744 break;
10745
10746 case dw_val_class_view_list:
10747 output_view_list_offset (a);
10748 break;
10749
10750 case dw_val_class_die_ref:
10751 if (AT_ref_external (a))
10752 {
10753 if (AT_ref (a)->comdat_type_p)
10754 {
10755 comdat_type_node *type_node
10756 = AT_ref (a)->die_id.die_type_node;
10757
10758 gcc_assert (type_node);
10759 output_signature (type_node->signature, name);
10760 }
10761 else
10762 {
10763 const char *sym = AT_ref (a)->die_id.die_symbol;
10764 int size;
10765
10766 gcc_assert (sym);
10767 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10768 length, whereas in DWARF3 it's always sized as an
10769 offset. */
10770 if (dwarf_version == 2)
10771 size = DWARF2_ADDR_SIZE;
10772 else
10773 size = DWARF_OFFSET_SIZE;
10774 /* ??? We cannot unconditionally output die_offset if
10775 non-zero - others might create references to those
10776 DIEs via symbols.
10777 And we do not clear its DIE offset after outputting it
10778 (and the label refers to the actual DIEs, not the
10779 DWARF CU unit header which is when using label + offset
10780 would be the correct thing to do).
10781 ??? This is the reason for the with_offset flag. */
10782 if (AT_ref (a)->with_offset)
10783 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10784 debug_info_section, "%s", name);
10785 else
10786 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10787 name);
10788 }
10789 }
10790 else
10791 {
10792 gcc_assert (AT_ref (a)->die_offset);
10793 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10794 "%s", name);
10795 }
10796 break;
10797
10798 case dw_val_class_fde_ref:
10799 {
10800 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10801
10802 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10803 a->dw_attr_val.v.val_fde_index * 2);
10804 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10805 "%s", name);
10806 }
10807 break;
10808
10809 case dw_val_class_vms_delta:
10810 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10811 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10812 AT_vms_delta2 (a), AT_vms_delta1 (a),
10813 "%s", name);
10814 #else
10815 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10816 AT_vms_delta2 (a), AT_vms_delta1 (a),
10817 "%s", name);
10818 #endif
10819 break;
10820
10821 case dw_val_class_lbl_id:
10822 output_attr_index_or_value (a);
10823 break;
10824
10825 case dw_val_class_lineptr:
10826 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10827 debug_line_section, "%s", name);
10828 break;
10829
10830 case dw_val_class_macptr:
10831 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10832 debug_macinfo_section, "%s", name);
10833 break;
10834
10835 case dw_val_class_loclistsptr:
10836 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10837 debug_loc_section, "%s", name);
10838 break;
10839
10840 case dw_val_class_str:
10841 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10842 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10843 a->dw_attr_val.v.val_str->label,
10844 debug_str_section,
10845 "%s: \"%s\"", name, AT_string (a));
10846 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10847 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10848 a->dw_attr_val.v.val_str->label,
10849 debug_line_str_section,
10850 "%s: \"%s\"", name, AT_string (a));
10851 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10852 dw2_asm_output_data_uleb128 (AT_index (a),
10853 "%s: \"%s\"", name, AT_string (a));
10854 else
10855 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10856 break;
10857
10858 case dw_val_class_file:
10859 {
10860 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10861
10862 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10863 a->dw_attr_val.v.val_file->filename);
10864 break;
10865 }
10866
10867 case dw_val_class_file_implicit:
10868 if (flag_debug_asm)
10869 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10870 ASM_COMMENT_START, name,
10871 maybe_emit_file (a->dw_attr_val.v.val_file),
10872 a->dw_attr_val.v.val_file->filename);
10873 break;
10874
10875 case dw_val_class_data8:
10876 {
10877 int i;
10878
10879 for (i = 0; i < 8; i++)
10880 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10881 i == 0 ? "%s" : NULL, name);
10882 break;
10883 }
10884
10885 case dw_val_class_high_pc:
10886 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10887 get_AT_low_pc (die), "DW_AT_high_pc");
10888 break;
10889
10890 case dw_val_class_discr_value:
10891 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10892 break;
10893
10894 case dw_val_class_discr_list:
10895 {
10896 dw_discr_list_ref list = AT_discr_list (a);
10897 const int size = size_of_discr_list (list);
10898
10899 /* This is a block, so output its length first. */
10900 dw2_asm_output_data (constant_size (size), size,
10901 "%s: block size", name);
10902
10903 for (; list != NULL; list = list->dw_discr_next)
10904 {
10905 /* One byte for the discriminant value descriptor, and then as
10906 many LEB128 numbers as required. */
10907 if (list->dw_discr_range)
10908 dw2_asm_output_data (1, DW_DSC_range,
10909 "%s: DW_DSC_range", name);
10910 else
10911 dw2_asm_output_data (1, DW_DSC_label,
10912 "%s: DW_DSC_label", name);
10913
10914 output_discr_value (&list->dw_discr_lower_bound, name);
10915 if (list->dw_discr_range)
10916 output_discr_value (&list->dw_discr_upper_bound, name);
10917 }
10918 break;
10919 }
10920
10921 default:
10922 gcc_unreachable ();
10923 }
10924 }
10925
10926 FOR_EACH_CHILD (die, c, output_die (c));
10927
10928 /* Add null byte to terminate sibling list. */
10929 if (die->die_child != NULL)
10930 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10931 (unsigned long) die->die_offset);
10932 }
10933
10934 /* Output the dwarf version number. */
10935
10936 static void
10937 output_dwarf_version ()
10938 {
10939 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10940 views in loclist. That will change eventually. */
10941 if (dwarf_version == 6)
10942 {
10943 static bool once;
10944 if (!once)
10945 {
10946 warning (0,
10947 "-gdwarf-6 is output as version 5 with incompatibilities");
10948 once = true;
10949 }
10950 dw2_asm_output_data (2, 5, "DWARF version number");
10951 }
10952 else
10953 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10954 }
10955
10956 /* Output the compilation unit that appears at the beginning of the
10957 .debug_info section, and precedes the DIE descriptions. */
10958
10959 static void
10960 output_compilation_unit_header (enum dwarf_unit_type ut)
10961 {
10962 if (!XCOFF_DEBUGGING_INFO)
10963 {
10964 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10965 dw2_asm_output_data (4, 0xffffffff,
10966 "Initial length escape value indicating 64-bit DWARF extension");
10967 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10968 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10969 "Length of Compilation Unit Info");
10970 }
10971
10972 output_dwarf_version ();
10973 if (dwarf_version >= 5)
10974 {
10975 const char *name;
10976 switch (ut)
10977 {
10978 case DW_UT_compile: name = "DW_UT_compile"; break;
10979 case DW_UT_type: name = "DW_UT_type"; break;
10980 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10981 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10982 default: gcc_unreachable ();
10983 }
10984 dw2_asm_output_data (1, ut, "%s", name);
10985 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10986 }
10987 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10988 debug_abbrev_section,
10989 "Offset Into Abbrev. Section");
10990 if (dwarf_version < 5)
10991 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10992 }
10993
10994 /* Output the compilation unit DIE and its children. */
10995
10996 static void
10997 output_comp_unit (dw_die_ref die, int output_if_empty,
10998 const unsigned char *dwo_id)
10999 {
11000 const char *secname, *oldsym;
11001 char *tmp;
11002
11003 /* Unless we are outputting main CU, we may throw away empty ones. */
11004 if (!output_if_empty && die->die_child == NULL)
11005 return;
11006
11007 /* Even if there are no children of this DIE, we must output the information
11008 about the compilation unit. Otherwise, on an empty translation unit, we
11009 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11010 will then complain when examining the file. First mark all the DIEs in
11011 this CU so we know which get local refs. */
11012 mark_dies (die);
11013
11014 external_ref_hash_type *extern_map = optimize_external_refs (die);
11015
11016 /* For now, optimize only the main CU, in order to optimize the rest
11017 we'd need to see all of them earlier. Leave the rest for post-linking
11018 tools like DWZ. */
11019 if (die == comp_unit_die ())
11020 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11021
11022 build_abbrev_table (die, extern_map);
11023
11024 optimize_abbrev_table ();
11025
11026 delete extern_map;
11027
11028 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11029 next_die_offset = (dwo_id
11030 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11031 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11032 calc_die_sizes (die);
11033
11034 oldsym = die->die_id.die_symbol;
11035 if (oldsym && die->comdat_type_p)
11036 {
11037 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11038
11039 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11040 secname = tmp;
11041 die->die_id.die_symbol = NULL;
11042 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11043 }
11044 else
11045 {
11046 switch_to_section (debug_info_section);
11047 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11048 info_section_emitted = true;
11049 }
11050
11051 /* For LTO cross unit DIE refs we want a symbol on the start of the
11052 debuginfo section, not on the CU DIE. */
11053 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11054 {
11055 /* ??? No way to get visibility assembled without a decl. */
11056 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11057 get_identifier (oldsym), char_type_node);
11058 TREE_PUBLIC (decl) = true;
11059 TREE_STATIC (decl) = true;
11060 DECL_ARTIFICIAL (decl) = true;
11061 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11062 DECL_VISIBILITY_SPECIFIED (decl) = true;
11063 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11064 #ifdef ASM_WEAKEN_LABEL
11065 /* We prefer a .weak because that handles duplicates from duplicate
11066 archive members in a graceful way. */
11067 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11068 #else
11069 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11070 #endif
11071 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11072 }
11073
11074 /* Output debugging information. */
11075 output_compilation_unit_header (dwo_id
11076 ? DW_UT_split_compile : DW_UT_compile);
11077 if (dwarf_version >= 5)
11078 {
11079 if (dwo_id != NULL)
11080 for (int i = 0; i < 8; i++)
11081 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11082 }
11083 output_die (die);
11084
11085 /* Leave the marks on the main CU, so we can check them in
11086 output_pubnames. */
11087 if (oldsym)
11088 {
11089 unmark_dies (die);
11090 die->die_id.die_symbol = oldsym;
11091 }
11092 }
11093
11094 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11095 and .debug_pubtypes. This is configured per-target, but can be
11096 overridden by the -gpubnames or -gno-pubnames options. */
11097
11098 static inline bool
11099 want_pubnames (void)
11100 {
11101 if (debug_info_level <= DINFO_LEVEL_TERSE)
11102 return false;
11103 if (debug_generate_pub_sections != -1)
11104 return debug_generate_pub_sections;
11105 return targetm.want_debug_pub_sections;
11106 }
11107
11108 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11109
11110 static void
11111 add_AT_pubnames (dw_die_ref die)
11112 {
11113 if (want_pubnames ())
11114 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11115 }
11116
11117 /* Add a string attribute value to a skeleton DIE. */
11118
11119 static inline void
11120 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11121 const char *str)
11122 {
11123 dw_attr_node attr;
11124 struct indirect_string_node *node;
11125
11126 if (! skeleton_debug_str_hash)
11127 skeleton_debug_str_hash
11128 = hash_table<indirect_string_hasher>::create_ggc (10);
11129
11130 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11131 find_string_form (node);
11132 if (node->form == dwarf_FORM (DW_FORM_strx))
11133 node->form = DW_FORM_strp;
11134
11135 attr.dw_attr = attr_kind;
11136 attr.dw_attr_val.val_class = dw_val_class_str;
11137 attr.dw_attr_val.val_entry = NULL;
11138 attr.dw_attr_val.v.val_str = node;
11139 add_dwarf_attr (die, &attr);
11140 }
11141
11142 /* Helper function to generate top-level dies for skeleton debug_info and
11143 debug_types. */
11144
11145 static void
11146 add_top_level_skeleton_die_attrs (dw_die_ref die)
11147 {
11148 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11149 const char *comp_dir = comp_dir_string ();
11150
11151 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11152 if (comp_dir != NULL)
11153 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11154 add_AT_pubnames (die);
11155 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11156 }
11157
11158 /* Output skeleton debug sections that point to the dwo file. */
11159
11160 static void
11161 output_skeleton_debug_sections (dw_die_ref comp_unit,
11162 const unsigned char *dwo_id)
11163 {
11164 /* These attributes will be found in the full debug_info section. */
11165 remove_AT (comp_unit, DW_AT_producer);
11166 remove_AT (comp_unit, DW_AT_language);
11167
11168 switch_to_section (debug_skeleton_info_section);
11169 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11170
11171 /* Produce the skeleton compilation-unit header. This one differs enough from
11172 a normal CU header that it's better not to call output_compilation_unit
11173 header. */
11174 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11175 dw2_asm_output_data (4, 0xffffffff,
11176 "Initial length escape value indicating 64-bit "
11177 "DWARF extension");
11178
11179 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11180 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11181 - DWARF_INITIAL_LENGTH_SIZE
11182 + size_of_die (comp_unit),
11183 "Length of Compilation Unit Info");
11184 output_dwarf_version ();
11185 if (dwarf_version >= 5)
11186 {
11187 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11188 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11189 }
11190 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11191 debug_skeleton_abbrev_section,
11192 "Offset Into Abbrev. Section");
11193 if (dwarf_version < 5)
11194 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11195 else
11196 for (int i = 0; i < 8; i++)
11197 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11198
11199 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11200 output_die (comp_unit);
11201
11202 /* Build the skeleton debug_abbrev section. */
11203 switch_to_section (debug_skeleton_abbrev_section);
11204 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11205
11206 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11207
11208 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11209 }
11210
11211 /* Output a comdat type unit DIE and its children. */
11212
11213 static void
11214 output_comdat_type_unit (comdat_type_node *node)
11215 {
11216 const char *secname;
11217 char *tmp;
11218 int i;
11219 #if defined (OBJECT_FORMAT_ELF)
11220 tree comdat_key;
11221 #endif
11222
11223 /* First mark all the DIEs in this CU so we know which get local refs. */
11224 mark_dies (node->root_die);
11225
11226 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11227
11228 build_abbrev_table (node->root_die, extern_map);
11229
11230 delete extern_map;
11231 extern_map = NULL;
11232
11233 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11234 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11235 calc_die_sizes (node->root_die);
11236
11237 #if defined (OBJECT_FORMAT_ELF)
11238 if (dwarf_version >= 5)
11239 {
11240 if (!dwarf_split_debug_info)
11241 secname = ".debug_info";
11242 else
11243 secname = ".debug_info.dwo";
11244 }
11245 else if (!dwarf_split_debug_info)
11246 secname = ".debug_types";
11247 else
11248 secname = ".debug_types.dwo";
11249
11250 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11251 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11252 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11253 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11254 comdat_key = get_identifier (tmp);
11255 targetm.asm_out.named_section (secname,
11256 SECTION_DEBUG | SECTION_LINKONCE,
11257 comdat_key);
11258 #else
11259 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11260 sprintf (tmp, (dwarf_version >= 5
11261 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11262 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11263 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11264 secname = tmp;
11265 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11266 #endif
11267
11268 /* Output debugging information. */
11269 output_compilation_unit_header (dwarf_split_debug_info
11270 ? DW_UT_split_type : DW_UT_type);
11271 output_signature (node->signature, "Type Signature");
11272 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11273 "Offset to Type DIE");
11274 output_die (node->root_die);
11275
11276 unmark_dies (node->root_die);
11277 }
11278
11279 /* Return the DWARF2/3 pubname associated with a decl. */
11280
11281 static const char *
11282 dwarf2_name (tree decl, int scope)
11283 {
11284 if (DECL_NAMELESS (decl))
11285 return NULL;
11286 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11287 }
11288
11289 /* Add a new entry to .debug_pubnames if appropriate. */
11290
11291 static void
11292 add_pubname_string (const char *str, dw_die_ref die)
11293 {
11294 pubname_entry e;
11295
11296 e.die = die;
11297 e.name = xstrdup (str);
11298 vec_safe_push (pubname_table, e);
11299 }
11300
11301 static void
11302 add_pubname (tree decl, dw_die_ref die)
11303 {
11304 if (!want_pubnames ())
11305 return;
11306
11307 /* Don't add items to the table when we expect that the consumer will have
11308 just read the enclosing die. For example, if the consumer is looking at a
11309 class_member, it will either be inside the class already, or will have just
11310 looked up the class to find the member. Either way, searching the class is
11311 faster than searching the index. */
11312 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11313 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11314 {
11315 const char *name = dwarf2_name (decl, 1);
11316
11317 if (name)
11318 add_pubname_string (name, die);
11319 }
11320 }
11321
11322 /* Add an enumerator to the pubnames section. */
11323
11324 static void
11325 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11326 {
11327 pubname_entry e;
11328
11329 gcc_assert (scope_name);
11330 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11331 e.die = die;
11332 vec_safe_push (pubname_table, e);
11333 }
11334
11335 /* Add a new entry to .debug_pubtypes if appropriate. */
11336
11337 static void
11338 add_pubtype (tree decl, dw_die_ref die)
11339 {
11340 pubname_entry e;
11341
11342 if (!want_pubnames ())
11343 return;
11344
11345 if ((TREE_PUBLIC (decl)
11346 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11347 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11348 {
11349 tree scope = NULL;
11350 const char *scope_name = "";
11351 const char *sep = is_cxx () ? "::" : ".";
11352 const char *name;
11353
11354 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11355 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11356 {
11357 scope_name = lang_hooks.dwarf_name (scope, 1);
11358 if (scope_name != NULL && scope_name[0] != '\0')
11359 scope_name = concat (scope_name, sep, NULL);
11360 else
11361 scope_name = "";
11362 }
11363
11364 if (TYPE_P (decl))
11365 name = type_tag (decl);
11366 else
11367 name = lang_hooks.dwarf_name (decl, 1);
11368
11369 /* If we don't have a name for the type, there's no point in adding
11370 it to the table. */
11371 if (name != NULL && name[0] != '\0')
11372 {
11373 e.die = die;
11374 e.name = concat (scope_name, name, NULL);
11375 vec_safe_push (pubtype_table, e);
11376 }
11377
11378 /* Although it might be more consistent to add the pubinfo for the
11379 enumerators as their dies are created, they should only be added if the
11380 enum type meets the criteria above. So rather than re-check the parent
11381 enum type whenever an enumerator die is created, just output them all
11382 here. This isn't protected by the name conditional because anonymous
11383 enums don't have names. */
11384 if (die->die_tag == DW_TAG_enumeration_type)
11385 {
11386 dw_die_ref c;
11387
11388 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11389 }
11390 }
11391 }
11392
11393 /* Output a single entry in the pubnames table. */
11394
11395 static void
11396 output_pubname (dw_offset die_offset, pubname_entry *entry)
11397 {
11398 dw_die_ref die = entry->die;
11399 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11400
11401 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11402
11403 if (debug_generate_pub_sections == 2)
11404 {
11405 /* This logic follows gdb's method for determining the value of the flag
11406 byte. */
11407 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11408 switch (die->die_tag)
11409 {
11410 case DW_TAG_typedef:
11411 case DW_TAG_base_type:
11412 case DW_TAG_subrange_type:
11413 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11414 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11415 break;
11416 case DW_TAG_enumerator:
11417 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11418 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11419 if (!is_cxx ())
11420 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11421 break;
11422 case DW_TAG_subprogram:
11423 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11424 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11425 if (!is_ada ())
11426 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11427 break;
11428 case DW_TAG_constant:
11429 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11430 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11431 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11432 break;
11433 case DW_TAG_variable:
11434 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11435 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11436 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11437 break;
11438 case DW_TAG_namespace:
11439 case DW_TAG_imported_declaration:
11440 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11441 break;
11442 case DW_TAG_class_type:
11443 case DW_TAG_interface_type:
11444 case DW_TAG_structure_type:
11445 case DW_TAG_union_type:
11446 case DW_TAG_enumeration_type:
11447 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11448 if (!is_cxx ())
11449 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11450 break;
11451 default:
11452 /* An unusual tag. Leave the flag-byte empty. */
11453 break;
11454 }
11455 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11456 "GDB-index flags");
11457 }
11458
11459 dw2_asm_output_nstring (entry->name, -1, "external name");
11460 }
11461
11462
11463 /* Output the public names table used to speed up access to externally
11464 visible names; or the public types table used to find type definitions. */
11465
11466 static void
11467 output_pubnames (vec<pubname_entry, va_gc> *names)
11468 {
11469 unsigned i;
11470 unsigned long pubnames_length = size_of_pubnames (names);
11471 pubname_entry *pub;
11472
11473 if (!XCOFF_DEBUGGING_INFO)
11474 {
11475 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11476 dw2_asm_output_data (4, 0xffffffff,
11477 "Initial length escape value indicating 64-bit DWARF extension");
11478 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11479 "Pub Info Length");
11480 }
11481
11482 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11483 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11484
11485 if (dwarf_split_debug_info)
11486 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11487 debug_skeleton_info_section,
11488 "Offset of Compilation Unit Info");
11489 else
11490 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11491 debug_info_section,
11492 "Offset of Compilation Unit Info");
11493 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11494 "Compilation Unit Length");
11495
11496 FOR_EACH_VEC_ELT (*names, i, pub)
11497 {
11498 if (include_pubname_in_output (names, pub))
11499 {
11500 dw_offset die_offset = pub->die->die_offset;
11501
11502 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11503 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11504 gcc_assert (pub->die->die_mark);
11505
11506 /* If we're putting types in their own .debug_types sections,
11507 the .debug_pubtypes table will still point to the compile
11508 unit (not the type unit), so we want to use the offset of
11509 the skeleton DIE (if there is one). */
11510 if (pub->die->comdat_type_p && names == pubtype_table)
11511 {
11512 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11513
11514 if (type_node != NULL)
11515 die_offset = (type_node->skeleton_die != NULL
11516 ? type_node->skeleton_die->die_offset
11517 : comp_unit_die ()->die_offset);
11518 }
11519
11520 output_pubname (die_offset, pub);
11521 }
11522 }
11523
11524 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11525 }
11526
11527 /* Output public names and types tables if necessary. */
11528
11529 static void
11530 output_pubtables (void)
11531 {
11532 if (!want_pubnames () || !info_section_emitted)
11533 return;
11534
11535 switch_to_section (debug_pubnames_section);
11536 output_pubnames (pubname_table);
11537 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11538 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11539 simply won't look for the section. */
11540 switch_to_section (debug_pubtypes_section);
11541 output_pubnames (pubtype_table);
11542 }
11543
11544
11545 /* Output the information that goes into the .debug_aranges table.
11546 Namely, define the beginning and ending address range of the
11547 text section generated for this compilation unit. */
11548
11549 static void
11550 output_aranges (void)
11551 {
11552 unsigned i;
11553 unsigned long aranges_length = size_of_aranges ();
11554
11555 if (!XCOFF_DEBUGGING_INFO)
11556 {
11557 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11558 dw2_asm_output_data (4, 0xffffffff,
11559 "Initial length escape value indicating 64-bit DWARF extension");
11560 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11561 "Length of Address Ranges Info");
11562 }
11563
11564 /* Version number for aranges is still 2, even up to DWARF5. */
11565 dw2_asm_output_data (2, 2, "DWARF aranges version");
11566 if (dwarf_split_debug_info)
11567 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11568 debug_skeleton_info_section,
11569 "Offset of Compilation Unit Info");
11570 else
11571 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11572 debug_info_section,
11573 "Offset of Compilation Unit Info");
11574 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11575 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11576
11577 /* We need to align to twice the pointer size here. */
11578 if (DWARF_ARANGES_PAD_SIZE)
11579 {
11580 /* Pad using a 2 byte words so that padding is correct for any
11581 pointer size. */
11582 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11583 2 * DWARF2_ADDR_SIZE);
11584 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11585 dw2_asm_output_data (2, 0, NULL);
11586 }
11587
11588 /* It is necessary not to output these entries if the sections were
11589 not used; if the sections were not used, the length will be 0 and
11590 the address may end up as 0 if the section is discarded by ld
11591 --gc-sections, leaving an invalid (0, 0) entry that can be
11592 confused with the terminator. */
11593 if (text_section_used)
11594 {
11595 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11596 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11597 text_section_label, "Length");
11598 }
11599 if (cold_text_section_used)
11600 {
11601 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11602 "Address");
11603 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11604 cold_text_section_label, "Length");
11605 }
11606
11607 if (have_multiple_function_sections)
11608 {
11609 unsigned fde_idx;
11610 dw_fde_ref fde;
11611
11612 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11613 {
11614 if (DECL_IGNORED_P (fde->decl))
11615 continue;
11616 if (!fde->in_std_section)
11617 {
11618 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11619 "Address");
11620 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11621 fde->dw_fde_begin, "Length");
11622 }
11623 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11624 {
11625 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11626 "Address");
11627 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11628 fde->dw_fde_second_begin, "Length");
11629 }
11630 }
11631 }
11632
11633 /* Output the terminator words. */
11634 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11635 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11636 }
11637
11638 /* Add a new entry to .debug_ranges. Return its index into
11639 ranges_table vector. */
11640
11641 static unsigned int
11642 add_ranges_num (int num, bool maybe_new_sec)
11643 {
11644 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11645 vec_safe_push (ranges_table, r);
11646 return vec_safe_length (ranges_table) - 1;
11647 }
11648
11649 /* Add a new entry to .debug_ranges corresponding to a block, or a
11650 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11651 this entry might be in a different section from previous range. */
11652
11653 static unsigned int
11654 add_ranges (const_tree block, bool maybe_new_sec)
11655 {
11656 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11657 }
11658
11659 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11660 chain, or middle entry of a chain that will be directly referred to. */
11661
11662 static void
11663 note_rnglist_head (unsigned int offset)
11664 {
11665 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11666 return;
11667 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11668 }
11669
11670 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11671 When using dwarf_split_debug_info, address attributes in dies destined
11672 for the final executable should be direct references--setting the
11673 parameter force_direct ensures this behavior. */
11674
11675 static void
11676 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11677 bool *added, bool force_direct)
11678 {
11679 unsigned int in_use = vec_safe_length (ranges_by_label);
11680 unsigned int offset;
11681 dw_ranges_by_label rbl = { begin, end };
11682 vec_safe_push (ranges_by_label, rbl);
11683 offset = add_ranges_num (-(int)in_use - 1, true);
11684 if (!*added)
11685 {
11686 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11687 *added = true;
11688 note_rnglist_head (offset);
11689 }
11690 }
11691
11692 /* Emit .debug_ranges section. */
11693
11694 static void
11695 output_ranges (void)
11696 {
11697 unsigned i;
11698 static const char *const start_fmt = "Offset %#x";
11699 const char *fmt = start_fmt;
11700 dw_ranges *r;
11701
11702 switch_to_section (debug_ranges_section);
11703 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11704 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11705 {
11706 int block_num = r->num;
11707
11708 if (block_num > 0)
11709 {
11710 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11711 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11712
11713 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11714 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11715
11716 /* If all code is in the text section, then the compilation
11717 unit base address defaults to DW_AT_low_pc, which is the
11718 base of the text section. */
11719 if (!have_multiple_function_sections)
11720 {
11721 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11722 text_section_label,
11723 fmt, i * 2 * DWARF2_ADDR_SIZE);
11724 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11725 text_section_label, NULL);
11726 }
11727
11728 /* Otherwise, the compilation unit base address is zero,
11729 which allows us to use absolute addresses, and not worry
11730 about whether the target supports cross-section
11731 arithmetic. */
11732 else
11733 {
11734 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11735 fmt, i * 2 * DWARF2_ADDR_SIZE);
11736 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11737 }
11738
11739 fmt = NULL;
11740 }
11741
11742 /* Negative block_num stands for an index into ranges_by_label. */
11743 else if (block_num < 0)
11744 {
11745 int lab_idx = - block_num - 1;
11746
11747 if (!have_multiple_function_sections)
11748 {
11749 gcc_unreachable ();
11750 #if 0
11751 /* If we ever use add_ranges_by_labels () for a single
11752 function section, all we have to do is to take out
11753 the #if 0 above. */
11754 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11755 (*ranges_by_label)[lab_idx].begin,
11756 text_section_label,
11757 fmt, i * 2 * DWARF2_ADDR_SIZE);
11758 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11759 (*ranges_by_label)[lab_idx].end,
11760 text_section_label, NULL);
11761 #endif
11762 }
11763 else
11764 {
11765 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11766 (*ranges_by_label)[lab_idx].begin,
11767 fmt, i * 2 * DWARF2_ADDR_SIZE);
11768 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11769 (*ranges_by_label)[lab_idx].end,
11770 NULL);
11771 }
11772 }
11773 else
11774 {
11775 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11776 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11777 fmt = start_fmt;
11778 }
11779 }
11780 }
11781
11782 /* Non-zero if .debug_line_str should be used for .debug_line section
11783 strings or strings that are likely shareable with those. */
11784 #define DWARF5_USE_DEBUG_LINE_STR \
11785 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11786 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11787 /* FIXME: there is no .debug_line_str.dwo section, \
11788 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11789 && !dwarf_split_debug_info)
11790
11791 /* Assign .debug_rnglists indexes. */
11792
11793 static void
11794 index_rnglists (void)
11795 {
11796 unsigned i;
11797 dw_ranges *r;
11798
11799 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11800 if (r->label)
11801 r->idx = rnglist_idx++;
11802 }
11803
11804 /* Emit .debug_rnglists section. */
11805
11806 static void
11807 output_rnglists (unsigned generation)
11808 {
11809 unsigned i;
11810 dw_ranges *r;
11811 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11812 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11813 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11814
11815 switch_to_section (debug_ranges_section);
11816 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11817 /* There are up to 4 unique ranges labels per generation.
11818 See also init_sections_and_labels. */
11819 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11820 2 + generation * 4);
11821 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11822 3 + generation * 4);
11823 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11824 dw2_asm_output_data (4, 0xffffffff,
11825 "Initial length escape value indicating "
11826 "64-bit DWARF extension");
11827 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11828 "Length of Range Lists");
11829 ASM_OUTPUT_LABEL (asm_out_file, l1);
11830 output_dwarf_version ();
11831 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11832 dw2_asm_output_data (1, 0, "Segment Size");
11833 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11834 about relocation sizes and primarily care about the size of .debug*
11835 sections in linked shared libraries and executables, then
11836 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11837 into it are usually larger than just DW_FORM_sec_offset offsets
11838 into the .debug_rnglists section. */
11839 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11840 "Offset Entry Count");
11841 if (dwarf_split_debug_info)
11842 {
11843 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11844 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11845 if (r->label)
11846 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11847 ranges_base_label, NULL);
11848 }
11849
11850 const char *lab = "";
11851 unsigned int len = vec_safe_length (ranges_table);
11852 const char *base = NULL;
11853 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11854 {
11855 int block_num = r->num;
11856
11857 if (r->label)
11858 {
11859 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11860 lab = r->label;
11861 }
11862 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11863 base = NULL;
11864 if (block_num > 0)
11865 {
11866 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11867 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11868
11869 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11870 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11871
11872 if (HAVE_AS_LEB128)
11873 {
11874 /* If all code is in the text section, then the compilation
11875 unit base address defaults to DW_AT_low_pc, which is the
11876 base of the text section. */
11877 if (!have_multiple_function_sections)
11878 {
11879 dw2_asm_output_data (1, DW_RLE_offset_pair,
11880 "DW_RLE_offset_pair (%s)", lab);
11881 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11882 "Range begin address (%s)", lab);
11883 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11884 "Range end address (%s)", lab);
11885 continue;
11886 }
11887 if (base == NULL)
11888 {
11889 dw_ranges *r2 = NULL;
11890 if (i < len - 1)
11891 r2 = &(*ranges_table)[i + 1];
11892 if (r2
11893 && r2->num != 0
11894 && r2->label == NULL
11895 && !r2->maybe_new_sec)
11896 {
11897 dw2_asm_output_data (1, DW_RLE_base_address,
11898 "DW_RLE_base_address (%s)", lab);
11899 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11900 "Base address (%s)", lab);
11901 strcpy (basebuf, blabel);
11902 base = basebuf;
11903 }
11904 }
11905 if (base)
11906 {
11907 dw2_asm_output_data (1, DW_RLE_offset_pair,
11908 "DW_RLE_offset_pair (%s)", lab);
11909 dw2_asm_output_delta_uleb128 (blabel, base,
11910 "Range begin address (%s)", lab);
11911 dw2_asm_output_delta_uleb128 (elabel, base,
11912 "Range end address (%s)", lab);
11913 continue;
11914 }
11915 dw2_asm_output_data (1, DW_RLE_start_length,
11916 "DW_RLE_start_length (%s)", lab);
11917 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11918 "Range begin address (%s)", lab);
11919 dw2_asm_output_delta_uleb128 (elabel, blabel,
11920 "Range length (%s)", lab);
11921 }
11922 else
11923 {
11924 dw2_asm_output_data (1, DW_RLE_start_end,
11925 "DW_RLE_start_end (%s)", lab);
11926 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11927 "Range begin address (%s)", lab);
11928 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11929 "Range end address (%s)", lab);
11930 }
11931 }
11932
11933 /* Negative block_num stands for an index into ranges_by_label. */
11934 else if (block_num < 0)
11935 {
11936 int lab_idx = - block_num - 1;
11937 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11938 const char *elabel = (*ranges_by_label)[lab_idx].end;
11939
11940 if (!have_multiple_function_sections)
11941 gcc_unreachable ();
11942 if (HAVE_AS_LEB128)
11943 {
11944 dw2_asm_output_data (1, DW_RLE_start_length,
11945 "DW_RLE_start_length (%s)", lab);
11946 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11947 "Range begin address (%s)", lab);
11948 dw2_asm_output_delta_uleb128 (elabel, blabel,
11949 "Range length (%s)", lab);
11950 }
11951 else
11952 {
11953 dw2_asm_output_data (1, DW_RLE_start_end,
11954 "DW_RLE_start_end (%s)", lab);
11955 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11956 "Range begin address (%s)", lab);
11957 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11958 "Range end address (%s)", lab);
11959 }
11960 }
11961 else
11962 dw2_asm_output_data (1, DW_RLE_end_of_list,
11963 "DW_RLE_end_of_list (%s)", lab);
11964 }
11965 ASM_OUTPUT_LABEL (asm_out_file, l2);
11966 }
11967
11968 /* Data structure containing information about input files. */
11969 struct file_info
11970 {
11971 const char *path; /* Complete file name. */
11972 const char *fname; /* File name part. */
11973 int length; /* Length of entire string. */
11974 struct dwarf_file_data * file_idx; /* Index in input file table. */
11975 int dir_idx; /* Index in directory table. */
11976 };
11977
11978 /* Data structure containing information about directories with source
11979 files. */
11980 struct dir_info
11981 {
11982 const char *path; /* Path including directory name. */
11983 int length; /* Path length. */
11984 int prefix; /* Index of directory entry which is a prefix. */
11985 int count; /* Number of files in this directory. */
11986 int dir_idx; /* Index of directory used as base. */
11987 };
11988
11989 /* Callback function for file_info comparison. We sort by looking at
11990 the directories in the path. */
11991
11992 static int
11993 file_info_cmp (const void *p1, const void *p2)
11994 {
11995 const struct file_info *const s1 = (const struct file_info *) p1;
11996 const struct file_info *const s2 = (const struct file_info *) p2;
11997 const unsigned char *cp1;
11998 const unsigned char *cp2;
11999
12000 /* Take care of file names without directories. We need to make sure that
12001 we return consistent values to qsort since some will get confused if
12002 we return the same value when identical operands are passed in opposite
12003 orders. So if neither has a directory, return 0 and otherwise return
12004 1 or -1 depending on which one has the directory. We want the one with
12005 the directory to sort after the one without, so all no directory files
12006 are at the start (normally only the compilation unit file). */
12007 if ((s1->path == s1->fname || s2->path == s2->fname))
12008 return (s2->path == s2->fname) - (s1->path == s1->fname);
12009
12010 cp1 = (const unsigned char *) s1->path;
12011 cp2 = (const unsigned char *) s2->path;
12012
12013 while (1)
12014 {
12015 ++cp1;
12016 ++cp2;
12017 /* Reached the end of the first path? If so, handle like above,
12018 but now we want longer directory prefixes before shorter ones. */
12019 if ((cp1 == (const unsigned char *) s1->fname)
12020 || (cp2 == (const unsigned char *) s2->fname))
12021 return ((cp1 == (const unsigned char *) s1->fname)
12022 - (cp2 == (const unsigned char *) s2->fname));
12023
12024 /* Character of current path component the same? */
12025 else if (*cp1 != *cp2)
12026 return *cp1 - *cp2;
12027 }
12028 }
12029
12030 struct file_name_acquire_data
12031 {
12032 struct file_info *files;
12033 int used_files;
12034 int max_files;
12035 };
12036
12037 /* Traversal function for the hash table. */
12038
12039 int
12040 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12041 {
12042 struct dwarf_file_data *d = *slot;
12043 struct file_info *fi;
12044 const char *f;
12045
12046 gcc_assert (fnad->max_files >= d->emitted_number);
12047
12048 if (! d->emitted_number)
12049 return 1;
12050
12051 gcc_assert (fnad->max_files != fnad->used_files);
12052
12053 fi = fnad->files + fnad->used_files++;
12054
12055 /* Skip all leading "./". */
12056 f = d->filename;
12057 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12058 f += 2;
12059
12060 /* Create a new array entry. */
12061 fi->path = f;
12062 fi->length = strlen (f);
12063 fi->file_idx = d;
12064
12065 /* Search for the file name part. */
12066 f = strrchr (f, DIR_SEPARATOR);
12067 #if defined (DIR_SEPARATOR_2)
12068 {
12069 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12070
12071 if (g != NULL)
12072 {
12073 if (f == NULL || f < g)
12074 f = g;
12075 }
12076 }
12077 #endif
12078
12079 fi->fname = f == NULL ? fi->path : f + 1;
12080 return 1;
12081 }
12082
12083 /* Helper function for output_file_names. Emit a FORM encoded
12084 string STR, with assembly comment start ENTRY_KIND and
12085 index IDX */
12086
12087 static void
12088 output_line_string (enum dwarf_form form, const char *str,
12089 const char *entry_kind, unsigned int idx)
12090 {
12091 switch (form)
12092 {
12093 case DW_FORM_string:
12094 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12095 break;
12096 case DW_FORM_line_strp:
12097 if (!debug_line_str_hash)
12098 debug_line_str_hash
12099 = hash_table<indirect_string_hasher>::create_ggc (10);
12100
12101 struct indirect_string_node *node;
12102 node = find_AT_string_in_table (str, debug_line_str_hash);
12103 set_indirect_string (node);
12104 node->form = form;
12105 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12106 debug_line_str_section, "%s: %#x: \"%s\"",
12107 entry_kind, 0, node->str);
12108 break;
12109 default:
12110 gcc_unreachable ();
12111 }
12112 }
12113
12114 /* Output the directory table and the file name table. We try to minimize
12115 the total amount of memory needed. A heuristic is used to avoid large
12116 slowdowns with many input files. */
12117
12118 static void
12119 output_file_names (void)
12120 {
12121 struct file_name_acquire_data fnad;
12122 int numfiles;
12123 struct file_info *files;
12124 struct dir_info *dirs;
12125 int *saved;
12126 int *savehere;
12127 int *backmap;
12128 int ndirs;
12129 int idx_offset;
12130 int i;
12131
12132 if (!last_emitted_file)
12133 {
12134 if (dwarf_version >= 5)
12135 {
12136 dw2_asm_output_data (1, 0, "Directory entry format count");
12137 dw2_asm_output_data_uleb128 (0, "Directories count");
12138 dw2_asm_output_data (1, 0, "File name entry format count");
12139 dw2_asm_output_data_uleb128 (0, "File names count");
12140 }
12141 else
12142 {
12143 dw2_asm_output_data (1, 0, "End directory table");
12144 dw2_asm_output_data (1, 0, "End file name table");
12145 }
12146 return;
12147 }
12148
12149 numfiles = last_emitted_file->emitted_number;
12150
12151 /* Allocate the various arrays we need. */
12152 files = XALLOCAVEC (struct file_info, numfiles);
12153 dirs = XALLOCAVEC (struct dir_info, numfiles);
12154
12155 fnad.files = files;
12156 fnad.used_files = 0;
12157 fnad.max_files = numfiles;
12158 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12159 gcc_assert (fnad.used_files == fnad.max_files);
12160
12161 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12162
12163 /* Find all the different directories used. */
12164 dirs[0].path = files[0].path;
12165 dirs[0].length = files[0].fname - files[0].path;
12166 dirs[0].prefix = -1;
12167 dirs[0].count = 1;
12168 dirs[0].dir_idx = 0;
12169 files[0].dir_idx = 0;
12170 ndirs = 1;
12171
12172 for (i = 1; i < numfiles; i++)
12173 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12174 && memcmp (dirs[ndirs - 1].path, files[i].path,
12175 dirs[ndirs - 1].length) == 0)
12176 {
12177 /* Same directory as last entry. */
12178 files[i].dir_idx = ndirs - 1;
12179 ++dirs[ndirs - 1].count;
12180 }
12181 else
12182 {
12183 int j;
12184
12185 /* This is a new directory. */
12186 dirs[ndirs].path = files[i].path;
12187 dirs[ndirs].length = files[i].fname - files[i].path;
12188 dirs[ndirs].count = 1;
12189 dirs[ndirs].dir_idx = ndirs;
12190 files[i].dir_idx = ndirs;
12191
12192 /* Search for a prefix. */
12193 dirs[ndirs].prefix = -1;
12194 for (j = 0; j < ndirs; j++)
12195 if (dirs[j].length < dirs[ndirs].length
12196 && dirs[j].length > 1
12197 && (dirs[ndirs].prefix == -1
12198 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12199 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12200 dirs[ndirs].prefix = j;
12201
12202 ++ndirs;
12203 }
12204
12205 /* Now to the actual work. We have to find a subset of the directories which
12206 allow expressing the file name using references to the directory table
12207 with the least amount of characters. We do not do an exhaustive search
12208 where we would have to check out every combination of every single
12209 possible prefix. Instead we use a heuristic which provides nearly optimal
12210 results in most cases and never is much off. */
12211 saved = XALLOCAVEC (int, ndirs);
12212 savehere = XALLOCAVEC (int, ndirs);
12213
12214 memset (saved, '\0', ndirs * sizeof (saved[0]));
12215 for (i = 0; i < ndirs; i++)
12216 {
12217 int j;
12218 int total;
12219
12220 /* We can always save some space for the current directory. But this
12221 does not mean it will be enough to justify adding the directory. */
12222 savehere[i] = dirs[i].length;
12223 total = (savehere[i] - saved[i]) * dirs[i].count;
12224
12225 for (j = i + 1; j < ndirs; j++)
12226 {
12227 savehere[j] = 0;
12228 if (saved[j] < dirs[i].length)
12229 {
12230 /* Determine whether the dirs[i] path is a prefix of the
12231 dirs[j] path. */
12232 int k;
12233
12234 k = dirs[j].prefix;
12235 while (k != -1 && k != (int) i)
12236 k = dirs[k].prefix;
12237
12238 if (k == (int) i)
12239 {
12240 /* Yes it is. We can possibly save some memory by
12241 writing the filenames in dirs[j] relative to
12242 dirs[i]. */
12243 savehere[j] = dirs[i].length;
12244 total += (savehere[j] - saved[j]) * dirs[j].count;
12245 }
12246 }
12247 }
12248
12249 /* Check whether we can save enough to justify adding the dirs[i]
12250 directory. */
12251 if (total > dirs[i].length + 1)
12252 {
12253 /* It's worthwhile adding. */
12254 for (j = i; j < ndirs; j++)
12255 if (savehere[j] > 0)
12256 {
12257 /* Remember how much we saved for this directory so far. */
12258 saved[j] = savehere[j];
12259
12260 /* Remember the prefix directory. */
12261 dirs[j].dir_idx = i;
12262 }
12263 }
12264 }
12265
12266 /* Emit the directory name table. */
12267 idx_offset = dirs[0].length > 0 ? 1 : 0;
12268 enum dwarf_form str_form = DW_FORM_string;
12269 enum dwarf_form idx_form = DW_FORM_udata;
12270 if (dwarf_version >= 5)
12271 {
12272 const char *comp_dir = comp_dir_string ();
12273 if (comp_dir == NULL)
12274 comp_dir = "";
12275 dw2_asm_output_data (1, 1, "Directory entry format count");
12276 if (DWARF5_USE_DEBUG_LINE_STR)
12277 str_form = DW_FORM_line_strp;
12278 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12279 dw2_asm_output_data_uleb128 (str_form, "%s",
12280 get_DW_FORM_name (str_form));
12281 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12282 if (str_form == DW_FORM_string)
12283 {
12284 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12285 for (i = 1 - idx_offset; i < ndirs; i++)
12286 dw2_asm_output_nstring (dirs[i].path,
12287 dirs[i].length
12288 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12289 "Directory Entry: %#x", i + idx_offset);
12290 }
12291 else
12292 {
12293 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12294 for (i = 1 - idx_offset; i < ndirs; i++)
12295 {
12296 const char *str
12297 = ggc_alloc_string (dirs[i].path,
12298 dirs[i].length
12299 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12300 output_line_string (str_form, str, "Directory Entry",
12301 (unsigned) i + idx_offset);
12302 }
12303 }
12304 }
12305 else
12306 {
12307 for (i = 1 - idx_offset; i < ndirs; i++)
12308 dw2_asm_output_nstring (dirs[i].path,
12309 dirs[i].length
12310 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12311 "Directory Entry: %#x", i + idx_offset);
12312
12313 dw2_asm_output_data (1, 0, "End directory table");
12314 }
12315
12316 /* We have to emit them in the order of emitted_number since that's
12317 used in the debug info generation. To do this efficiently we
12318 generate a back-mapping of the indices first. */
12319 backmap = XALLOCAVEC (int, numfiles);
12320 for (i = 0; i < numfiles; i++)
12321 backmap[files[i].file_idx->emitted_number - 1] = i;
12322
12323 if (dwarf_version >= 5)
12324 {
12325 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12326 if (filename0 == NULL)
12327 filename0 = "";
12328 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12329 DW_FORM_data2. Choose one based on the number of directories
12330 and how much space would they occupy in each encoding.
12331 If we have at most 256 directories, all indexes fit into
12332 a single byte, so DW_FORM_data1 is most compact (if there
12333 are at most 128 directories, DW_FORM_udata would be as
12334 compact as that, but not shorter and slower to decode). */
12335 if (ndirs + idx_offset <= 256)
12336 idx_form = DW_FORM_data1;
12337 /* If there are more than 65536 directories, we have to use
12338 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12339 Otherwise, compute what space would occupy if all the indexes
12340 used DW_FORM_udata - sum - and compare that to how large would
12341 be DW_FORM_data2 encoding, and pick the more efficient one. */
12342 else if (ndirs + idx_offset <= 65536)
12343 {
12344 unsigned HOST_WIDE_INT sum = 1;
12345 for (i = 0; i < numfiles; i++)
12346 {
12347 int file_idx = backmap[i];
12348 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12349 sum += size_of_uleb128 (dir_idx);
12350 }
12351 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12352 idx_form = DW_FORM_data2;
12353 }
12354 #ifdef VMS_DEBUGGING_INFO
12355 dw2_asm_output_data (1, 4, "File name entry format count");
12356 #else
12357 dw2_asm_output_data (1, 2, "File name entry format count");
12358 #endif
12359 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12360 dw2_asm_output_data_uleb128 (str_form, "%s",
12361 get_DW_FORM_name (str_form));
12362 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12363 "DW_LNCT_directory_index");
12364 dw2_asm_output_data_uleb128 (idx_form, "%s",
12365 get_DW_FORM_name (idx_form));
12366 #ifdef VMS_DEBUGGING_INFO
12367 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12368 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12369 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12370 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12371 #endif
12372 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12373
12374 output_line_string (str_form, filename0, "File Entry", 0);
12375
12376 /* Include directory index. */
12377 if (idx_form != DW_FORM_udata)
12378 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12379 0, NULL);
12380 else
12381 dw2_asm_output_data_uleb128 (0, NULL);
12382
12383 #ifdef VMS_DEBUGGING_INFO
12384 dw2_asm_output_data_uleb128 (0, NULL);
12385 dw2_asm_output_data_uleb128 (0, NULL);
12386 #endif
12387 }
12388
12389 /* Now write all the file names. */
12390 for (i = 0; i < numfiles; i++)
12391 {
12392 int file_idx = backmap[i];
12393 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12394
12395 #ifdef VMS_DEBUGGING_INFO
12396 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12397
12398 /* Setting these fields can lead to debugger miscomparisons,
12399 but VMS Debug requires them to be set correctly. */
12400
12401 int ver;
12402 long long cdt;
12403 long siz;
12404 int maxfilelen = (strlen (files[file_idx].path)
12405 + dirs[dir_idx].length
12406 + MAX_VMS_VERSION_LEN + 1);
12407 char *filebuf = XALLOCAVEC (char, maxfilelen);
12408
12409 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12410 snprintf (filebuf, maxfilelen, "%s;%d",
12411 files[file_idx].path + dirs[dir_idx].length, ver);
12412
12413 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12414
12415 /* Include directory index. */
12416 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12417 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12418 dir_idx + idx_offset, NULL);
12419 else
12420 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12421
12422 /* Modification time. */
12423 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12424 &cdt, 0, 0, 0) == 0)
12425 ? cdt : 0, NULL);
12426
12427 /* File length in bytes. */
12428 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12429 0, &siz, 0, 0) == 0)
12430 ? siz : 0, NULL);
12431 #else
12432 output_line_string (str_form,
12433 files[file_idx].path + dirs[dir_idx].length,
12434 "File Entry", (unsigned) i + 1);
12435
12436 /* Include directory index. */
12437 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12438 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12439 dir_idx + idx_offset, NULL);
12440 else
12441 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12442
12443 if (dwarf_version >= 5)
12444 continue;
12445
12446 /* Modification time. */
12447 dw2_asm_output_data_uleb128 (0, NULL);
12448
12449 /* File length in bytes. */
12450 dw2_asm_output_data_uleb128 (0, NULL);
12451 #endif /* VMS_DEBUGGING_INFO */
12452 }
12453
12454 if (dwarf_version < 5)
12455 dw2_asm_output_data (1, 0, "End file name table");
12456 }
12457
12458
12459 /* Output one line number table into the .debug_line section. */
12460
12461 static void
12462 output_one_line_info_table (dw_line_info_table *table)
12463 {
12464 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12465 unsigned int current_line = 1;
12466 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12467 dw_line_info_entry *ent, *prev_addr;
12468 size_t i;
12469 unsigned int view;
12470
12471 view = 0;
12472
12473 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12474 {
12475 switch (ent->opcode)
12476 {
12477 case LI_set_address:
12478 /* ??? Unfortunately, we have little choice here currently, and
12479 must always use the most general form. GCC does not know the
12480 address delta itself, so we can't use DW_LNS_advance_pc. Many
12481 ports do have length attributes which will give an upper bound
12482 on the address range. We could perhaps use length attributes
12483 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12484 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12485
12486 view = 0;
12487
12488 /* This can handle any delta. This takes
12489 4+DWARF2_ADDR_SIZE bytes. */
12490 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12491 debug_variable_location_views
12492 ? ", reset view to 0" : "");
12493 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12494 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12495 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12496
12497 prev_addr = ent;
12498 break;
12499
12500 case LI_adv_address:
12501 {
12502 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12503 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12504 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12505
12506 view++;
12507
12508 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12509 dw2_asm_output_delta (2, line_label, prev_label,
12510 "from %s to %s", prev_label, line_label);
12511
12512 prev_addr = ent;
12513 break;
12514 }
12515
12516 case LI_set_line:
12517 if (ent->val == current_line)
12518 {
12519 /* We still need to start a new row, so output a copy insn. */
12520 dw2_asm_output_data (1, DW_LNS_copy,
12521 "copy line %u", current_line);
12522 }
12523 else
12524 {
12525 int line_offset = ent->val - current_line;
12526 int line_delta = line_offset - DWARF_LINE_BASE;
12527
12528 current_line = ent->val;
12529 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12530 {
12531 /* This can handle deltas from -10 to 234, using the current
12532 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12533 This takes 1 byte. */
12534 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12535 "line %u", current_line);
12536 }
12537 else
12538 {
12539 /* This can handle any delta. This takes at least 4 bytes,
12540 depending on the value being encoded. */
12541 dw2_asm_output_data (1, DW_LNS_advance_line,
12542 "advance to line %u", current_line);
12543 dw2_asm_output_data_sleb128 (line_offset, NULL);
12544 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12545 }
12546 }
12547 break;
12548
12549 case LI_set_file:
12550 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12551 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12552 break;
12553
12554 case LI_set_column:
12555 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12556 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12557 break;
12558
12559 case LI_negate_stmt:
12560 current_is_stmt = !current_is_stmt;
12561 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12562 "is_stmt %d", current_is_stmt);
12563 break;
12564
12565 case LI_set_prologue_end:
12566 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12567 "set prologue end");
12568 break;
12569
12570 case LI_set_epilogue_begin:
12571 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12572 "set epilogue begin");
12573 break;
12574
12575 case LI_set_discriminator:
12576 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12577 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12578 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12579 dw2_asm_output_data_uleb128 (ent->val, NULL);
12580 break;
12581 }
12582 }
12583
12584 /* Emit debug info for the address of the end of the table. */
12585 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12586 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12587 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12588 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12589
12590 dw2_asm_output_data (1, 0, "end sequence");
12591 dw2_asm_output_data_uleb128 (1, NULL);
12592 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12593 }
12594
12595 /* Output the source line number correspondence information. This
12596 information goes into the .debug_line section. */
12597
12598 static void
12599 output_line_info (bool prologue_only)
12600 {
12601 static unsigned int generation;
12602 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12603 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12604 bool saw_one = false;
12605 int opc;
12606
12607 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12608 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12609 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12610 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12611
12612 if (!XCOFF_DEBUGGING_INFO)
12613 {
12614 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12615 dw2_asm_output_data (4, 0xffffffff,
12616 "Initial length escape value indicating 64-bit DWARF extension");
12617 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12618 "Length of Source Line Info");
12619 }
12620
12621 ASM_OUTPUT_LABEL (asm_out_file, l1);
12622
12623 output_dwarf_version ();
12624 if (dwarf_version >= 5)
12625 {
12626 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12627 dw2_asm_output_data (1, 0, "Segment Size");
12628 }
12629 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12630 ASM_OUTPUT_LABEL (asm_out_file, p1);
12631
12632 /* Define the architecture-dependent minimum instruction length (in bytes).
12633 In this implementation of DWARF, this field is used for information
12634 purposes only. Since GCC generates assembly language, we have no
12635 a priori knowledge of how many instruction bytes are generated for each
12636 source line, and therefore can use only the DW_LNE_set_address and
12637 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12638 this as '1', which is "correct enough" for all architectures,
12639 and don't let the target override. */
12640 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12641
12642 if (dwarf_version >= 4)
12643 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12644 "Maximum Operations Per Instruction");
12645 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12646 "Default is_stmt_start flag");
12647 dw2_asm_output_data (1, DWARF_LINE_BASE,
12648 "Line Base Value (Special Opcodes)");
12649 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12650 "Line Range Value (Special Opcodes)");
12651 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12652 "Special Opcode Base");
12653
12654 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12655 {
12656 int n_op_args;
12657 switch (opc)
12658 {
12659 case DW_LNS_advance_pc:
12660 case DW_LNS_advance_line:
12661 case DW_LNS_set_file:
12662 case DW_LNS_set_column:
12663 case DW_LNS_fixed_advance_pc:
12664 case DW_LNS_set_isa:
12665 n_op_args = 1;
12666 break;
12667 default:
12668 n_op_args = 0;
12669 break;
12670 }
12671
12672 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12673 opc, n_op_args);
12674 }
12675
12676 /* Write out the information about the files we use. */
12677 output_file_names ();
12678 ASM_OUTPUT_LABEL (asm_out_file, p2);
12679 if (prologue_only)
12680 {
12681 /* Output the marker for the end of the line number info. */
12682 ASM_OUTPUT_LABEL (asm_out_file, l2);
12683 return;
12684 }
12685
12686 if (separate_line_info)
12687 {
12688 dw_line_info_table *table;
12689 size_t i;
12690
12691 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12692 if (table->in_use)
12693 {
12694 output_one_line_info_table (table);
12695 saw_one = true;
12696 }
12697 }
12698 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12699 {
12700 output_one_line_info_table (cold_text_section_line_info);
12701 saw_one = true;
12702 }
12703
12704 /* ??? Some Darwin linkers crash on a .debug_line section with no
12705 sequences. Further, merely a DW_LNE_end_sequence entry is not
12706 sufficient -- the address column must also be initialized.
12707 Make sure to output at least one set_address/end_sequence pair,
12708 choosing .text since that section is always present. */
12709 if (text_section_line_info->in_use || !saw_one)
12710 output_one_line_info_table (text_section_line_info);
12711
12712 /* Output the marker for the end of the line number info. */
12713 ASM_OUTPUT_LABEL (asm_out_file, l2);
12714 }
12715 \f
12716 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12717
12718 static inline bool
12719 need_endianity_attribute_p (bool reverse)
12720 {
12721 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12722 }
12723
12724 /* Given a pointer to a tree node for some base type, return a pointer to
12725 a DIE that describes the given type. REVERSE is true if the type is
12726 to be interpreted in the reverse storage order wrt the target order.
12727
12728 This routine must only be called for GCC type nodes that correspond to
12729 Dwarf base (fundamental) types. */
12730
12731 static dw_die_ref
12732 base_type_die (tree type, bool reverse)
12733 {
12734 dw_die_ref base_type_result;
12735 enum dwarf_type encoding;
12736 bool fpt_used = false;
12737 struct fixed_point_type_info fpt_info;
12738 tree type_bias = NULL_TREE;
12739
12740 /* If this is a subtype that should not be emitted as a subrange type,
12741 use the base type. See subrange_type_for_debug_p. */
12742 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12743 type = TREE_TYPE (type);
12744
12745 switch (TREE_CODE (type))
12746 {
12747 case INTEGER_TYPE:
12748 if ((dwarf_version >= 4 || !dwarf_strict)
12749 && TYPE_NAME (type)
12750 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12751 && DECL_IS_BUILTIN (TYPE_NAME (type))
12752 && DECL_NAME (TYPE_NAME (type)))
12753 {
12754 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12755 if (strcmp (name, "char16_t") == 0
12756 || strcmp (name, "char32_t") == 0)
12757 {
12758 encoding = DW_ATE_UTF;
12759 break;
12760 }
12761 }
12762 if ((dwarf_version >= 3 || !dwarf_strict)
12763 && lang_hooks.types.get_fixed_point_type_info)
12764 {
12765 memset (&fpt_info, 0, sizeof (fpt_info));
12766 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12767 {
12768 fpt_used = true;
12769 encoding = ((TYPE_UNSIGNED (type))
12770 ? DW_ATE_unsigned_fixed
12771 : DW_ATE_signed_fixed);
12772 break;
12773 }
12774 }
12775 if (TYPE_STRING_FLAG (type))
12776 {
12777 if (TYPE_UNSIGNED (type))
12778 encoding = DW_ATE_unsigned_char;
12779 else
12780 encoding = DW_ATE_signed_char;
12781 }
12782 else if (TYPE_UNSIGNED (type))
12783 encoding = DW_ATE_unsigned;
12784 else
12785 encoding = DW_ATE_signed;
12786
12787 if (!dwarf_strict
12788 && lang_hooks.types.get_type_bias)
12789 type_bias = lang_hooks.types.get_type_bias (type);
12790 break;
12791
12792 case REAL_TYPE:
12793 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12794 {
12795 if (dwarf_version >= 3 || !dwarf_strict)
12796 encoding = DW_ATE_decimal_float;
12797 else
12798 encoding = DW_ATE_lo_user;
12799 }
12800 else
12801 encoding = DW_ATE_float;
12802 break;
12803
12804 case FIXED_POINT_TYPE:
12805 if (!(dwarf_version >= 3 || !dwarf_strict))
12806 encoding = DW_ATE_lo_user;
12807 else if (TYPE_UNSIGNED (type))
12808 encoding = DW_ATE_unsigned_fixed;
12809 else
12810 encoding = DW_ATE_signed_fixed;
12811 break;
12812
12813 /* Dwarf2 doesn't know anything about complex ints, so use
12814 a user defined type for it. */
12815 case COMPLEX_TYPE:
12816 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12817 encoding = DW_ATE_complex_float;
12818 else
12819 encoding = DW_ATE_lo_user;
12820 break;
12821
12822 case BOOLEAN_TYPE:
12823 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12824 encoding = DW_ATE_boolean;
12825 break;
12826
12827 default:
12828 /* No other TREE_CODEs are Dwarf fundamental types. */
12829 gcc_unreachable ();
12830 }
12831
12832 base_type_result = new_die_raw (DW_TAG_base_type);
12833
12834 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12835 int_size_in_bytes (type));
12836 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12837
12838 if (need_endianity_attribute_p (reverse))
12839 add_AT_unsigned (base_type_result, DW_AT_endianity,
12840 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12841
12842 add_alignment_attribute (base_type_result, type);
12843
12844 if (fpt_used)
12845 {
12846 switch (fpt_info.scale_factor_kind)
12847 {
12848 case fixed_point_scale_factor_binary:
12849 add_AT_int (base_type_result, DW_AT_binary_scale,
12850 fpt_info.scale_factor.binary);
12851 break;
12852
12853 case fixed_point_scale_factor_decimal:
12854 add_AT_int (base_type_result, DW_AT_decimal_scale,
12855 fpt_info.scale_factor.decimal);
12856 break;
12857
12858 case fixed_point_scale_factor_arbitrary:
12859 /* Arbitrary scale factors cannot be described in standard DWARF,
12860 yet. */
12861 if (!dwarf_strict)
12862 {
12863 /* Describe the scale factor as a rational constant. */
12864 const dw_die_ref scale_factor
12865 = new_die (DW_TAG_constant, comp_unit_die (), type);
12866
12867 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12868 fpt_info.scale_factor.arbitrary.numerator);
12869 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12870 fpt_info.scale_factor.arbitrary.denominator);
12871
12872 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12873 }
12874 break;
12875
12876 default:
12877 gcc_unreachable ();
12878 }
12879 }
12880
12881 if (type_bias)
12882 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12883 dw_scalar_form_constant
12884 | dw_scalar_form_exprloc
12885 | dw_scalar_form_reference,
12886 NULL);
12887
12888 return base_type_result;
12889 }
12890
12891 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12892 named 'auto' in its type: return true for it, false otherwise. */
12893
12894 static inline bool
12895 is_cxx_auto (tree type)
12896 {
12897 if (is_cxx ())
12898 {
12899 tree name = TYPE_IDENTIFIER (type);
12900 if (name == get_identifier ("auto")
12901 || name == get_identifier ("decltype(auto)"))
12902 return true;
12903 }
12904 return false;
12905 }
12906
12907 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12908 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12909
12910 static inline int
12911 is_base_type (tree type)
12912 {
12913 switch (TREE_CODE (type))
12914 {
12915 case INTEGER_TYPE:
12916 case REAL_TYPE:
12917 case FIXED_POINT_TYPE:
12918 case COMPLEX_TYPE:
12919 case BOOLEAN_TYPE:
12920 return 1;
12921
12922 case VOID_TYPE:
12923 case ARRAY_TYPE:
12924 case RECORD_TYPE:
12925 case UNION_TYPE:
12926 case QUAL_UNION_TYPE:
12927 case ENUMERAL_TYPE:
12928 case FUNCTION_TYPE:
12929 case METHOD_TYPE:
12930 case POINTER_TYPE:
12931 case REFERENCE_TYPE:
12932 case NULLPTR_TYPE:
12933 case OFFSET_TYPE:
12934 case LANG_TYPE:
12935 case VECTOR_TYPE:
12936 return 0;
12937
12938 default:
12939 if (is_cxx_auto (type))
12940 return 0;
12941 gcc_unreachable ();
12942 }
12943
12944 return 0;
12945 }
12946
12947 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12948 node, return the size in bits for the type if it is a constant, or else
12949 return the alignment for the type if the type's size is not constant, or
12950 else return BITS_PER_WORD if the type actually turns out to be an
12951 ERROR_MARK node. */
12952
12953 static inline unsigned HOST_WIDE_INT
12954 simple_type_size_in_bits (const_tree type)
12955 {
12956 if (TREE_CODE (type) == ERROR_MARK)
12957 return BITS_PER_WORD;
12958 else if (TYPE_SIZE (type) == NULL_TREE)
12959 return 0;
12960 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12961 return tree_to_uhwi (TYPE_SIZE (type));
12962 else
12963 return TYPE_ALIGN (type);
12964 }
12965
12966 /* Similarly, but return an offset_int instead of UHWI. */
12967
12968 static inline offset_int
12969 offset_int_type_size_in_bits (const_tree type)
12970 {
12971 if (TREE_CODE (type) == ERROR_MARK)
12972 return BITS_PER_WORD;
12973 else if (TYPE_SIZE (type) == NULL_TREE)
12974 return 0;
12975 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12976 return wi::to_offset (TYPE_SIZE (type));
12977 else
12978 return TYPE_ALIGN (type);
12979 }
12980
12981 /* Given a pointer to a tree node for a subrange type, return a pointer
12982 to a DIE that describes the given type. */
12983
12984 static dw_die_ref
12985 subrange_type_die (tree type, tree low, tree high, tree bias,
12986 dw_die_ref context_die)
12987 {
12988 dw_die_ref subrange_die;
12989 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12990
12991 if (context_die == NULL)
12992 context_die = comp_unit_die ();
12993
12994 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12995
12996 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12997 {
12998 /* The size of the subrange type and its base type do not match,
12999 so we need to generate a size attribute for the subrange type. */
13000 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13001 }
13002
13003 add_alignment_attribute (subrange_die, type);
13004
13005 if (low)
13006 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13007 if (high)
13008 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13009 if (bias && !dwarf_strict)
13010 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13011 dw_scalar_form_constant
13012 | dw_scalar_form_exprloc
13013 | dw_scalar_form_reference,
13014 NULL);
13015
13016 return subrange_die;
13017 }
13018
13019 /* Returns the (const and/or volatile) cv_qualifiers associated with
13020 the decl node. This will normally be augmented with the
13021 cv_qualifiers of the underlying type in add_type_attribute. */
13022
13023 static int
13024 decl_quals (const_tree decl)
13025 {
13026 return ((TREE_READONLY (decl)
13027 /* The C++ front-end correctly marks reference-typed
13028 variables as readonly, but from a language (and debug
13029 info) standpoint they are not const-qualified. */
13030 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13031 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13032 | (TREE_THIS_VOLATILE (decl)
13033 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13034 }
13035
13036 /* Determine the TYPE whose qualifiers match the largest strict subset
13037 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13038 qualifiers outside QUAL_MASK. */
13039
13040 static int
13041 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13042 {
13043 tree t;
13044 int best_rank = 0, best_qual = 0, max_rank;
13045
13046 type_quals &= qual_mask;
13047 max_rank = popcount_hwi (type_quals) - 1;
13048
13049 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13050 t = TYPE_NEXT_VARIANT (t))
13051 {
13052 int q = TYPE_QUALS (t) & qual_mask;
13053
13054 if ((q & type_quals) == q && q != type_quals
13055 && check_base_type (t, type))
13056 {
13057 int rank = popcount_hwi (q);
13058
13059 if (rank > best_rank)
13060 {
13061 best_rank = rank;
13062 best_qual = q;
13063 }
13064 }
13065 }
13066
13067 return best_qual;
13068 }
13069
13070 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13071 static const dwarf_qual_info_t dwarf_qual_info[] =
13072 {
13073 { TYPE_QUAL_CONST, DW_TAG_const_type },
13074 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13075 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13076 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13077 };
13078 static const unsigned int dwarf_qual_info_size
13079 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13080
13081 /* If DIE is a qualified DIE of some base DIE with the same parent,
13082 return the base DIE, otherwise return NULL. Set MASK to the
13083 qualifiers added compared to the returned DIE. */
13084
13085 static dw_die_ref
13086 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13087 {
13088 unsigned int i;
13089 for (i = 0; i < dwarf_qual_info_size; i++)
13090 if (die->die_tag == dwarf_qual_info[i].t)
13091 break;
13092 if (i == dwarf_qual_info_size)
13093 return NULL;
13094 if (vec_safe_length (die->die_attr) != 1)
13095 return NULL;
13096 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13097 if (type == NULL || type->die_parent != die->die_parent)
13098 return NULL;
13099 *mask |= dwarf_qual_info[i].q;
13100 if (depth)
13101 {
13102 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13103 if (ret)
13104 return ret;
13105 }
13106 return type;
13107 }
13108
13109 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13110 entry that chains the modifiers specified by CV_QUALS in front of the
13111 given type. REVERSE is true if the type is to be interpreted in the
13112 reverse storage order wrt the target order. */
13113
13114 static dw_die_ref
13115 modified_type_die (tree type, int cv_quals, bool reverse,
13116 dw_die_ref context_die)
13117 {
13118 enum tree_code code = TREE_CODE (type);
13119 dw_die_ref mod_type_die;
13120 dw_die_ref sub_die = NULL;
13121 tree item_type = NULL;
13122 tree qualified_type;
13123 tree name, low, high;
13124 dw_die_ref mod_scope;
13125 /* Only these cv-qualifiers are currently handled. */
13126 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13127 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13128 ENCODE_QUAL_ADDR_SPACE(~0U));
13129 const bool reverse_base_type
13130 = need_endianity_attribute_p (reverse) && is_base_type (type);
13131
13132 if (code == ERROR_MARK)
13133 return NULL;
13134
13135 if (lang_hooks.types.get_debug_type)
13136 {
13137 tree debug_type = lang_hooks.types.get_debug_type (type);
13138
13139 if (debug_type != NULL_TREE && debug_type != type)
13140 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13141 }
13142
13143 cv_quals &= cv_qual_mask;
13144
13145 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13146 tag modifier (and not an attribute) old consumers won't be able
13147 to handle it. */
13148 if (dwarf_version < 3)
13149 cv_quals &= ~TYPE_QUAL_RESTRICT;
13150
13151 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13152 if (dwarf_version < 5)
13153 cv_quals &= ~TYPE_QUAL_ATOMIC;
13154
13155 /* See if we already have the appropriately qualified variant of
13156 this type. */
13157 qualified_type = get_qualified_type (type, cv_quals);
13158
13159 if (qualified_type == sizetype)
13160 {
13161 /* Try not to expose the internal sizetype type's name. */
13162 if (TYPE_NAME (qualified_type)
13163 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13164 {
13165 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13166
13167 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13168 && (TYPE_PRECISION (t)
13169 == TYPE_PRECISION (qualified_type))
13170 && (TYPE_UNSIGNED (t)
13171 == TYPE_UNSIGNED (qualified_type)));
13172 qualified_type = t;
13173 }
13174 else if (qualified_type == sizetype
13175 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13176 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13177 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13178 qualified_type = size_type_node;
13179 }
13180
13181 /* If we do, then we can just use its DIE, if it exists. */
13182 if (qualified_type)
13183 {
13184 mod_type_die = lookup_type_die (qualified_type);
13185
13186 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13187 dealt with specially: the DIE with the attribute, if it exists, is
13188 placed immediately after the regular DIE for the same base type. */
13189 if (mod_type_die
13190 && (!reverse_base_type
13191 || ((mod_type_die = mod_type_die->die_sib) != NULL
13192 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13193 return mod_type_die;
13194 }
13195
13196 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13197
13198 /* Handle C typedef types. */
13199 if (name
13200 && TREE_CODE (name) == TYPE_DECL
13201 && DECL_ORIGINAL_TYPE (name)
13202 && !DECL_ARTIFICIAL (name))
13203 {
13204 tree dtype = TREE_TYPE (name);
13205
13206 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13207 if (qualified_type == dtype && !reverse_base_type)
13208 {
13209 tree origin = decl_ultimate_origin (name);
13210
13211 /* Typedef variants that have an abstract origin don't get their own
13212 type DIE (see gen_typedef_die), so fall back on the ultimate
13213 abstract origin instead. */
13214 if (origin != NULL && origin != name)
13215 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13216 context_die);
13217
13218 /* For a named type, use the typedef. */
13219 gen_type_die (qualified_type, context_die);
13220 return lookup_type_die (qualified_type);
13221 }
13222 else
13223 {
13224 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13225 dquals &= cv_qual_mask;
13226 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13227 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13228 /* cv-unqualified version of named type. Just use
13229 the unnamed type to which it refers. */
13230 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13231 reverse, context_die);
13232 /* Else cv-qualified version of named type; fall through. */
13233 }
13234 }
13235
13236 mod_scope = scope_die_for (type, context_die);
13237
13238 if (cv_quals)
13239 {
13240 int sub_quals = 0, first_quals = 0;
13241 unsigned i;
13242 dw_die_ref first = NULL, last = NULL;
13243
13244 /* Determine a lesser qualified type that most closely matches
13245 this one. Then generate DW_TAG_* entries for the remaining
13246 qualifiers. */
13247 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13248 cv_qual_mask);
13249 if (sub_quals && use_debug_types)
13250 {
13251 bool needed = false;
13252 /* If emitting type units, make sure the order of qualifiers
13253 is canonical. Thus, start from unqualified type if
13254 an earlier qualifier is missing in sub_quals, but some later
13255 one is present there. */
13256 for (i = 0; i < dwarf_qual_info_size; i++)
13257 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13258 needed = true;
13259 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13260 {
13261 sub_quals = 0;
13262 break;
13263 }
13264 }
13265 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13266 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13267 {
13268 /* As not all intermediate qualified DIEs have corresponding
13269 tree types, ensure that qualified DIEs in the same scope
13270 as their DW_AT_type are emitted after their DW_AT_type,
13271 only with other qualified DIEs for the same type possibly
13272 in between them. Determine the range of such qualified
13273 DIEs now (first being the base type, last being corresponding
13274 last qualified DIE for it). */
13275 unsigned int count = 0;
13276 first = qualified_die_p (mod_type_die, &first_quals,
13277 dwarf_qual_info_size);
13278 if (first == NULL)
13279 first = mod_type_die;
13280 gcc_assert ((first_quals & ~sub_quals) == 0);
13281 for (count = 0, last = first;
13282 count < (1U << dwarf_qual_info_size);
13283 count++, last = last->die_sib)
13284 {
13285 int quals = 0;
13286 if (last == mod_scope->die_child)
13287 break;
13288 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13289 != first)
13290 break;
13291 }
13292 }
13293
13294 for (i = 0; i < dwarf_qual_info_size; i++)
13295 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13296 {
13297 dw_die_ref d;
13298 if (first && first != last)
13299 {
13300 for (d = first->die_sib; ; d = d->die_sib)
13301 {
13302 int quals = 0;
13303 qualified_die_p (d, &quals, dwarf_qual_info_size);
13304 if (quals == (first_quals | dwarf_qual_info[i].q))
13305 break;
13306 if (d == last)
13307 {
13308 d = NULL;
13309 break;
13310 }
13311 }
13312 if (d)
13313 {
13314 mod_type_die = d;
13315 continue;
13316 }
13317 }
13318 if (first)
13319 {
13320 d = new_die_raw (dwarf_qual_info[i].t);
13321 add_child_die_after (mod_scope, d, last);
13322 last = d;
13323 }
13324 else
13325 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13326 if (mod_type_die)
13327 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13328 mod_type_die = d;
13329 first_quals |= dwarf_qual_info[i].q;
13330 }
13331 }
13332 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13333 {
13334 dwarf_tag tag = DW_TAG_pointer_type;
13335 if (code == REFERENCE_TYPE)
13336 {
13337 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13338 tag = DW_TAG_rvalue_reference_type;
13339 else
13340 tag = DW_TAG_reference_type;
13341 }
13342 mod_type_die = new_die (tag, mod_scope, type);
13343
13344 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13345 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13346 add_alignment_attribute (mod_type_die, type);
13347 item_type = TREE_TYPE (type);
13348
13349 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13350 if (!ADDR_SPACE_GENERIC_P (as))
13351 {
13352 int action = targetm.addr_space.debug (as);
13353 if (action >= 0)
13354 {
13355 /* Positive values indicate an address_class. */
13356 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13357 }
13358 else
13359 {
13360 /* Negative values indicate an (inverted) segment base reg. */
13361 dw_loc_descr_ref d
13362 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13363 add_AT_loc (mod_type_die, DW_AT_segment, d);
13364 }
13365 }
13366 }
13367 else if (code == INTEGER_TYPE
13368 && TREE_TYPE (type) != NULL_TREE
13369 && subrange_type_for_debug_p (type, &low, &high))
13370 {
13371 tree bias = NULL_TREE;
13372 if (lang_hooks.types.get_type_bias)
13373 bias = lang_hooks.types.get_type_bias (type);
13374 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13375 item_type = TREE_TYPE (type);
13376 }
13377 else if (is_base_type (type))
13378 {
13379 mod_type_die = base_type_die (type, reverse);
13380
13381 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13382 if (reverse_base_type)
13383 {
13384 dw_die_ref after_die
13385 = modified_type_die (type, cv_quals, false, context_die);
13386 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13387 }
13388 else
13389 add_child_die (comp_unit_die (), mod_type_die);
13390
13391 add_pubtype (type, mod_type_die);
13392 }
13393 else
13394 {
13395 gen_type_die (type, context_die);
13396
13397 /* We have to get the type_main_variant here (and pass that to the
13398 `lookup_type_die' routine) because the ..._TYPE node we have
13399 might simply be a *copy* of some original type node (where the
13400 copy was created to help us keep track of typedef names) and
13401 that copy might have a different TYPE_UID from the original
13402 ..._TYPE node. */
13403 if (TREE_CODE (type) == FUNCTION_TYPE
13404 || TREE_CODE (type) == METHOD_TYPE)
13405 {
13406 /* For function/method types, can't just use type_main_variant here,
13407 because that can have different ref-qualifiers for C++,
13408 but try to canonicalize. */
13409 tree main = TYPE_MAIN_VARIANT (type);
13410 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13411 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13412 && check_base_type (t, main)
13413 && check_lang_type (t, type))
13414 return lookup_type_die (t);
13415 return lookup_type_die (type);
13416 }
13417 else if (TREE_CODE (type) != VECTOR_TYPE
13418 && TREE_CODE (type) != ARRAY_TYPE)
13419 return lookup_type_die (type_main_variant (type));
13420 else
13421 /* Vectors have the debugging information in the type,
13422 not the main variant. */
13423 return lookup_type_die (type);
13424 }
13425
13426 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13427 don't output a DW_TAG_typedef, since there isn't one in the
13428 user's program; just attach a DW_AT_name to the type.
13429 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13430 if the base type already has the same name. */
13431 if (name
13432 && ((TREE_CODE (name) != TYPE_DECL
13433 && (qualified_type == TYPE_MAIN_VARIANT (type)
13434 || (cv_quals == TYPE_UNQUALIFIED)))
13435 || (TREE_CODE (name) == TYPE_DECL
13436 && TREE_TYPE (name) == qualified_type
13437 && DECL_NAME (name))))
13438 {
13439 if (TREE_CODE (name) == TYPE_DECL)
13440 /* Could just call add_name_and_src_coords_attributes here,
13441 but since this is a builtin type it doesn't have any
13442 useful source coordinates anyway. */
13443 name = DECL_NAME (name);
13444 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13445 }
13446 /* This probably indicates a bug. */
13447 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13448 {
13449 name = TYPE_IDENTIFIER (type);
13450 add_name_attribute (mod_type_die,
13451 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13452 }
13453
13454 if (qualified_type && !reverse_base_type)
13455 equate_type_number_to_die (qualified_type, mod_type_die);
13456
13457 if (item_type)
13458 /* We must do this after the equate_type_number_to_die call, in case
13459 this is a recursive type. This ensures that the modified_type_die
13460 recursion will terminate even if the type is recursive. Recursive
13461 types are possible in Ada. */
13462 sub_die = modified_type_die (item_type,
13463 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13464 reverse,
13465 context_die);
13466
13467 if (sub_die != NULL)
13468 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13469
13470 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13471 if (TYPE_ARTIFICIAL (type))
13472 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13473
13474 return mod_type_die;
13475 }
13476
13477 /* Generate DIEs for the generic parameters of T.
13478 T must be either a generic type or a generic function.
13479 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13480
13481 static void
13482 gen_generic_params_dies (tree t)
13483 {
13484 tree parms, args;
13485 int parms_num, i;
13486 dw_die_ref die = NULL;
13487 int non_default;
13488
13489 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13490 return;
13491
13492 if (TYPE_P (t))
13493 die = lookup_type_die (t);
13494 else if (DECL_P (t))
13495 die = lookup_decl_die (t);
13496
13497 gcc_assert (die);
13498
13499 parms = lang_hooks.get_innermost_generic_parms (t);
13500 if (!parms)
13501 /* T has no generic parameter. It means T is neither a generic type
13502 or function. End of story. */
13503 return;
13504
13505 parms_num = TREE_VEC_LENGTH (parms);
13506 args = lang_hooks.get_innermost_generic_args (t);
13507 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13508 non_default = int_cst_value (TREE_CHAIN (args));
13509 else
13510 non_default = TREE_VEC_LENGTH (args);
13511 for (i = 0; i < parms_num; i++)
13512 {
13513 tree parm, arg, arg_pack_elems;
13514 dw_die_ref parm_die;
13515
13516 parm = TREE_VEC_ELT (parms, i);
13517 arg = TREE_VEC_ELT (args, i);
13518 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13519 gcc_assert (parm && TREE_VALUE (parm) && arg);
13520
13521 if (parm && TREE_VALUE (parm) && arg)
13522 {
13523 /* If PARM represents a template parameter pack,
13524 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13525 by DW_TAG_template_*_parameter DIEs for the argument
13526 pack elements of ARG. Note that ARG would then be
13527 an argument pack. */
13528 if (arg_pack_elems)
13529 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13530 arg_pack_elems,
13531 die);
13532 else
13533 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13534 true /* emit name */, die);
13535 if (i >= non_default)
13536 add_AT_flag (parm_die, DW_AT_default_value, 1);
13537 }
13538 }
13539 }
13540
13541 /* Create and return a DIE for PARM which should be
13542 the representation of a generic type parameter.
13543 For instance, in the C++ front end, PARM would be a template parameter.
13544 ARG is the argument to PARM.
13545 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13546 name of the PARM.
13547 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13548 as a child node. */
13549
13550 static dw_die_ref
13551 generic_parameter_die (tree parm, tree arg,
13552 bool emit_name_p,
13553 dw_die_ref parent_die)
13554 {
13555 dw_die_ref tmpl_die = NULL;
13556 const char *name = NULL;
13557
13558 if (!parm || !DECL_NAME (parm) || !arg)
13559 return NULL;
13560
13561 /* We support non-type generic parameters and arguments,
13562 type generic parameters and arguments, as well as
13563 generic generic parameters (a.k.a. template template parameters in C++)
13564 and arguments. */
13565 if (TREE_CODE (parm) == PARM_DECL)
13566 /* PARM is a nontype generic parameter */
13567 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13568 else if (TREE_CODE (parm) == TYPE_DECL)
13569 /* PARM is a type generic parameter. */
13570 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13571 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13572 /* PARM is a generic generic parameter.
13573 Its DIE is a GNU extension. It shall have a
13574 DW_AT_name attribute to represent the name of the template template
13575 parameter, and a DW_AT_GNU_template_name attribute to represent the
13576 name of the template template argument. */
13577 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13578 parent_die, parm);
13579 else
13580 gcc_unreachable ();
13581
13582 if (tmpl_die)
13583 {
13584 tree tmpl_type;
13585
13586 /* If PARM is a generic parameter pack, it means we are
13587 emitting debug info for a template argument pack element.
13588 In other terms, ARG is a template argument pack element.
13589 In that case, we don't emit any DW_AT_name attribute for
13590 the die. */
13591 if (emit_name_p)
13592 {
13593 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13594 gcc_assert (name);
13595 add_AT_string (tmpl_die, DW_AT_name, name);
13596 }
13597
13598 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13599 {
13600 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13601 TMPL_DIE should have a child DW_AT_type attribute that is set
13602 to the type of the argument to PARM, which is ARG.
13603 If PARM is a type generic parameter, TMPL_DIE should have a
13604 child DW_AT_type that is set to ARG. */
13605 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13606 add_type_attribute (tmpl_die, tmpl_type,
13607 (TREE_THIS_VOLATILE (tmpl_type)
13608 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13609 false, parent_die);
13610 }
13611 else
13612 {
13613 /* So TMPL_DIE is a DIE representing a
13614 a generic generic template parameter, a.k.a template template
13615 parameter in C++ and arg is a template. */
13616
13617 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13618 to the name of the argument. */
13619 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13620 if (name)
13621 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13622 }
13623
13624 if (TREE_CODE (parm) == PARM_DECL)
13625 /* So PARM is a non-type generic parameter.
13626 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13627 attribute of TMPL_DIE which value represents the value
13628 of ARG.
13629 We must be careful here:
13630 The value of ARG might reference some function decls.
13631 We might currently be emitting debug info for a generic
13632 type and types are emitted before function decls, we don't
13633 know if the function decls referenced by ARG will actually be
13634 emitted after cgraph computations.
13635 So must defer the generation of the DW_AT_const_value to
13636 after cgraph is ready. */
13637 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13638 }
13639
13640 return tmpl_die;
13641 }
13642
13643 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13644 PARM_PACK must be a template parameter pack. The returned DIE
13645 will be child DIE of PARENT_DIE. */
13646
13647 static dw_die_ref
13648 template_parameter_pack_die (tree parm_pack,
13649 tree parm_pack_args,
13650 dw_die_ref parent_die)
13651 {
13652 dw_die_ref die;
13653 int j;
13654
13655 gcc_assert (parent_die && parm_pack);
13656
13657 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13658 add_name_and_src_coords_attributes (die, parm_pack);
13659 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13660 generic_parameter_die (parm_pack,
13661 TREE_VEC_ELT (parm_pack_args, j),
13662 false /* Don't emit DW_AT_name */,
13663 die);
13664 return die;
13665 }
13666
13667 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13668 an enumerated type. */
13669
13670 static inline int
13671 type_is_enum (const_tree type)
13672 {
13673 return TREE_CODE (type) == ENUMERAL_TYPE;
13674 }
13675
13676 /* Return the DBX register number described by a given RTL node. */
13677
13678 static unsigned int
13679 dbx_reg_number (const_rtx rtl)
13680 {
13681 unsigned regno = REGNO (rtl);
13682
13683 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13684
13685 #ifdef LEAF_REG_REMAP
13686 if (crtl->uses_only_leaf_regs)
13687 {
13688 int leaf_reg = LEAF_REG_REMAP (regno);
13689 if (leaf_reg != -1)
13690 regno = (unsigned) leaf_reg;
13691 }
13692 #endif
13693
13694 regno = DBX_REGISTER_NUMBER (regno);
13695 gcc_assert (regno != INVALID_REGNUM);
13696 return regno;
13697 }
13698
13699 /* Optionally add a DW_OP_piece term to a location description expression.
13700 DW_OP_piece is only added if the location description expression already
13701 doesn't end with DW_OP_piece. */
13702
13703 static void
13704 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13705 {
13706 dw_loc_descr_ref loc;
13707
13708 if (*list_head != NULL)
13709 {
13710 /* Find the end of the chain. */
13711 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13712 ;
13713
13714 if (loc->dw_loc_opc != DW_OP_piece)
13715 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13716 }
13717 }
13718
13719 /* Return a location descriptor that designates a machine register or
13720 zero if there is none. */
13721
13722 static dw_loc_descr_ref
13723 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13724 {
13725 rtx regs;
13726
13727 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13728 return 0;
13729
13730 /* We only use "frame base" when we're sure we're talking about the
13731 post-prologue local stack frame. We do this by *not* running
13732 register elimination until this point, and recognizing the special
13733 argument pointer and soft frame pointer rtx's.
13734 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13735 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13736 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13737 {
13738 dw_loc_descr_ref result = NULL;
13739
13740 if (dwarf_version >= 4 || !dwarf_strict)
13741 {
13742 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13743 initialized);
13744 if (result)
13745 add_loc_descr (&result,
13746 new_loc_descr (DW_OP_stack_value, 0, 0));
13747 }
13748 return result;
13749 }
13750
13751 regs = targetm.dwarf_register_span (rtl);
13752
13753 if (REG_NREGS (rtl) > 1 || regs)
13754 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13755 else
13756 {
13757 unsigned int dbx_regnum = dbx_reg_number (rtl);
13758 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13759 return 0;
13760 return one_reg_loc_descriptor (dbx_regnum, initialized);
13761 }
13762 }
13763
13764 /* Return a location descriptor that designates a machine register for
13765 a given hard register number. */
13766
13767 static dw_loc_descr_ref
13768 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13769 {
13770 dw_loc_descr_ref reg_loc_descr;
13771
13772 if (regno <= 31)
13773 reg_loc_descr
13774 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13775 else
13776 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13777
13778 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13779 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13780
13781 return reg_loc_descr;
13782 }
13783
13784 /* Given an RTL of a register, return a location descriptor that
13785 designates a value that spans more than one register. */
13786
13787 static dw_loc_descr_ref
13788 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13789 enum var_init_status initialized)
13790 {
13791 int size, i;
13792 dw_loc_descr_ref loc_result = NULL;
13793
13794 /* Simple, contiguous registers. */
13795 if (regs == NULL_RTX)
13796 {
13797 unsigned reg = REGNO (rtl);
13798 int nregs;
13799
13800 #ifdef LEAF_REG_REMAP
13801 if (crtl->uses_only_leaf_regs)
13802 {
13803 int leaf_reg = LEAF_REG_REMAP (reg);
13804 if (leaf_reg != -1)
13805 reg = (unsigned) leaf_reg;
13806 }
13807 #endif
13808
13809 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13810 nregs = REG_NREGS (rtl);
13811
13812 /* At present we only track constant-sized pieces. */
13813 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13814 return NULL;
13815 size /= nregs;
13816
13817 loc_result = NULL;
13818 while (nregs--)
13819 {
13820 dw_loc_descr_ref t;
13821
13822 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13823 VAR_INIT_STATUS_INITIALIZED);
13824 add_loc_descr (&loc_result, t);
13825 add_loc_descr_op_piece (&loc_result, size);
13826 ++reg;
13827 }
13828 return loc_result;
13829 }
13830
13831 /* Now onto stupid register sets in non contiguous locations. */
13832
13833 gcc_assert (GET_CODE (regs) == PARALLEL);
13834
13835 /* At present we only track constant-sized pieces. */
13836 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13837 return NULL;
13838 loc_result = NULL;
13839
13840 for (i = 0; i < XVECLEN (regs, 0); ++i)
13841 {
13842 dw_loc_descr_ref t;
13843
13844 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13845 VAR_INIT_STATUS_INITIALIZED);
13846 add_loc_descr (&loc_result, t);
13847 add_loc_descr_op_piece (&loc_result, size);
13848 }
13849
13850 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13851 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13852 return loc_result;
13853 }
13854
13855 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13856
13857 /* Return a location descriptor that designates a constant i,
13858 as a compound operation from constant (i >> shift), constant shift
13859 and DW_OP_shl. */
13860
13861 static dw_loc_descr_ref
13862 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13863 {
13864 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13865 add_loc_descr (&ret, int_loc_descriptor (shift));
13866 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13867 return ret;
13868 }
13869
13870 /* Return a location descriptor that designates constant POLY_I. */
13871
13872 static dw_loc_descr_ref
13873 int_loc_descriptor (poly_int64 poly_i)
13874 {
13875 enum dwarf_location_atom op;
13876
13877 HOST_WIDE_INT i;
13878 if (!poly_i.is_constant (&i))
13879 {
13880 /* Create location descriptions for the non-constant part and
13881 add any constant offset at the end. */
13882 dw_loc_descr_ref ret = NULL;
13883 HOST_WIDE_INT constant = poly_i.coeffs[0];
13884 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13885 {
13886 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13887 if (coeff != 0)
13888 {
13889 dw_loc_descr_ref start = ret;
13890 unsigned int factor;
13891 int bias;
13892 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13893 (j, &factor, &bias);
13894
13895 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13896 add COEFF * (REGNO / FACTOR) now and subtract
13897 COEFF * BIAS from the final constant part. */
13898 constant -= coeff * bias;
13899 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13900 if (coeff % factor == 0)
13901 coeff /= factor;
13902 else
13903 {
13904 int amount = exact_log2 (factor);
13905 gcc_assert (amount >= 0);
13906 add_loc_descr (&ret, int_loc_descriptor (amount));
13907 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13908 }
13909 if (coeff != 1)
13910 {
13911 add_loc_descr (&ret, int_loc_descriptor (coeff));
13912 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13913 }
13914 if (start)
13915 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13916 }
13917 }
13918 loc_descr_plus_const (&ret, constant);
13919 return ret;
13920 }
13921
13922 /* Pick the smallest representation of a constant, rather than just
13923 defaulting to the LEB encoding. */
13924 if (i >= 0)
13925 {
13926 int clz = clz_hwi (i);
13927 int ctz = ctz_hwi (i);
13928 if (i <= 31)
13929 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13930 else if (i <= 0xff)
13931 op = DW_OP_const1u;
13932 else if (i <= 0xffff)
13933 op = DW_OP_const2u;
13934 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13935 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13936 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13937 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13938 while DW_OP_const4u is 5 bytes. */
13939 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13940 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13941 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13942 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13943 while DW_OP_const4u is 5 bytes. */
13944 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13945
13946 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13947 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13948 <= 4)
13949 {
13950 /* As i >= 2**31, the double cast above will yield a negative number.
13951 Since wrapping is defined in DWARF expressions we can output big
13952 positive integers as small negative ones, regardless of the size
13953 of host wide ints.
13954
13955 Here, since the evaluator will handle 32-bit values and since i >=
13956 2**31, we know it's going to be interpreted as a negative literal:
13957 store it this way if we can do better than 5 bytes this way. */
13958 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13959 }
13960 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13961 op = DW_OP_const4u;
13962
13963 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13964 least 6 bytes: see if we can do better before falling back to it. */
13965 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13966 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13967 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13968 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13969 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13970 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13971 >= HOST_BITS_PER_WIDE_INT)
13972 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13973 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13974 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13975 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13976 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13977 && size_of_uleb128 (i) > 6)
13978 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13979 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13980 else
13981 op = DW_OP_constu;
13982 }
13983 else
13984 {
13985 if (i >= -0x80)
13986 op = DW_OP_const1s;
13987 else if (i >= -0x8000)
13988 op = DW_OP_const2s;
13989 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13990 {
13991 if (size_of_int_loc_descriptor (i) < 5)
13992 {
13993 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13994 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13995 return ret;
13996 }
13997 op = DW_OP_const4s;
13998 }
13999 else
14000 {
14001 if (size_of_int_loc_descriptor (i)
14002 < (unsigned long) 1 + size_of_sleb128 (i))
14003 {
14004 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14005 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14006 return ret;
14007 }
14008 op = DW_OP_consts;
14009 }
14010 }
14011
14012 return new_loc_descr (op, i, 0);
14013 }
14014
14015 /* Likewise, for unsigned constants. */
14016
14017 static dw_loc_descr_ref
14018 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14019 {
14020 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14021 const unsigned HOST_WIDE_INT max_uint
14022 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14023
14024 /* If possible, use the clever signed constants handling. */
14025 if (i <= max_int)
14026 return int_loc_descriptor ((HOST_WIDE_INT) i);
14027
14028 /* Here, we are left with positive numbers that cannot be represented as
14029 HOST_WIDE_INT, i.e.:
14030 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14031
14032 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14033 whereas may be better to output a negative integer: thanks to integer
14034 wrapping, we know that:
14035 x = x - 2 ** DWARF2_ADDR_SIZE
14036 = x - 2 * (max (HOST_WIDE_INT) + 1)
14037 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14038 small negative integers. Let's try that in cases it will clearly improve
14039 the encoding: there is no gain turning DW_OP_const4u into
14040 DW_OP_const4s. */
14041 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14042 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14043 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14044 {
14045 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14046
14047 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14048 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14049 const HOST_WIDE_INT second_shift
14050 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14051
14052 /* So we finally have:
14053 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14054 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14055 return int_loc_descriptor (second_shift);
14056 }
14057
14058 /* Last chance: fallback to a simple constant operation. */
14059 return new_loc_descr
14060 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14061 ? DW_OP_const4u
14062 : DW_OP_const8u,
14063 i, 0);
14064 }
14065
14066 /* Generate and return a location description that computes the unsigned
14067 comparison of the two stack top entries (a OP b where b is the top-most
14068 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14069 LE_EXPR, GT_EXPR or GE_EXPR. */
14070
14071 static dw_loc_descr_ref
14072 uint_comparison_loc_list (enum tree_code kind)
14073 {
14074 enum dwarf_location_atom op, flip_op;
14075 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14076
14077 switch (kind)
14078 {
14079 case LT_EXPR:
14080 op = DW_OP_lt;
14081 break;
14082 case LE_EXPR:
14083 op = DW_OP_le;
14084 break;
14085 case GT_EXPR:
14086 op = DW_OP_gt;
14087 break;
14088 case GE_EXPR:
14089 op = DW_OP_ge;
14090 break;
14091 default:
14092 gcc_unreachable ();
14093 }
14094
14095 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14096 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14097
14098 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14099 possible to perform unsigned comparisons: we just have to distinguish
14100 three cases:
14101
14102 1. when a and b have the same sign (as signed integers); then we should
14103 return: a OP(signed) b;
14104
14105 2. when a is a negative signed integer while b is a positive one, then a
14106 is a greater unsigned integer than b; likewise when a and b's roles
14107 are flipped.
14108
14109 So first, compare the sign of the two operands. */
14110 ret = new_loc_descr (DW_OP_over, 0, 0);
14111 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14112 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14113 /* If they have different signs (i.e. they have different sign bits), then
14114 the stack top value has now the sign bit set and thus it's smaller than
14115 zero. */
14116 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14117 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14118 add_loc_descr (&ret, bra_node);
14119
14120 /* We are in case 1. At this point, we know both operands have the same
14121 sign, to it's safe to use the built-in signed comparison. */
14122 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14123 add_loc_descr (&ret, jmp_node);
14124
14125 /* We are in case 2. Here, we know both operands do not have the same sign,
14126 so we have to flip the signed comparison. */
14127 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14128 tmp = new_loc_descr (flip_op, 0, 0);
14129 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14130 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14131 add_loc_descr (&ret, tmp);
14132
14133 /* This dummy operation is necessary to make the two branches join. */
14134 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14135 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14136 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14137 add_loc_descr (&ret, tmp);
14138
14139 return ret;
14140 }
14141
14142 /* Likewise, but takes the location description lists (might be destructive on
14143 them). Return NULL if either is NULL or if concatenation fails. */
14144
14145 static dw_loc_list_ref
14146 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14147 enum tree_code kind)
14148 {
14149 if (left == NULL || right == NULL)
14150 return NULL;
14151
14152 add_loc_list (&left, right);
14153 if (left == NULL)
14154 return NULL;
14155
14156 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14157 return left;
14158 }
14159
14160 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14161 without actually allocating it. */
14162
14163 static unsigned long
14164 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14165 {
14166 return size_of_int_loc_descriptor (i >> shift)
14167 + size_of_int_loc_descriptor (shift)
14168 + 1;
14169 }
14170
14171 /* Return size_of_locs (int_loc_descriptor (i)) without
14172 actually allocating it. */
14173
14174 static unsigned long
14175 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14176 {
14177 unsigned long s;
14178
14179 if (i >= 0)
14180 {
14181 int clz, ctz;
14182 if (i <= 31)
14183 return 1;
14184 else if (i <= 0xff)
14185 return 2;
14186 else if (i <= 0xffff)
14187 return 3;
14188 clz = clz_hwi (i);
14189 ctz = ctz_hwi (i);
14190 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14191 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14192 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14193 - clz - 5);
14194 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14195 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14196 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14197 - clz - 8);
14198 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14199 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14200 <= 4)
14201 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14202 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14203 return 5;
14204 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14205 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14206 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14207 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14208 - clz - 8);
14209 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14210 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14211 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14212 - clz - 16);
14213 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14214 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14215 && s > 6)
14216 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14217 - clz - 32);
14218 else
14219 return 1 + s;
14220 }
14221 else
14222 {
14223 if (i >= -0x80)
14224 return 2;
14225 else if (i >= -0x8000)
14226 return 3;
14227 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14228 {
14229 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14230 {
14231 s = size_of_int_loc_descriptor (-i) + 1;
14232 if (s < 5)
14233 return s;
14234 }
14235 return 5;
14236 }
14237 else
14238 {
14239 unsigned long r = 1 + size_of_sleb128 (i);
14240 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14241 {
14242 s = size_of_int_loc_descriptor (-i) + 1;
14243 if (s < r)
14244 return s;
14245 }
14246 return r;
14247 }
14248 }
14249 }
14250
14251 /* Return loc description representing "address" of integer value.
14252 This can appear only as toplevel expression. */
14253
14254 static dw_loc_descr_ref
14255 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14256 {
14257 int litsize;
14258 dw_loc_descr_ref loc_result = NULL;
14259
14260 if (!(dwarf_version >= 4 || !dwarf_strict))
14261 return NULL;
14262
14263 litsize = size_of_int_loc_descriptor (i);
14264 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14265 is more compact. For DW_OP_stack_value we need:
14266 litsize + 1 (DW_OP_stack_value)
14267 and for DW_OP_implicit_value:
14268 1 (DW_OP_implicit_value) + 1 (length) + size. */
14269 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14270 {
14271 loc_result = int_loc_descriptor (i);
14272 add_loc_descr (&loc_result,
14273 new_loc_descr (DW_OP_stack_value, 0, 0));
14274 return loc_result;
14275 }
14276
14277 loc_result = new_loc_descr (DW_OP_implicit_value,
14278 size, 0);
14279 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14280 loc_result->dw_loc_oprnd2.v.val_int = i;
14281 return loc_result;
14282 }
14283
14284 /* Return a location descriptor that designates a base+offset location. */
14285
14286 static dw_loc_descr_ref
14287 based_loc_descr (rtx reg, poly_int64 offset,
14288 enum var_init_status initialized)
14289 {
14290 unsigned int regno;
14291 dw_loc_descr_ref result;
14292 dw_fde_ref fde = cfun->fde;
14293
14294 /* We only use "frame base" when we're sure we're talking about the
14295 post-prologue local stack frame. We do this by *not* running
14296 register elimination until this point, and recognizing the special
14297 argument pointer and soft frame pointer rtx's. */
14298 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14299 {
14300 rtx elim = (ira_use_lra_p
14301 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14302 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14303
14304 if (elim != reg)
14305 {
14306 elim = strip_offset_and_add (elim, &offset);
14307 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14308 && (elim == hard_frame_pointer_rtx
14309 || elim == stack_pointer_rtx))
14310 || elim == (frame_pointer_needed
14311 ? hard_frame_pointer_rtx
14312 : stack_pointer_rtx));
14313
14314 /* If drap register is used to align stack, use frame
14315 pointer + offset to access stack variables. If stack
14316 is aligned without drap, use stack pointer + offset to
14317 access stack variables. */
14318 if (crtl->stack_realign_tried
14319 && reg == frame_pointer_rtx)
14320 {
14321 int base_reg
14322 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14323 ? HARD_FRAME_POINTER_REGNUM
14324 : REGNO (elim));
14325 return new_reg_loc_descr (base_reg, offset);
14326 }
14327
14328 gcc_assert (frame_pointer_fb_offset_valid);
14329 offset += frame_pointer_fb_offset;
14330 HOST_WIDE_INT const_offset;
14331 if (offset.is_constant (&const_offset))
14332 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14333 else
14334 {
14335 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14336 loc_descr_plus_const (&ret, offset);
14337 return ret;
14338 }
14339 }
14340 }
14341
14342 regno = REGNO (reg);
14343 #ifdef LEAF_REG_REMAP
14344 if (crtl->uses_only_leaf_regs)
14345 {
14346 int leaf_reg = LEAF_REG_REMAP (regno);
14347 if (leaf_reg != -1)
14348 regno = (unsigned) leaf_reg;
14349 }
14350 #endif
14351 regno = DWARF_FRAME_REGNUM (regno);
14352
14353 HOST_WIDE_INT const_offset;
14354 if (!optimize && fde
14355 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14356 && offset.is_constant (&const_offset))
14357 {
14358 /* Use cfa+offset to represent the location of arguments passed
14359 on the stack when drap is used to align stack.
14360 Only do this when not optimizing, for optimized code var-tracking
14361 is supposed to track where the arguments live and the register
14362 used as vdrap or drap in some spot might be used for something
14363 else in other part of the routine. */
14364 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14365 }
14366
14367 result = new_reg_loc_descr (regno, offset);
14368
14369 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14370 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14371
14372 return result;
14373 }
14374
14375 /* Return true if this RTL expression describes a base+offset calculation. */
14376
14377 static inline int
14378 is_based_loc (const_rtx rtl)
14379 {
14380 return (GET_CODE (rtl) == PLUS
14381 && ((REG_P (XEXP (rtl, 0))
14382 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14383 && CONST_INT_P (XEXP (rtl, 1)))));
14384 }
14385
14386 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14387 failed. */
14388
14389 static dw_loc_descr_ref
14390 tls_mem_loc_descriptor (rtx mem)
14391 {
14392 tree base;
14393 dw_loc_descr_ref loc_result;
14394
14395 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14396 return NULL;
14397
14398 base = get_base_address (MEM_EXPR (mem));
14399 if (base == NULL
14400 || !VAR_P (base)
14401 || !DECL_THREAD_LOCAL_P (base))
14402 return NULL;
14403
14404 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14405 if (loc_result == NULL)
14406 return NULL;
14407
14408 if (maybe_ne (MEM_OFFSET (mem), 0))
14409 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14410
14411 return loc_result;
14412 }
14413
14414 /* Output debug info about reason why we failed to expand expression as dwarf
14415 expression. */
14416
14417 static void
14418 expansion_failed (tree expr, rtx rtl, char const *reason)
14419 {
14420 if (dump_file && (dump_flags & TDF_DETAILS))
14421 {
14422 fprintf (dump_file, "Failed to expand as dwarf: ");
14423 if (expr)
14424 print_generic_expr (dump_file, expr, dump_flags);
14425 if (rtl)
14426 {
14427 fprintf (dump_file, "\n");
14428 print_rtl (dump_file, rtl);
14429 }
14430 fprintf (dump_file, "\nReason: %s\n", reason);
14431 }
14432 }
14433
14434 /* Helper function for const_ok_for_output. */
14435
14436 static bool
14437 const_ok_for_output_1 (rtx rtl)
14438 {
14439 if (targetm.const_not_ok_for_debug_p (rtl))
14440 {
14441 if (GET_CODE (rtl) != UNSPEC)
14442 {
14443 expansion_failed (NULL_TREE, rtl,
14444 "Expression rejected for debug by the backend.\n");
14445 return false;
14446 }
14447
14448 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14449 the target hook doesn't explicitly allow it in debug info, assume
14450 we can't express it in the debug info. */
14451 /* Don't complain about TLS UNSPECs, those are just too hard to
14452 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14453 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14454 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14455 if (flag_checking
14456 && (XVECLEN (rtl, 0) == 0
14457 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14458 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14459 inform (current_function_decl
14460 ? DECL_SOURCE_LOCATION (current_function_decl)
14461 : UNKNOWN_LOCATION,
14462 #if NUM_UNSPEC_VALUES > 0
14463 "non-delegitimized UNSPEC %s (%d) found in variable location",
14464 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14465 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14466 XINT (rtl, 1));
14467 #else
14468 "non-delegitimized UNSPEC %d found in variable location",
14469 XINT (rtl, 1));
14470 #endif
14471 expansion_failed (NULL_TREE, rtl,
14472 "UNSPEC hasn't been delegitimized.\n");
14473 return false;
14474 }
14475
14476 if (CONST_POLY_INT_P (rtl))
14477 return false;
14478
14479 if (targetm.const_not_ok_for_debug_p (rtl))
14480 {
14481 expansion_failed (NULL_TREE, rtl,
14482 "Expression rejected for debug by the backend.\n");
14483 return false;
14484 }
14485
14486 /* FIXME: Refer to PR60655. It is possible for simplification
14487 of rtl expressions in var tracking to produce such expressions.
14488 We should really identify / validate expressions
14489 enclosed in CONST that can be handled by assemblers on various
14490 targets and only handle legitimate cases here. */
14491 switch (GET_CODE (rtl))
14492 {
14493 case SYMBOL_REF:
14494 break;
14495 case NOT:
14496 case NEG:
14497 return false;
14498 default:
14499 return true;
14500 }
14501
14502 if (CONSTANT_POOL_ADDRESS_P (rtl))
14503 {
14504 bool marked;
14505 get_pool_constant_mark (rtl, &marked);
14506 /* If all references to this pool constant were optimized away,
14507 it was not output and thus we can't represent it. */
14508 if (!marked)
14509 {
14510 expansion_failed (NULL_TREE, rtl,
14511 "Constant was removed from constant pool.\n");
14512 return false;
14513 }
14514 }
14515
14516 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14517 return false;
14518
14519 /* Avoid references to external symbols in debug info, on several targets
14520 the linker might even refuse to link when linking a shared library,
14521 and in many other cases the relocations for .debug_info/.debug_loc are
14522 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14523 to be defined within the same shared library or executable are fine. */
14524 if (SYMBOL_REF_EXTERNAL_P (rtl))
14525 {
14526 tree decl = SYMBOL_REF_DECL (rtl);
14527
14528 if (decl == NULL || !targetm.binds_local_p (decl))
14529 {
14530 expansion_failed (NULL_TREE, rtl,
14531 "Symbol not defined in current TU.\n");
14532 return false;
14533 }
14534 }
14535
14536 return true;
14537 }
14538
14539 /* Return true if constant RTL can be emitted in DW_OP_addr or
14540 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14541 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14542
14543 static bool
14544 const_ok_for_output (rtx rtl)
14545 {
14546 if (GET_CODE (rtl) == SYMBOL_REF)
14547 return const_ok_for_output_1 (rtl);
14548
14549 if (GET_CODE (rtl) == CONST)
14550 {
14551 subrtx_var_iterator::array_type array;
14552 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14553 if (!const_ok_for_output_1 (*iter))
14554 return false;
14555 return true;
14556 }
14557
14558 return true;
14559 }
14560
14561 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14562 if possible, NULL otherwise. */
14563
14564 static dw_die_ref
14565 base_type_for_mode (machine_mode mode, bool unsignedp)
14566 {
14567 dw_die_ref type_die;
14568 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14569
14570 if (type == NULL)
14571 return NULL;
14572 switch (TREE_CODE (type))
14573 {
14574 case INTEGER_TYPE:
14575 case REAL_TYPE:
14576 break;
14577 default:
14578 return NULL;
14579 }
14580 type_die = lookup_type_die (type);
14581 if (!type_die)
14582 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14583 comp_unit_die ());
14584 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14585 return NULL;
14586 return type_die;
14587 }
14588
14589 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14590 type matching MODE, or, if MODE is narrower than or as wide as
14591 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14592 possible. */
14593
14594 static dw_loc_descr_ref
14595 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14596 {
14597 machine_mode outer_mode = mode;
14598 dw_die_ref type_die;
14599 dw_loc_descr_ref cvt;
14600
14601 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14602 {
14603 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14604 return op;
14605 }
14606 type_die = base_type_for_mode (outer_mode, 1);
14607 if (type_die == NULL)
14608 return NULL;
14609 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14610 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14611 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14612 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14613 add_loc_descr (&op, cvt);
14614 return op;
14615 }
14616
14617 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14618
14619 static dw_loc_descr_ref
14620 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14621 dw_loc_descr_ref op1)
14622 {
14623 dw_loc_descr_ref ret = op0;
14624 add_loc_descr (&ret, op1);
14625 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14626 if (STORE_FLAG_VALUE != 1)
14627 {
14628 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14629 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14630 }
14631 return ret;
14632 }
14633
14634 /* Subroutine of scompare_loc_descriptor for the case in which we're
14635 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14636 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14637
14638 static dw_loc_descr_ref
14639 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14640 scalar_int_mode op_mode,
14641 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14642 {
14643 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14644 dw_loc_descr_ref cvt;
14645
14646 if (type_die == NULL)
14647 return NULL;
14648 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14649 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14650 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14651 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14652 add_loc_descr (&op0, cvt);
14653 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14654 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14655 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14656 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14657 add_loc_descr (&op1, cvt);
14658 return compare_loc_descriptor (op, op0, op1);
14659 }
14660
14661 /* Subroutine of scompare_loc_descriptor for the case in which we're
14662 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14663 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14664
14665 static dw_loc_descr_ref
14666 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14667 scalar_int_mode op_mode,
14668 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14669 {
14670 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14671 /* For eq/ne, if the operands are known to be zero-extended,
14672 there is no need to do the fancy shifting up. */
14673 if (op == DW_OP_eq || op == DW_OP_ne)
14674 {
14675 dw_loc_descr_ref last0, last1;
14676 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14677 ;
14678 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14679 ;
14680 /* deref_size zero extends, and for constants we can check
14681 whether they are zero extended or not. */
14682 if (((last0->dw_loc_opc == DW_OP_deref_size
14683 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14684 || (CONST_INT_P (XEXP (rtl, 0))
14685 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14686 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14687 && ((last1->dw_loc_opc == DW_OP_deref_size
14688 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14689 || (CONST_INT_P (XEXP (rtl, 1))
14690 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14691 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14692 return compare_loc_descriptor (op, op0, op1);
14693
14694 /* EQ/NE comparison against constant in narrower type than
14695 DWARF2_ADDR_SIZE can be performed either as
14696 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14697 DW_OP_{eq,ne}
14698 or
14699 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14700 DW_OP_{eq,ne}. Pick whatever is shorter. */
14701 if (CONST_INT_P (XEXP (rtl, 1))
14702 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14703 && (size_of_int_loc_descriptor (shift) + 1
14704 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14705 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14706 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14707 & GET_MODE_MASK (op_mode))))
14708 {
14709 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14710 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14711 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14712 & GET_MODE_MASK (op_mode));
14713 return compare_loc_descriptor (op, op0, op1);
14714 }
14715 }
14716 add_loc_descr (&op0, int_loc_descriptor (shift));
14717 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14718 if (CONST_INT_P (XEXP (rtl, 1)))
14719 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14720 else
14721 {
14722 add_loc_descr (&op1, int_loc_descriptor (shift));
14723 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14724 }
14725 return compare_loc_descriptor (op, op0, op1);
14726 }
14727
14728 /* Return location descriptor for unsigned comparison OP RTL. */
14729
14730 static dw_loc_descr_ref
14731 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14732 machine_mode mem_mode)
14733 {
14734 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14735 dw_loc_descr_ref op0, op1;
14736
14737 if (op_mode == VOIDmode)
14738 op_mode = GET_MODE (XEXP (rtl, 1));
14739 if (op_mode == VOIDmode)
14740 return NULL;
14741
14742 scalar_int_mode int_op_mode;
14743 if (dwarf_strict
14744 && dwarf_version < 5
14745 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14746 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14747 return NULL;
14748
14749 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14750 VAR_INIT_STATUS_INITIALIZED);
14751 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14752 VAR_INIT_STATUS_INITIALIZED);
14753
14754 if (op0 == NULL || op1 == NULL)
14755 return NULL;
14756
14757 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14758 {
14759 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14760 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14761
14762 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14763 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14764 }
14765 return compare_loc_descriptor (op, op0, op1);
14766 }
14767
14768 /* Return location descriptor for unsigned comparison OP RTL. */
14769
14770 static dw_loc_descr_ref
14771 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14772 machine_mode mem_mode)
14773 {
14774 dw_loc_descr_ref op0, op1;
14775
14776 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14777 if (test_op_mode == VOIDmode)
14778 test_op_mode = GET_MODE (XEXP (rtl, 1));
14779
14780 scalar_int_mode op_mode;
14781 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14782 return NULL;
14783
14784 if (dwarf_strict
14785 && dwarf_version < 5
14786 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14787 return NULL;
14788
14789 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14790 VAR_INIT_STATUS_INITIALIZED);
14791 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14792 VAR_INIT_STATUS_INITIALIZED);
14793
14794 if (op0 == NULL || op1 == NULL)
14795 return NULL;
14796
14797 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14798 {
14799 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14800 dw_loc_descr_ref last0, last1;
14801 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14802 ;
14803 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14804 ;
14805 if (CONST_INT_P (XEXP (rtl, 0)))
14806 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14807 /* deref_size zero extends, so no need to mask it again. */
14808 else if (last0->dw_loc_opc != DW_OP_deref_size
14809 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14810 {
14811 add_loc_descr (&op0, int_loc_descriptor (mask));
14812 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14813 }
14814 if (CONST_INT_P (XEXP (rtl, 1)))
14815 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14816 /* deref_size zero extends, so no need to mask it again. */
14817 else if (last1->dw_loc_opc != DW_OP_deref_size
14818 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14819 {
14820 add_loc_descr (&op1, int_loc_descriptor (mask));
14821 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14822 }
14823 }
14824 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14825 {
14826 HOST_WIDE_INT bias = 1;
14827 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14828 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14829 if (CONST_INT_P (XEXP (rtl, 1)))
14830 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14831 + INTVAL (XEXP (rtl, 1)));
14832 else
14833 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14834 bias, 0));
14835 }
14836 return compare_loc_descriptor (op, op0, op1);
14837 }
14838
14839 /* Return location descriptor for {U,S}{MIN,MAX}. */
14840
14841 static dw_loc_descr_ref
14842 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14843 machine_mode mem_mode)
14844 {
14845 enum dwarf_location_atom op;
14846 dw_loc_descr_ref op0, op1, ret;
14847 dw_loc_descr_ref bra_node, drop_node;
14848
14849 scalar_int_mode int_mode;
14850 if (dwarf_strict
14851 && dwarf_version < 5
14852 && (!is_a <scalar_int_mode> (mode, &int_mode)
14853 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14854 return NULL;
14855
14856 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14857 VAR_INIT_STATUS_INITIALIZED);
14858 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14859 VAR_INIT_STATUS_INITIALIZED);
14860
14861 if (op0 == NULL || op1 == NULL)
14862 return NULL;
14863
14864 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14865 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14866 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14867 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14868 {
14869 /* Checked by the caller. */
14870 int_mode = as_a <scalar_int_mode> (mode);
14871 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14872 {
14873 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14874 add_loc_descr (&op0, int_loc_descriptor (mask));
14875 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14876 add_loc_descr (&op1, int_loc_descriptor (mask));
14877 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14878 }
14879 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14880 {
14881 HOST_WIDE_INT bias = 1;
14882 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14883 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14884 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14885 }
14886 }
14887 else if (is_a <scalar_int_mode> (mode, &int_mode)
14888 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14889 {
14890 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14891 add_loc_descr (&op0, int_loc_descriptor (shift));
14892 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14893 add_loc_descr (&op1, int_loc_descriptor (shift));
14894 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14895 }
14896 else if (is_a <scalar_int_mode> (mode, &int_mode)
14897 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14898 {
14899 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14900 dw_loc_descr_ref cvt;
14901 if (type_die == NULL)
14902 return NULL;
14903 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14904 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14905 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14906 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14907 add_loc_descr (&op0, cvt);
14908 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14909 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14910 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14911 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14912 add_loc_descr (&op1, cvt);
14913 }
14914
14915 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14916 op = DW_OP_lt;
14917 else
14918 op = DW_OP_gt;
14919 ret = op0;
14920 add_loc_descr (&ret, op1);
14921 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14922 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14923 add_loc_descr (&ret, bra_node);
14924 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14925 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14926 add_loc_descr (&ret, drop_node);
14927 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14928 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14929 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14930 && is_a <scalar_int_mode> (mode, &int_mode)
14931 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14932 ret = convert_descriptor_to_mode (int_mode, ret);
14933 return ret;
14934 }
14935
14936 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14937 but after converting arguments to type_die, afterwards
14938 convert back to unsigned. */
14939
14940 static dw_loc_descr_ref
14941 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14942 scalar_int_mode mode, machine_mode mem_mode)
14943 {
14944 dw_loc_descr_ref cvt, op0, op1;
14945
14946 if (type_die == NULL)
14947 return NULL;
14948 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14949 VAR_INIT_STATUS_INITIALIZED);
14950 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14951 VAR_INIT_STATUS_INITIALIZED);
14952 if (op0 == NULL || op1 == NULL)
14953 return NULL;
14954 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14955 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14956 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14957 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14958 add_loc_descr (&op0, cvt);
14959 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14960 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14961 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14962 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14963 add_loc_descr (&op1, cvt);
14964 add_loc_descr (&op0, op1);
14965 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14966 return convert_descriptor_to_mode (mode, op0);
14967 }
14968
14969 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14970 const0 is DW_OP_lit0 or corresponding typed constant,
14971 const1 is DW_OP_lit1 or corresponding typed constant
14972 and constMSB is constant with just the MSB bit set
14973 for the mode):
14974 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14975 L1: const0 DW_OP_swap
14976 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14977 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14978 L3: DW_OP_drop
14979 L4: DW_OP_nop
14980
14981 CTZ is similar:
14982 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14983 L1: const0 DW_OP_swap
14984 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14985 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14986 L3: DW_OP_drop
14987 L4: DW_OP_nop
14988
14989 FFS is similar:
14990 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14991 L1: const1 DW_OP_swap
14992 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14993 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14994 L3: DW_OP_drop
14995 L4: DW_OP_nop */
14996
14997 static dw_loc_descr_ref
14998 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14999 machine_mode mem_mode)
15000 {
15001 dw_loc_descr_ref op0, ret, tmp;
15002 HOST_WIDE_INT valv;
15003 dw_loc_descr_ref l1jump, l1label;
15004 dw_loc_descr_ref l2jump, l2label;
15005 dw_loc_descr_ref l3jump, l3label;
15006 dw_loc_descr_ref l4jump, l4label;
15007 rtx msb;
15008
15009 if (GET_MODE (XEXP (rtl, 0)) != mode)
15010 return NULL;
15011
15012 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15013 VAR_INIT_STATUS_INITIALIZED);
15014 if (op0 == NULL)
15015 return NULL;
15016 ret = op0;
15017 if (GET_CODE (rtl) == CLZ)
15018 {
15019 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15020 valv = GET_MODE_BITSIZE (mode);
15021 }
15022 else if (GET_CODE (rtl) == FFS)
15023 valv = 0;
15024 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15025 valv = GET_MODE_BITSIZE (mode);
15026 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15027 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15028 add_loc_descr (&ret, l1jump);
15029 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15030 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15031 VAR_INIT_STATUS_INITIALIZED);
15032 if (tmp == NULL)
15033 return NULL;
15034 add_loc_descr (&ret, tmp);
15035 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15036 add_loc_descr (&ret, l4jump);
15037 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15038 ? const1_rtx : const0_rtx,
15039 mode, mem_mode,
15040 VAR_INIT_STATUS_INITIALIZED);
15041 if (l1label == NULL)
15042 return NULL;
15043 add_loc_descr (&ret, l1label);
15044 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15045 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15046 add_loc_descr (&ret, l2label);
15047 if (GET_CODE (rtl) != CLZ)
15048 msb = const1_rtx;
15049 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15050 msb = GEN_INT (HOST_WIDE_INT_1U
15051 << (GET_MODE_BITSIZE (mode) - 1));
15052 else
15053 msb = immed_wide_int_const
15054 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15055 GET_MODE_PRECISION (mode)), mode);
15056 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15057 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15058 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15059 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15060 else
15061 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15062 VAR_INIT_STATUS_INITIALIZED);
15063 if (tmp == NULL)
15064 return NULL;
15065 add_loc_descr (&ret, tmp);
15066 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15067 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15068 add_loc_descr (&ret, l3jump);
15069 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15070 VAR_INIT_STATUS_INITIALIZED);
15071 if (tmp == NULL)
15072 return NULL;
15073 add_loc_descr (&ret, tmp);
15074 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15075 ? DW_OP_shl : DW_OP_shr, 0, 0));
15076 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15077 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15078 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15079 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15080 add_loc_descr (&ret, l2jump);
15081 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15082 add_loc_descr (&ret, l3label);
15083 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15084 add_loc_descr (&ret, l4label);
15085 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15086 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15087 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15088 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15089 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15090 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15091 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15092 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15093 return ret;
15094 }
15095
15096 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15097 const1 is DW_OP_lit1 or corresponding typed constant):
15098 const0 DW_OP_swap
15099 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15100 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15101 L2: DW_OP_drop
15102
15103 PARITY is similar:
15104 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15105 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15106 L2: DW_OP_drop */
15107
15108 static dw_loc_descr_ref
15109 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15110 machine_mode mem_mode)
15111 {
15112 dw_loc_descr_ref op0, ret, tmp;
15113 dw_loc_descr_ref l1jump, l1label;
15114 dw_loc_descr_ref l2jump, l2label;
15115
15116 if (GET_MODE (XEXP (rtl, 0)) != mode)
15117 return NULL;
15118
15119 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15120 VAR_INIT_STATUS_INITIALIZED);
15121 if (op0 == NULL)
15122 return NULL;
15123 ret = op0;
15124 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15125 VAR_INIT_STATUS_INITIALIZED);
15126 if (tmp == NULL)
15127 return NULL;
15128 add_loc_descr (&ret, tmp);
15129 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15130 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15131 add_loc_descr (&ret, l1label);
15132 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15133 add_loc_descr (&ret, l2jump);
15134 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15135 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15136 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15137 VAR_INIT_STATUS_INITIALIZED);
15138 if (tmp == NULL)
15139 return NULL;
15140 add_loc_descr (&ret, tmp);
15141 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15142 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15143 ? DW_OP_plus : DW_OP_xor, 0, 0));
15144 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15145 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15146 VAR_INIT_STATUS_INITIALIZED);
15147 add_loc_descr (&ret, tmp);
15148 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15149 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15150 add_loc_descr (&ret, l1jump);
15151 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15152 add_loc_descr (&ret, l2label);
15153 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15154 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15155 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15156 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15157 return ret;
15158 }
15159
15160 /* BSWAP (constS is initial shift count, either 56 or 24):
15161 constS const0
15162 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15163 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15164 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15165 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15166 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15167
15168 static dw_loc_descr_ref
15169 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15170 machine_mode mem_mode)
15171 {
15172 dw_loc_descr_ref op0, ret, tmp;
15173 dw_loc_descr_ref l1jump, l1label;
15174 dw_loc_descr_ref l2jump, l2label;
15175
15176 if (BITS_PER_UNIT != 8
15177 || (GET_MODE_BITSIZE (mode) != 32
15178 && GET_MODE_BITSIZE (mode) != 64))
15179 return NULL;
15180
15181 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15182 VAR_INIT_STATUS_INITIALIZED);
15183 if (op0 == NULL)
15184 return NULL;
15185
15186 ret = op0;
15187 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15188 mode, mem_mode,
15189 VAR_INIT_STATUS_INITIALIZED);
15190 if (tmp == NULL)
15191 return NULL;
15192 add_loc_descr (&ret, tmp);
15193 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15194 VAR_INIT_STATUS_INITIALIZED);
15195 if (tmp == NULL)
15196 return NULL;
15197 add_loc_descr (&ret, tmp);
15198 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15199 add_loc_descr (&ret, l1label);
15200 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15201 mode, mem_mode,
15202 VAR_INIT_STATUS_INITIALIZED);
15203 add_loc_descr (&ret, tmp);
15204 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15205 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15206 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15207 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15208 VAR_INIT_STATUS_INITIALIZED);
15209 if (tmp == NULL)
15210 return NULL;
15211 add_loc_descr (&ret, tmp);
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15213 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15216 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15217 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15218 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15219 VAR_INIT_STATUS_INITIALIZED);
15220 add_loc_descr (&ret, tmp);
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15222 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15223 add_loc_descr (&ret, l2jump);
15224 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15225 VAR_INIT_STATUS_INITIALIZED);
15226 add_loc_descr (&ret, tmp);
15227 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15228 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15229 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15230 add_loc_descr (&ret, l1jump);
15231 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15232 add_loc_descr (&ret, l2label);
15233 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15234 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15235 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15236 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15237 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15238 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15239 return ret;
15240 }
15241
15242 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15243 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15244 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15245 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15246
15247 ROTATERT is similar:
15248 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15249 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15250 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15251
15252 static dw_loc_descr_ref
15253 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15254 machine_mode mem_mode)
15255 {
15256 rtx rtlop1 = XEXP (rtl, 1);
15257 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15258 int i;
15259
15260 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15261 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15262 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15263 VAR_INIT_STATUS_INITIALIZED);
15264 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15265 VAR_INIT_STATUS_INITIALIZED);
15266 if (op0 == NULL || op1 == NULL)
15267 return NULL;
15268 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15269 for (i = 0; i < 2; i++)
15270 {
15271 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15272 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15273 mode, mem_mode,
15274 VAR_INIT_STATUS_INITIALIZED);
15275 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15276 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15277 ? DW_OP_const4u
15278 : HOST_BITS_PER_WIDE_INT == 64
15279 ? DW_OP_const8u : DW_OP_constu,
15280 GET_MODE_MASK (mode), 0);
15281 else
15282 mask[i] = NULL;
15283 if (mask[i] == NULL)
15284 return NULL;
15285 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15286 }
15287 ret = op0;
15288 add_loc_descr (&ret, op1);
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15291 if (GET_CODE (rtl) == ROTATERT)
15292 {
15293 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15294 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15295 GET_MODE_BITSIZE (mode), 0));
15296 }
15297 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15298 if (mask[0] != NULL)
15299 add_loc_descr (&ret, mask[0]);
15300 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15301 if (mask[1] != NULL)
15302 {
15303 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15304 add_loc_descr (&ret, mask[1]);
15305 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15306 }
15307 if (GET_CODE (rtl) == ROTATE)
15308 {
15309 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15310 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15311 GET_MODE_BITSIZE (mode), 0));
15312 }
15313 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15314 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15315 return ret;
15316 }
15317
15318 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15319 for DEBUG_PARAMETER_REF RTL. */
15320
15321 static dw_loc_descr_ref
15322 parameter_ref_descriptor (rtx rtl)
15323 {
15324 dw_loc_descr_ref ret;
15325 dw_die_ref ref;
15326
15327 if (dwarf_strict)
15328 return NULL;
15329 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15330 /* With LTO during LTRANS we get the late DIE that refers to the early
15331 DIE, thus we add another indirection here. This seems to confuse
15332 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15333 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15334 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15335 if (ref)
15336 {
15337 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15338 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15339 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15340 }
15341 else
15342 {
15343 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15344 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15345 }
15346 return ret;
15347 }
15348
15349 /* The following routine converts the RTL for a variable or parameter
15350 (resident in memory) into an equivalent Dwarf representation of a
15351 mechanism for getting the address of that same variable onto the top of a
15352 hypothetical "address evaluation" stack.
15353
15354 When creating memory location descriptors, we are effectively transforming
15355 the RTL for a memory-resident object into its Dwarf postfix expression
15356 equivalent. This routine recursively descends an RTL tree, turning
15357 it into Dwarf postfix code as it goes.
15358
15359 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15360
15361 MEM_MODE is the mode of the memory reference, needed to handle some
15362 autoincrement addressing modes.
15363
15364 Return 0 if we can't represent the location. */
15365
15366 dw_loc_descr_ref
15367 mem_loc_descriptor (rtx rtl, machine_mode mode,
15368 machine_mode mem_mode,
15369 enum var_init_status initialized)
15370 {
15371 dw_loc_descr_ref mem_loc_result = NULL;
15372 enum dwarf_location_atom op;
15373 dw_loc_descr_ref op0, op1;
15374 rtx inner = NULL_RTX;
15375 poly_int64 offset;
15376
15377 if (mode == VOIDmode)
15378 mode = GET_MODE (rtl);
15379
15380 /* Note that for a dynamically sized array, the location we will generate a
15381 description of here will be the lowest numbered location which is
15382 actually within the array. That's *not* necessarily the same as the
15383 zeroth element of the array. */
15384
15385 rtl = targetm.delegitimize_address (rtl);
15386
15387 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15388 return NULL;
15389
15390 scalar_int_mode int_mode, inner_mode, op1_mode;
15391 switch (GET_CODE (rtl))
15392 {
15393 case POST_INC:
15394 case POST_DEC:
15395 case POST_MODIFY:
15396 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15397
15398 case SUBREG:
15399 /* The case of a subreg may arise when we have a local (register)
15400 variable or a formal (register) parameter which doesn't quite fill
15401 up an entire register. For now, just assume that it is
15402 legitimate to make the Dwarf info refer to the whole register which
15403 contains the given subreg. */
15404 if (!subreg_lowpart_p (rtl))
15405 break;
15406 inner = SUBREG_REG (rtl);
15407 /* FALLTHRU */
15408 case TRUNCATE:
15409 if (inner == NULL_RTX)
15410 inner = XEXP (rtl, 0);
15411 if (is_a <scalar_int_mode> (mode, &int_mode)
15412 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15413 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15414 #ifdef POINTERS_EXTEND_UNSIGNED
15415 || (int_mode == Pmode && mem_mode != VOIDmode)
15416 #endif
15417 )
15418 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15419 {
15420 mem_loc_result = mem_loc_descriptor (inner,
15421 inner_mode,
15422 mem_mode, initialized);
15423 break;
15424 }
15425 if (dwarf_strict && dwarf_version < 5)
15426 break;
15427 if (is_a <scalar_int_mode> (mode, &int_mode)
15428 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15429 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15430 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15431 {
15432 dw_die_ref type_die;
15433 dw_loc_descr_ref cvt;
15434
15435 mem_loc_result = mem_loc_descriptor (inner,
15436 GET_MODE (inner),
15437 mem_mode, initialized);
15438 if (mem_loc_result == NULL)
15439 break;
15440 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15441 if (type_die == NULL)
15442 {
15443 mem_loc_result = NULL;
15444 break;
15445 }
15446 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15447 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15448 else
15449 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15450 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15451 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15452 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15453 add_loc_descr (&mem_loc_result, cvt);
15454 if (is_a <scalar_int_mode> (mode, &int_mode)
15455 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15456 {
15457 /* Convert it to untyped afterwards. */
15458 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15459 add_loc_descr (&mem_loc_result, cvt);
15460 }
15461 }
15462 break;
15463
15464 case REG:
15465 if (!is_a <scalar_int_mode> (mode, &int_mode)
15466 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15467 && rtl != arg_pointer_rtx
15468 && rtl != frame_pointer_rtx
15469 #ifdef POINTERS_EXTEND_UNSIGNED
15470 && (int_mode != Pmode || mem_mode == VOIDmode)
15471 #endif
15472 ))
15473 {
15474 dw_die_ref type_die;
15475 unsigned int dbx_regnum;
15476
15477 if (dwarf_strict && dwarf_version < 5)
15478 break;
15479 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15480 break;
15481 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15482 if (type_die == NULL)
15483 break;
15484
15485 dbx_regnum = dbx_reg_number (rtl);
15486 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15487 break;
15488 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15489 dbx_regnum, 0);
15490 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15491 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15492 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15493 break;
15494 }
15495 /* Whenever a register number forms a part of the description of the
15496 method for calculating the (dynamic) address of a memory resident
15497 object, DWARF rules require the register number be referred to as
15498 a "base register". This distinction is not based in any way upon
15499 what category of register the hardware believes the given register
15500 belongs to. This is strictly DWARF terminology we're dealing with
15501 here. Note that in cases where the location of a memory-resident
15502 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15503 OP_CONST (0)) the actual DWARF location descriptor that we generate
15504 may just be OP_BASEREG (basereg). This may look deceptively like
15505 the object in question was allocated to a register (rather than in
15506 memory) so DWARF consumers need to be aware of the subtle
15507 distinction between OP_REG and OP_BASEREG. */
15508 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15509 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15510 else if (stack_realign_drap
15511 && crtl->drap_reg
15512 && crtl->args.internal_arg_pointer == rtl
15513 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15514 {
15515 /* If RTL is internal_arg_pointer, which has been optimized
15516 out, use DRAP instead. */
15517 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15518 VAR_INIT_STATUS_INITIALIZED);
15519 }
15520 break;
15521
15522 case SIGN_EXTEND:
15523 case ZERO_EXTEND:
15524 if (!is_a <scalar_int_mode> (mode, &int_mode)
15525 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15526 break;
15527 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15528 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15529 if (op0 == 0)
15530 break;
15531 else if (GET_CODE (rtl) == ZERO_EXTEND
15532 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15533 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15534 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15535 to expand zero extend as two shifts instead of
15536 masking. */
15537 && GET_MODE_SIZE (inner_mode) <= 4)
15538 {
15539 mem_loc_result = op0;
15540 add_loc_descr (&mem_loc_result,
15541 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15542 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15543 }
15544 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15545 {
15546 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15547 shift *= BITS_PER_UNIT;
15548 if (GET_CODE (rtl) == SIGN_EXTEND)
15549 op = DW_OP_shra;
15550 else
15551 op = DW_OP_shr;
15552 mem_loc_result = op0;
15553 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15554 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15555 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15556 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15557 }
15558 else if (!dwarf_strict || dwarf_version >= 5)
15559 {
15560 dw_die_ref type_die1, type_die2;
15561 dw_loc_descr_ref cvt;
15562
15563 type_die1 = base_type_for_mode (inner_mode,
15564 GET_CODE (rtl) == ZERO_EXTEND);
15565 if (type_die1 == NULL)
15566 break;
15567 type_die2 = base_type_for_mode (int_mode, 1);
15568 if (type_die2 == NULL)
15569 break;
15570 mem_loc_result = op0;
15571 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15572 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15573 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15574 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15575 add_loc_descr (&mem_loc_result, cvt);
15576 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15577 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15578 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15579 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15580 add_loc_descr (&mem_loc_result, cvt);
15581 }
15582 break;
15583
15584 case MEM:
15585 {
15586 rtx new_rtl = avoid_constant_pool_reference (rtl);
15587 if (new_rtl != rtl)
15588 {
15589 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15590 initialized);
15591 if (mem_loc_result != NULL)
15592 return mem_loc_result;
15593 }
15594 }
15595 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15596 get_address_mode (rtl), mode,
15597 VAR_INIT_STATUS_INITIALIZED);
15598 if (mem_loc_result == NULL)
15599 mem_loc_result = tls_mem_loc_descriptor (rtl);
15600 if (mem_loc_result != NULL)
15601 {
15602 if (!is_a <scalar_int_mode> (mode, &int_mode)
15603 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15604 {
15605 dw_die_ref type_die;
15606 dw_loc_descr_ref deref;
15607 HOST_WIDE_INT size;
15608
15609 if (dwarf_strict && dwarf_version < 5)
15610 return NULL;
15611 if (!GET_MODE_SIZE (mode).is_constant (&size))
15612 return NULL;
15613 type_die
15614 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15615 if (type_die == NULL)
15616 return NULL;
15617 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15618 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15619 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15620 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15621 add_loc_descr (&mem_loc_result, deref);
15622 }
15623 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15624 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15625 else
15626 add_loc_descr (&mem_loc_result,
15627 new_loc_descr (DW_OP_deref_size,
15628 GET_MODE_SIZE (int_mode), 0));
15629 }
15630 break;
15631
15632 case LO_SUM:
15633 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15634
15635 case LABEL_REF:
15636 /* Some ports can transform a symbol ref into a label ref, because
15637 the symbol ref is too far away and has to be dumped into a constant
15638 pool. */
15639 case CONST:
15640 case SYMBOL_REF:
15641 if (!is_a <scalar_int_mode> (mode, &int_mode)
15642 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15643 #ifdef POINTERS_EXTEND_UNSIGNED
15644 && (int_mode != Pmode || mem_mode == VOIDmode)
15645 #endif
15646 ))
15647 break;
15648 if (GET_CODE (rtl) == SYMBOL_REF
15649 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15650 {
15651 dw_loc_descr_ref temp;
15652
15653 /* If this is not defined, we have no way to emit the data. */
15654 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15655 break;
15656
15657 temp = new_addr_loc_descr (rtl, dtprel_true);
15658
15659 /* We check for DWARF 5 here because gdb did not implement
15660 DW_OP_form_tls_address until after 7.12. */
15661 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15662 ? DW_OP_form_tls_address
15663 : DW_OP_GNU_push_tls_address),
15664 0, 0);
15665 add_loc_descr (&mem_loc_result, temp);
15666
15667 break;
15668 }
15669
15670 if (!const_ok_for_output (rtl))
15671 {
15672 if (GET_CODE (rtl) == CONST)
15673 switch (GET_CODE (XEXP (rtl, 0)))
15674 {
15675 case NOT:
15676 op = DW_OP_not;
15677 goto try_const_unop;
15678 case NEG:
15679 op = DW_OP_neg;
15680 goto try_const_unop;
15681 try_const_unop:
15682 rtx arg;
15683 arg = XEXP (XEXP (rtl, 0), 0);
15684 if (!CONSTANT_P (arg))
15685 arg = gen_rtx_CONST (int_mode, arg);
15686 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15687 initialized);
15688 if (op0)
15689 {
15690 mem_loc_result = op0;
15691 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15692 }
15693 break;
15694 default:
15695 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15696 mem_mode, initialized);
15697 break;
15698 }
15699 break;
15700 }
15701
15702 symref:
15703 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15704 vec_safe_push (used_rtx_array, rtl);
15705 break;
15706
15707 case CONCAT:
15708 case CONCATN:
15709 case VAR_LOCATION:
15710 case DEBUG_IMPLICIT_PTR:
15711 expansion_failed (NULL_TREE, rtl,
15712 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15713 return 0;
15714
15715 case ENTRY_VALUE:
15716 if (dwarf_strict && dwarf_version < 5)
15717 return NULL;
15718 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15719 {
15720 if (!is_a <scalar_int_mode> (mode, &int_mode)
15721 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15722 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15723 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15724 else
15725 {
15726 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15727 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15728 return NULL;
15729 op0 = one_reg_loc_descriptor (dbx_regnum,
15730 VAR_INIT_STATUS_INITIALIZED);
15731 }
15732 }
15733 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15734 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15735 {
15736 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15737 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15738 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15739 return NULL;
15740 }
15741 else
15742 gcc_unreachable ();
15743 if (op0 == NULL)
15744 return NULL;
15745 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15746 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15747 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15748 break;
15749
15750 case DEBUG_PARAMETER_REF:
15751 mem_loc_result = parameter_ref_descriptor (rtl);
15752 break;
15753
15754 case PRE_MODIFY:
15755 /* Extract the PLUS expression nested inside and fall into
15756 PLUS code below. */
15757 rtl = XEXP (rtl, 1);
15758 goto plus;
15759
15760 case PRE_INC:
15761 case PRE_DEC:
15762 /* Turn these into a PLUS expression and fall into the PLUS code
15763 below. */
15764 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15765 gen_int_mode (GET_CODE (rtl) == PRE_INC
15766 ? GET_MODE_UNIT_SIZE (mem_mode)
15767 : -GET_MODE_UNIT_SIZE (mem_mode),
15768 mode));
15769
15770 /* fall through */
15771
15772 case PLUS:
15773 plus:
15774 if (is_based_loc (rtl)
15775 && is_a <scalar_int_mode> (mode, &int_mode)
15776 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15777 || XEXP (rtl, 0) == arg_pointer_rtx
15778 || XEXP (rtl, 0) == frame_pointer_rtx))
15779 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15780 INTVAL (XEXP (rtl, 1)),
15781 VAR_INIT_STATUS_INITIALIZED);
15782 else
15783 {
15784 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15785 VAR_INIT_STATUS_INITIALIZED);
15786 if (mem_loc_result == 0)
15787 break;
15788
15789 if (CONST_INT_P (XEXP (rtl, 1))
15790 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15791 <= DWARF2_ADDR_SIZE))
15792 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15793 else
15794 {
15795 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15796 VAR_INIT_STATUS_INITIALIZED);
15797 if (op1 == 0)
15798 return NULL;
15799 add_loc_descr (&mem_loc_result, op1);
15800 add_loc_descr (&mem_loc_result,
15801 new_loc_descr (DW_OP_plus, 0, 0));
15802 }
15803 }
15804 break;
15805
15806 /* If a pseudo-reg is optimized away, it is possible for it to
15807 be replaced with a MEM containing a multiply or shift. */
15808 case MINUS:
15809 op = DW_OP_minus;
15810 goto do_binop;
15811
15812 case MULT:
15813 op = DW_OP_mul;
15814 goto do_binop;
15815
15816 case DIV:
15817 if ((!dwarf_strict || dwarf_version >= 5)
15818 && is_a <scalar_int_mode> (mode, &int_mode)
15819 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15820 {
15821 mem_loc_result = typed_binop (DW_OP_div, rtl,
15822 base_type_for_mode (mode, 0),
15823 int_mode, mem_mode);
15824 break;
15825 }
15826 op = DW_OP_div;
15827 goto do_binop;
15828
15829 case UMOD:
15830 op = DW_OP_mod;
15831 goto do_binop;
15832
15833 case ASHIFT:
15834 op = DW_OP_shl;
15835 goto do_shift;
15836
15837 case ASHIFTRT:
15838 op = DW_OP_shra;
15839 goto do_shift;
15840
15841 case LSHIFTRT:
15842 op = DW_OP_shr;
15843 goto do_shift;
15844
15845 do_shift:
15846 if (!is_a <scalar_int_mode> (mode, &int_mode))
15847 break;
15848 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15849 VAR_INIT_STATUS_INITIALIZED);
15850 {
15851 rtx rtlop1 = XEXP (rtl, 1);
15852 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15853 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15854 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15855 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15856 VAR_INIT_STATUS_INITIALIZED);
15857 }
15858
15859 if (op0 == 0 || op1 == 0)
15860 break;
15861
15862 mem_loc_result = op0;
15863 add_loc_descr (&mem_loc_result, op1);
15864 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15865 break;
15866
15867 case AND:
15868 op = DW_OP_and;
15869 goto do_binop;
15870
15871 case IOR:
15872 op = DW_OP_or;
15873 goto do_binop;
15874
15875 case XOR:
15876 op = DW_OP_xor;
15877 goto do_binop;
15878
15879 do_binop:
15880 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15881 VAR_INIT_STATUS_INITIALIZED);
15882 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15883 VAR_INIT_STATUS_INITIALIZED);
15884
15885 if (op0 == 0 || op1 == 0)
15886 break;
15887
15888 mem_loc_result = op0;
15889 add_loc_descr (&mem_loc_result, op1);
15890 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15891 break;
15892
15893 case MOD:
15894 if ((!dwarf_strict || dwarf_version >= 5)
15895 && is_a <scalar_int_mode> (mode, &int_mode)
15896 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15897 {
15898 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15899 base_type_for_mode (mode, 0),
15900 int_mode, mem_mode);
15901 break;
15902 }
15903
15904 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15905 VAR_INIT_STATUS_INITIALIZED);
15906 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15907 VAR_INIT_STATUS_INITIALIZED);
15908
15909 if (op0 == 0 || op1 == 0)
15910 break;
15911
15912 mem_loc_result = op0;
15913 add_loc_descr (&mem_loc_result, op1);
15914 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15915 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15916 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15917 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15918 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15919 break;
15920
15921 case UDIV:
15922 if ((!dwarf_strict || dwarf_version >= 5)
15923 && is_a <scalar_int_mode> (mode, &int_mode))
15924 {
15925 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15926 {
15927 op = DW_OP_div;
15928 goto do_binop;
15929 }
15930 mem_loc_result = typed_binop (DW_OP_div, rtl,
15931 base_type_for_mode (int_mode, 1),
15932 int_mode, mem_mode);
15933 }
15934 break;
15935
15936 case NOT:
15937 op = DW_OP_not;
15938 goto do_unop;
15939
15940 case ABS:
15941 op = DW_OP_abs;
15942 goto do_unop;
15943
15944 case NEG:
15945 op = DW_OP_neg;
15946 goto do_unop;
15947
15948 do_unop:
15949 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15950 VAR_INIT_STATUS_INITIALIZED);
15951
15952 if (op0 == 0)
15953 break;
15954
15955 mem_loc_result = op0;
15956 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15957 break;
15958
15959 case CONST_INT:
15960 if (!is_a <scalar_int_mode> (mode, &int_mode)
15961 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15962 #ifdef POINTERS_EXTEND_UNSIGNED
15963 || (int_mode == Pmode
15964 && mem_mode != VOIDmode
15965 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15966 #endif
15967 )
15968 {
15969 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15970 break;
15971 }
15972 if ((!dwarf_strict || dwarf_version >= 5)
15973 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15974 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15975 {
15976 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15977 scalar_int_mode amode;
15978 if (type_die == NULL)
15979 return NULL;
15980 if (INTVAL (rtl) >= 0
15981 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15982 .exists (&amode))
15983 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15984 /* const DW_OP_convert <XXX> vs.
15985 DW_OP_const_type <XXX, 1, const>. */
15986 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15987 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15988 {
15989 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15990 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15991 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15992 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15993 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15994 add_loc_descr (&mem_loc_result, op0);
15995 return mem_loc_result;
15996 }
15997 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15998 INTVAL (rtl));
15999 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16000 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16001 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16002 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16003 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16004 else
16005 {
16006 mem_loc_result->dw_loc_oprnd2.val_class
16007 = dw_val_class_const_double;
16008 mem_loc_result->dw_loc_oprnd2.v.val_double
16009 = double_int::from_shwi (INTVAL (rtl));
16010 }
16011 }
16012 break;
16013
16014 case CONST_DOUBLE:
16015 if (!dwarf_strict || dwarf_version >= 5)
16016 {
16017 dw_die_ref type_die;
16018
16019 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16020 CONST_DOUBLE rtx could represent either a large integer
16021 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16022 the value is always a floating point constant.
16023
16024 When it is an integer, a CONST_DOUBLE is used whenever
16025 the constant requires 2 HWIs to be adequately represented.
16026 We output CONST_DOUBLEs as blocks. */
16027 if (mode == VOIDmode
16028 || (GET_MODE (rtl) == VOIDmode
16029 && maybe_ne (GET_MODE_BITSIZE (mode),
16030 HOST_BITS_PER_DOUBLE_INT)))
16031 break;
16032 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16033 if (type_die == NULL)
16034 return NULL;
16035 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16036 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16037 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16038 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16039 #if TARGET_SUPPORTS_WIDE_INT == 0
16040 if (!SCALAR_FLOAT_MODE_P (mode))
16041 {
16042 mem_loc_result->dw_loc_oprnd2.val_class
16043 = dw_val_class_const_double;
16044 mem_loc_result->dw_loc_oprnd2.v.val_double
16045 = rtx_to_double_int (rtl);
16046 }
16047 else
16048 #endif
16049 {
16050 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16051 unsigned int length = GET_MODE_SIZE (float_mode);
16052 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16053
16054 insert_float (rtl, array);
16055 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16056 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16057 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16058 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16059 }
16060 }
16061 break;
16062
16063 case CONST_WIDE_INT:
16064 if (!dwarf_strict || dwarf_version >= 5)
16065 {
16066 dw_die_ref type_die;
16067
16068 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16069 if (type_die == NULL)
16070 return NULL;
16071 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16072 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16073 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16074 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16075 mem_loc_result->dw_loc_oprnd2.val_class
16076 = dw_val_class_wide_int;
16077 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16078 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16079 }
16080 break;
16081
16082 case CONST_POLY_INT:
16083 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16084 break;
16085
16086 case EQ:
16087 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16088 break;
16089
16090 case GE:
16091 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16092 break;
16093
16094 case GT:
16095 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16096 break;
16097
16098 case LE:
16099 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16100 break;
16101
16102 case LT:
16103 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16104 break;
16105
16106 case NE:
16107 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16108 break;
16109
16110 case GEU:
16111 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16112 break;
16113
16114 case GTU:
16115 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16116 break;
16117
16118 case LEU:
16119 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16120 break;
16121
16122 case LTU:
16123 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16124 break;
16125
16126 case UMIN:
16127 case UMAX:
16128 if (!SCALAR_INT_MODE_P (mode))
16129 break;
16130 /* FALLTHRU */
16131 case SMIN:
16132 case SMAX:
16133 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16134 break;
16135
16136 case ZERO_EXTRACT:
16137 case SIGN_EXTRACT:
16138 if (CONST_INT_P (XEXP (rtl, 1))
16139 && CONST_INT_P (XEXP (rtl, 2))
16140 && is_a <scalar_int_mode> (mode, &int_mode)
16141 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16142 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16143 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16144 && ((unsigned) INTVAL (XEXP (rtl, 1))
16145 + (unsigned) INTVAL (XEXP (rtl, 2))
16146 <= GET_MODE_BITSIZE (int_mode)))
16147 {
16148 int shift, size;
16149 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16150 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16151 if (op0 == 0)
16152 break;
16153 if (GET_CODE (rtl) == SIGN_EXTRACT)
16154 op = DW_OP_shra;
16155 else
16156 op = DW_OP_shr;
16157 mem_loc_result = op0;
16158 size = INTVAL (XEXP (rtl, 1));
16159 shift = INTVAL (XEXP (rtl, 2));
16160 if (BITS_BIG_ENDIAN)
16161 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16162 if (shift + size != (int) DWARF2_ADDR_SIZE)
16163 {
16164 add_loc_descr (&mem_loc_result,
16165 int_loc_descriptor (DWARF2_ADDR_SIZE
16166 - shift - size));
16167 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16168 }
16169 if (size != (int) DWARF2_ADDR_SIZE)
16170 {
16171 add_loc_descr (&mem_loc_result,
16172 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16173 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16174 }
16175 }
16176 break;
16177
16178 case IF_THEN_ELSE:
16179 {
16180 dw_loc_descr_ref op2, bra_node, drop_node;
16181 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16182 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16183 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16184 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16185 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16186 VAR_INIT_STATUS_INITIALIZED);
16187 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16188 VAR_INIT_STATUS_INITIALIZED);
16189 if (op0 == NULL || op1 == NULL || op2 == NULL)
16190 break;
16191
16192 mem_loc_result = op1;
16193 add_loc_descr (&mem_loc_result, op2);
16194 add_loc_descr (&mem_loc_result, op0);
16195 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16196 add_loc_descr (&mem_loc_result, bra_node);
16197 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16198 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16199 add_loc_descr (&mem_loc_result, drop_node);
16200 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16201 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16202 }
16203 break;
16204
16205 case FLOAT_EXTEND:
16206 case FLOAT_TRUNCATE:
16207 case FLOAT:
16208 case UNSIGNED_FLOAT:
16209 case FIX:
16210 case UNSIGNED_FIX:
16211 if (!dwarf_strict || dwarf_version >= 5)
16212 {
16213 dw_die_ref type_die;
16214 dw_loc_descr_ref cvt;
16215
16216 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16217 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16218 if (op0 == NULL)
16219 break;
16220 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16221 && (GET_CODE (rtl) == FLOAT
16222 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16223 {
16224 type_die = base_type_for_mode (int_mode,
16225 GET_CODE (rtl) == UNSIGNED_FLOAT);
16226 if (type_die == NULL)
16227 break;
16228 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16229 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16230 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16231 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16232 add_loc_descr (&op0, cvt);
16233 }
16234 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16235 if (type_die == NULL)
16236 break;
16237 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16238 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16239 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16240 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16241 add_loc_descr (&op0, cvt);
16242 if (is_a <scalar_int_mode> (mode, &int_mode)
16243 && (GET_CODE (rtl) == FIX
16244 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16245 {
16246 op0 = convert_descriptor_to_mode (int_mode, op0);
16247 if (op0 == NULL)
16248 break;
16249 }
16250 mem_loc_result = op0;
16251 }
16252 break;
16253
16254 case CLZ:
16255 case CTZ:
16256 case FFS:
16257 if (is_a <scalar_int_mode> (mode, &int_mode))
16258 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16259 break;
16260
16261 case POPCOUNT:
16262 case PARITY:
16263 if (is_a <scalar_int_mode> (mode, &int_mode))
16264 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16265 break;
16266
16267 case BSWAP:
16268 if (is_a <scalar_int_mode> (mode, &int_mode))
16269 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16270 break;
16271
16272 case ROTATE:
16273 case ROTATERT:
16274 if (is_a <scalar_int_mode> (mode, &int_mode))
16275 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16276 break;
16277
16278 case COMPARE:
16279 /* In theory, we could implement the above. */
16280 /* DWARF cannot represent the unsigned compare operations
16281 natively. */
16282 case SS_MULT:
16283 case US_MULT:
16284 case SS_DIV:
16285 case US_DIV:
16286 case SS_PLUS:
16287 case US_PLUS:
16288 case SS_MINUS:
16289 case US_MINUS:
16290 case SS_NEG:
16291 case US_NEG:
16292 case SS_ABS:
16293 case SS_ASHIFT:
16294 case US_ASHIFT:
16295 case SS_TRUNCATE:
16296 case US_TRUNCATE:
16297 case UNORDERED:
16298 case ORDERED:
16299 case UNEQ:
16300 case UNGE:
16301 case UNGT:
16302 case UNLE:
16303 case UNLT:
16304 case LTGT:
16305 case FRACT_CONVERT:
16306 case UNSIGNED_FRACT_CONVERT:
16307 case SAT_FRACT:
16308 case UNSIGNED_SAT_FRACT:
16309 case SQRT:
16310 case ASM_OPERANDS:
16311 case VEC_MERGE:
16312 case VEC_SELECT:
16313 case VEC_CONCAT:
16314 case VEC_DUPLICATE:
16315 case VEC_SERIES:
16316 case UNSPEC:
16317 case HIGH:
16318 case FMA:
16319 case STRICT_LOW_PART:
16320 case CONST_VECTOR:
16321 case CONST_FIXED:
16322 case CLRSB:
16323 case CLOBBER:
16324 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16325 can't express it in the debug info. This can happen e.g. with some
16326 TLS UNSPECs. */
16327 break;
16328
16329 case CONST_STRING:
16330 resolve_one_addr (&rtl);
16331 goto symref;
16332
16333 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16334 the expression. An UNSPEC rtx represents a raw DWARF operation,
16335 new_loc_descr is called for it to build the operation directly.
16336 Otherwise mem_loc_descriptor is called recursively. */
16337 case PARALLEL:
16338 {
16339 int index = 0;
16340 dw_loc_descr_ref exp_result = NULL;
16341
16342 for (; index < XVECLEN (rtl, 0); index++)
16343 {
16344 rtx elem = XVECEXP (rtl, 0, index);
16345 if (GET_CODE (elem) == UNSPEC)
16346 {
16347 /* Each DWARF operation UNSPEC contain two operands, if
16348 one operand is not used for the operation, const0_rtx is
16349 passed. */
16350 gcc_assert (XVECLEN (elem, 0) == 2);
16351
16352 HOST_WIDE_INT dw_op = XINT (elem, 1);
16353 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16354 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16355 exp_result
16356 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16357 oprnd2);
16358 }
16359 else
16360 exp_result
16361 = mem_loc_descriptor (elem, mode, mem_mode,
16362 VAR_INIT_STATUS_INITIALIZED);
16363
16364 if (!mem_loc_result)
16365 mem_loc_result = exp_result;
16366 else
16367 add_loc_descr (&mem_loc_result, exp_result);
16368 }
16369
16370 break;
16371 }
16372
16373 default:
16374 if (flag_checking)
16375 {
16376 print_rtl (stderr, rtl);
16377 gcc_unreachable ();
16378 }
16379 break;
16380 }
16381
16382 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16383 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16384
16385 return mem_loc_result;
16386 }
16387
16388 /* Return a descriptor that describes the concatenation of two locations.
16389 This is typically a complex variable. */
16390
16391 static dw_loc_descr_ref
16392 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16393 {
16394 /* At present we only track constant-sized pieces. */
16395 unsigned int size0, size1;
16396 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16397 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16398 return 0;
16399
16400 dw_loc_descr_ref cc_loc_result = NULL;
16401 dw_loc_descr_ref x0_ref
16402 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16403 dw_loc_descr_ref x1_ref
16404 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16405
16406 if (x0_ref == 0 || x1_ref == 0)
16407 return 0;
16408
16409 cc_loc_result = x0_ref;
16410 add_loc_descr_op_piece (&cc_loc_result, size0);
16411
16412 add_loc_descr (&cc_loc_result, x1_ref);
16413 add_loc_descr_op_piece (&cc_loc_result, size1);
16414
16415 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16416 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16417
16418 return cc_loc_result;
16419 }
16420
16421 /* Return a descriptor that describes the concatenation of N
16422 locations. */
16423
16424 static dw_loc_descr_ref
16425 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16426 {
16427 unsigned int i;
16428 dw_loc_descr_ref cc_loc_result = NULL;
16429 unsigned int n = XVECLEN (concatn, 0);
16430 unsigned int size;
16431
16432 for (i = 0; i < n; ++i)
16433 {
16434 dw_loc_descr_ref ref;
16435 rtx x = XVECEXP (concatn, 0, i);
16436
16437 /* At present we only track constant-sized pieces. */
16438 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16439 return NULL;
16440
16441 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16442 if (ref == NULL)
16443 return NULL;
16444
16445 add_loc_descr (&cc_loc_result, ref);
16446 add_loc_descr_op_piece (&cc_loc_result, size);
16447 }
16448
16449 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16450 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16451
16452 return cc_loc_result;
16453 }
16454
16455 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16456 for DEBUG_IMPLICIT_PTR RTL. */
16457
16458 static dw_loc_descr_ref
16459 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16460 {
16461 dw_loc_descr_ref ret;
16462 dw_die_ref ref;
16463
16464 if (dwarf_strict && dwarf_version < 5)
16465 return NULL;
16466 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16467 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16468 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16469 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16470 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16471 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16472 if (ref)
16473 {
16474 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16475 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16476 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16477 }
16478 else
16479 {
16480 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16481 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16482 }
16483 return ret;
16484 }
16485
16486 /* Output a proper Dwarf location descriptor for a variable or parameter
16487 which is either allocated in a register or in a memory location. For a
16488 register, we just generate an OP_REG and the register number. For a
16489 memory location we provide a Dwarf postfix expression describing how to
16490 generate the (dynamic) address of the object onto the address stack.
16491
16492 MODE is mode of the decl if this loc_descriptor is going to be used in
16493 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16494 allowed, VOIDmode otherwise.
16495
16496 If we don't know how to describe it, return 0. */
16497
16498 static dw_loc_descr_ref
16499 loc_descriptor (rtx rtl, machine_mode mode,
16500 enum var_init_status initialized)
16501 {
16502 dw_loc_descr_ref loc_result = NULL;
16503 scalar_int_mode int_mode;
16504
16505 switch (GET_CODE (rtl))
16506 {
16507 case SUBREG:
16508 /* The case of a subreg may arise when we have a local (register)
16509 variable or a formal (register) parameter which doesn't quite fill
16510 up an entire register. For now, just assume that it is
16511 legitimate to make the Dwarf info refer to the whole register which
16512 contains the given subreg. */
16513 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16514 loc_result = loc_descriptor (SUBREG_REG (rtl),
16515 GET_MODE (SUBREG_REG (rtl)), initialized);
16516 else
16517 goto do_default;
16518 break;
16519
16520 case REG:
16521 loc_result = reg_loc_descriptor (rtl, initialized);
16522 break;
16523
16524 case MEM:
16525 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16526 GET_MODE (rtl), initialized);
16527 if (loc_result == NULL)
16528 loc_result = tls_mem_loc_descriptor (rtl);
16529 if (loc_result == NULL)
16530 {
16531 rtx new_rtl = avoid_constant_pool_reference (rtl);
16532 if (new_rtl != rtl)
16533 loc_result = loc_descriptor (new_rtl, mode, initialized);
16534 }
16535 break;
16536
16537 case CONCAT:
16538 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16539 initialized);
16540 break;
16541
16542 case CONCATN:
16543 loc_result = concatn_loc_descriptor (rtl, initialized);
16544 break;
16545
16546 case VAR_LOCATION:
16547 /* Single part. */
16548 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16549 {
16550 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16551 if (GET_CODE (loc) == EXPR_LIST)
16552 loc = XEXP (loc, 0);
16553 loc_result = loc_descriptor (loc, mode, initialized);
16554 break;
16555 }
16556
16557 rtl = XEXP (rtl, 1);
16558 /* FALLTHRU */
16559
16560 case PARALLEL:
16561 {
16562 rtvec par_elems = XVEC (rtl, 0);
16563 int num_elem = GET_NUM_ELEM (par_elems);
16564 machine_mode mode;
16565 int i, size;
16566
16567 /* Create the first one, so we have something to add to. */
16568 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16569 VOIDmode, initialized);
16570 if (loc_result == NULL)
16571 return NULL;
16572 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16573 /* At present we only track constant-sized pieces. */
16574 if (!GET_MODE_SIZE (mode).is_constant (&size))
16575 return NULL;
16576 add_loc_descr_op_piece (&loc_result, size);
16577 for (i = 1; i < num_elem; i++)
16578 {
16579 dw_loc_descr_ref temp;
16580
16581 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16582 VOIDmode, initialized);
16583 if (temp == NULL)
16584 return NULL;
16585 add_loc_descr (&loc_result, temp);
16586 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16587 /* At present we only track constant-sized pieces. */
16588 if (!GET_MODE_SIZE (mode).is_constant (&size))
16589 return NULL;
16590 add_loc_descr_op_piece (&loc_result, size);
16591 }
16592 }
16593 break;
16594
16595 case CONST_INT:
16596 if (mode != VOIDmode && mode != BLKmode)
16597 {
16598 int_mode = as_a <scalar_int_mode> (mode);
16599 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16600 INTVAL (rtl));
16601 }
16602 break;
16603
16604 case CONST_DOUBLE:
16605 if (mode == VOIDmode)
16606 mode = GET_MODE (rtl);
16607
16608 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16609 {
16610 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16611
16612 /* Note that a CONST_DOUBLE rtx could represent either an integer
16613 or a floating-point constant. A CONST_DOUBLE is used whenever
16614 the constant requires more than one word in order to be
16615 adequately represented. We output CONST_DOUBLEs as blocks. */
16616 scalar_mode smode = as_a <scalar_mode> (mode);
16617 loc_result = new_loc_descr (DW_OP_implicit_value,
16618 GET_MODE_SIZE (smode), 0);
16619 #if TARGET_SUPPORTS_WIDE_INT == 0
16620 if (!SCALAR_FLOAT_MODE_P (smode))
16621 {
16622 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16623 loc_result->dw_loc_oprnd2.v.val_double
16624 = rtx_to_double_int (rtl);
16625 }
16626 else
16627 #endif
16628 {
16629 unsigned int length = GET_MODE_SIZE (smode);
16630 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16631
16632 insert_float (rtl, array);
16633 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16634 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16635 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16636 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16637 }
16638 }
16639 break;
16640
16641 case CONST_WIDE_INT:
16642 if (mode == VOIDmode)
16643 mode = GET_MODE (rtl);
16644
16645 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16646 {
16647 int_mode = as_a <scalar_int_mode> (mode);
16648 loc_result = new_loc_descr (DW_OP_implicit_value,
16649 GET_MODE_SIZE (int_mode), 0);
16650 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16651 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16652 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16653 }
16654 break;
16655
16656 case CONST_VECTOR:
16657 if (mode == VOIDmode)
16658 mode = GET_MODE (rtl);
16659
16660 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16661 {
16662 unsigned int length;
16663 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16664 return NULL;
16665
16666 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16667 unsigned char *array
16668 = ggc_vec_alloc<unsigned char> (length * elt_size);
16669 unsigned int i;
16670 unsigned char *p;
16671 machine_mode imode = GET_MODE_INNER (mode);
16672
16673 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16674 switch (GET_MODE_CLASS (mode))
16675 {
16676 case MODE_VECTOR_INT:
16677 for (i = 0, p = array; i < length; i++, p += elt_size)
16678 {
16679 rtx elt = CONST_VECTOR_ELT (rtl, i);
16680 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16681 }
16682 break;
16683
16684 case MODE_VECTOR_FLOAT:
16685 for (i = 0, p = array; i < length; i++, p += elt_size)
16686 {
16687 rtx elt = CONST_VECTOR_ELT (rtl, i);
16688 insert_float (elt, p);
16689 }
16690 break;
16691
16692 default:
16693 gcc_unreachable ();
16694 }
16695
16696 loc_result = new_loc_descr (DW_OP_implicit_value,
16697 length * elt_size, 0);
16698 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16699 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16700 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16701 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16702 }
16703 break;
16704
16705 case CONST:
16706 if (mode == VOIDmode
16707 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16708 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16709 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16710 {
16711 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16712 break;
16713 }
16714 /* FALLTHROUGH */
16715 case SYMBOL_REF:
16716 if (!const_ok_for_output (rtl))
16717 break;
16718 /* FALLTHROUGH */
16719 case LABEL_REF:
16720 if (is_a <scalar_int_mode> (mode, &int_mode)
16721 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16722 && (dwarf_version >= 4 || !dwarf_strict))
16723 {
16724 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16725 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16726 vec_safe_push (used_rtx_array, rtl);
16727 }
16728 break;
16729
16730 case DEBUG_IMPLICIT_PTR:
16731 loc_result = implicit_ptr_descriptor (rtl, 0);
16732 break;
16733
16734 case PLUS:
16735 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16736 && CONST_INT_P (XEXP (rtl, 1)))
16737 {
16738 loc_result
16739 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16740 break;
16741 }
16742 /* FALLTHRU */
16743 do_default:
16744 default:
16745 if ((is_a <scalar_int_mode> (mode, &int_mode)
16746 && GET_MODE (rtl) == int_mode
16747 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16748 && dwarf_version >= 4)
16749 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16750 {
16751 /* Value expression. */
16752 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16753 if (loc_result)
16754 add_loc_descr (&loc_result,
16755 new_loc_descr (DW_OP_stack_value, 0, 0));
16756 }
16757 break;
16758 }
16759
16760 return loc_result;
16761 }
16762
16763 /* We need to figure out what section we should use as the base for the
16764 address ranges where a given location is valid.
16765 1. If this particular DECL has a section associated with it, use that.
16766 2. If this function has a section associated with it, use that.
16767 3. Otherwise, use the text section.
16768 XXX: If you split a variable across multiple sections, we won't notice. */
16769
16770 static const char *
16771 secname_for_decl (const_tree decl)
16772 {
16773 const char *secname;
16774
16775 if (VAR_OR_FUNCTION_DECL_P (decl)
16776 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16777 && DECL_SECTION_NAME (decl))
16778 secname = DECL_SECTION_NAME (decl);
16779 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16780 secname = DECL_SECTION_NAME (current_function_decl);
16781 else if (cfun && in_cold_section_p)
16782 secname = crtl->subsections.cold_section_label;
16783 else
16784 secname = text_section_label;
16785
16786 return secname;
16787 }
16788
16789 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16790
16791 static bool
16792 decl_by_reference_p (tree decl)
16793 {
16794 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16795 || VAR_P (decl))
16796 && DECL_BY_REFERENCE (decl));
16797 }
16798
16799 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16800 for VARLOC. */
16801
16802 static dw_loc_descr_ref
16803 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16804 enum var_init_status initialized)
16805 {
16806 int have_address = 0;
16807 dw_loc_descr_ref descr;
16808 machine_mode mode;
16809
16810 if (want_address != 2)
16811 {
16812 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16813 /* Single part. */
16814 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16815 {
16816 varloc = PAT_VAR_LOCATION_LOC (varloc);
16817 if (GET_CODE (varloc) == EXPR_LIST)
16818 varloc = XEXP (varloc, 0);
16819 mode = GET_MODE (varloc);
16820 if (MEM_P (varloc))
16821 {
16822 rtx addr = XEXP (varloc, 0);
16823 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16824 mode, initialized);
16825 if (descr)
16826 have_address = 1;
16827 else
16828 {
16829 rtx x = avoid_constant_pool_reference (varloc);
16830 if (x != varloc)
16831 descr = mem_loc_descriptor (x, mode, VOIDmode,
16832 initialized);
16833 }
16834 }
16835 else
16836 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16837 }
16838 else
16839 return 0;
16840 }
16841 else
16842 {
16843 if (GET_CODE (varloc) == VAR_LOCATION)
16844 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16845 else
16846 mode = DECL_MODE (loc);
16847 descr = loc_descriptor (varloc, mode, initialized);
16848 have_address = 1;
16849 }
16850
16851 if (!descr)
16852 return 0;
16853
16854 if (want_address == 2 && !have_address
16855 && (dwarf_version >= 4 || !dwarf_strict))
16856 {
16857 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16858 {
16859 expansion_failed (loc, NULL_RTX,
16860 "DWARF address size mismatch");
16861 return 0;
16862 }
16863 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16864 have_address = 1;
16865 }
16866 /* Show if we can't fill the request for an address. */
16867 if (want_address && !have_address)
16868 {
16869 expansion_failed (loc, NULL_RTX,
16870 "Want address and only have value");
16871 return 0;
16872 }
16873
16874 /* If we've got an address and don't want one, dereference. */
16875 if (!want_address && have_address)
16876 {
16877 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16878 enum dwarf_location_atom op;
16879
16880 if (size > DWARF2_ADDR_SIZE || size == -1)
16881 {
16882 expansion_failed (loc, NULL_RTX,
16883 "DWARF address size mismatch");
16884 return 0;
16885 }
16886 else if (size == DWARF2_ADDR_SIZE)
16887 op = DW_OP_deref;
16888 else
16889 op = DW_OP_deref_size;
16890
16891 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16892 }
16893
16894 return descr;
16895 }
16896
16897 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16898 if it is not possible. */
16899
16900 static dw_loc_descr_ref
16901 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16902 {
16903 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16904 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16905 else if (dwarf_version >= 3 || !dwarf_strict)
16906 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16907 else
16908 return NULL;
16909 }
16910
16911 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16912 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16913
16914 static dw_loc_descr_ref
16915 dw_sra_loc_expr (tree decl, rtx loc)
16916 {
16917 rtx p;
16918 unsigned HOST_WIDE_INT padsize = 0;
16919 dw_loc_descr_ref descr, *descr_tail;
16920 unsigned HOST_WIDE_INT decl_size;
16921 rtx varloc;
16922 enum var_init_status initialized;
16923
16924 if (DECL_SIZE (decl) == NULL
16925 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16926 return NULL;
16927
16928 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16929 descr = NULL;
16930 descr_tail = &descr;
16931
16932 for (p = loc; p; p = XEXP (p, 1))
16933 {
16934 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16935 rtx loc_note = *decl_piece_varloc_ptr (p);
16936 dw_loc_descr_ref cur_descr;
16937 dw_loc_descr_ref *tail, last = NULL;
16938 unsigned HOST_WIDE_INT opsize = 0;
16939
16940 if (loc_note == NULL_RTX
16941 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16942 {
16943 padsize += bitsize;
16944 continue;
16945 }
16946 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16947 varloc = NOTE_VAR_LOCATION (loc_note);
16948 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16949 if (cur_descr == NULL)
16950 {
16951 padsize += bitsize;
16952 continue;
16953 }
16954
16955 /* Check that cur_descr either doesn't use
16956 DW_OP_*piece operations, or their sum is equal
16957 to bitsize. Otherwise we can't embed it. */
16958 for (tail = &cur_descr; *tail != NULL;
16959 tail = &(*tail)->dw_loc_next)
16960 if ((*tail)->dw_loc_opc == DW_OP_piece)
16961 {
16962 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16963 * BITS_PER_UNIT;
16964 last = *tail;
16965 }
16966 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16967 {
16968 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16969 last = *tail;
16970 }
16971
16972 if (last != NULL && opsize != bitsize)
16973 {
16974 padsize += bitsize;
16975 /* Discard the current piece of the descriptor and release any
16976 addr_table entries it uses. */
16977 remove_loc_list_addr_table_entries (cur_descr);
16978 continue;
16979 }
16980
16981 /* If there is a hole, add DW_OP_*piece after empty DWARF
16982 expression, which means that those bits are optimized out. */
16983 if (padsize)
16984 {
16985 if (padsize > decl_size)
16986 {
16987 remove_loc_list_addr_table_entries (cur_descr);
16988 goto discard_descr;
16989 }
16990 decl_size -= padsize;
16991 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16992 if (*descr_tail == NULL)
16993 {
16994 remove_loc_list_addr_table_entries (cur_descr);
16995 goto discard_descr;
16996 }
16997 descr_tail = &(*descr_tail)->dw_loc_next;
16998 padsize = 0;
16999 }
17000 *descr_tail = cur_descr;
17001 descr_tail = tail;
17002 if (bitsize > decl_size)
17003 goto discard_descr;
17004 decl_size -= bitsize;
17005 if (last == NULL)
17006 {
17007 HOST_WIDE_INT offset = 0;
17008 if (GET_CODE (varloc) == VAR_LOCATION
17009 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17010 {
17011 varloc = PAT_VAR_LOCATION_LOC (varloc);
17012 if (GET_CODE (varloc) == EXPR_LIST)
17013 varloc = XEXP (varloc, 0);
17014 }
17015 do
17016 {
17017 if (GET_CODE (varloc) == CONST
17018 || GET_CODE (varloc) == SIGN_EXTEND
17019 || GET_CODE (varloc) == ZERO_EXTEND)
17020 varloc = XEXP (varloc, 0);
17021 else if (GET_CODE (varloc) == SUBREG)
17022 varloc = SUBREG_REG (varloc);
17023 else
17024 break;
17025 }
17026 while (1);
17027 /* DW_OP_bit_size offset should be zero for register
17028 or implicit location descriptions and empty location
17029 descriptions, but for memory addresses needs big endian
17030 adjustment. */
17031 if (MEM_P (varloc))
17032 {
17033 unsigned HOST_WIDE_INT memsize;
17034 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17035 goto discard_descr;
17036 memsize *= BITS_PER_UNIT;
17037 if (memsize != bitsize)
17038 {
17039 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17040 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17041 goto discard_descr;
17042 if (memsize < bitsize)
17043 goto discard_descr;
17044 if (BITS_BIG_ENDIAN)
17045 offset = memsize - bitsize;
17046 }
17047 }
17048
17049 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17050 if (*descr_tail == NULL)
17051 goto discard_descr;
17052 descr_tail = &(*descr_tail)->dw_loc_next;
17053 }
17054 }
17055
17056 /* If there were any non-empty expressions, add padding till the end of
17057 the decl. */
17058 if (descr != NULL && decl_size != 0)
17059 {
17060 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17061 if (*descr_tail == NULL)
17062 goto discard_descr;
17063 }
17064 return descr;
17065
17066 discard_descr:
17067 /* Discard the descriptor and release any addr_table entries it uses. */
17068 remove_loc_list_addr_table_entries (descr);
17069 return NULL;
17070 }
17071
17072 /* Return the dwarf representation of the location list LOC_LIST of
17073 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17074 function. */
17075
17076 static dw_loc_list_ref
17077 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17078 {
17079 const char *endname, *secname;
17080 var_loc_view endview;
17081 rtx varloc;
17082 enum var_init_status initialized;
17083 struct var_loc_node *node;
17084 dw_loc_descr_ref descr;
17085 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17086 dw_loc_list_ref list = NULL;
17087 dw_loc_list_ref *listp = &list;
17088
17089 /* Now that we know what section we are using for a base,
17090 actually construct the list of locations.
17091 The first location information is what is passed to the
17092 function that creates the location list, and the remaining
17093 locations just get added on to that list.
17094 Note that we only know the start address for a location
17095 (IE location changes), so to build the range, we use
17096 the range [current location start, next location start].
17097 This means we have to special case the last node, and generate
17098 a range of [last location start, end of function label]. */
17099
17100 if (cfun && crtl->has_bb_partition)
17101 {
17102 bool save_in_cold_section_p = in_cold_section_p;
17103 in_cold_section_p = first_function_block_is_cold;
17104 if (loc_list->last_before_switch == NULL)
17105 in_cold_section_p = !in_cold_section_p;
17106 secname = secname_for_decl (decl);
17107 in_cold_section_p = save_in_cold_section_p;
17108 }
17109 else
17110 secname = secname_for_decl (decl);
17111
17112 for (node = loc_list->first; node; node = node->next)
17113 {
17114 bool range_across_switch = false;
17115 if (GET_CODE (node->loc) == EXPR_LIST
17116 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17117 {
17118 if (GET_CODE (node->loc) == EXPR_LIST)
17119 {
17120 descr = NULL;
17121 /* This requires DW_OP_{,bit_}piece, which is not usable
17122 inside DWARF expressions. */
17123 if (want_address == 2)
17124 descr = dw_sra_loc_expr (decl, node->loc);
17125 }
17126 else
17127 {
17128 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17129 varloc = NOTE_VAR_LOCATION (node->loc);
17130 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17131 }
17132 if (descr)
17133 {
17134 /* If section switch happens in between node->label
17135 and node->next->label (or end of function) and
17136 we can't emit it as a single entry list,
17137 emit two ranges, first one ending at the end
17138 of first partition and second one starting at the
17139 beginning of second partition. */
17140 if (node == loc_list->last_before_switch
17141 && (node != loc_list->first || loc_list->first->next
17142 /* If we are to emit a view number, we will emit
17143 a loclist rather than a single location
17144 expression for the entire function (see
17145 loc_list_has_views), so we have to split the
17146 range that straddles across partitions. */
17147 || !ZERO_VIEW_P (node->view))
17148 && current_function_decl)
17149 {
17150 endname = cfun->fde->dw_fde_end;
17151 endview = 0;
17152 range_across_switch = true;
17153 }
17154 /* The variable has a location between NODE->LABEL and
17155 NODE->NEXT->LABEL. */
17156 else if (node->next)
17157 endname = node->next->label, endview = node->next->view;
17158 /* If the variable has a location at the last label
17159 it keeps its location until the end of function. */
17160 else if (!current_function_decl)
17161 endname = text_end_label, endview = 0;
17162 else
17163 {
17164 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17165 current_function_funcdef_no);
17166 endname = ggc_strdup (label_id);
17167 endview = 0;
17168 }
17169
17170 *listp = new_loc_list (descr, node->label, node->view,
17171 endname, endview, secname);
17172 if (TREE_CODE (decl) == PARM_DECL
17173 && node == loc_list->first
17174 && NOTE_P (node->loc)
17175 && strcmp (node->label, endname) == 0)
17176 (*listp)->force = true;
17177 listp = &(*listp)->dw_loc_next;
17178 }
17179 }
17180
17181 if (cfun
17182 && crtl->has_bb_partition
17183 && node == loc_list->last_before_switch)
17184 {
17185 bool save_in_cold_section_p = in_cold_section_p;
17186 in_cold_section_p = !first_function_block_is_cold;
17187 secname = secname_for_decl (decl);
17188 in_cold_section_p = save_in_cold_section_p;
17189 }
17190
17191 if (range_across_switch)
17192 {
17193 if (GET_CODE (node->loc) == EXPR_LIST)
17194 descr = dw_sra_loc_expr (decl, node->loc);
17195 else
17196 {
17197 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17198 varloc = NOTE_VAR_LOCATION (node->loc);
17199 descr = dw_loc_list_1 (decl, varloc, want_address,
17200 initialized);
17201 }
17202 gcc_assert (descr);
17203 /* The variable has a location between NODE->LABEL and
17204 NODE->NEXT->LABEL. */
17205 if (node->next)
17206 endname = node->next->label, endview = node->next->view;
17207 else
17208 endname = cfun->fde->dw_fde_second_end, endview = 0;
17209 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17210 endname, endview, secname);
17211 listp = &(*listp)->dw_loc_next;
17212 }
17213 }
17214
17215 /* Try to avoid the overhead of a location list emitting a location
17216 expression instead, but only if we didn't have more than one
17217 location entry in the first place. If some entries were not
17218 representable, we don't want to pretend a single entry that was
17219 applies to the entire scope in which the variable is
17220 available. */
17221 if (list && loc_list->first->next)
17222 gen_llsym (list);
17223 else
17224 maybe_gen_llsym (list);
17225
17226 return list;
17227 }
17228
17229 /* Return if the loc_list has only single element and thus can be represented
17230 as location description. */
17231
17232 static bool
17233 single_element_loc_list_p (dw_loc_list_ref list)
17234 {
17235 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17236 return !list->ll_symbol;
17237 }
17238
17239 /* Duplicate a single element of location list. */
17240
17241 static inline dw_loc_descr_ref
17242 copy_loc_descr (dw_loc_descr_ref ref)
17243 {
17244 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17245 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17246 return copy;
17247 }
17248
17249 /* To each location in list LIST append loc descr REF. */
17250
17251 static void
17252 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17253 {
17254 dw_loc_descr_ref copy;
17255 add_loc_descr (&list->expr, ref);
17256 list = list->dw_loc_next;
17257 while (list)
17258 {
17259 copy = copy_loc_descr (ref);
17260 add_loc_descr (&list->expr, copy);
17261 while (copy->dw_loc_next)
17262 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17263 list = list->dw_loc_next;
17264 }
17265 }
17266
17267 /* To each location in list LIST prepend loc descr REF. */
17268
17269 static void
17270 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17271 {
17272 dw_loc_descr_ref copy;
17273 dw_loc_descr_ref ref_end = list->expr;
17274 add_loc_descr (&ref, list->expr);
17275 list->expr = ref;
17276 list = list->dw_loc_next;
17277 while (list)
17278 {
17279 dw_loc_descr_ref end = list->expr;
17280 list->expr = copy = copy_loc_descr (ref);
17281 while (copy->dw_loc_next != ref_end)
17282 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17283 copy->dw_loc_next = end;
17284 list = list->dw_loc_next;
17285 }
17286 }
17287
17288 /* Given two lists RET and LIST
17289 produce location list that is result of adding expression in LIST
17290 to expression in RET on each position in program.
17291 Might be destructive on both RET and LIST.
17292
17293 TODO: We handle only simple cases of RET or LIST having at most one
17294 element. General case would involve sorting the lists in program order
17295 and merging them that will need some additional work.
17296 Adding that will improve quality of debug info especially for SRA-ed
17297 structures. */
17298
17299 static void
17300 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17301 {
17302 if (!list)
17303 return;
17304 if (!*ret)
17305 {
17306 *ret = list;
17307 return;
17308 }
17309 if (!list->dw_loc_next)
17310 {
17311 add_loc_descr_to_each (*ret, list->expr);
17312 return;
17313 }
17314 if (!(*ret)->dw_loc_next)
17315 {
17316 prepend_loc_descr_to_each (list, (*ret)->expr);
17317 *ret = list;
17318 return;
17319 }
17320 expansion_failed (NULL_TREE, NULL_RTX,
17321 "Don't know how to merge two non-trivial"
17322 " location lists.\n");
17323 *ret = NULL;
17324 return;
17325 }
17326
17327 /* LOC is constant expression. Try a luck, look it up in constant
17328 pool and return its loc_descr of its address. */
17329
17330 static dw_loc_descr_ref
17331 cst_pool_loc_descr (tree loc)
17332 {
17333 /* Get an RTL for this, if something has been emitted. */
17334 rtx rtl = lookup_constant_def (loc);
17335
17336 if (!rtl || !MEM_P (rtl))
17337 {
17338 gcc_assert (!rtl);
17339 return 0;
17340 }
17341 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17342
17343 /* TODO: We might get more coverage if we was actually delaying expansion
17344 of all expressions till end of compilation when constant pools are fully
17345 populated. */
17346 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17347 {
17348 expansion_failed (loc, NULL_RTX,
17349 "CST value in contant pool but not marked.");
17350 return 0;
17351 }
17352 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17353 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17354 }
17355
17356 /* Return dw_loc_list representing address of addr_expr LOC
17357 by looking for inner INDIRECT_REF expression and turning
17358 it into simple arithmetics.
17359
17360 See loc_list_from_tree for the meaning of CONTEXT. */
17361
17362 static dw_loc_list_ref
17363 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17364 loc_descr_context *context)
17365 {
17366 tree obj, offset;
17367 poly_int64 bitsize, bitpos, bytepos;
17368 machine_mode mode;
17369 int unsignedp, reversep, volatilep = 0;
17370 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17371
17372 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17373 &bitsize, &bitpos, &offset, &mode,
17374 &unsignedp, &reversep, &volatilep);
17375 STRIP_NOPS (obj);
17376 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17377 {
17378 expansion_failed (loc, NULL_RTX, "bitfield access");
17379 return 0;
17380 }
17381 if (!INDIRECT_REF_P (obj))
17382 {
17383 expansion_failed (obj,
17384 NULL_RTX, "no indirect ref in inner refrence");
17385 return 0;
17386 }
17387 if (!offset && known_eq (bitpos, 0))
17388 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17389 context);
17390 else if (toplev
17391 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17392 && (dwarf_version >= 4 || !dwarf_strict))
17393 {
17394 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17395 if (!list_ret)
17396 return 0;
17397 if (offset)
17398 {
17399 /* Variable offset. */
17400 list_ret1 = loc_list_from_tree (offset, 0, context);
17401 if (list_ret1 == 0)
17402 return 0;
17403 add_loc_list (&list_ret, list_ret1);
17404 if (!list_ret)
17405 return 0;
17406 add_loc_descr_to_each (list_ret,
17407 new_loc_descr (DW_OP_plus, 0, 0));
17408 }
17409 HOST_WIDE_INT value;
17410 if (bytepos.is_constant (&value) && value > 0)
17411 add_loc_descr_to_each (list_ret,
17412 new_loc_descr (DW_OP_plus_uconst, value, 0));
17413 else if (maybe_ne (bytepos, 0))
17414 loc_list_plus_const (list_ret, bytepos);
17415 add_loc_descr_to_each (list_ret,
17416 new_loc_descr (DW_OP_stack_value, 0, 0));
17417 }
17418 return list_ret;
17419 }
17420
17421 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17422 all operations from LOC are nops, move to the last one. Insert in NOPS all
17423 operations that are skipped. */
17424
17425 static void
17426 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17427 hash_set<dw_loc_descr_ref> &nops)
17428 {
17429 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17430 {
17431 nops.add (loc);
17432 loc = loc->dw_loc_next;
17433 }
17434 }
17435
17436 /* Helper for loc_descr_without_nops: free the location description operation
17437 P. */
17438
17439 bool
17440 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17441 {
17442 ggc_free (loc);
17443 return true;
17444 }
17445
17446 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17447 finishes LOC. */
17448
17449 static void
17450 loc_descr_without_nops (dw_loc_descr_ref &loc)
17451 {
17452 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17453 return;
17454
17455 /* Set of all DW_OP_nop operations we remove. */
17456 hash_set<dw_loc_descr_ref> nops;
17457
17458 /* First, strip all prefix NOP operations in order to keep the head of the
17459 operations list. */
17460 loc_descr_to_next_no_nop (loc, nops);
17461
17462 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17463 {
17464 /* For control flow operations: strip "prefix" nops in destination
17465 labels. */
17466 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17467 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17468 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17469 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17470
17471 /* Do the same for the operations that follow, then move to the next
17472 iteration. */
17473 if (cur->dw_loc_next != NULL)
17474 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17475 cur = cur->dw_loc_next;
17476 }
17477
17478 nops.traverse<void *, free_loc_descr> (NULL);
17479 }
17480
17481
17482 struct dwarf_procedure_info;
17483
17484 /* Helper structure for location descriptions generation. */
17485 struct loc_descr_context
17486 {
17487 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17488 NULL_TREE if DW_OP_push_object_address in invalid for this location
17489 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17490 tree context_type;
17491 /* The ..._DECL node that should be translated as a
17492 DW_OP_push_object_address operation. */
17493 tree base_decl;
17494 /* Information about the DWARF procedure we are currently generating. NULL if
17495 we are not generating a DWARF procedure. */
17496 struct dwarf_procedure_info *dpi;
17497 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17498 by consumer. Used for DW_TAG_generic_subrange attributes. */
17499 bool placeholder_arg;
17500 /* True if PLACEHOLDER_EXPR has been seen. */
17501 bool placeholder_seen;
17502 };
17503
17504 /* DWARF procedures generation
17505
17506 DWARF expressions (aka. location descriptions) are used to encode variable
17507 things such as sizes or offsets. Such computations can have redundant parts
17508 that can be factorized in order to reduce the size of the output debug
17509 information. This is the whole point of DWARF procedures.
17510
17511 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17512 already factorized into functions ("size functions") in order to handle very
17513 big and complex types. Such functions are quite simple: they have integral
17514 arguments, they return an integral result and their body contains only a
17515 return statement with arithmetic expressions. This is the only kind of
17516 function we are interested in translating into DWARF procedures, here.
17517
17518 DWARF expressions and DWARF procedure are executed using a stack, so we have
17519 to define some calling convention for them to interact. Let's say that:
17520
17521 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17522 all arguments in reverse order (right-to-left) so that when the DWARF
17523 procedure execution starts, the first argument is the top of the stack.
17524
17525 - Then, when returning, the DWARF procedure must have consumed all arguments
17526 on the stack, must have pushed the result and touched nothing else.
17527
17528 - Each integral argument and the result are integral types can be hold in a
17529 single stack slot.
17530
17531 - We call "frame offset" the number of stack slots that are "under DWARF
17532 procedure control": it includes the arguments slots, the temporaries and
17533 the result slot. Thus, it is equal to the number of arguments when the
17534 procedure execution starts and must be equal to one (the result) when it
17535 returns. */
17536
17537 /* Helper structure used when generating operations for a DWARF procedure. */
17538 struct dwarf_procedure_info
17539 {
17540 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17541 currently translated. */
17542 tree fndecl;
17543 /* The number of arguments FNDECL takes. */
17544 unsigned args_count;
17545 };
17546
17547 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17548 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17549 equate it to this DIE. */
17550
17551 static dw_die_ref
17552 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17553 dw_die_ref parent_die)
17554 {
17555 dw_die_ref dwarf_proc_die;
17556
17557 if ((dwarf_version < 3 && dwarf_strict)
17558 || location == NULL)
17559 return NULL;
17560
17561 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17562 if (fndecl)
17563 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17564 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17565 return dwarf_proc_die;
17566 }
17567
17568 /* Return whether TYPE is a supported type as a DWARF procedure argument
17569 type or return type (we handle only scalar types and pointer types that
17570 aren't wider than the DWARF expression evaluation stack. */
17571
17572 static bool
17573 is_handled_procedure_type (tree type)
17574 {
17575 return ((INTEGRAL_TYPE_P (type)
17576 || TREE_CODE (type) == OFFSET_TYPE
17577 || TREE_CODE (type) == POINTER_TYPE)
17578 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17579 }
17580
17581 /* Helper for resolve_args_picking: do the same but stop when coming across
17582 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17583 offset *before* evaluating the corresponding operation. */
17584
17585 static bool
17586 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17587 struct dwarf_procedure_info *dpi,
17588 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17589 {
17590 /* The "frame_offset" identifier is already used to name a macro... */
17591 unsigned frame_offset_ = initial_frame_offset;
17592 dw_loc_descr_ref l;
17593
17594 for (l = loc; l != NULL;)
17595 {
17596 bool existed;
17597 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17598
17599 /* If we already met this node, there is nothing to compute anymore. */
17600 if (existed)
17601 {
17602 /* Make sure that the stack size is consistent wherever the execution
17603 flow comes from. */
17604 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17605 break;
17606 }
17607 l_frame_offset = frame_offset_;
17608
17609 /* If needed, relocate the picking offset with respect to the frame
17610 offset. */
17611 if (l->frame_offset_rel)
17612 {
17613 unsigned HOST_WIDE_INT off;
17614 switch (l->dw_loc_opc)
17615 {
17616 case DW_OP_pick:
17617 off = l->dw_loc_oprnd1.v.val_unsigned;
17618 break;
17619 case DW_OP_dup:
17620 off = 0;
17621 break;
17622 case DW_OP_over:
17623 off = 1;
17624 break;
17625 default:
17626 gcc_unreachable ();
17627 }
17628 /* frame_offset_ is the size of the current stack frame, including
17629 incoming arguments. Besides, the arguments are pushed
17630 right-to-left. Thus, in order to access the Nth argument from
17631 this operation node, the picking has to skip temporaries *plus*
17632 one stack slot per argument (0 for the first one, 1 for the second
17633 one, etc.).
17634
17635 The targetted argument number (N) is already set as the operand,
17636 and the number of temporaries can be computed with:
17637 frame_offsets_ - dpi->args_count */
17638 off += frame_offset_ - dpi->args_count;
17639
17640 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17641 if (off > 255)
17642 return false;
17643
17644 if (off == 0)
17645 {
17646 l->dw_loc_opc = DW_OP_dup;
17647 l->dw_loc_oprnd1.v.val_unsigned = 0;
17648 }
17649 else if (off == 1)
17650 {
17651 l->dw_loc_opc = DW_OP_over;
17652 l->dw_loc_oprnd1.v.val_unsigned = 0;
17653 }
17654 else
17655 {
17656 l->dw_loc_opc = DW_OP_pick;
17657 l->dw_loc_oprnd1.v.val_unsigned = off;
17658 }
17659 }
17660
17661 /* Update frame_offset according to the effect the current operation has
17662 on the stack. */
17663 switch (l->dw_loc_opc)
17664 {
17665 case DW_OP_deref:
17666 case DW_OP_swap:
17667 case DW_OP_rot:
17668 case DW_OP_abs:
17669 case DW_OP_neg:
17670 case DW_OP_not:
17671 case DW_OP_plus_uconst:
17672 case DW_OP_skip:
17673 case DW_OP_reg0:
17674 case DW_OP_reg1:
17675 case DW_OP_reg2:
17676 case DW_OP_reg3:
17677 case DW_OP_reg4:
17678 case DW_OP_reg5:
17679 case DW_OP_reg6:
17680 case DW_OP_reg7:
17681 case DW_OP_reg8:
17682 case DW_OP_reg9:
17683 case DW_OP_reg10:
17684 case DW_OP_reg11:
17685 case DW_OP_reg12:
17686 case DW_OP_reg13:
17687 case DW_OP_reg14:
17688 case DW_OP_reg15:
17689 case DW_OP_reg16:
17690 case DW_OP_reg17:
17691 case DW_OP_reg18:
17692 case DW_OP_reg19:
17693 case DW_OP_reg20:
17694 case DW_OP_reg21:
17695 case DW_OP_reg22:
17696 case DW_OP_reg23:
17697 case DW_OP_reg24:
17698 case DW_OP_reg25:
17699 case DW_OP_reg26:
17700 case DW_OP_reg27:
17701 case DW_OP_reg28:
17702 case DW_OP_reg29:
17703 case DW_OP_reg30:
17704 case DW_OP_reg31:
17705 case DW_OP_bregx:
17706 case DW_OP_piece:
17707 case DW_OP_deref_size:
17708 case DW_OP_nop:
17709 case DW_OP_bit_piece:
17710 case DW_OP_implicit_value:
17711 case DW_OP_stack_value:
17712 break;
17713
17714 case DW_OP_addr:
17715 case DW_OP_const1u:
17716 case DW_OP_const1s:
17717 case DW_OP_const2u:
17718 case DW_OP_const2s:
17719 case DW_OP_const4u:
17720 case DW_OP_const4s:
17721 case DW_OP_const8u:
17722 case DW_OP_const8s:
17723 case DW_OP_constu:
17724 case DW_OP_consts:
17725 case DW_OP_dup:
17726 case DW_OP_over:
17727 case DW_OP_pick:
17728 case DW_OP_lit0:
17729 case DW_OP_lit1:
17730 case DW_OP_lit2:
17731 case DW_OP_lit3:
17732 case DW_OP_lit4:
17733 case DW_OP_lit5:
17734 case DW_OP_lit6:
17735 case DW_OP_lit7:
17736 case DW_OP_lit8:
17737 case DW_OP_lit9:
17738 case DW_OP_lit10:
17739 case DW_OP_lit11:
17740 case DW_OP_lit12:
17741 case DW_OP_lit13:
17742 case DW_OP_lit14:
17743 case DW_OP_lit15:
17744 case DW_OP_lit16:
17745 case DW_OP_lit17:
17746 case DW_OP_lit18:
17747 case DW_OP_lit19:
17748 case DW_OP_lit20:
17749 case DW_OP_lit21:
17750 case DW_OP_lit22:
17751 case DW_OP_lit23:
17752 case DW_OP_lit24:
17753 case DW_OP_lit25:
17754 case DW_OP_lit26:
17755 case DW_OP_lit27:
17756 case DW_OP_lit28:
17757 case DW_OP_lit29:
17758 case DW_OP_lit30:
17759 case DW_OP_lit31:
17760 case DW_OP_breg0:
17761 case DW_OP_breg1:
17762 case DW_OP_breg2:
17763 case DW_OP_breg3:
17764 case DW_OP_breg4:
17765 case DW_OP_breg5:
17766 case DW_OP_breg6:
17767 case DW_OP_breg7:
17768 case DW_OP_breg8:
17769 case DW_OP_breg9:
17770 case DW_OP_breg10:
17771 case DW_OP_breg11:
17772 case DW_OP_breg12:
17773 case DW_OP_breg13:
17774 case DW_OP_breg14:
17775 case DW_OP_breg15:
17776 case DW_OP_breg16:
17777 case DW_OP_breg17:
17778 case DW_OP_breg18:
17779 case DW_OP_breg19:
17780 case DW_OP_breg20:
17781 case DW_OP_breg21:
17782 case DW_OP_breg22:
17783 case DW_OP_breg23:
17784 case DW_OP_breg24:
17785 case DW_OP_breg25:
17786 case DW_OP_breg26:
17787 case DW_OP_breg27:
17788 case DW_OP_breg28:
17789 case DW_OP_breg29:
17790 case DW_OP_breg30:
17791 case DW_OP_breg31:
17792 case DW_OP_fbreg:
17793 case DW_OP_push_object_address:
17794 case DW_OP_call_frame_cfa:
17795 case DW_OP_GNU_variable_value:
17796 ++frame_offset_;
17797 break;
17798
17799 case DW_OP_drop:
17800 case DW_OP_xderef:
17801 case DW_OP_and:
17802 case DW_OP_div:
17803 case DW_OP_minus:
17804 case DW_OP_mod:
17805 case DW_OP_mul:
17806 case DW_OP_or:
17807 case DW_OP_plus:
17808 case DW_OP_shl:
17809 case DW_OP_shr:
17810 case DW_OP_shra:
17811 case DW_OP_xor:
17812 case DW_OP_bra:
17813 case DW_OP_eq:
17814 case DW_OP_ge:
17815 case DW_OP_gt:
17816 case DW_OP_le:
17817 case DW_OP_lt:
17818 case DW_OP_ne:
17819 case DW_OP_regx:
17820 case DW_OP_xderef_size:
17821 --frame_offset_;
17822 break;
17823
17824 case DW_OP_call2:
17825 case DW_OP_call4:
17826 case DW_OP_call_ref:
17827 {
17828 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17829 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17830
17831 if (stack_usage == NULL)
17832 return false;
17833 frame_offset_ += *stack_usage;
17834 break;
17835 }
17836
17837 case DW_OP_implicit_pointer:
17838 case DW_OP_entry_value:
17839 case DW_OP_const_type:
17840 case DW_OP_regval_type:
17841 case DW_OP_deref_type:
17842 case DW_OP_convert:
17843 case DW_OP_reinterpret:
17844 case DW_OP_form_tls_address:
17845 case DW_OP_GNU_push_tls_address:
17846 case DW_OP_GNU_uninit:
17847 case DW_OP_GNU_encoded_addr:
17848 case DW_OP_GNU_implicit_pointer:
17849 case DW_OP_GNU_entry_value:
17850 case DW_OP_GNU_const_type:
17851 case DW_OP_GNU_regval_type:
17852 case DW_OP_GNU_deref_type:
17853 case DW_OP_GNU_convert:
17854 case DW_OP_GNU_reinterpret:
17855 case DW_OP_GNU_parameter_ref:
17856 /* loc_list_from_tree will probably not output these operations for
17857 size functions, so assume they will not appear here. */
17858 /* Fall through... */
17859
17860 default:
17861 gcc_unreachable ();
17862 }
17863
17864 /* Now, follow the control flow (except subroutine calls). */
17865 switch (l->dw_loc_opc)
17866 {
17867 case DW_OP_bra:
17868 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17869 frame_offsets))
17870 return false;
17871 /* Fall through. */
17872
17873 case DW_OP_skip:
17874 l = l->dw_loc_oprnd1.v.val_loc;
17875 break;
17876
17877 case DW_OP_stack_value:
17878 return true;
17879
17880 default:
17881 l = l->dw_loc_next;
17882 break;
17883 }
17884 }
17885
17886 return true;
17887 }
17888
17889 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17890 operations) in order to resolve the operand of DW_OP_pick operations that
17891 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17892 offset *before* LOC is executed. Return if all relocations were
17893 successful. */
17894
17895 static bool
17896 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17897 struct dwarf_procedure_info *dpi)
17898 {
17899 /* Associate to all visited operations the frame offset *before* evaluating
17900 this operation. */
17901 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17902
17903 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17904 frame_offsets);
17905 }
17906
17907 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17908 Return NULL if it is not possible. */
17909
17910 static dw_die_ref
17911 function_to_dwarf_procedure (tree fndecl)
17912 {
17913 struct loc_descr_context ctx;
17914 struct dwarf_procedure_info dpi;
17915 dw_die_ref dwarf_proc_die;
17916 tree tree_body = DECL_SAVED_TREE (fndecl);
17917 dw_loc_descr_ref loc_body, epilogue;
17918
17919 tree cursor;
17920 unsigned i;
17921
17922 /* Do not generate multiple DWARF procedures for the same function
17923 declaration. */
17924 dwarf_proc_die = lookup_decl_die (fndecl);
17925 if (dwarf_proc_die != NULL)
17926 return dwarf_proc_die;
17927
17928 /* DWARF procedures are available starting with the DWARFv3 standard. */
17929 if (dwarf_version < 3 && dwarf_strict)
17930 return NULL;
17931
17932 /* We handle only functions for which we still have a body, that return a
17933 supported type and that takes arguments with supported types. Note that
17934 there is no point translating functions that return nothing. */
17935 if (tree_body == NULL_TREE
17936 || DECL_RESULT (fndecl) == NULL_TREE
17937 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17938 return NULL;
17939
17940 for (cursor = DECL_ARGUMENTS (fndecl);
17941 cursor != NULL_TREE;
17942 cursor = TREE_CHAIN (cursor))
17943 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17944 return NULL;
17945
17946 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17947 if (TREE_CODE (tree_body) != RETURN_EXPR)
17948 return NULL;
17949 tree_body = TREE_OPERAND (tree_body, 0);
17950 if (TREE_CODE (tree_body) != MODIFY_EXPR
17951 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17952 return NULL;
17953 tree_body = TREE_OPERAND (tree_body, 1);
17954
17955 /* Try to translate the body expression itself. Note that this will probably
17956 cause an infinite recursion if its call graph has a cycle. This is very
17957 unlikely for size functions, however, so don't bother with such things at
17958 the moment. */
17959 ctx.context_type = NULL_TREE;
17960 ctx.base_decl = NULL_TREE;
17961 ctx.dpi = &dpi;
17962 ctx.placeholder_arg = false;
17963 ctx.placeholder_seen = false;
17964 dpi.fndecl = fndecl;
17965 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17966 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17967 if (!loc_body)
17968 return NULL;
17969
17970 /* After evaluating all operands in "loc_body", we should still have on the
17971 stack all arguments plus the desired function result (top of the stack).
17972 Generate code in order to keep only the result in our stack frame. */
17973 epilogue = NULL;
17974 for (i = 0; i < dpi.args_count; ++i)
17975 {
17976 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17977 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17978 op_couple->dw_loc_next->dw_loc_next = epilogue;
17979 epilogue = op_couple;
17980 }
17981 add_loc_descr (&loc_body, epilogue);
17982 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17983 return NULL;
17984
17985 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17986 because they are considered useful. Now there is an epilogue, they are
17987 not anymore, so give it another try. */
17988 loc_descr_without_nops (loc_body);
17989
17990 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17991 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17992 though, given that size functions do not come from source, so they should
17993 not have a dedicated DW_TAG_subprogram DIE. */
17994 dwarf_proc_die
17995 = new_dwarf_proc_die (loc_body, fndecl,
17996 get_context_die (DECL_CONTEXT (fndecl)));
17997
17998 /* The called DWARF procedure consumes one stack slot per argument and
17999 returns one stack slot. */
18000 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18001
18002 return dwarf_proc_die;
18003 }
18004
18005
18006 /* Generate Dwarf location list representing LOC.
18007 If WANT_ADDRESS is false, expression computing LOC will be computed
18008 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18009 if WANT_ADDRESS is 2, expression computing address useable in location
18010 will be returned (i.e. DW_OP_reg can be used
18011 to refer to register values).
18012
18013 CONTEXT provides information to customize the location descriptions
18014 generation. Its context_type field specifies what type is implicitly
18015 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18016 will not be generated.
18017
18018 Its DPI field determines whether we are generating a DWARF expression for a
18019 DWARF procedure, so PARM_DECL references are processed specifically.
18020
18021 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18022 and dpi fields were null. */
18023
18024 static dw_loc_list_ref
18025 loc_list_from_tree_1 (tree loc, int want_address,
18026 struct loc_descr_context *context)
18027 {
18028 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18029 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18030 int have_address = 0;
18031 enum dwarf_location_atom op;
18032
18033 /* ??? Most of the time we do not take proper care for sign/zero
18034 extending the values properly. Hopefully this won't be a real
18035 problem... */
18036
18037 if (context != NULL
18038 && context->base_decl == loc
18039 && want_address == 0)
18040 {
18041 if (dwarf_version >= 3 || !dwarf_strict)
18042 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18043 NULL, 0, NULL, 0, NULL);
18044 else
18045 return NULL;
18046 }
18047
18048 switch (TREE_CODE (loc))
18049 {
18050 case ERROR_MARK:
18051 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18052 return 0;
18053
18054 case PLACEHOLDER_EXPR:
18055 /* This case involves extracting fields from an object to determine the
18056 position of other fields. It is supposed to appear only as the first
18057 operand of COMPONENT_REF nodes and to reference precisely the type
18058 that the context allows. */
18059 if (context != NULL
18060 && TREE_TYPE (loc) == context->context_type
18061 && want_address >= 1)
18062 {
18063 if (dwarf_version >= 3 || !dwarf_strict)
18064 {
18065 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18066 have_address = 1;
18067 break;
18068 }
18069 else
18070 return NULL;
18071 }
18072 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18073 the single argument passed by consumer. */
18074 else if (context != NULL
18075 && context->placeholder_arg
18076 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18077 && want_address == 0)
18078 {
18079 ret = new_loc_descr (DW_OP_pick, 0, 0);
18080 ret->frame_offset_rel = 1;
18081 context->placeholder_seen = true;
18082 break;
18083 }
18084 else
18085 expansion_failed (loc, NULL_RTX,
18086 "PLACEHOLDER_EXPR for an unexpected type");
18087 break;
18088
18089 case CALL_EXPR:
18090 {
18091 const int nargs = call_expr_nargs (loc);
18092 tree callee = get_callee_fndecl (loc);
18093 int i;
18094 dw_die_ref dwarf_proc;
18095
18096 if (callee == NULL_TREE)
18097 goto call_expansion_failed;
18098
18099 /* We handle only functions that return an integer. */
18100 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18101 goto call_expansion_failed;
18102
18103 dwarf_proc = function_to_dwarf_procedure (callee);
18104 if (dwarf_proc == NULL)
18105 goto call_expansion_failed;
18106
18107 /* Evaluate arguments right-to-left so that the first argument will
18108 be the top-most one on the stack. */
18109 for (i = nargs - 1; i >= 0; --i)
18110 {
18111 dw_loc_descr_ref loc_descr
18112 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18113 context);
18114
18115 if (loc_descr == NULL)
18116 goto call_expansion_failed;
18117
18118 add_loc_descr (&ret, loc_descr);
18119 }
18120
18121 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18122 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18123 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18124 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18125 add_loc_descr (&ret, ret1);
18126 break;
18127
18128 call_expansion_failed:
18129 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18130 /* There are no opcodes for these operations. */
18131 return 0;
18132 }
18133
18134 case PREINCREMENT_EXPR:
18135 case PREDECREMENT_EXPR:
18136 case POSTINCREMENT_EXPR:
18137 case POSTDECREMENT_EXPR:
18138 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18139 /* There are no opcodes for these operations. */
18140 return 0;
18141
18142 case ADDR_EXPR:
18143 /* If we already want an address, see if there is INDIRECT_REF inside
18144 e.g. for &this->field. */
18145 if (want_address)
18146 {
18147 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18148 (loc, want_address == 2, context);
18149 if (list_ret)
18150 have_address = 1;
18151 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18152 && (ret = cst_pool_loc_descr (loc)))
18153 have_address = 1;
18154 }
18155 /* Otherwise, process the argument and look for the address. */
18156 if (!list_ret && !ret)
18157 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18158 else
18159 {
18160 if (want_address)
18161 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18162 return NULL;
18163 }
18164 break;
18165
18166 case VAR_DECL:
18167 if (DECL_THREAD_LOCAL_P (loc))
18168 {
18169 rtx rtl;
18170 enum dwarf_location_atom tls_op;
18171 enum dtprel_bool dtprel = dtprel_false;
18172
18173 if (targetm.have_tls)
18174 {
18175 /* If this is not defined, we have no way to emit the
18176 data. */
18177 if (!targetm.asm_out.output_dwarf_dtprel)
18178 return 0;
18179
18180 /* The way DW_OP_GNU_push_tls_address is specified, we
18181 can only look up addresses of objects in the current
18182 module. We used DW_OP_addr as first op, but that's
18183 wrong, because DW_OP_addr is relocated by the debug
18184 info consumer, while DW_OP_GNU_push_tls_address
18185 operand shouldn't be. */
18186 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18187 return 0;
18188 dtprel = dtprel_true;
18189 /* We check for DWARF 5 here because gdb did not implement
18190 DW_OP_form_tls_address until after 7.12. */
18191 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18192 : DW_OP_GNU_push_tls_address);
18193 }
18194 else
18195 {
18196 if (!targetm.emutls.debug_form_tls_address
18197 || !(dwarf_version >= 3 || !dwarf_strict))
18198 return 0;
18199 /* We stuffed the control variable into the DECL_VALUE_EXPR
18200 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18201 no longer appear in gimple code. We used the control
18202 variable in specific so that we could pick it up here. */
18203 loc = DECL_VALUE_EXPR (loc);
18204 tls_op = DW_OP_form_tls_address;
18205 }
18206
18207 rtl = rtl_for_decl_location (loc);
18208 if (rtl == NULL_RTX)
18209 return 0;
18210
18211 if (!MEM_P (rtl))
18212 return 0;
18213 rtl = XEXP (rtl, 0);
18214 if (! CONSTANT_P (rtl))
18215 return 0;
18216
18217 ret = new_addr_loc_descr (rtl, dtprel);
18218 ret1 = new_loc_descr (tls_op, 0, 0);
18219 add_loc_descr (&ret, ret1);
18220
18221 have_address = 1;
18222 break;
18223 }
18224 /* FALLTHRU */
18225
18226 case PARM_DECL:
18227 if (context != NULL && context->dpi != NULL
18228 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18229 {
18230 /* We are generating code for a DWARF procedure and we want to access
18231 one of its arguments: find the appropriate argument offset and let
18232 the resolve_args_picking pass compute the offset that complies
18233 with the stack frame size. */
18234 unsigned i = 0;
18235 tree cursor;
18236
18237 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18238 cursor != NULL_TREE && cursor != loc;
18239 cursor = TREE_CHAIN (cursor), ++i)
18240 ;
18241 /* If we are translating a DWARF procedure, all referenced parameters
18242 must belong to the current function. */
18243 gcc_assert (cursor != NULL_TREE);
18244
18245 ret = new_loc_descr (DW_OP_pick, i, 0);
18246 ret->frame_offset_rel = 1;
18247 break;
18248 }
18249 /* FALLTHRU */
18250
18251 case RESULT_DECL:
18252 if (DECL_HAS_VALUE_EXPR_P (loc))
18253 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18254 want_address, context);
18255 /* FALLTHRU */
18256
18257 case FUNCTION_DECL:
18258 {
18259 rtx rtl;
18260 var_loc_list *loc_list = lookup_decl_loc (loc);
18261
18262 if (loc_list && loc_list->first)
18263 {
18264 list_ret = dw_loc_list (loc_list, loc, want_address);
18265 have_address = want_address != 0;
18266 break;
18267 }
18268 rtl = rtl_for_decl_location (loc);
18269 if (rtl == NULL_RTX)
18270 {
18271 if (TREE_CODE (loc) != FUNCTION_DECL
18272 && early_dwarf
18273 && current_function_decl
18274 && want_address != 1
18275 && ! DECL_IGNORED_P (loc)
18276 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18277 || POINTER_TYPE_P (TREE_TYPE (loc)))
18278 && DECL_CONTEXT (loc) == current_function_decl
18279 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18280 <= DWARF2_ADDR_SIZE))
18281 {
18282 dw_die_ref ref = lookup_decl_die (loc);
18283 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18284 if (ref)
18285 {
18286 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18287 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18288 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18289 }
18290 else
18291 {
18292 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18293 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18294 }
18295 break;
18296 }
18297 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18298 return 0;
18299 }
18300 else if (CONST_INT_P (rtl))
18301 {
18302 HOST_WIDE_INT val = INTVAL (rtl);
18303 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18304 val &= GET_MODE_MASK (DECL_MODE (loc));
18305 ret = int_loc_descriptor (val);
18306 }
18307 else if (GET_CODE (rtl) == CONST_STRING)
18308 {
18309 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18310 return 0;
18311 }
18312 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18313 ret = new_addr_loc_descr (rtl, dtprel_false);
18314 else
18315 {
18316 machine_mode mode, mem_mode;
18317
18318 /* Certain constructs can only be represented at top-level. */
18319 if (want_address == 2)
18320 {
18321 ret = loc_descriptor (rtl, VOIDmode,
18322 VAR_INIT_STATUS_INITIALIZED);
18323 have_address = 1;
18324 }
18325 else
18326 {
18327 mode = GET_MODE (rtl);
18328 mem_mode = VOIDmode;
18329 if (MEM_P (rtl))
18330 {
18331 mem_mode = mode;
18332 mode = get_address_mode (rtl);
18333 rtl = XEXP (rtl, 0);
18334 have_address = 1;
18335 }
18336 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18337 VAR_INIT_STATUS_INITIALIZED);
18338 }
18339 if (!ret)
18340 expansion_failed (loc, rtl,
18341 "failed to produce loc descriptor for rtl");
18342 }
18343 }
18344 break;
18345
18346 case MEM_REF:
18347 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18348 {
18349 have_address = 1;
18350 goto do_plus;
18351 }
18352 /* Fallthru. */
18353 case INDIRECT_REF:
18354 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18355 have_address = 1;
18356 break;
18357
18358 case TARGET_MEM_REF:
18359 case SSA_NAME:
18360 case DEBUG_EXPR_DECL:
18361 return NULL;
18362
18363 case COMPOUND_EXPR:
18364 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18365 context);
18366
18367 CASE_CONVERT:
18368 case VIEW_CONVERT_EXPR:
18369 case SAVE_EXPR:
18370 case MODIFY_EXPR:
18371 case NON_LVALUE_EXPR:
18372 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18373 context);
18374
18375 case COMPONENT_REF:
18376 case BIT_FIELD_REF:
18377 case ARRAY_REF:
18378 case ARRAY_RANGE_REF:
18379 case REALPART_EXPR:
18380 case IMAGPART_EXPR:
18381 {
18382 tree obj, offset;
18383 poly_int64 bitsize, bitpos, bytepos;
18384 machine_mode mode;
18385 int unsignedp, reversep, volatilep = 0;
18386
18387 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18388 &unsignedp, &reversep, &volatilep);
18389
18390 gcc_assert (obj != loc);
18391
18392 list_ret = loc_list_from_tree_1 (obj,
18393 want_address == 2
18394 && known_eq (bitpos, 0)
18395 && !offset ? 2 : 1,
18396 context);
18397 /* TODO: We can extract value of the small expression via shifting even
18398 for nonzero bitpos. */
18399 if (list_ret == 0)
18400 return 0;
18401 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18402 || !multiple_p (bitsize, BITS_PER_UNIT))
18403 {
18404 expansion_failed (loc, NULL_RTX,
18405 "bitfield access");
18406 return 0;
18407 }
18408
18409 if (offset != NULL_TREE)
18410 {
18411 /* Variable offset. */
18412 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18413 if (list_ret1 == 0)
18414 return 0;
18415 add_loc_list (&list_ret, list_ret1);
18416 if (!list_ret)
18417 return 0;
18418 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18419 }
18420
18421 HOST_WIDE_INT value;
18422 if (bytepos.is_constant (&value) && value > 0)
18423 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18424 value, 0));
18425 else if (maybe_ne (bytepos, 0))
18426 loc_list_plus_const (list_ret, bytepos);
18427
18428 have_address = 1;
18429 break;
18430 }
18431
18432 case INTEGER_CST:
18433 if ((want_address || !tree_fits_shwi_p (loc))
18434 && (ret = cst_pool_loc_descr (loc)))
18435 have_address = 1;
18436 else if (want_address == 2
18437 && tree_fits_shwi_p (loc)
18438 && (ret = address_of_int_loc_descriptor
18439 (int_size_in_bytes (TREE_TYPE (loc)),
18440 tree_to_shwi (loc))))
18441 have_address = 1;
18442 else if (tree_fits_shwi_p (loc))
18443 ret = int_loc_descriptor (tree_to_shwi (loc));
18444 else if (tree_fits_uhwi_p (loc))
18445 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18446 else
18447 {
18448 expansion_failed (loc, NULL_RTX,
18449 "Integer operand is not host integer");
18450 return 0;
18451 }
18452 break;
18453
18454 case CONSTRUCTOR:
18455 case REAL_CST:
18456 case STRING_CST:
18457 case COMPLEX_CST:
18458 if ((ret = cst_pool_loc_descr (loc)))
18459 have_address = 1;
18460 else if (TREE_CODE (loc) == CONSTRUCTOR)
18461 {
18462 tree type = TREE_TYPE (loc);
18463 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18464 unsigned HOST_WIDE_INT offset = 0;
18465 unsigned HOST_WIDE_INT cnt;
18466 constructor_elt *ce;
18467
18468 if (TREE_CODE (type) == RECORD_TYPE)
18469 {
18470 /* This is very limited, but it's enough to output
18471 pointers to member functions, as long as the
18472 referenced function is defined in the current
18473 translation unit. */
18474 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18475 {
18476 tree val = ce->value;
18477
18478 tree field = ce->index;
18479
18480 if (val)
18481 STRIP_NOPS (val);
18482
18483 if (!field || DECL_BIT_FIELD (field))
18484 {
18485 expansion_failed (loc, NULL_RTX,
18486 "bitfield in record type constructor");
18487 size = offset = (unsigned HOST_WIDE_INT)-1;
18488 ret = NULL;
18489 break;
18490 }
18491
18492 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18493 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18494 gcc_assert (pos + fieldsize <= size);
18495 if (pos < offset)
18496 {
18497 expansion_failed (loc, NULL_RTX,
18498 "out-of-order fields in record constructor");
18499 size = offset = (unsigned HOST_WIDE_INT)-1;
18500 ret = NULL;
18501 break;
18502 }
18503 if (pos > offset)
18504 {
18505 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18506 add_loc_descr (&ret, ret1);
18507 offset = pos;
18508 }
18509 if (val && fieldsize != 0)
18510 {
18511 ret1 = loc_descriptor_from_tree (val, want_address, context);
18512 if (!ret1)
18513 {
18514 expansion_failed (loc, NULL_RTX,
18515 "unsupported expression in field");
18516 size = offset = (unsigned HOST_WIDE_INT)-1;
18517 ret = NULL;
18518 break;
18519 }
18520 add_loc_descr (&ret, ret1);
18521 }
18522 if (fieldsize)
18523 {
18524 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18525 add_loc_descr (&ret, ret1);
18526 offset = pos + fieldsize;
18527 }
18528 }
18529
18530 if (offset != size)
18531 {
18532 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18533 add_loc_descr (&ret, ret1);
18534 offset = size;
18535 }
18536
18537 have_address = !!want_address;
18538 }
18539 else
18540 expansion_failed (loc, NULL_RTX,
18541 "constructor of non-record type");
18542 }
18543 else
18544 /* We can construct small constants here using int_loc_descriptor. */
18545 expansion_failed (loc, NULL_RTX,
18546 "constructor or constant not in constant pool");
18547 break;
18548
18549 case TRUTH_AND_EXPR:
18550 case TRUTH_ANDIF_EXPR:
18551 case BIT_AND_EXPR:
18552 op = DW_OP_and;
18553 goto do_binop;
18554
18555 case TRUTH_XOR_EXPR:
18556 case BIT_XOR_EXPR:
18557 op = DW_OP_xor;
18558 goto do_binop;
18559
18560 case TRUTH_OR_EXPR:
18561 case TRUTH_ORIF_EXPR:
18562 case BIT_IOR_EXPR:
18563 op = DW_OP_or;
18564 goto do_binop;
18565
18566 case FLOOR_DIV_EXPR:
18567 case CEIL_DIV_EXPR:
18568 case ROUND_DIV_EXPR:
18569 case TRUNC_DIV_EXPR:
18570 case EXACT_DIV_EXPR:
18571 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18572 return 0;
18573 op = DW_OP_div;
18574 goto do_binop;
18575
18576 case MINUS_EXPR:
18577 op = DW_OP_minus;
18578 goto do_binop;
18579
18580 case FLOOR_MOD_EXPR:
18581 case CEIL_MOD_EXPR:
18582 case ROUND_MOD_EXPR:
18583 case TRUNC_MOD_EXPR:
18584 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18585 {
18586 op = DW_OP_mod;
18587 goto do_binop;
18588 }
18589 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18590 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18591 if (list_ret == 0 || list_ret1 == 0)
18592 return 0;
18593
18594 add_loc_list (&list_ret, list_ret1);
18595 if (list_ret == 0)
18596 return 0;
18597 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18598 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18599 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18600 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18601 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18602 break;
18603
18604 case MULT_EXPR:
18605 op = DW_OP_mul;
18606 goto do_binop;
18607
18608 case LSHIFT_EXPR:
18609 op = DW_OP_shl;
18610 goto do_binop;
18611
18612 case RSHIFT_EXPR:
18613 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18614 goto do_binop;
18615
18616 case POINTER_PLUS_EXPR:
18617 case PLUS_EXPR:
18618 do_plus:
18619 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18620 {
18621 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18622 smarter to encode their opposite. The DW_OP_plus_uconst operation
18623 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18624 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18625 bytes, Y being the size of the operation that pushes the opposite
18626 of the addend. So let's choose the smallest representation. */
18627 const tree tree_addend = TREE_OPERAND (loc, 1);
18628 offset_int wi_addend;
18629 HOST_WIDE_INT shwi_addend;
18630 dw_loc_descr_ref loc_naddend;
18631
18632 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18633 if (list_ret == 0)
18634 return 0;
18635
18636 /* Try to get the literal to push. It is the opposite of the addend,
18637 so as we rely on wrapping during DWARF evaluation, first decode
18638 the literal as a "DWARF-sized" signed number. */
18639 wi_addend = wi::to_offset (tree_addend);
18640 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18641 shwi_addend = wi_addend.to_shwi ();
18642 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18643 ? int_loc_descriptor (-shwi_addend)
18644 : NULL;
18645
18646 if (loc_naddend != NULL
18647 && ((unsigned) size_of_uleb128 (shwi_addend)
18648 > size_of_loc_descr (loc_naddend)))
18649 {
18650 add_loc_descr_to_each (list_ret, loc_naddend);
18651 add_loc_descr_to_each (list_ret,
18652 new_loc_descr (DW_OP_minus, 0, 0));
18653 }
18654 else
18655 {
18656 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18657 {
18658 loc_naddend = loc_cur;
18659 loc_cur = loc_cur->dw_loc_next;
18660 ggc_free (loc_naddend);
18661 }
18662 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18663 }
18664 break;
18665 }
18666
18667 op = DW_OP_plus;
18668 goto do_binop;
18669
18670 case LE_EXPR:
18671 op = DW_OP_le;
18672 goto do_comp_binop;
18673
18674 case GE_EXPR:
18675 op = DW_OP_ge;
18676 goto do_comp_binop;
18677
18678 case LT_EXPR:
18679 op = DW_OP_lt;
18680 goto do_comp_binop;
18681
18682 case GT_EXPR:
18683 op = DW_OP_gt;
18684 goto do_comp_binop;
18685
18686 do_comp_binop:
18687 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18688 {
18689 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18690 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18691 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18692 TREE_CODE (loc));
18693 break;
18694 }
18695 else
18696 goto do_binop;
18697
18698 case EQ_EXPR:
18699 op = DW_OP_eq;
18700 goto do_binop;
18701
18702 case NE_EXPR:
18703 op = DW_OP_ne;
18704 goto do_binop;
18705
18706 do_binop:
18707 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18708 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18709 if (list_ret == 0 || list_ret1 == 0)
18710 return 0;
18711
18712 add_loc_list (&list_ret, list_ret1);
18713 if (list_ret == 0)
18714 return 0;
18715 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18716 break;
18717
18718 case TRUTH_NOT_EXPR:
18719 case BIT_NOT_EXPR:
18720 op = DW_OP_not;
18721 goto do_unop;
18722
18723 case ABS_EXPR:
18724 op = DW_OP_abs;
18725 goto do_unop;
18726
18727 case NEGATE_EXPR:
18728 op = DW_OP_neg;
18729 goto do_unop;
18730
18731 do_unop:
18732 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18733 if (list_ret == 0)
18734 return 0;
18735
18736 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18737 break;
18738
18739 case MIN_EXPR:
18740 case MAX_EXPR:
18741 {
18742 const enum tree_code code =
18743 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18744
18745 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18746 build2 (code, integer_type_node,
18747 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18748 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18749 }
18750
18751 /* fall through */
18752
18753 case COND_EXPR:
18754 {
18755 dw_loc_descr_ref lhs
18756 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18757 dw_loc_list_ref rhs
18758 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18759 dw_loc_descr_ref bra_node, jump_node, tmp;
18760
18761 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18762 if (list_ret == 0 || lhs == 0 || rhs == 0)
18763 return 0;
18764
18765 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18766 add_loc_descr_to_each (list_ret, bra_node);
18767
18768 add_loc_list (&list_ret, rhs);
18769 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18770 add_loc_descr_to_each (list_ret, jump_node);
18771
18772 add_loc_descr_to_each (list_ret, lhs);
18773 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18774 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18775
18776 /* ??? Need a node to point the skip at. Use a nop. */
18777 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18778 add_loc_descr_to_each (list_ret, tmp);
18779 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18780 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18781 }
18782 break;
18783
18784 case FIX_TRUNC_EXPR:
18785 return 0;
18786
18787 default:
18788 /* Leave front-end specific codes as simply unknown. This comes
18789 up, for instance, with the C STMT_EXPR. */
18790 if ((unsigned int) TREE_CODE (loc)
18791 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18792 {
18793 expansion_failed (loc, NULL_RTX,
18794 "language specific tree node");
18795 return 0;
18796 }
18797
18798 /* Otherwise this is a generic code; we should just lists all of
18799 these explicitly. We forgot one. */
18800 if (flag_checking)
18801 gcc_unreachable ();
18802
18803 /* In a release build, we want to degrade gracefully: better to
18804 generate incomplete debugging information than to crash. */
18805 return NULL;
18806 }
18807
18808 if (!ret && !list_ret)
18809 return 0;
18810
18811 if (want_address == 2 && !have_address
18812 && (dwarf_version >= 4 || !dwarf_strict))
18813 {
18814 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18815 {
18816 expansion_failed (loc, NULL_RTX,
18817 "DWARF address size mismatch");
18818 return 0;
18819 }
18820 if (ret)
18821 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18822 else
18823 add_loc_descr_to_each (list_ret,
18824 new_loc_descr (DW_OP_stack_value, 0, 0));
18825 have_address = 1;
18826 }
18827 /* Show if we can't fill the request for an address. */
18828 if (want_address && !have_address)
18829 {
18830 expansion_failed (loc, NULL_RTX,
18831 "Want address and only have value");
18832 return 0;
18833 }
18834
18835 gcc_assert (!ret || !list_ret);
18836
18837 /* If we've got an address and don't want one, dereference. */
18838 if (!want_address && have_address)
18839 {
18840 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18841
18842 if (size > DWARF2_ADDR_SIZE || size == -1)
18843 {
18844 expansion_failed (loc, NULL_RTX,
18845 "DWARF address size mismatch");
18846 return 0;
18847 }
18848 else if (size == DWARF2_ADDR_SIZE)
18849 op = DW_OP_deref;
18850 else
18851 op = DW_OP_deref_size;
18852
18853 if (ret)
18854 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18855 else
18856 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18857 }
18858 if (ret)
18859 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18860
18861 return list_ret;
18862 }
18863
18864 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18865 expressions. */
18866
18867 static dw_loc_list_ref
18868 loc_list_from_tree (tree loc, int want_address,
18869 struct loc_descr_context *context)
18870 {
18871 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18872
18873 for (dw_loc_list_ref loc_cur = result;
18874 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18875 loc_descr_without_nops (loc_cur->expr);
18876 return result;
18877 }
18878
18879 /* Same as above but return only single location expression. */
18880 static dw_loc_descr_ref
18881 loc_descriptor_from_tree (tree loc, int want_address,
18882 struct loc_descr_context *context)
18883 {
18884 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18885 if (!ret)
18886 return NULL;
18887 if (ret->dw_loc_next)
18888 {
18889 expansion_failed (loc, NULL_RTX,
18890 "Location list where only loc descriptor needed");
18891 return NULL;
18892 }
18893 return ret->expr;
18894 }
18895
18896 /* Given a value, round it up to the lowest multiple of `boundary'
18897 which is not less than the value itself. */
18898
18899 static inline HOST_WIDE_INT
18900 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18901 {
18902 return (((value + boundary - 1) / boundary) * boundary);
18903 }
18904
18905 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18906 pointer to the declared type for the relevant field variable, or return
18907 `integer_type_node' if the given node turns out to be an
18908 ERROR_MARK node. */
18909
18910 static inline tree
18911 field_type (const_tree decl)
18912 {
18913 tree type;
18914
18915 if (TREE_CODE (decl) == ERROR_MARK)
18916 return integer_type_node;
18917
18918 type = DECL_BIT_FIELD_TYPE (decl);
18919 if (type == NULL_TREE)
18920 type = TREE_TYPE (decl);
18921
18922 return type;
18923 }
18924
18925 /* Given a pointer to a tree node, return the alignment in bits for
18926 it, or else return BITS_PER_WORD if the node actually turns out to
18927 be an ERROR_MARK node. */
18928
18929 static inline unsigned
18930 simple_type_align_in_bits (const_tree type)
18931 {
18932 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18933 }
18934
18935 static inline unsigned
18936 simple_decl_align_in_bits (const_tree decl)
18937 {
18938 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18939 }
18940
18941 /* Return the result of rounding T up to ALIGN. */
18942
18943 static inline offset_int
18944 round_up_to_align (const offset_int &t, unsigned int align)
18945 {
18946 return wi::udiv_trunc (t + align - 1, align) * align;
18947 }
18948
18949 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18950 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18951 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18952 if we fail to return the size in one of these two forms. */
18953
18954 static dw_loc_descr_ref
18955 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18956 {
18957 tree tree_size;
18958 struct loc_descr_context ctx;
18959
18960 /* Return a constant integer in priority, if possible. */
18961 *cst_size = int_size_in_bytes (type);
18962 if (*cst_size != -1)
18963 return NULL;
18964
18965 ctx.context_type = const_cast<tree> (type);
18966 ctx.base_decl = NULL_TREE;
18967 ctx.dpi = NULL;
18968 ctx.placeholder_arg = false;
18969 ctx.placeholder_seen = false;
18970
18971 type = TYPE_MAIN_VARIANT (type);
18972 tree_size = TYPE_SIZE_UNIT (type);
18973 return ((tree_size != NULL_TREE)
18974 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18975 : NULL);
18976 }
18977
18978 /* Helper structure for RECORD_TYPE processing. */
18979 struct vlr_context
18980 {
18981 /* Root RECORD_TYPE. It is needed to generate data member location
18982 descriptions in variable-length records (VLR), but also to cope with
18983 variants, which are composed of nested structures multiplexed with
18984 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18985 function processing a FIELD_DECL, it is required to be non null. */
18986 tree struct_type;
18987 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18988 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18989 this variant part as part of the root record (in storage units). For
18990 regular records, it must be NULL_TREE. */
18991 tree variant_part_offset;
18992 };
18993
18994 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18995 addressed byte of the "containing object" for the given FIELD_DECL. If
18996 possible, return a native constant through CST_OFFSET (in which case NULL is
18997 returned); otherwise return a DWARF expression that computes the offset.
18998
18999 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19000 that offset is, either because the argument turns out to be a pointer to an
19001 ERROR_MARK node, or because the offset expression is too complex for us.
19002
19003 CTX is required: see the comment for VLR_CONTEXT. */
19004
19005 static dw_loc_descr_ref
19006 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19007 HOST_WIDE_INT *cst_offset)
19008 {
19009 tree tree_result;
19010 dw_loc_list_ref loc_result;
19011
19012 *cst_offset = 0;
19013
19014 if (TREE_CODE (decl) == ERROR_MARK)
19015 return NULL;
19016 else
19017 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19018
19019 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19020 case. */
19021 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19022 return NULL;
19023
19024 #ifdef PCC_BITFIELD_TYPE_MATTERS
19025 /* We used to handle only constant offsets in all cases. Now, we handle
19026 properly dynamic byte offsets only when PCC bitfield type doesn't
19027 matter. */
19028 if (PCC_BITFIELD_TYPE_MATTERS
19029 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19030 {
19031 offset_int object_offset_in_bits;
19032 offset_int object_offset_in_bytes;
19033 offset_int bitpos_int;
19034 tree type;
19035 tree field_size_tree;
19036 offset_int deepest_bitpos;
19037 offset_int field_size_in_bits;
19038 unsigned int type_align_in_bits;
19039 unsigned int decl_align_in_bits;
19040 offset_int type_size_in_bits;
19041
19042 bitpos_int = wi::to_offset (bit_position (decl));
19043 type = field_type (decl);
19044 type_size_in_bits = offset_int_type_size_in_bits (type);
19045 type_align_in_bits = simple_type_align_in_bits (type);
19046
19047 field_size_tree = DECL_SIZE (decl);
19048
19049 /* The size could be unspecified if there was an error, or for
19050 a flexible array member. */
19051 if (!field_size_tree)
19052 field_size_tree = bitsize_zero_node;
19053
19054 /* If the size of the field is not constant, use the type size. */
19055 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19056 field_size_in_bits = wi::to_offset (field_size_tree);
19057 else
19058 field_size_in_bits = type_size_in_bits;
19059
19060 decl_align_in_bits = simple_decl_align_in_bits (decl);
19061
19062 /* The GCC front-end doesn't make any attempt to keep track of the
19063 starting bit offset (relative to the start of the containing
19064 structure type) of the hypothetical "containing object" for a
19065 bit-field. Thus, when computing the byte offset value for the
19066 start of the "containing object" of a bit-field, we must deduce
19067 this information on our own. This can be rather tricky to do in
19068 some cases. For example, handling the following structure type
19069 definition when compiling for an i386/i486 target (which only
19070 aligns long long's to 32-bit boundaries) can be very tricky:
19071
19072 struct S { int field1; long long field2:31; };
19073
19074 Fortunately, there is a simple rule-of-thumb which can be used
19075 in such cases. When compiling for an i386/i486, GCC will
19076 allocate 8 bytes for the structure shown above. It decides to
19077 do this based upon one simple rule for bit-field allocation.
19078 GCC allocates each "containing object" for each bit-field at
19079 the first (i.e. lowest addressed) legitimate alignment boundary
19080 (based upon the required minimum alignment for the declared
19081 type of the field) which it can possibly use, subject to the
19082 condition that there is still enough available space remaining
19083 in the containing object (when allocated at the selected point)
19084 to fully accommodate all of the bits of the bit-field itself.
19085
19086 This simple rule makes it obvious why GCC allocates 8 bytes for
19087 each object of the structure type shown above. When looking
19088 for a place to allocate the "containing object" for `field2',
19089 the compiler simply tries to allocate a 64-bit "containing
19090 object" at each successive 32-bit boundary (starting at zero)
19091 until it finds a place to allocate that 64- bit field such that
19092 at least 31 contiguous (and previously unallocated) bits remain
19093 within that selected 64 bit field. (As it turns out, for the
19094 example above, the compiler finds it is OK to allocate the
19095 "containing object" 64-bit field at bit-offset zero within the
19096 structure type.)
19097
19098 Here we attempt to work backwards from the limited set of facts
19099 we're given, and we try to deduce from those facts, where GCC
19100 must have believed that the containing object started (within
19101 the structure type). The value we deduce is then used (by the
19102 callers of this routine) to generate DW_AT_location and
19103 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19104 the case of DW_AT_location, regular fields as well). */
19105
19106 /* Figure out the bit-distance from the start of the structure to
19107 the "deepest" bit of the bit-field. */
19108 deepest_bitpos = bitpos_int + field_size_in_bits;
19109
19110 /* This is the tricky part. Use some fancy footwork to deduce
19111 where the lowest addressed bit of the containing object must
19112 be. */
19113 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19114
19115 /* Round up to type_align by default. This works best for
19116 bitfields. */
19117 object_offset_in_bits
19118 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19119
19120 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19121 {
19122 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19123
19124 /* Round up to decl_align instead. */
19125 object_offset_in_bits
19126 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19127 }
19128
19129 object_offset_in_bytes
19130 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19131 if (ctx->variant_part_offset == NULL_TREE)
19132 {
19133 *cst_offset = object_offset_in_bytes.to_shwi ();
19134 return NULL;
19135 }
19136 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19137 }
19138 else
19139 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19140 tree_result = byte_position (decl);
19141
19142 if (ctx->variant_part_offset != NULL_TREE)
19143 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19144 ctx->variant_part_offset, tree_result);
19145
19146 /* If the byte offset is a constant, it's simplier to handle a native
19147 constant rather than a DWARF expression. */
19148 if (TREE_CODE (tree_result) == INTEGER_CST)
19149 {
19150 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19151 return NULL;
19152 }
19153 struct loc_descr_context loc_ctx = {
19154 ctx->struct_type, /* context_type */
19155 NULL_TREE, /* base_decl */
19156 NULL, /* dpi */
19157 false, /* placeholder_arg */
19158 false /* placeholder_seen */
19159 };
19160 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19161
19162 /* We want a DWARF expression: abort if we only have a location list with
19163 multiple elements. */
19164 if (!loc_result || !single_element_loc_list_p (loc_result))
19165 return NULL;
19166 else
19167 return loc_result->expr;
19168 }
19169 \f
19170 /* The following routines define various Dwarf attributes and any data
19171 associated with them. */
19172
19173 /* Add a location description attribute value to a DIE.
19174
19175 This emits location attributes suitable for whole variables and
19176 whole parameters. Note that the location attributes for struct fields are
19177 generated by the routine `data_member_location_attribute' below. */
19178
19179 static inline void
19180 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19181 dw_loc_list_ref descr)
19182 {
19183 bool check_no_locviews = true;
19184 if (descr == 0)
19185 return;
19186 if (single_element_loc_list_p (descr))
19187 add_AT_loc (die, attr_kind, descr->expr);
19188 else
19189 {
19190 add_AT_loc_list (die, attr_kind, descr);
19191 gcc_assert (descr->ll_symbol);
19192 if (attr_kind == DW_AT_location && descr->vl_symbol
19193 && dwarf2out_locviews_in_attribute ())
19194 {
19195 add_AT_view_list (die, DW_AT_GNU_locviews);
19196 check_no_locviews = false;
19197 }
19198 }
19199
19200 if (check_no_locviews)
19201 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19202 }
19203
19204 /* Add DW_AT_accessibility attribute to DIE if needed. */
19205
19206 static void
19207 add_accessibility_attribute (dw_die_ref die, tree decl)
19208 {
19209 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19210 children, otherwise the default is DW_ACCESS_public. In DWARF2
19211 the default has always been DW_ACCESS_public. */
19212 if (TREE_PROTECTED (decl))
19213 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19214 else if (TREE_PRIVATE (decl))
19215 {
19216 if (dwarf_version == 2
19217 || die->die_parent == NULL
19218 || die->die_parent->die_tag != DW_TAG_class_type)
19219 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19220 }
19221 else if (dwarf_version > 2
19222 && die->die_parent
19223 && die->die_parent->die_tag == DW_TAG_class_type)
19224 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19225 }
19226
19227 /* Attach the specialized form of location attribute used for data members of
19228 struct and union types. In the special case of a FIELD_DECL node which
19229 represents a bit-field, the "offset" part of this special location
19230 descriptor must indicate the distance in bytes from the lowest-addressed
19231 byte of the containing struct or union type to the lowest-addressed byte of
19232 the "containing object" for the bit-field. (See the `field_byte_offset'
19233 function above).
19234
19235 For any given bit-field, the "containing object" is a hypothetical object
19236 (of some integral or enum type) within which the given bit-field lives. The
19237 type of this hypothetical "containing object" is always the same as the
19238 declared type of the individual bit-field itself (for GCC anyway... the
19239 DWARF spec doesn't actually mandate this). Note that it is the size (in
19240 bytes) of the hypothetical "containing object" which will be given in the
19241 DW_AT_byte_size attribute for this bit-field. (See the
19242 `byte_size_attribute' function below.) It is also used when calculating the
19243 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19244 function below.)
19245
19246 CTX is required: see the comment for VLR_CONTEXT. */
19247
19248 static void
19249 add_data_member_location_attribute (dw_die_ref die,
19250 tree decl,
19251 struct vlr_context *ctx)
19252 {
19253 HOST_WIDE_INT offset;
19254 dw_loc_descr_ref loc_descr = 0;
19255
19256 if (TREE_CODE (decl) == TREE_BINFO)
19257 {
19258 /* We're working on the TAG_inheritance for a base class. */
19259 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19260 {
19261 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19262 aren't at a fixed offset from all (sub)objects of the same
19263 type. We need to extract the appropriate offset from our
19264 vtable. The following dwarf expression means
19265
19266 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19267
19268 This is specific to the V3 ABI, of course. */
19269
19270 dw_loc_descr_ref tmp;
19271
19272 /* Make a copy of the object address. */
19273 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19274 add_loc_descr (&loc_descr, tmp);
19275
19276 /* Extract the vtable address. */
19277 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19278 add_loc_descr (&loc_descr, tmp);
19279
19280 /* Calculate the address of the offset. */
19281 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19282 gcc_assert (offset < 0);
19283
19284 tmp = int_loc_descriptor (-offset);
19285 add_loc_descr (&loc_descr, tmp);
19286 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19287 add_loc_descr (&loc_descr, tmp);
19288
19289 /* Extract the offset. */
19290 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19291 add_loc_descr (&loc_descr, tmp);
19292
19293 /* Add it to the object address. */
19294 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19295 add_loc_descr (&loc_descr, tmp);
19296 }
19297 else
19298 offset = tree_to_shwi (BINFO_OFFSET (decl));
19299 }
19300 else
19301 {
19302 loc_descr = field_byte_offset (decl, ctx, &offset);
19303
19304 /* If loc_descr is available then we know the field offset is dynamic.
19305 However, GDB does not handle dynamic field offsets very well at the
19306 moment. */
19307 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19308 {
19309 loc_descr = NULL;
19310 offset = 0;
19311 }
19312
19313 /* Data member location evalutation starts with the base address on the
19314 stack. Compute the field offset and add it to this base address. */
19315 else if (loc_descr != NULL)
19316 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19317 }
19318
19319 if (! loc_descr)
19320 {
19321 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19322 e.g. GDB only added support to it in November 2016. For DWARF5
19323 we need newer debug info consumers anyway. We might change this
19324 to dwarf_version >= 4 once most consumers catched up. */
19325 if (dwarf_version >= 5
19326 && TREE_CODE (decl) == FIELD_DECL
19327 && DECL_BIT_FIELD_TYPE (decl))
19328 {
19329 tree off = bit_position (decl);
19330 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19331 {
19332 remove_AT (die, DW_AT_byte_size);
19333 remove_AT (die, DW_AT_bit_offset);
19334 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19335 return;
19336 }
19337 }
19338 if (dwarf_version > 2)
19339 {
19340 /* Don't need to output a location expression, just the constant. */
19341 if (offset < 0)
19342 add_AT_int (die, DW_AT_data_member_location, offset);
19343 else
19344 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19345 return;
19346 }
19347 else
19348 {
19349 enum dwarf_location_atom op;
19350
19351 /* The DWARF2 standard says that we should assume that the structure
19352 address is already on the stack, so we can specify a structure
19353 field address by using DW_OP_plus_uconst. */
19354 op = DW_OP_plus_uconst;
19355 loc_descr = new_loc_descr (op, offset, 0);
19356 }
19357 }
19358
19359 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19360 }
19361
19362 /* Writes integer values to dw_vec_const array. */
19363
19364 static void
19365 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19366 {
19367 while (size != 0)
19368 {
19369 *dest++ = val & 0xff;
19370 val >>= 8;
19371 --size;
19372 }
19373 }
19374
19375 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19376
19377 static HOST_WIDE_INT
19378 extract_int (const unsigned char *src, unsigned int size)
19379 {
19380 HOST_WIDE_INT val = 0;
19381
19382 src += size;
19383 while (size != 0)
19384 {
19385 val <<= 8;
19386 val |= *--src & 0xff;
19387 --size;
19388 }
19389 return val;
19390 }
19391
19392 /* Writes wide_int values to dw_vec_const array. */
19393
19394 static void
19395 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19396 {
19397 int i;
19398
19399 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19400 {
19401 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19402 return;
19403 }
19404
19405 /* We'd have to extend this code to support odd sizes. */
19406 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19407
19408 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19409
19410 if (WORDS_BIG_ENDIAN)
19411 for (i = n - 1; i >= 0; i--)
19412 {
19413 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19414 dest += sizeof (HOST_WIDE_INT);
19415 }
19416 else
19417 for (i = 0; i < n; i++)
19418 {
19419 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19420 dest += sizeof (HOST_WIDE_INT);
19421 }
19422 }
19423
19424 /* Writes floating point values to dw_vec_const array. */
19425
19426 static void
19427 insert_float (const_rtx rtl, unsigned char *array)
19428 {
19429 long val[4];
19430 int i;
19431 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19432
19433 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19434
19435 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19436 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19437 {
19438 insert_int (val[i], 4, array);
19439 array += 4;
19440 }
19441 }
19442
19443 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19444 does not have a "location" either in memory or in a register. These
19445 things can arise in GNU C when a constant is passed as an actual parameter
19446 to an inlined function. They can also arise in C++ where declared
19447 constants do not necessarily get memory "homes". */
19448
19449 static bool
19450 add_const_value_attribute (dw_die_ref die, rtx rtl)
19451 {
19452 switch (GET_CODE (rtl))
19453 {
19454 case CONST_INT:
19455 {
19456 HOST_WIDE_INT val = INTVAL (rtl);
19457
19458 if (val < 0)
19459 add_AT_int (die, DW_AT_const_value, val);
19460 else
19461 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19462 }
19463 return true;
19464
19465 case CONST_WIDE_INT:
19466 {
19467 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19468 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19469 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19470 wide_int w = wi::zext (w1, prec);
19471 add_AT_wide (die, DW_AT_const_value, w);
19472 }
19473 return true;
19474
19475 case CONST_DOUBLE:
19476 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19477 floating-point constant. A CONST_DOUBLE is used whenever the
19478 constant requires more than one word in order to be adequately
19479 represented. */
19480 if (TARGET_SUPPORTS_WIDE_INT == 0
19481 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19482 add_AT_double (die, DW_AT_const_value,
19483 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19484 else
19485 {
19486 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19487 unsigned int length = GET_MODE_SIZE (mode);
19488 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19489
19490 insert_float (rtl, array);
19491 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19492 }
19493 return true;
19494
19495 case CONST_VECTOR:
19496 {
19497 unsigned int length;
19498 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19499 return false;
19500
19501 machine_mode mode = GET_MODE (rtl);
19502 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19503 unsigned char *array
19504 = ggc_vec_alloc<unsigned char> (length * elt_size);
19505 unsigned int i;
19506 unsigned char *p;
19507 machine_mode imode = GET_MODE_INNER (mode);
19508
19509 switch (GET_MODE_CLASS (mode))
19510 {
19511 case MODE_VECTOR_INT:
19512 for (i = 0, p = array; i < length; i++, p += elt_size)
19513 {
19514 rtx elt = CONST_VECTOR_ELT (rtl, i);
19515 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19516 }
19517 break;
19518
19519 case MODE_VECTOR_FLOAT:
19520 for (i = 0, p = array; i < length; i++, p += elt_size)
19521 {
19522 rtx elt = CONST_VECTOR_ELT (rtl, i);
19523 insert_float (elt, p);
19524 }
19525 break;
19526
19527 default:
19528 gcc_unreachable ();
19529 }
19530
19531 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19532 }
19533 return true;
19534
19535 case CONST_STRING:
19536 if (dwarf_version >= 4 || !dwarf_strict)
19537 {
19538 dw_loc_descr_ref loc_result;
19539 resolve_one_addr (&rtl);
19540 rtl_addr:
19541 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19542 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19543 add_AT_loc (die, DW_AT_location, loc_result);
19544 vec_safe_push (used_rtx_array, rtl);
19545 return true;
19546 }
19547 return false;
19548
19549 case CONST:
19550 if (CONSTANT_P (XEXP (rtl, 0)))
19551 return add_const_value_attribute (die, XEXP (rtl, 0));
19552 /* FALLTHROUGH */
19553 case SYMBOL_REF:
19554 if (!const_ok_for_output (rtl))
19555 return false;
19556 /* FALLTHROUGH */
19557 case LABEL_REF:
19558 if (dwarf_version >= 4 || !dwarf_strict)
19559 goto rtl_addr;
19560 return false;
19561
19562 case PLUS:
19563 /* In cases where an inlined instance of an inline function is passed
19564 the address of an `auto' variable (which is local to the caller) we
19565 can get a situation where the DECL_RTL of the artificial local
19566 variable (for the inlining) which acts as a stand-in for the
19567 corresponding formal parameter (of the inline function) will look
19568 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19569 exactly a compile-time constant expression, but it isn't the address
19570 of the (artificial) local variable either. Rather, it represents the
19571 *value* which the artificial local variable always has during its
19572 lifetime. We currently have no way to represent such quasi-constant
19573 values in Dwarf, so for now we just punt and generate nothing. */
19574 return false;
19575
19576 case HIGH:
19577 case CONST_FIXED:
19578 return false;
19579
19580 case MEM:
19581 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19582 && MEM_READONLY_P (rtl)
19583 && GET_MODE (rtl) == BLKmode)
19584 {
19585 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19586 return true;
19587 }
19588 return false;
19589
19590 default:
19591 /* No other kinds of rtx should be possible here. */
19592 gcc_unreachable ();
19593 }
19594 return false;
19595 }
19596
19597 /* Determine whether the evaluation of EXPR references any variables
19598 or functions which aren't otherwise used (and therefore may not be
19599 output). */
19600 static tree
19601 reference_to_unused (tree * tp, int * walk_subtrees,
19602 void * data ATTRIBUTE_UNUSED)
19603 {
19604 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19605 *walk_subtrees = 0;
19606
19607 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19608 && ! TREE_ASM_WRITTEN (*tp))
19609 return *tp;
19610 /* ??? The C++ FE emits debug information for using decls, so
19611 putting gcc_unreachable here falls over. See PR31899. For now
19612 be conservative. */
19613 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19614 return *tp;
19615 else if (VAR_P (*tp))
19616 {
19617 varpool_node *node = varpool_node::get (*tp);
19618 if (!node || !node->definition)
19619 return *tp;
19620 }
19621 else if (TREE_CODE (*tp) == FUNCTION_DECL
19622 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19623 {
19624 /* The call graph machinery must have finished analyzing,
19625 optimizing and gimplifying the CU by now.
19626 So if *TP has no call graph node associated
19627 to it, it means *TP will not be emitted. */
19628 if (!cgraph_node::get (*tp))
19629 return *tp;
19630 }
19631 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19632 return *tp;
19633
19634 return NULL_TREE;
19635 }
19636
19637 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19638 for use in a later add_const_value_attribute call. */
19639
19640 static rtx
19641 rtl_for_decl_init (tree init, tree type)
19642 {
19643 rtx rtl = NULL_RTX;
19644
19645 STRIP_NOPS (init);
19646
19647 /* If a variable is initialized with a string constant without embedded
19648 zeros, build CONST_STRING. */
19649 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19650 {
19651 tree enttype = TREE_TYPE (type);
19652 tree domain = TYPE_DOMAIN (type);
19653 scalar_int_mode mode;
19654
19655 if (is_int_mode (TYPE_MODE (enttype), &mode)
19656 && GET_MODE_SIZE (mode) == 1
19657 && domain
19658 && TYPE_MAX_VALUE (domain)
19659 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19660 && integer_zerop (TYPE_MIN_VALUE (domain))
19661 && compare_tree_int (TYPE_MAX_VALUE (domain),
19662 TREE_STRING_LENGTH (init) - 1) == 0
19663 && ((size_t) TREE_STRING_LENGTH (init)
19664 == strlen (TREE_STRING_POINTER (init)) + 1))
19665 {
19666 rtl = gen_rtx_CONST_STRING (VOIDmode,
19667 ggc_strdup (TREE_STRING_POINTER (init)));
19668 rtl = gen_rtx_MEM (BLKmode, rtl);
19669 MEM_READONLY_P (rtl) = 1;
19670 }
19671 }
19672 /* Other aggregates, and complex values, could be represented using
19673 CONCAT: FIXME! */
19674 else if (AGGREGATE_TYPE_P (type)
19675 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19676 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19677 || TREE_CODE (type) == COMPLEX_TYPE)
19678 ;
19679 /* Vectors only work if their mode is supported by the target.
19680 FIXME: generic vectors ought to work too. */
19681 else if (TREE_CODE (type) == VECTOR_TYPE
19682 && !VECTOR_MODE_P (TYPE_MODE (type)))
19683 ;
19684 /* If the initializer is something that we know will expand into an
19685 immediate RTL constant, expand it now. We must be careful not to
19686 reference variables which won't be output. */
19687 else if (initializer_constant_valid_p (init, type)
19688 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19689 {
19690 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19691 possible. */
19692 if (TREE_CODE (type) == VECTOR_TYPE)
19693 switch (TREE_CODE (init))
19694 {
19695 case VECTOR_CST:
19696 break;
19697 case CONSTRUCTOR:
19698 if (TREE_CONSTANT (init))
19699 {
19700 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19701 bool constant_p = true;
19702 tree value;
19703 unsigned HOST_WIDE_INT ix;
19704
19705 /* Even when ctor is constant, it might contain non-*_CST
19706 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19707 belong into VECTOR_CST nodes. */
19708 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19709 if (!CONSTANT_CLASS_P (value))
19710 {
19711 constant_p = false;
19712 break;
19713 }
19714
19715 if (constant_p)
19716 {
19717 init = build_vector_from_ctor (type, elts);
19718 break;
19719 }
19720 }
19721 /* FALLTHRU */
19722
19723 default:
19724 return NULL;
19725 }
19726
19727 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19728
19729 /* If expand_expr returns a MEM, it wasn't immediate. */
19730 gcc_assert (!rtl || !MEM_P (rtl));
19731 }
19732
19733 return rtl;
19734 }
19735
19736 /* Generate RTL for the variable DECL to represent its location. */
19737
19738 static rtx
19739 rtl_for_decl_location (tree decl)
19740 {
19741 rtx rtl;
19742
19743 /* Here we have to decide where we are going to say the parameter "lives"
19744 (as far as the debugger is concerned). We only have a couple of
19745 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19746
19747 DECL_RTL normally indicates where the parameter lives during most of the
19748 activation of the function. If optimization is enabled however, this
19749 could be either NULL or else a pseudo-reg. Both of those cases indicate
19750 that the parameter doesn't really live anywhere (as far as the code
19751 generation parts of GCC are concerned) during most of the function's
19752 activation. That will happen (for example) if the parameter is never
19753 referenced within the function.
19754
19755 We could just generate a location descriptor here for all non-NULL
19756 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19757 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19758 where DECL_RTL is NULL or is a pseudo-reg.
19759
19760 Note however that we can only get away with using DECL_INCOMING_RTL as
19761 a backup substitute for DECL_RTL in certain limited cases. In cases
19762 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19763 we can be sure that the parameter was passed using the same type as it is
19764 declared to have within the function, and that its DECL_INCOMING_RTL
19765 points us to a place where a value of that type is passed.
19766
19767 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19768 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19769 because in these cases DECL_INCOMING_RTL points us to a value of some
19770 type which is *different* from the type of the parameter itself. Thus,
19771 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19772 such cases, the debugger would end up (for example) trying to fetch a
19773 `float' from a place which actually contains the first part of a
19774 `double'. That would lead to really incorrect and confusing
19775 output at debug-time.
19776
19777 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19778 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19779 are a couple of exceptions however. On little-endian machines we can
19780 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19781 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19782 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19783 when (on a little-endian machine) a non-prototyped function has a
19784 parameter declared to be of type `short' or `char'. In such cases,
19785 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19786 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19787 passed `int' value. If the debugger then uses that address to fetch
19788 a `short' or a `char' (on a little-endian machine) the result will be
19789 the correct data, so we allow for such exceptional cases below.
19790
19791 Note that our goal here is to describe the place where the given formal
19792 parameter lives during most of the function's activation (i.e. between the
19793 end of the prologue and the start of the epilogue). We'll do that as best
19794 as we can. Note however that if the given formal parameter is modified
19795 sometime during the execution of the function, then a stack backtrace (at
19796 debug-time) will show the function as having been called with the *new*
19797 value rather than the value which was originally passed in. This happens
19798 rarely enough that it is not a major problem, but it *is* a problem, and
19799 I'd like to fix it.
19800
19801 A future version of dwarf2out.c may generate two additional attributes for
19802 any given DW_TAG_formal_parameter DIE which will describe the "passed
19803 type" and the "passed location" for the given formal parameter in addition
19804 to the attributes we now generate to indicate the "declared type" and the
19805 "active location" for each parameter. This additional set of attributes
19806 could be used by debuggers for stack backtraces. Separately, note that
19807 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19808 This happens (for example) for inlined-instances of inline function formal
19809 parameters which are never referenced. This really shouldn't be
19810 happening. All PARM_DECL nodes should get valid non-NULL
19811 DECL_INCOMING_RTL values. FIXME. */
19812
19813 /* Use DECL_RTL as the "location" unless we find something better. */
19814 rtl = DECL_RTL_IF_SET (decl);
19815
19816 /* When generating abstract instances, ignore everything except
19817 constants, symbols living in memory, and symbols living in
19818 fixed registers. */
19819 if (! reload_completed)
19820 {
19821 if (rtl
19822 && (CONSTANT_P (rtl)
19823 || (MEM_P (rtl)
19824 && CONSTANT_P (XEXP (rtl, 0)))
19825 || (REG_P (rtl)
19826 && VAR_P (decl)
19827 && TREE_STATIC (decl))))
19828 {
19829 rtl = targetm.delegitimize_address (rtl);
19830 return rtl;
19831 }
19832 rtl = NULL_RTX;
19833 }
19834 else if (TREE_CODE (decl) == PARM_DECL)
19835 {
19836 if (rtl == NULL_RTX
19837 || is_pseudo_reg (rtl)
19838 || (MEM_P (rtl)
19839 && is_pseudo_reg (XEXP (rtl, 0))
19840 && DECL_INCOMING_RTL (decl)
19841 && MEM_P (DECL_INCOMING_RTL (decl))
19842 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19843 {
19844 tree declared_type = TREE_TYPE (decl);
19845 tree passed_type = DECL_ARG_TYPE (decl);
19846 machine_mode dmode = TYPE_MODE (declared_type);
19847 machine_mode pmode = TYPE_MODE (passed_type);
19848
19849 /* This decl represents a formal parameter which was optimized out.
19850 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19851 all cases where (rtl == NULL_RTX) just below. */
19852 if (dmode == pmode)
19853 rtl = DECL_INCOMING_RTL (decl);
19854 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19855 && SCALAR_INT_MODE_P (dmode)
19856 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19857 && DECL_INCOMING_RTL (decl))
19858 {
19859 rtx inc = DECL_INCOMING_RTL (decl);
19860 if (REG_P (inc))
19861 rtl = inc;
19862 else if (MEM_P (inc))
19863 {
19864 if (BYTES_BIG_ENDIAN)
19865 rtl = adjust_address_nv (inc, dmode,
19866 GET_MODE_SIZE (pmode)
19867 - GET_MODE_SIZE (dmode));
19868 else
19869 rtl = inc;
19870 }
19871 }
19872 }
19873
19874 /* If the parm was passed in registers, but lives on the stack, then
19875 make a big endian correction if the mode of the type of the
19876 parameter is not the same as the mode of the rtl. */
19877 /* ??? This is the same series of checks that are made in dbxout.c before
19878 we reach the big endian correction code there. It isn't clear if all
19879 of these checks are necessary here, but keeping them all is the safe
19880 thing to do. */
19881 else if (MEM_P (rtl)
19882 && XEXP (rtl, 0) != const0_rtx
19883 && ! CONSTANT_P (XEXP (rtl, 0))
19884 /* Not passed in memory. */
19885 && !MEM_P (DECL_INCOMING_RTL (decl))
19886 /* Not passed by invisible reference. */
19887 && (!REG_P (XEXP (rtl, 0))
19888 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19889 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19890 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19891 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19892 #endif
19893 )
19894 /* Big endian correction check. */
19895 && BYTES_BIG_ENDIAN
19896 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19897 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19898 UNITS_PER_WORD))
19899 {
19900 machine_mode addr_mode = get_address_mode (rtl);
19901 poly_int64 offset = (UNITS_PER_WORD
19902 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19903
19904 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19905 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19906 }
19907 }
19908 else if (VAR_P (decl)
19909 && rtl
19910 && MEM_P (rtl)
19911 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19912 {
19913 machine_mode addr_mode = get_address_mode (rtl);
19914 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19915 GET_MODE (rtl));
19916
19917 /* If a variable is declared "register" yet is smaller than
19918 a register, then if we store the variable to memory, it
19919 looks like we're storing a register-sized value, when in
19920 fact we are not. We need to adjust the offset of the
19921 storage location to reflect the actual value's bytes,
19922 else gdb will not be able to display it. */
19923 if (maybe_ne (offset, 0))
19924 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19925 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19926 }
19927
19928 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19929 and will have been substituted directly into all expressions that use it.
19930 C does not have such a concept, but C++ and other languages do. */
19931 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19932 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19933
19934 if (rtl)
19935 rtl = targetm.delegitimize_address (rtl);
19936
19937 /* If we don't look past the constant pool, we risk emitting a
19938 reference to a constant pool entry that isn't referenced from
19939 code, and thus is not emitted. */
19940 if (rtl)
19941 rtl = avoid_constant_pool_reference (rtl);
19942
19943 /* Try harder to get a rtl. If this symbol ends up not being emitted
19944 in the current CU, resolve_addr will remove the expression referencing
19945 it. */
19946 if (rtl == NULL_RTX
19947 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19948 && VAR_P (decl)
19949 && !DECL_EXTERNAL (decl)
19950 && TREE_STATIC (decl)
19951 && DECL_NAME (decl)
19952 && !DECL_HARD_REGISTER (decl)
19953 && DECL_MODE (decl) != VOIDmode)
19954 {
19955 rtl = make_decl_rtl_for_debug (decl);
19956 if (!MEM_P (rtl)
19957 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19958 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19959 rtl = NULL_RTX;
19960 }
19961
19962 return rtl;
19963 }
19964
19965 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19966 returned. If so, the decl for the COMMON block is returned, and the
19967 value is the offset into the common block for the symbol. */
19968
19969 static tree
19970 fortran_common (tree decl, HOST_WIDE_INT *value)
19971 {
19972 tree val_expr, cvar;
19973 machine_mode mode;
19974 poly_int64 bitsize, bitpos;
19975 tree offset;
19976 HOST_WIDE_INT cbitpos;
19977 int unsignedp, reversep, volatilep = 0;
19978
19979 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19980 it does not have a value (the offset into the common area), or if it
19981 is thread local (as opposed to global) then it isn't common, and shouldn't
19982 be handled as such. */
19983 if (!VAR_P (decl)
19984 || !TREE_STATIC (decl)
19985 || !DECL_HAS_VALUE_EXPR_P (decl)
19986 || !is_fortran ())
19987 return NULL_TREE;
19988
19989 val_expr = DECL_VALUE_EXPR (decl);
19990 if (TREE_CODE (val_expr) != COMPONENT_REF)
19991 return NULL_TREE;
19992
19993 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19994 &unsignedp, &reversep, &volatilep);
19995
19996 if (cvar == NULL_TREE
19997 || !VAR_P (cvar)
19998 || DECL_ARTIFICIAL (cvar)
19999 || !TREE_PUBLIC (cvar)
20000 /* We don't expect to have to cope with variable offsets,
20001 since at present all static data must have a constant size. */
20002 || !bitpos.is_constant (&cbitpos))
20003 return NULL_TREE;
20004
20005 *value = 0;
20006 if (offset != NULL)
20007 {
20008 if (!tree_fits_shwi_p (offset))
20009 return NULL_TREE;
20010 *value = tree_to_shwi (offset);
20011 }
20012 if (cbitpos != 0)
20013 *value += cbitpos / BITS_PER_UNIT;
20014
20015 return cvar;
20016 }
20017
20018 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20019 data attribute for a variable or a parameter. We generate the
20020 DW_AT_const_value attribute only in those cases where the given variable
20021 or parameter does not have a true "location" either in memory or in a
20022 register. This can happen (for example) when a constant is passed as an
20023 actual argument in a call to an inline function. (It's possible that
20024 these things can crop up in other ways also.) Note that one type of
20025 constant value which can be passed into an inlined function is a constant
20026 pointer. This can happen for example if an actual argument in an inlined
20027 function call evaluates to a compile-time constant address.
20028
20029 CACHE_P is true if it is worth caching the location list for DECL,
20030 so that future calls can reuse it rather than regenerate it from scratch.
20031 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20032 since we will need to refer to them each time the function is inlined. */
20033
20034 static bool
20035 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20036 {
20037 rtx rtl;
20038 dw_loc_list_ref list;
20039 var_loc_list *loc_list;
20040 cached_dw_loc_list *cache;
20041
20042 if (early_dwarf)
20043 return false;
20044
20045 if (TREE_CODE (decl) == ERROR_MARK)
20046 return false;
20047
20048 if (get_AT (die, DW_AT_location)
20049 || get_AT (die, DW_AT_const_value))
20050 return true;
20051
20052 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20053 || TREE_CODE (decl) == RESULT_DECL);
20054
20055 /* Try to get some constant RTL for this decl, and use that as the value of
20056 the location. */
20057
20058 rtl = rtl_for_decl_location (decl);
20059 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20060 && add_const_value_attribute (die, rtl))
20061 return true;
20062
20063 /* See if we have single element location list that is equivalent to
20064 a constant value. That way we are better to use add_const_value_attribute
20065 rather than expanding constant value equivalent. */
20066 loc_list = lookup_decl_loc (decl);
20067 if (loc_list
20068 && loc_list->first
20069 && loc_list->first->next == NULL
20070 && NOTE_P (loc_list->first->loc)
20071 && NOTE_VAR_LOCATION (loc_list->first->loc)
20072 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20073 {
20074 struct var_loc_node *node;
20075
20076 node = loc_list->first;
20077 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20078 if (GET_CODE (rtl) == EXPR_LIST)
20079 rtl = XEXP (rtl, 0);
20080 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20081 && add_const_value_attribute (die, rtl))
20082 return true;
20083 }
20084 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20085 list several times. See if we've already cached the contents. */
20086 list = NULL;
20087 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20088 cache_p = false;
20089 if (cache_p)
20090 {
20091 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20092 if (cache)
20093 list = cache->loc_list;
20094 }
20095 if (list == NULL)
20096 {
20097 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20098 NULL);
20099 /* It is usually worth caching this result if the decl is from
20100 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20101 if (cache_p && list && list->dw_loc_next)
20102 {
20103 cached_dw_loc_list **slot
20104 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20105 DECL_UID (decl),
20106 INSERT);
20107 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20108 cache->decl_id = DECL_UID (decl);
20109 cache->loc_list = list;
20110 *slot = cache;
20111 }
20112 }
20113 if (list)
20114 {
20115 add_AT_location_description (die, DW_AT_location, list);
20116 return true;
20117 }
20118 /* None of that worked, so it must not really have a location;
20119 try adding a constant value attribute from the DECL_INITIAL. */
20120 return tree_add_const_value_attribute_for_decl (die, decl);
20121 }
20122
20123 /* Helper function for tree_add_const_value_attribute. Natively encode
20124 initializer INIT into an array. Return true if successful. */
20125
20126 static bool
20127 native_encode_initializer (tree init, unsigned char *array, int size)
20128 {
20129 tree type;
20130
20131 if (init == NULL_TREE)
20132 return false;
20133
20134 STRIP_NOPS (init);
20135 switch (TREE_CODE (init))
20136 {
20137 case STRING_CST:
20138 type = TREE_TYPE (init);
20139 if (TREE_CODE (type) == ARRAY_TYPE)
20140 {
20141 tree enttype = TREE_TYPE (type);
20142 scalar_int_mode mode;
20143
20144 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20145 || GET_MODE_SIZE (mode) != 1)
20146 return false;
20147 if (int_size_in_bytes (type) != size)
20148 return false;
20149 if (size > TREE_STRING_LENGTH (init))
20150 {
20151 memcpy (array, TREE_STRING_POINTER (init),
20152 TREE_STRING_LENGTH (init));
20153 memset (array + TREE_STRING_LENGTH (init),
20154 '\0', size - TREE_STRING_LENGTH (init));
20155 }
20156 else
20157 memcpy (array, TREE_STRING_POINTER (init), size);
20158 return true;
20159 }
20160 return false;
20161 case CONSTRUCTOR:
20162 type = TREE_TYPE (init);
20163 if (int_size_in_bytes (type) != size)
20164 return false;
20165 if (TREE_CODE (type) == ARRAY_TYPE)
20166 {
20167 HOST_WIDE_INT min_index;
20168 unsigned HOST_WIDE_INT cnt;
20169 int curpos = 0, fieldsize;
20170 constructor_elt *ce;
20171
20172 if (TYPE_DOMAIN (type) == NULL_TREE
20173 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20174 return false;
20175
20176 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20177 if (fieldsize <= 0)
20178 return false;
20179
20180 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20181 memset (array, '\0', size);
20182 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20183 {
20184 tree val = ce->value;
20185 tree index = ce->index;
20186 int pos = curpos;
20187 if (index && TREE_CODE (index) == RANGE_EXPR)
20188 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20189 * fieldsize;
20190 else if (index)
20191 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20192
20193 if (val)
20194 {
20195 STRIP_NOPS (val);
20196 if (!native_encode_initializer (val, array + pos, fieldsize))
20197 return false;
20198 }
20199 curpos = pos + fieldsize;
20200 if (index && TREE_CODE (index) == RANGE_EXPR)
20201 {
20202 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20203 - tree_to_shwi (TREE_OPERAND (index, 0));
20204 while (count-- > 0)
20205 {
20206 if (val)
20207 memcpy (array + curpos, array + pos, fieldsize);
20208 curpos += fieldsize;
20209 }
20210 }
20211 gcc_assert (curpos <= size);
20212 }
20213 return true;
20214 }
20215 else if (TREE_CODE (type) == RECORD_TYPE
20216 || TREE_CODE (type) == UNION_TYPE)
20217 {
20218 tree field = NULL_TREE;
20219 unsigned HOST_WIDE_INT cnt;
20220 constructor_elt *ce;
20221
20222 if (int_size_in_bytes (type) != size)
20223 return false;
20224
20225 if (TREE_CODE (type) == RECORD_TYPE)
20226 field = TYPE_FIELDS (type);
20227
20228 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20229 {
20230 tree val = ce->value;
20231 int pos, fieldsize;
20232
20233 if (ce->index != 0)
20234 field = ce->index;
20235
20236 if (val)
20237 STRIP_NOPS (val);
20238
20239 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20240 return false;
20241
20242 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20243 && TYPE_DOMAIN (TREE_TYPE (field))
20244 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20245 return false;
20246 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20247 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20248 return false;
20249 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20250 pos = int_byte_position (field);
20251 gcc_assert (pos + fieldsize <= size);
20252 if (val && fieldsize != 0
20253 && !native_encode_initializer (val, array + pos, fieldsize))
20254 return false;
20255 }
20256 return true;
20257 }
20258 return false;
20259 case VIEW_CONVERT_EXPR:
20260 case NON_LVALUE_EXPR:
20261 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20262 default:
20263 return native_encode_expr (init, array, size) == size;
20264 }
20265 }
20266
20267 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20268 attribute is the const value T. */
20269
20270 static bool
20271 tree_add_const_value_attribute (dw_die_ref die, tree t)
20272 {
20273 tree init;
20274 tree type = TREE_TYPE (t);
20275 rtx rtl;
20276
20277 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20278 return false;
20279
20280 init = t;
20281 gcc_assert (!DECL_P (init));
20282
20283 if (TREE_CODE (init) == INTEGER_CST)
20284 {
20285 if (tree_fits_uhwi_p (init))
20286 {
20287 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20288 return true;
20289 }
20290 if (tree_fits_shwi_p (init))
20291 {
20292 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20293 return true;
20294 }
20295 }
20296 if (! early_dwarf)
20297 {
20298 rtl = rtl_for_decl_init (init, type);
20299 if (rtl)
20300 return add_const_value_attribute (die, rtl);
20301 }
20302 /* If the host and target are sane, try harder. */
20303 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20304 && initializer_constant_valid_p (init, type))
20305 {
20306 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20307 if (size > 0 && (int) size == size)
20308 {
20309 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20310
20311 if (native_encode_initializer (init, array, size))
20312 {
20313 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20314 return true;
20315 }
20316 ggc_free (array);
20317 }
20318 }
20319 return false;
20320 }
20321
20322 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20323 attribute is the const value of T, where T is an integral constant
20324 variable with static storage duration
20325 (so it can't be a PARM_DECL or a RESULT_DECL). */
20326
20327 static bool
20328 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20329 {
20330
20331 if (!decl
20332 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20333 || (VAR_P (decl) && !TREE_STATIC (decl)))
20334 return false;
20335
20336 if (TREE_READONLY (decl)
20337 && ! TREE_THIS_VOLATILE (decl)
20338 && DECL_INITIAL (decl))
20339 /* OK */;
20340 else
20341 return false;
20342
20343 /* Don't add DW_AT_const_value if abstract origin already has one. */
20344 if (get_AT (var_die, DW_AT_const_value))
20345 return false;
20346
20347 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20348 }
20349
20350 /* Convert the CFI instructions for the current function into a
20351 location list. This is used for DW_AT_frame_base when we targeting
20352 a dwarf2 consumer that does not support the dwarf3
20353 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20354 expressions. */
20355
20356 static dw_loc_list_ref
20357 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20358 {
20359 int ix;
20360 dw_fde_ref fde;
20361 dw_loc_list_ref list, *list_tail;
20362 dw_cfi_ref cfi;
20363 dw_cfa_location last_cfa, next_cfa;
20364 const char *start_label, *last_label, *section;
20365 dw_cfa_location remember;
20366
20367 fde = cfun->fde;
20368 gcc_assert (fde != NULL);
20369
20370 section = secname_for_decl (current_function_decl);
20371 list_tail = &list;
20372 list = NULL;
20373
20374 memset (&next_cfa, 0, sizeof (next_cfa));
20375 next_cfa.reg = INVALID_REGNUM;
20376 remember = next_cfa;
20377
20378 start_label = fde->dw_fde_begin;
20379
20380 /* ??? Bald assumption that the CIE opcode list does not contain
20381 advance opcodes. */
20382 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20383 lookup_cfa_1 (cfi, &next_cfa, &remember);
20384
20385 last_cfa = next_cfa;
20386 last_label = start_label;
20387
20388 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20389 {
20390 /* If the first partition contained no CFI adjustments, the
20391 CIE opcodes apply to the whole first partition. */
20392 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20393 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20394 list_tail =&(*list_tail)->dw_loc_next;
20395 start_label = last_label = fde->dw_fde_second_begin;
20396 }
20397
20398 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20399 {
20400 switch (cfi->dw_cfi_opc)
20401 {
20402 case DW_CFA_set_loc:
20403 case DW_CFA_advance_loc1:
20404 case DW_CFA_advance_loc2:
20405 case DW_CFA_advance_loc4:
20406 if (!cfa_equal_p (&last_cfa, &next_cfa))
20407 {
20408 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20409 start_label, 0, last_label, 0, section);
20410
20411 list_tail = &(*list_tail)->dw_loc_next;
20412 last_cfa = next_cfa;
20413 start_label = last_label;
20414 }
20415 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20416 break;
20417
20418 case DW_CFA_advance_loc:
20419 /* The encoding is complex enough that we should never emit this. */
20420 gcc_unreachable ();
20421
20422 default:
20423 lookup_cfa_1 (cfi, &next_cfa, &remember);
20424 break;
20425 }
20426 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20427 {
20428 if (!cfa_equal_p (&last_cfa, &next_cfa))
20429 {
20430 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20431 start_label, 0, last_label, 0, section);
20432
20433 list_tail = &(*list_tail)->dw_loc_next;
20434 last_cfa = next_cfa;
20435 start_label = last_label;
20436 }
20437 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20438 start_label, 0, fde->dw_fde_end, 0, section);
20439 list_tail = &(*list_tail)->dw_loc_next;
20440 start_label = last_label = fde->dw_fde_second_begin;
20441 }
20442 }
20443
20444 if (!cfa_equal_p (&last_cfa, &next_cfa))
20445 {
20446 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20447 start_label, 0, last_label, 0, section);
20448 list_tail = &(*list_tail)->dw_loc_next;
20449 start_label = last_label;
20450 }
20451
20452 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20453 start_label, 0,
20454 fde->dw_fde_second_begin
20455 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20456 section);
20457
20458 maybe_gen_llsym (list);
20459
20460 return list;
20461 }
20462
20463 /* Compute a displacement from the "steady-state frame pointer" to the
20464 frame base (often the same as the CFA), and store it in
20465 frame_pointer_fb_offset. OFFSET is added to the displacement
20466 before the latter is negated. */
20467
20468 static void
20469 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20470 {
20471 rtx reg, elim;
20472
20473 #ifdef FRAME_POINTER_CFA_OFFSET
20474 reg = frame_pointer_rtx;
20475 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20476 #else
20477 reg = arg_pointer_rtx;
20478 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20479 #endif
20480
20481 elim = (ira_use_lra_p
20482 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20483 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20484 elim = strip_offset_and_add (elim, &offset);
20485
20486 frame_pointer_fb_offset = -offset;
20487
20488 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20489 in which to eliminate. This is because it's stack pointer isn't
20490 directly accessible as a register within the ISA. To work around
20491 this, assume that while we cannot provide a proper value for
20492 frame_pointer_fb_offset, we won't need one either. */
20493 frame_pointer_fb_offset_valid
20494 = ((SUPPORTS_STACK_ALIGNMENT
20495 && (elim == hard_frame_pointer_rtx
20496 || elim == stack_pointer_rtx))
20497 || elim == (frame_pointer_needed
20498 ? hard_frame_pointer_rtx
20499 : stack_pointer_rtx));
20500 }
20501
20502 /* Generate a DW_AT_name attribute given some string value to be included as
20503 the value of the attribute. */
20504
20505 static void
20506 add_name_attribute (dw_die_ref die, const char *name_string)
20507 {
20508 if (name_string != NULL && *name_string != 0)
20509 {
20510 if (demangle_name_func)
20511 name_string = (*demangle_name_func) (name_string);
20512
20513 add_AT_string (die, DW_AT_name, name_string);
20514 }
20515 }
20516
20517 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20518 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20519 of TYPE accordingly.
20520
20521 ??? This is a temporary measure until after we're able to generate
20522 regular DWARF for the complex Ada type system. */
20523
20524 static void
20525 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20526 dw_die_ref context_die)
20527 {
20528 tree dtype;
20529 dw_die_ref dtype_die;
20530
20531 if (!lang_hooks.types.descriptive_type)
20532 return;
20533
20534 dtype = lang_hooks.types.descriptive_type (type);
20535 if (!dtype)
20536 return;
20537
20538 dtype_die = lookup_type_die (dtype);
20539 if (!dtype_die)
20540 {
20541 gen_type_die (dtype, context_die);
20542 dtype_die = lookup_type_die (dtype);
20543 gcc_assert (dtype_die);
20544 }
20545
20546 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20547 }
20548
20549 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20550
20551 static const char *
20552 comp_dir_string (void)
20553 {
20554 const char *wd;
20555 char *wd1;
20556 static const char *cached_wd = NULL;
20557
20558 if (cached_wd != NULL)
20559 return cached_wd;
20560
20561 wd = get_src_pwd ();
20562 if (wd == NULL)
20563 return NULL;
20564
20565 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20566 {
20567 int wdlen;
20568
20569 wdlen = strlen (wd);
20570 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20571 strcpy (wd1, wd);
20572 wd1 [wdlen] = DIR_SEPARATOR;
20573 wd1 [wdlen + 1] = 0;
20574 wd = wd1;
20575 }
20576
20577 cached_wd = remap_debug_filename (wd);
20578 return cached_wd;
20579 }
20580
20581 /* Generate a DW_AT_comp_dir attribute for DIE. */
20582
20583 static void
20584 add_comp_dir_attribute (dw_die_ref die)
20585 {
20586 const char * wd = comp_dir_string ();
20587 if (wd != NULL)
20588 add_AT_string (die, DW_AT_comp_dir, wd);
20589 }
20590
20591 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20592 pointer computation, ...), output a representation for that bound according
20593 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20594 loc_list_from_tree for the meaning of CONTEXT. */
20595
20596 static void
20597 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20598 int forms, struct loc_descr_context *context)
20599 {
20600 dw_die_ref context_die, decl_die;
20601 dw_loc_list_ref list;
20602 bool strip_conversions = true;
20603 bool placeholder_seen = false;
20604
20605 while (strip_conversions)
20606 switch (TREE_CODE (value))
20607 {
20608 case ERROR_MARK:
20609 case SAVE_EXPR:
20610 return;
20611
20612 CASE_CONVERT:
20613 case VIEW_CONVERT_EXPR:
20614 value = TREE_OPERAND (value, 0);
20615 break;
20616
20617 default:
20618 strip_conversions = false;
20619 break;
20620 }
20621
20622 /* If possible and permitted, output the attribute as a constant. */
20623 if ((forms & dw_scalar_form_constant) != 0
20624 && TREE_CODE (value) == INTEGER_CST)
20625 {
20626 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20627
20628 /* If HOST_WIDE_INT is big enough then represent the bound as
20629 a constant value. We need to choose a form based on
20630 whether the type is signed or unsigned. We cannot just
20631 call add_AT_unsigned if the value itself is positive
20632 (add_AT_unsigned might add the unsigned value encoded as
20633 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20634 bounds type and then sign extend any unsigned values found
20635 for signed types. This is needed only for
20636 DW_AT_{lower,upper}_bound, since for most other attributes,
20637 consumers will treat DW_FORM_data[1248] as unsigned values,
20638 regardless of the underlying type. */
20639 if (prec <= HOST_BITS_PER_WIDE_INT
20640 || tree_fits_uhwi_p (value))
20641 {
20642 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20643 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20644 else
20645 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20646 }
20647 else
20648 /* Otherwise represent the bound as an unsigned value with
20649 the precision of its type. The precision and signedness
20650 of the type will be necessary to re-interpret it
20651 unambiguously. */
20652 add_AT_wide (die, attr, wi::to_wide (value));
20653 return;
20654 }
20655
20656 /* Otherwise, if it's possible and permitted too, output a reference to
20657 another DIE. */
20658 if ((forms & dw_scalar_form_reference) != 0)
20659 {
20660 tree decl = NULL_TREE;
20661
20662 /* Some type attributes reference an outer type. For instance, the upper
20663 bound of an array may reference an embedding record (this happens in
20664 Ada). */
20665 if (TREE_CODE (value) == COMPONENT_REF
20666 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20667 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20668 decl = TREE_OPERAND (value, 1);
20669
20670 else if (VAR_P (value)
20671 || TREE_CODE (value) == PARM_DECL
20672 || TREE_CODE (value) == RESULT_DECL)
20673 decl = value;
20674
20675 if (decl != NULL_TREE)
20676 {
20677 dw_die_ref decl_die = lookup_decl_die (decl);
20678
20679 /* ??? Can this happen, or should the variable have been bound
20680 first? Probably it can, since I imagine that we try to create
20681 the types of parameters in the order in which they exist in
20682 the list, and won't have created a forward reference to a
20683 later parameter. */
20684 if (decl_die != NULL)
20685 {
20686 add_AT_die_ref (die, attr, decl_die);
20687 return;
20688 }
20689 }
20690 }
20691
20692 /* Last chance: try to create a stack operation procedure to evaluate the
20693 value. Do nothing if even that is not possible or permitted. */
20694 if ((forms & dw_scalar_form_exprloc) == 0)
20695 return;
20696
20697 list = loc_list_from_tree (value, 2, context);
20698 if (context && context->placeholder_arg)
20699 {
20700 placeholder_seen = context->placeholder_seen;
20701 context->placeholder_seen = false;
20702 }
20703 if (list == NULL || single_element_loc_list_p (list))
20704 {
20705 /* If this attribute is not a reference nor constant, it is
20706 a DWARF expression rather than location description. For that
20707 loc_list_from_tree (value, 0, &context) is needed. */
20708 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20709 if (list2 && single_element_loc_list_p (list2))
20710 {
20711 if (placeholder_seen)
20712 {
20713 struct dwarf_procedure_info dpi;
20714 dpi.fndecl = NULL_TREE;
20715 dpi.args_count = 1;
20716 if (!resolve_args_picking (list2->expr, 1, &dpi))
20717 return;
20718 }
20719 add_AT_loc (die, attr, list2->expr);
20720 return;
20721 }
20722 }
20723
20724 /* If that failed to give a single element location list, fall back to
20725 outputting this as a reference... still if permitted. */
20726 if (list == NULL
20727 || (forms & dw_scalar_form_reference) == 0
20728 || placeholder_seen)
20729 return;
20730
20731 if (current_function_decl == 0)
20732 context_die = comp_unit_die ();
20733 else
20734 context_die = lookup_decl_die (current_function_decl);
20735
20736 decl_die = new_die (DW_TAG_variable, context_die, value);
20737 add_AT_flag (decl_die, DW_AT_artificial, 1);
20738 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20739 context_die);
20740 add_AT_location_description (decl_die, DW_AT_location, list);
20741 add_AT_die_ref (die, attr, decl_die);
20742 }
20743
20744 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20745 default. */
20746
20747 static int
20748 lower_bound_default (void)
20749 {
20750 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20751 {
20752 case DW_LANG_C:
20753 case DW_LANG_C89:
20754 case DW_LANG_C99:
20755 case DW_LANG_C11:
20756 case DW_LANG_C_plus_plus:
20757 case DW_LANG_C_plus_plus_11:
20758 case DW_LANG_C_plus_plus_14:
20759 case DW_LANG_ObjC:
20760 case DW_LANG_ObjC_plus_plus:
20761 return 0;
20762 case DW_LANG_Fortran77:
20763 case DW_LANG_Fortran90:
20764 case DW_LANG_Fortran95:
20765 case DW_LANG_Fortran03:
20766 case DW_LANG_Fortran08:
20767 return 1;
20768 case DW_LANG_UPC:
20769 case DW_LANG_D:
20770 case DW_LANG_Python:
20771 return dwarf_version >= 4 ? 0 : -1;
20772 case DW_LANG_Ada95:
20773 case DW_LANG_Ada83:
20774 case DW_LANG_Cobol74:
20775 case DW_LANG_Cobol85:
20776 case DW_LANG_Modula2:
20777 case DW_LANG_PLI:
20778 return dwarf_version >= 4 ? 1 : -1;
20779 default:
20780 return -1;
20781 }
20782 }
20783
20784 /* Given a tree node describing an array bound (either lower or upper) output
20785 a representation for that bound. */
20786
20787 static void
20788 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20789 tree bound, struct loc_descr_context *context)
20790 {
20791 int dflt;
20792
20793 while (1)
20794 switch (TREE_CODE (bound))
20795 {
20796 /* Strip all conversions. */
20797 CASE_CONVERT:
20798 case VIEW_CONVERT_EXPR:
20799 bound = TREE_OPERAND (bound, 0);
20800 break;
20801
20802 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20803 are even omitted when they are the default. */
20804 case INTEGER_CST:
20805 /* If the value for this bound is the default one, we can even omit the
20806 attribute. */
20807 if (bound_attr == DW_AT_lower_bound
20808 && tree_fits_shwi_p (bound)
20809 && (dflt = lower_bound_default ()) != -1
20810 && tree_to_shwi (bound) == dflt)
20811 return;
20812
20813 /* FALLTHRU */
20814
20815 default:
20816 /* Because of the complex interaction there can be with other GNAT
20817 encodings, GDB isn't ready yet to handle proper DWARF description
20818 for self-referencial subrange bounds: let GNAT encodings do the
20819 magic in such a case. */
20820 if (is_ada ()
20821 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20822 && contains_placeholder_p (bound))
20823 return;
20824
20825 add_scalar_info (subrange_die, bound_attr, bound,
20826 dw_scalar_form_constant
20827 | dw_scalar_form_exprloc
20828 | dw_scalar_form_reference,
20829 context);
20830 return;
20831 }
20832 }
20833
20834 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20835 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20836 Note that the block of subscript information for an array type also
20837 includes information about the element type of the given array type.
20838
20839 This function reuses previously set type and bound information if
20840 available. */
20841
20842 static void
20843 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20844 {
20845 unsigned dimension_number;
20846 tree lower, upper;
20847 dw_die_ref child = type_die->die_child;
20848
20849 for (dimension_number = 0;
20850 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20851 type = TREE_TYPE (type), dimension_number++)
20852 {
20853 tree domain = TYPE_DOMAIN (type);
20854
20855 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20856 break;
20857
20858 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20859 and (in GNU C only) variable bounds. Handle all three forms
20860 here. */
20861
20862 /* Find and reuse a previously generated DW_TAG_subrange_type if
20863 available.
20864
20865 For multi-dimensional arrays, as we iterate through the
20866 various dimensions in the enclosing for loop above, we also
20867 iterate through the DIE children and pick at each
20868 DW_TAG_subrange_type previously generated (if available).
20869 Each child DW_TAG_subrange_type DIE describes the range of
20870 the current dimension. At this point we should have as many
20871 DW_TAG_subrange_type's as we have dimensions in the
20872 array. */
20873 dw_die_ref subrange_die = NULL;
20874 if (child)
20875 while (1)
20876 {
20877 child = child->die_sib;
20878 if (child->die_tag == DW_TAG_subrange_type)
20879 subrange_die = child;
20880 if (child == type_die->die_child)
20881 {
20882 /* If we wrapped around, stop looking next time. */
20883 child = NULL;
20884 break;
20885 }
20886 if (child->die_tag == DW_TAG_subrange_type)
20887 break;
20888 }
20889 if (!subrange_die)
20890 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20891
20892 if (domain)
20893 {
20894 /* We have an array type with specified bounds. */
20895 lower = TYPE_MIN_VALUE (domain);
20896 upper = TYPE_MAX_VALUE (domain);
20897
20898 /* Define the index type. */
20899 if (TREE_TYPE (domain)
20900 && !get_AT (subrange_die, DW_AT_type))
20901 {
20902 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20903 TREE_TYPE field. We can't emit debug info for this
20904 because it is an unnamed integral type. */
20905 if (TREE_CODE (domain) == INTEGER_TYPE
20906 && TYPE_NAME (domain) == NULL_TREE
20907 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20908 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20909 ;
20910 else
20911 add_type_attribute (subrange_die, TREE_TYPE (domain),
20912 TYPE_UNQUALIFIED, false, type_die);
20913 }
20914
20915 /* ??? If upper is NULL, the array has unspecified length,
20916 but it does have a lower bound. This happens with Fortran
20917 dimension arr(N:*)
20918 Since the debugger is definitely going to need to know N
20919 to produce useful results, go ahead and output the lower
20920 bound solo, and hope the debugger can cope. */
20921
20922 if (!get_AT (subrange_die, DW_AT_lower_bound))
20923 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20924 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20925 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20926 }
20927
20928 /* Otherwise we have an array type with an unspecified length. The
20929 DWARF-2 spec does not say how to handle this; let's just leave out the
20930 bounds. */
20931 }
20932 }
20933
20934 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20935
20936 static void
20937 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20938 {
20939 dw_die_ref decl_die;
20940 HOST_WIDE_INT size;
20941 dw_loc_descr_ref size_expr = NULL;
20942
20943 switch (TREE_CODE (tree_node))
20944 {
20945 case ERROR_MARK:
20946 size = 0;
20947 break;
20948 case ENUMERAL_TYPE:
20949 case RECORD_TYPE:
20950 case UNION_TYPE:
20951 case QUAL_UNION_TYPE:
20952 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20953 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20954 {
20955 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20956 return;
20957 }
20958 size_expr = type_byte_size (tree_node, &size);
20959 break;
20960 case FIELD_DECL:
20961 /* For a data member of a struct or union, the DW_AT_byte_size is
20962 generally given as the number of bytes normally allocated for an
20963 object of the *declared* type of the member itself. This is true
20964 even for bit-fields. */
20965 size = int_size_in_bytes (field_type (tree_node));
20966 break;
20967 default:
20968 gcc_unreachable ();
20969 }
20970
20971 /* Support for dynamically-sized objects was introduced by DWARFv3.
20972 At the moment, GDB does not handle variable byte sizes very well,
20973 though. */
20974 if ((dwarf_version >= 3 || !dwarf_strict)
20975 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20976 && size_expr != NULL)
20977 add_AT_loc (die, DW_AT_byte_size, size_expr);
20978
20979 /* Note that `size' might be -1 when we get to this point. If it is, that
20980 indicates that the byte size of the entity in question is variable and
20981 that we could not generate a DWARF expression that computes it. */
20982 if (size >= 0)
20983 add_AT_unsigned (die, DW_AT_byte_size, size);
20984 }
20985
20986 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20987 alignment. */
20988
20989 static void
20990 add_alignment_attribute (dw_die_ref die, tree tree_node)
20991 {
20992 if (dwarf_version < 5 && dwarf_strict)
20993 return;
20994
20995 unsigned align;
20996
20997 if (DECL_P (tree_node))
20998 {
20999 if (!DECL_USER_ALIGN (tree_node))
21000 return;
21001
21002 align = DECL_ALIGN_UNIT (tree_node);
21003 }
21004 else if (TYPE_P (tree_node))
21005 {
21006 if (!TYPE_USER_ALIGN (tree_node))
21007 return;
21008
21009 align = TYPE_ALIGN_UNIT (tree_node);
21010 }
21011 else
21012 gcc_unreachable ();
21013
21014 add_AT_unsigned (die, DW_AT_alignment, align);
21015 }
21016
21017 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21018 which specifies the distance in bits from the highest order bit of the
21019 "containing object" for the bit-field to the highest order bit of the
21020 bit-field itself.
21021
21022 For any given bit-field, the "containing object" is a hypothetical object
21023 (of some integral or enum type) within which the given bit-field lives. The
21024 type of this hypothetical "containing object" is always the same as the
21025 declared type of the individual bit-field itself. The determination of the
21026 exact location of the "containing object" for a bit-field is rather
21027 complicated. It's handled by the `field_byte_offset' function (above).
21028
21029 CTX is required: see the comment for VLR_CONTEXT.
21030
21031 Note that it is the size (in bytes) of the hypothetical "containing object"
21032 which will be given in the DW_AT_byte_size attribute for this bit-field.
21033 (See `byte_size_attribute' above). */
21034
21035 static inline void
21036 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21037 {
21038 HOST_WIDE_INT object_offset_in_bytes;
21039 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21040 HOST_WIDE_INT bitpos_int;
21041 HOST_WIDE_INT highest_order_object_bit_offset;
21042 HOST_WIDE_INT highest_order_field_bit_offset;
21043 HOST_WIDE_INT bit_offset;
21044
21045 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21046
21047 /* Must be a field and a bit field. */
21048 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21049
21050 /* We can't yet handle bit-fields whose offsets are variable, so if we
21051 encounter such things, just return without generating any attribute
21052 whatsoever. Likewise for variable or too large size. */
21053 if (! tree_fits_shwi_p (bit_position (decl))
21054 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21055 return;
21056
21057 bitpos_int = int_bit_position (decl);
21058
21059 /* Note that the bit offset is always the distance (in bits) from the
21060 highest-order bit of the "containing object" to the highest-order bit of
21061 the bit-field itself. Since the "high-order end" of any object or field
21062 is different on big-endian and little-endian machines, the computation
21063 below must take account of these differences. */
21064 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21065 highest_order_field_bit_offset = bitpos_int;
21066
21067 if (! BYTES_BIG_ENDIAN)
21068 {
21069 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21070 highest_order_object_bit_offset +=
21071 simple_type_size_in_bits (original_type);
21072 }
21073
21074 bit_offset
21075 = (! BYTES_BIG_ENDIAN
21076 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21077 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21078
21079 if (bit_offset < 0)
21080 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21081 else
21082 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21083 }
21084
21085 /* For a FIELD_DECL node which represents a bit field, output an attribute
21086 which specifies the length in bits of the given field. */
21087
21088 static inline void
21089 add_bit_size_attribute (dw_die_ref die, tree decl)
21090 {
21091 /* Must be a field and a bit field. */
21092 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21093 && DECL_BIT_FIELD_TYPE (decl));
21094
21095 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21096 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21097 }
21098
21099 /* If the compiled language is ANSI C, then add a 'prototyped'
21100 attribute, if arg types are given for the parameters of a function. */
21101
21102 static inline void
21103 add_prototyped_attribute (dw_die_ref die, tree func_type)
21104 {
21105 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21106 {
21107 case DW_LANG_C:
21108 case DW_LANG_C89:
21109 case DW_LANG_C99:
21110 case DW_LANG_C11:
21111 case DW_LANG_ObjC:
21112 if (prototype_p (func_type))
21113 add_AT_flag (die, DW_AT_prototyped, 1);
21114 break;
21115 default:
21116 break;
21117 }
21118 }
21119
21120 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21121 by looking in the type declaration, the object declaration equate table or
21122 the block mapping. */
21123
21124 static inline dw_die_ref
21125 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21126 {
21127 dw_die_ref origin_die = NULL;
21128
21129 if (DECL_P (origin))
21130 {
21131 dw_die_ref c;
21132 origin_die = lookup_decl_die (origin);
21133 /* "Unwrap" the decls DIE which we put in the imported unit context.
21134 We are looking for the abstract copy here. */
21135 if (in_lto_p
21136 && origin_die
21137 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21138 /* ??? Identify this better. */
21139 && c->with_offset)
21140 origin_die = c;
21141 }
21142 else if (TYPE_P (origin))
21143 origin_die = lookup_type_die (origin);
21144 else if (TREE_CODE (origin) == BLOCK)
21145 origin_die = BLOCK_DIE (origin);
21146
21147 /* XXX: Functions that are never lowered don't always have correct block
21148 trees (in the case of java, they simply have no block tree, in some other
21149 languages). For these functions, there is nothing we can really do to
21150 output correct debug info for inlined functions in all cases. Rather
21151 than die, we'll just produce deficient debug info now, in that we will
21152 have variables without a proper abstract origin. In the future, when all
21153 functions are lowered, we should re-add a gcc_assert (origin_die)
21154 here. */
21155
21156 if (origin_die)
21157 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21158 return origin_die;
21159 }
21160
21161 /* We do not currently support the pure_virtual attribute. */
21162
21163 static inline void
21164 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21165 {
21166 if (DECL_VINDEX (func_decl))
21167 {
21168 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21169
21170 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21171 add_AT_loc (die, DW_AT_vtable_elem_location,
21172 new_loc_descr (DW_OP_constu,
21173 tree_to_shwi (DECL_VINDEX (func_decl)),
21174 0));
21175
21176 /* GNU extension: Record what type this method came from originally. */
21177 if (debug_info_level > DINFO_LEVEL_TERSE
21178 && DECL_CONTEXT (func_decl))
21179 add_AT_die_ref (die, DW_AT_containing_type,
21180 lookup_type_die (DECL_CONTEXT (func_decl)));
21181 }
21182 }
21183 \f
21184 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21185 given decl. This used to be a vendor extension until after DWARF 4
21186 standardized it. */
21187
21188 static void
21189 add_linkage_attr (dw_die_ref die, tree decl)
21190 {
21191 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21192
21193 /* Mimic what assemble_name_raw does with a leading '*'. */
21194 if (name[0] == '*')
21195 name = &name[1];
21196
21197 if (dwarf_version >= 4)
21198 add_AT_string (die, DW_AT_linkage_name, name);
21199 else
21200 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21201 }
21202
21203 /* Add source coordinate attributes for the given decl. */
21204
21205 static void
21206 add_src_coords_attributes (dw_die_ref die, tree decl)
21207 {
21208 expanded_location s;
21209
21210 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21211 return;
21212 s = expand_location (DECL_SOURCE_LOCATION (decl));
21213 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21214 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21215 if (debug_column_info && s.column)
21216 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21217 }
21218
21219 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21220
21221 static void
21222 add_linkage_name_raw (dw_die_ref die, tree decl)
21223 {
21224 /* Defer until we have an assembler name set. */
21225 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21226 {
21227 limbo_die_node *asm_name;
21228
21229 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21230 asm_name->die = die;
21231 asm_name->created_for = decl;
21232 asm_name->next = deferred_asm_name;
21233 deferred_asm_name = asm_name;
21234 }
21235 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21236 add_linkage_attr (die, decl);
21237 }
21238
21239 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21240
21241 static void
21242 add_linkage_name (dw_die_ref die, tree decl)
21243 {
21244 if (debug_info_level > DINFO_LEVEL_NONE
21245 && VAR_OR_FUNCTION_DECL_P (decl)
21246 && TREE_PUBLIC (decl)
21247 && !(VAR_P (decl) && DECL_REGISTER (decl))
21248 && die->die_tag != DW_TAG_member)
21249 add_linkage_name_raw (die, decl);
21250 }
21251
21252 /* Add a DW_AT_name attribute and source coordinate attribute for the
21253 given decl, but only if it actually has a name. */
21254
21255 static void
21256 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21257 bool no_linkage_name)
21258 {
21259 tree decl_name;
21260
21261 decl_name = DECL_NAME (decl);
21262 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21263 {
21264 const char *name = dwarf2_name (decl, 0);
21265 if (name)
21266 add_name_attribute (die, name);
21267 if (! DECL_ARTIFICIAL (decl))
21268 add_src_coords_attributes (die, decl);
21269
21270 if (!no_linkage_name)
21271 add_linkage_name (die, decl);
21272 }
21273
21274 #ifdef VMS_DEBUGGING_INFO
21275 /* Get the function's name, as described by its RTL. This may be different
21276 from the DECL_NAME name used in the source file. */
21277 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21278 {
21279 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21280 XEXP (DECL_RTL (decl), 0), false);
21281 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21282 }
21283 #endif /* VMS_DEBUGGING_INFO */
21284 }
21285
21286 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21287
21288 static void
21289 add_discr_value (dw_die_ref die, dw_discr_value *value)
21290 {
21291 dw_attr_node attr;
21292
21293 attr.dw_attr = DW_AT_discr_value;
21294 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21295 attr.dw_attr_val.val_entry = NULL;
21296 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21297 if (value->pos)
21298 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21299 else
21300 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21301 add_dwarf_attr (die, &attr);
21302 }
21303
21304 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21305
21306 static void
21307 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21308 {
21309 dw_attr_node attr;
21310
21311 attr.dw_attr = DW_AT_discr_list;
21312 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21313 attr.dw_attr_val.val_entry = NULL;
21314 attr.dw_attr_val.v.val_discr_list = discr_list;
21315 add_dwarf_attr (die, &attr);
21316 }
21317
21318 static inline dw_discr_list_ref
21319 AT_discr_list (dw_attr_node *attr)
21320 {
21321 return attr->dw_attr_val.v.val_discr_list;
21322 }
21323
21324 #ifdef VMS_DEBUGGING_INFO
21325 /* Output the debug main pointer die for VMS */
21326
21327 void
21328 dwarf2out_vms_debug_main_pointer (void)
21329 {
21330 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21331 dw_die_ref die;
21332
21333 /* Allocate the VMS debug main subprogram die. */
21334 die = new_die_raw (DW_TAG_subprogram);
21335 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21336 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21337 current_function_funcdef_no);
21338 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21339
21340 /* Make it the first child of comp_unit_die (). */
21341 die->die_parent = comp_unit_die ();
21342 if (comp_unit_die ()->die_child)
21343 {
21344 die->die_sib = comp_unit_die ()->die_child->die_sib;
21345 comp_unit_die ()->die_child->die_sib = die;
21346 }
21347 else
21348 {
21349 die->die_sib = die;
21350 comp_unit_die ()->die_child = die;
21351 }
21352 }
21353 #endif /* VMS_DEBUGGING_INFO */
21354
21355 /* walk_tree helper function for uses_local_type, below. */
21356
21357 static tree
21358 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21359 {
21360 if (!TYPE_P (*tp))
21361 *walk_subtrees = 0;
21362 else
21363 {
21364 tree name = TYPE_NAME (*tp);
21365 if (name && DECL_P (name) && decl_function_context (name))
21366 return *tp;
21367 }
21368 return NULL_TREE;
21369 }
21370
21371 /* If TYPE involves a function-local type (including a local typedef to a
21372 non-local type), returns that type; otherwise returns NULL_TREE. */
21373
21374 static tree
21375 uses_local_type (tree type)
21376 {
21377 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21378 return used;
21379 }
21380
21381 /* Return the DIE for the scope that immediately contains this type.
21382 Non-named types that do not involve a function-local type get global
21383 scope. Named types nested in namespaces or other types get their
21384 containing scope. All other types (i.e. function-local named types) get
21385 the current active scope. */
21386
21387 static dw_die_ref
21388 scope_die_for (tree t, dw_die_ref context_die)
21389 {
21390 dw_die_ref scope_die = NULL;
21391 tree containing_scope;
21392
21393 /* Non-types always go in the current scope. */
21394 gcc_assert (TYPE_P (t));
21395
21396 /* Use the scope of the typedef, rather than the scope of the type
21397 it refers to. */
21398 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21399 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21400 else
21401 containing_scope = TYPE_CONTEXT (t);
21402
21403 /* Use the containing namespace if there is one. */
21404 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21405 {
21406 if (context_die == lookup_decl_die (containing_scope))
21407 /* OK */;
21408 else if (debug_info_level > DINFO_LEVEL_TERSE)
21409 context_die = get_context_die (containing_scope);
21410 else
21411 containing_scope = NULL_TREE;
21412 }
21413
21414 /* Ignore function type "scopes" from the C frontend. They mean that
21415 a tagged type is local to a parmlist of a function declarator, but
21416 that isn't useful to DWARF. */
21417 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21418 containing_scope = NULL_TREE;
21419
21420 if (SCOPE_FILE_SCOPE_P (containing_scope))
21421 {
21422 /* If T uses a local type keep it local as well, to avoid references
21423 to function-local DIEs from outside the function. */
21424 if (current_function_decl && uses_local_type (t))
21425 scope_die = context_die;
21426 else
21427 scope_die = comp_unit_die ();
21428 }
21429 else if (TYPE_P (containing_scope))
21430 {
21431 /* For types, we can just look up the appropriate DIE. */
21432 if (debug_info_level > DINFO_LEVEL_TERSE)
21433 scope_die = get_context_die (containing_scope);
21434 else
21435 {
21436 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21437 if (scope_die == NULL)
21438 scope_die = comp_unit_die ();
21439 }
21440 }
21441 else
21442 scope_die = context_die;
21443
21444 return scope_die;
21445 }
21446
21447 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21448
21449 static inline int
21450 local_scope_p (dw_die_ref context_die)
21451 {
21452 for (; context_die; context_die = context_die->die_parent)
21453 if (context_die->die_tag == DW_TAG_inlined_subroutine
21454 || context_die->die_tag == DW_TAG_subprogram)
21455 return 1;
21456
21457 return 0;
21458 }
21459
21460 /* Returns nonzero if CONTEXT_DIE is a class. */
21461
21462 static inline int
21463 class_scope_p (dw_die_ref context_die)
21464 {
21465 return (context_die
21466 && (context_die->die_tag == DW_TAG_structure_type
21467 || context_die->die_tag == DW_TAG_class_type
21468 || context_die->die_tag == DW_TAG_interface_type
21469 || context_die->die_tag == DW_TAG_union_type));
21470 }
21471
21472 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21473 whether or not to treat a DIE in this context as a declaration. */
21474
21475 static inline int
21476 class_or_namespace_scope_p (dw_die_ref context_die)
21477 {
21478 return (class_scope_p (context_die)
21479 || (context_die && context_die->die_tag == DW_TAG_namespace));
21480 }
21481
21482 /* Many forms of DIEs require a "type description" attribute. This
21483 routine locates the proper "type descriptor" die for the type given
21484 by 'type' plus any additional qualifiers given by 'cv_quals', and
21485 adds a DW_AT_type attribute below the given die. */
21486
21487 static void
21488 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21489 bool reverse, dw_die_ref context_die)
21490 {
21491 enum tree_code code = TREE_CODE (type);
21492 dw_die_ref type_die = NULL;
21493
21494 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21495 or fixed-point type, use the inner type. This is because we have no
21496 support for unnamed types in base_type_die. This can happen if this is
21497 an Ada subrange type. Correct solution is emit a subrange type die. */
21498 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21499 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21500 type = TREE_TYPE (type), code = TREE_CODE (type);
21501
21502 if (code == ERROR_MARK
21503 /* Handle a special case. For functions whose return type is void, we
21504 generate *no* type attribute. (Note that no object may have type
21505 `void', so this only applies to function return types). */
21506 || code == VOID_TYPE)
21507 return;
21508
21509 type_die = modified_type_die (type,
21510 cv_quals | TYPE_QUALS (type),
21511 reverse,
21512 context_die);
21513
21514 if (type_die != NULL)
21515 add_AT_die_ref (object_die, DW_AT_type, type_die);
21516 }
21517
21518 /* Given an object die, add the calling convention attribute for the
21519 function call type. */
21520 static void
21521 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21522 {
21523 enum dwarf_calling_convention value = DW_CC_normal;
21524
21525 value = ((enum dwarf_calling_convention)
21526 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21527
21528 if (is_fortran ()
21529 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21530 {
21531 /* DWARF 2 doesn't provide a way to identify a program's source-level
21532 entry point. DW_AT_calling_convention attributes are only meant
21533 to describe functions' calling conventions. However, lacking a
21534 better way to signal the Fortran main program, we used this for
21535 a long time, following existing custom. Now, DWARF 4 has
21536 DW_AT_main_subprogram, which we add below, but some tools still
21537 rely on the old way, which we thus keep. */
21538 value = DW_CC_program;
21539
21540 if (dwarf_version >= 4 || !dwarf_strict)
21541 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21542 }
21543
21544 /* Only add the attribute if the backend requests it, and
21545 is not DW_CC_normal. */
21546 if (value && (value != DW_CC_normal))
21547 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21548 }
21549
21550 /* Given a tree pointer to a struct, class, union, or enum type node, return
21551 a pointer to the (string) tag name for the given type, or zero if the type
21552 was declared without a tag. */
21553
21554 static const char *
21555 type_tag (const_tree type)
21556 {
21557 const char *name = 0;
21558
21559 if (TYPE_NAME (type) != 0)
21560 {
21561 tree t = 0;
21562
21563 /* Find the IDENTIFIER_NODE for the type name. */
21564 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21565 && !TYPE_NAMELESS (type))
21566 t = TYPE_NAME (type);
21567
21568 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21569 a TYPE_DECL node, regardless of whether or not a `typedef' was
21570 involved. */
21571 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21572 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21573 {
21574 /* We want to be extra verbose. Don't call dwarf_name if
21575 DECL_NAME isn't set. The default hook for decl_printable_name
21576 doesn't like that, and in this context it's correct to return
21577 0, instead of "<anonymous>" or the like. */
21578 if (DECL_NAME (TYPE_NAME (type))
21579 && !DECL_NAMELESS (TYPE_NAME (type)))
21580 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21581 }
21582
21583 /* Now get the name as a string, or invent one. */
21584 if (!name && t != 0)
21585 name = IDENTIFIER_POINTER (t);
21586 }
21587
21588 return (name == 0 || *name == '\0') ? 0 : name;
21589 }
21590
21591 /* Return the type associated with a data member, make a special check
21592 for bit field types. */
21593
21594 static inline tree
21595 member_declared_type (const_tree member)
21596 {
21597 return (DECL_BIT_FIELD_TYPE (member)
21598 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21599 }
21600
21601 /* Get the decl's label, as described by its RTL. This may be different
21602 from the DECL_NAME name used in the source file. */
21603
21604 #if 0
21605 static const char *
21606 decl_start_label (tree decl)
21607 {
21608 rtx x;
21609 const char *fnname;
21610
21611 x = DECL_RTL (decl);
21612 gcc_assert (MEM_P (x));
21613
21614 x = XEXP (x, 0);
21615 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21616
21617 fnname = XSTR (x, 0);
21618 return fnname;
21619 }
21620 #endif
21621 \f
21622 /* For variable-length arrays that have been previously generated, but
21623 may be incomplete due to missing subscript info, fill the subscript
21624 info. Return TRUE if this is one of those cases. */
21625 static bool
21626 fill_variable_array_bounds (tree type)
21627 {
21628 if (TREE_ASM_WRITTEN (type)
21629 && TREE_CODE (type) == ARRAY_TYPE
21630 && variably_modified_type_p (type, NULL))
21631 {
21632 dw_die_ref array_die = lookup_type_die (type);
21633 if (!array_die)
21634 return false;
21635 add_subscript_info (array_die, type, !is_ada ());
21636 return true;
21637 }
21638 return false;
21639 }
21640
21641 /* These routines generate the internal representation of the DIE's for
21642 the compilation unit. Debugging information is collected by walking
21643 the declaration trees passed in from dwarf2out_decl(). */
21644
21645 static void
21646 gen_array_type_die (tree type, dw_die_ref context_die)
21647 {
21648 dw_die_ref array_die;
21649
21650 /* GNU compilers represent multidimensional array types as sequences of one
21651 dimensional array types whose element types are themselves array types.
21652 We sometimes squish that down to a single array_type DIE with multiple
21653 subscripts in the Dwarf debugging info. The draft Dwarf specification
21654 say that we are allowed to do this kind of compression in C, because
21655 there is no difference between an array of arrays and a multidimensional
21656 array. We don't do this for Ada to remain as close as possible to the
21657 actual representation, which is especially important against the language
21658 flexibilty wrt arrays of variable size. */
21659
21660 bool collapse_nested_arrays = !is_ada ();
21661
21662 if (fill_variable_array_bounds (type))
21663 return;
21664
21665 dw_die_ref scope_die = scope_die_for (type, context_die);
21666 tree element_type;
21667
21668 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21669 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21670 if (TYPE_STRING_FLAG (type)
21671 && TREE_CODE (type) == ARRAY_TYPE
21672 && is_fortran ()
21673 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21674 {
21675 HOST_WIDE_INT size;
21676
21677 array_die = new_die (DW_TAG_string_type, scope_die, type);
21678 add_name_attribute (array_die, type_tag (type));
21679 equate_type_number_to_die (type, array_die);
21680 size = int_size_in_bytes (type);
21681 if (size >= 0)
21682 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21683 /* ??? We can't annotate types late, but for LTO we may not
21684 generate a location early either (gfortran.dg/save_6.f90). */
21685 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21686 && TYPE_DOMAIN (type) != NULL_TREE
21687 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21688 {
21689 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21690 tree rszdecl = szdecl;
21691
21692 size = int_size_in_bytes (TREE_TYPE (szdecl));
21693 if (!DECL_P (szdecl))
21694 {
21695 if (TREE_CODE (szdecl) == INDIRECT_REF
21696 && DECL_P (TREE_OPERAND (szdecl, 0)))
21697 {
21698 rszdecl = TREE_OPERAND (szdecl, 0);
21699 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21700 != DWARF2_ADDR_SIZE)
21701 size = 0;
21702 }
21703 else
21704 size = 0;
21705 }
21706 if (size > 0)
21707 {
21708 dw_loc_list_ref loc
21709 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21710 NULL);
21711 if (loc)
21712 {
21713 add_AT_location_description (array_die, DW_AT_string_length,
21714 loc);
21715 if (size != DWARF2_ADDR_SIZE)
21716 add_AT_unsigned (array_die, dwarf_version >= 5
21717 ? DW_AT_string_length_byte_size
21718 : DW_AT_byte_size, size);
21719 }
21720 }
21721 }
21722 return;
21723 }
21724
21725 array_die = new_die (DW_TAG_array_type, scope_die, type);
21726 add_name_attribute (array_die, type_tag (type));
21727 equate_type_number_to_die (type, array_die);
21728
21729 if (TREE_CODE (type) == VECTOR_TYPE)
21730 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21731
21732 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21733 if (is_fortran ()
21734 && TREE_CODE (type) == ARRAY_TYPE
21735 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21736 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21737 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21738
21739 #if 0
21740 /* We default the array ordering. Debuggers will probably do the right
21741 things even if DW_AT_ordering is not present. It's not even an issue
21742 until we start to get into multidimensional arrays anyway. If a debugger
21743 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21744 then we'll have to put the DW_AT_ordering attribute back in. (But if
21745 and when we find out that we need to put these in, we will only do so
21746 for multidimensional arrays. */
21747 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21748 #endif
21749
21750 if (TREE_CODE (type) == VECTOR_TYPE)
21751 {
21752 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21753 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21754 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21755 add_bound_info (subrange_die, DW_AT_upper_bound,
21756 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21757 }
21758 else
21759 add_subscript_info (array_die, type, collapse_nested_arrays);
21760
21761 /* Add representation of the type of the elements of this array type and
21762 emit the corresponding DIE if we haven't done it already. */
21763 element_type = TREE_TYPE (type);
21764 if (collapse_nested_arrays)
21765 while (TREE_CODE (element_type) == ARRAY_TYPE)
21766 {
21767 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21768 break;
21769 element_type = TREE_TYPE (element_type);
21770 }
21771
21772 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21773 TREE_CODE (type) == ARRAY_TYPE
21774 && TYPE_REVERSE_STORAGE_ORDER (type),
21775 context_die);
21776
21777 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21778 if (TYPE_ARTIFICIAL (type))
21779 add_AT_flag (array_die, DW_AT_artificial, 1);
21780
21781 if (get_AT (array_die, DW_AT_name))
21782 add_pubtype (type, array_die);
21783
21784 add_alignment_attribute (array_die, type);
21785 }
21786
21787 /* This routine generates DIE for array with hidden descriptor, details
21788 are filled into *info by a langhook. */
21789
21790 static void
21791 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21792 dw_die_ref context_die)
21793 {
21794 const dw_die_ref scope_die = scope_die_for (type, context_die);
21795 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21796 struct loc_descr_context context = { type, info->base_decl, NULL,
21797 false, false };
21798 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21799 int dim;
21800
21801 add_name_attribute (array_die, type_tag (type));
21802 equate_type_number_to_die (type, array_die);
21803
21804 if (info->ndimensions > 1)
21805 switch (info->ordering)
21806 {
21807 case array_descr_ordering_row_major:
21808 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21809 break;
21810 case array_descr_ordering_column_major:
21811 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21812 break;
21813 default:
21814 break;
21815 }
21816
21817 if (dwarf_version >= 3 || !dwarf_strict)
21818 {
21819 if (info->data_location)
21820 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21821 dw_scalar_form_exprloc, &context);
21822 if (info->associated)
21823 add_scalar_info (array_die, DW_AT_associated, info->associated,
21824 dw_scalar_form_constant
21825 | dw_scalar_form_exprloc
21826 | dw_scalar_form_reference, &context);
21827 if (info->allocated)
21828 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21829 dw_scalar_form_constant
21830 | dw_scalar_form_exprloc
21831 | dw_scalar_form_reference, &context);
21832 if (info->stride)
21833 {
21834 const enum dwarf_attribute attr
21835 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21836 const int forms
21837 = (info->stride_in_bits)
21838 ? dw_scalar_form_constant
21839 : (dw_scalar_form_constant
21840 | dw_scalar_form_exprloc
21841 | dw_scalar_form_reference);
21842
21843 add_scalar_info (array_die, attr, info->stride, forms, &context);
21844 }
21845 }
21846 if (dwarf_version >= 5)
21847 {
21848 if (info->rank)
21849 {
21850 add_scalar_info (array_die, DW_AT_rank, info->rank,
21851 dw_scalar_form_constant
21852 | dw_scalar_form_exprloc, &context);
21853 subrange_tag = DW_TAG_generic_subrange;
21854 context.placeholder_arg = true;
21855 }
21856 }
21857
21858 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21859
21860 for (dim = 0; dim < info->ndimensions; dim++)
21861 {
21862 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21863
21864 if (info->dimen[dim].bounds_type)
21865 add_type_attribute (subrange_die,
21866 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21867 false, context_die);
21868 if (info->dimen[dim].lower_bound)
21869 add_bound_info (subrange_die, DW_AT_lower_bound,
21870 info->dimen[dim].lower_bound, &context);
21871 if (info->dimen[dim].upper_bound)
21872 add_bound_info (subrange_die, DW_AT_upper_bound,
21873 info->dimen[dim].upper_bound, &context);
21874 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21875 add_scalar_info (subrange_die, DW_AT_byte_stride,
21876 info->dimen[dim].stride,
21877 dw_scalar_form_constant
21878 | dw_scalar_form_exprloc
21879 | dw_scalar_form_reference,
21880 &context);
21881 }
21882
21883 gen_type_die (info->element_type, context_die);
21884 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21885 TREE_CODE (type) == ARRAY_TYPE
21886 && TYPE_REVERSE_STORAGE_ORDER (type),
21887 context_die);
21888
21889 if (get_AT (array_die, DW_AT_name))
21890 add_pubtype (type, array_die);
21891
21892 add_alignment_attribute (array_die, type);
21893 }
21894
21895 #if 0
21896 static void
21897 gen_entry_point_die (tree decl, dw_die_ref context_die)
21898 {
21899 tree origin = decl_ultimate_origin (decl);
21900 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21901
21902 if (origin != NULL)
21903 add_abstract_origin_attribute (decl_die, origin);
21904 else
21905 {
21906 add_name_and_src_coords_attributes (decl_die, decl);
21907 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21908 TYPE_UNQUALIFIED, false, context_die);
21909 }
21910
21911 if (DECL_ABSTRACT_P (decl))
21912 equate_decl_number_to_die (decl, decl_die);
21913 else
21914 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21915 }
21916 #endif
21917
21918 /* Walk through the list of incomplete types again, trying once more to
21919 emit full debugging info for them. */
21920
21921 static void
21922 retry_incomplete_types (void)
21923 {
21924 set_early_dwarf s;
21925 int i;
21926
21927 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21928 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21929 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21930 vec_safe_truncate (incomplete_types, 0);
21931 }
21932
21933 /* Determine what tag to use for a record type. */
21934
21935 static enum dwarf_tag
21936 record_type_tag (tree type)
21937 {
21938 if (! lang_hooks.types.classify_record)
21939 return DW_TAG_structure_type;
21940
21941 switch (lang_hooks.types.classify_record (type))
21942 {
21943 case RECORD_IS_STRUCT:
21944 return DW_TAG_structure_type;
21945
21946 case RECORD_IS_CLASS:
21947 return DW_TAG_class_type;
21948
21949 case RECORD_IS_INTERFACE:
21950 if (dwarf_version >= 3 || !dwarf_strict)
21951 return DW_TAG_interface_type;
21952 return DW_TAG_structure_type;
21953
21954 default:
21955 gcc_unreachable ();
21956 }
21957 }
21958
21959 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21960 include all of the information about the enumeration values also. Each
21961 enumerated type name/value is listed as a child of the enumerated type
21962 DIE. */
21963
21964 static dw_die_ref
21965 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21966 {
21967 dw_die_ref type_die = lookup_type_die (type);
21968 dw_die_ref orig_type_die = type_die;
21969
21970 if (type_die == NULL)
21971 {
21972 type_die = new_die (DW_TAG_enumeration_type,
21973 scope_die_for (type, context_die), type);
21974 equate_type_number_to_die (type, type_die);
21975 add_name_attribute (type_die, type_tag (type));
21976 if ((dwarf_version >= 4 || !dwarf_strict)
21977 && ENUM_IS_SCOPED (type))
21978 add_AT_flag (type_die, DW_AT_enum_class, 1);
21979 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21980 add_AT_flag (type_die, DW_AT_declaration, 1);
21981 if (!dwarf_strict)
21982 add_AT_unsigned (type_die, DW_AT_encoding,
21983 TYPE_UNSIGNED (type)
21984 ? DW_ATE_unsigned
21985 : DW_ATE_signed);
21986 }
21987 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21988 return type_die;
21989 else
21990 remove_AT (type_die, DW_AT_declaration);
21991
21992 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
21993 given enum type is incomplete, do not generate the DW_AT_byte_size
21994 attribute or the DW_AT_element_list attribute. */
21995 if (TYPE_SIZE (type))
21996 {
21997 tree link;
21998
21999 if (!ENUM_IS_OPAQUE (type))
22000 TREE_ASM_WRITTEN (type) = 1;
22001 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22002 add_byte_size_attribute (type_die, type);
22003 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22004 add_alignment_attribute (type_die, type);
22005 if ((dwarf_version >= 3 || !dwarf_strict)
22006 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22007 {
22008 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22009 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22010 context_die);
22011 }
22012 if (TYPE_STUB_DECL (type) != NULL_TREE)
22013 {
22014 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22015 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22016 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22017 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22018 }
22019
22020 /* If the first reference to this type was as the return type of an
22021 inline function, then it may not have a parent. Fix this now. */
22022 if (type_die->die_parent == NULL)
22023 add_child_die (scope_die_for (type, context_die), type_die);
22024
22025 for (link = TYPE_VALUES (type);
22026 link != NULL; link = TREE_CHAIN (link))
22027 {
22028 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22029 tree value = TREE_VALUE (link);
22030
22031 gcc_assert (!ENUM_IS_OPAQUE (type));
22032 add_name_attribute (enum_die,
22033 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22034
22035 if (TREE_CODE (value) == CONST_DECL)
22036 value = DECL_INITIAL (value);
22037
22038 if (simple_type_size_in_bits (TREE_TYPE (value))
22039 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22040 {
22041 /* For constant forms created by add_AT_unsigned DWARF
22042 consumers (GDB, elfutils, etc.) always zero extend
22043 the value. Only when the actual value is negative
22044 do we need to use add_AT_int to generate a constant
22045 form that can represent negative values. */
22046 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22047 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22048 add_AT_unsigned (enum_die, DW_AT_const_value,
22049 (unsigned HOST_WIDE_INT) val);
22050 else
22051 add_AT_int (enum_die, DW_AT_const_value, val);
22052 }
22053 else
22054 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22055 that here. TODO: This should be re-worked to use correct
22056 signed/unsigned double tags for all cases. */
22057 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22058 }
22059
22060 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22061 if (TYPE_ARTIFICIAL (type)
22062 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22063 add_AT_flag (type_die, DW_AT_artificial, 1);
22064 }
22065 else
22066 add_AT_flag (type_die, DW_AT_declaration, 1);
22067
22068 add_pubtype (type, type_die);
22069
22070 return type_die;
22071 }
22072
22073 /* Generate a DIE to represent either a real live formal parameter decl or to
22074 represent just the type of some formal parameter position in some function
22075 type.
22076
22077 Note that this routine is a bit unusual because its argument may be a
22078 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22079 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22080 node. If it's the former then this function is being called to output a
22081 DIE to represent a formal parameter object (or some inlining thereof). If
22082 it's the latter, then this function is only being called to output a
22083 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22084 argument type of some subprogram type.
22085 If EMIT_NAME_P is true, name and source coordinate attributes
22086 are emitted. */
22087
22088 static dw_die_ref
22089 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22090 dw_die_ref context_die)
22091 {
22092 tree node_or_origin = node ? node : origin;
22093 tree ultimate_origin;
22094 dw_die_ref parm_die = NULL;
22095
22096 if (DECL_P (node_or_origin))
22097 {
22098 parm_die = lookup_decl_die (node);
22099
22100 /* If the contexts differ, we may not be talking about the same
22101 thing.
22102 ??? When in LTO the DIE parent is the "abstract" copy and the
22103 context_die is the specification "copy". But this whole block
22104 should eventually be no longer needed. */
22105 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22106 {
22107 if (!DECL_ABSTRACT_P (node))
22108 {
22109 /* This can happen when creating an inlined instance, in
22110 which case we need to create a new DIE that will get
22111 annotated with DW_AT_abstract_origin. */
22112 parm_die = NULL;
22113 }
22114 else
22115 gcc_unreachable ();
22116 }
22117
22118 if (parm_die && parm_die->die_parent == NULL)
22119 {
22120 /* Check that parm_die already has the right attributes that
22121 we would have added below. If any attributes are
22122 missing, fall through to add them. */
22123 if (! DECL_ABSTRACT_P (node_or_origin)
22124 && !get_AT (parm_die, DW_AT_location)
22125 && !get_AT (parm_die, DW_AT_const_value))
22126 /* We are missing location info, and are about to add it. */
22127 ;
22128 else
22129 {
22130 add_child_die (context_die, parm_die);
22131 return parm_die;
22132 }
22133 }
22134 }
22135
22136 /* If we have a previously generated DIE, use it, unless this is an
22137 concrete instance (origin != NULL), in which case we need a new
22138 DIE with a corresponding DW_AT_abstract_origin. */
22139 bool reusing_die;
22140 if (parm_die && origin == NULL)
22141 reusing_die = true;
22142 else
22143 {
22144 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22145 reusing_die = false;
22146 }
22147
22148 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22149 {
22150 case tcc_declaration:
22151 ultimate_origin = decl_ultimate_origin (node_or_origin);
22152 if (node || ultimate_origin)
22153 origin = ultimate_origin;
22154
22155 if (reusing_die)
22156 goto add_location;
22157
22158 if (origin != NULL)
22159 add_abstract_origin_attribute (parm_die, origin);
22160 else if (emit_name_p)
22161 add_name_and_src_coords_attributes (parm_die, node);
22162 if (origin == NULL
22163 || (! DECL_ABSTRACT_P (node_or_origin)
22164 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22165 decl_function_context
22166 (node_or_origin))))
22167 {
22168 tree type = TREE_TYPE (node_or_origin);
22169 if (decl_by_reference_p (node_or_origin))
22170 add_type_attribute (parm_die, TREE_TYPE (type),
22171 TYPE_UNQUALIFIED,
22172 false, context_die);
22173 else
22174 add_type_attribute (parm_die, type,
22175 decl_quals (node_or_origin),
22176 false, context_die);
22177 }
22178 if (origin == NULL && DECL_ARTIFICIAL (node))
22179 add_AT_flag (parm_die, DW_AT_artificial, 1);
22180 add_location:
22181 if (node && node != origin)
22182 equate_decl_number_to_die (node, parm_die);
22183 if (! DECL_ABSTRACT_P (node_or_origin))
22184 add_location_or_const_value_attribute (parm_die, node_or_origin,
22185 node == NULL);
22186
22187 break;
22188
22189 case tcc_type:
22190 /* We were called with some kind of a ..._TYPE node. */
22191 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22192 context_die);
22193 break;
22194
22195 default:
22196 gcc_unreachable ();
22197 }
22198
22199 return parm_die;
22200 }
22201
22202 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22203 children DW_TAG_formal_parameter DIEs representing the arguments of the
22204 parameter pack.
22205
22206 PARM_PACK must be a function parameter pack.
22207 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22208 must point to the subsequent arguments of the function PACK_ARG belongs to.
22209 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22210 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22211 following the last one for which a DIE was generated. */
22212
22213 static dw_die_ref
22214 gen_formal_parameter_pack_die (tree parm_pack,
22215 tree pack_arg,
22216 dw_die_ref subr_die,
22217 tree *next_arg)
22218 {
22219 tree arg;
22220 dw_die_ref parm_pack_die;
22221
22222 gcc_assert (parm_pack
22223 && lang_hooks.function_parameter_pack_p (parm_pack)
22224 && subr_die);
22225
22226 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22227 add_src_coords_attributes (parm_pack_die, parm_pack);
22228
22229 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22230 {
22231 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22232 parm_pack))
22233 break;
22234 gen_formal_parameter_die (arg, NULL,
22235 false /* Don't emit name attribute. */,
22236 parm_pack_die);
22237 }
22238 if (next_arg)
22239 *next_arg = arg;
22240 return parm_pack_die;
22241 }
22242
22243 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22244 at the end of an (ANSI prototyped) formal parameters list. */
22245
22246 static void
22247 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22248 {
22249 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22250 }
22251
22252 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22253 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22254 parameters as specified in some function type specification (except for
22255 those which appear as part of a function *definition*). */
22256
22257 static void
22258 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22259 {
22260 tree link;
22261 tree formal_type = NULL;
22262 tree first_parm_type;
22263 tree arg;
22264
22265 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22266 {
22267 arg = DECL_ARGUMENTS (function_or_method_type);
22268 function_or_method_type = TREE_TYPE (function_or_method_type);
22269 }
22270 else
22271 arg = NULL_TREE;
22272
22273 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22274
22275 /* Make our first pass over the list of formal parameter types and output a
22276 DW_TAG_formal_parameter DIE for each one. */
22277 for (link = first_parm_type; link; )
22278 {
22279 dw_die_ref parm_die;
22280
22281 formal_type = TREE_VALUE (link);
22282 if (formal_type == void_type_node)
22283 break;
22284
22285 /* Output a (nameless) DIE to represent the formal parameter itself. */
22286 parm_die = gen_formal_parameter_die (formal_type, NULL,
22287 true /* Emit name attribute. */,
22288 context_die);
22289 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22290 && link == first_parm_type)
22291 {
22292 add_AT_flag (parm_die, DW_AT_artificial, 1);
22293 if (dwarf_version >= 3 || !dwarf_strict)
22294 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22295 }
22296 else if (arg && DECL_ARTIFICIAL (arg))
22297 add_AT_flag (parm_die, DW_AT_artificial, 1);
22298
22299 link = TREE_CHAIN (link);
22300 if (arg)
22301 arg = DECL_CHAIN (arg);
22302 }
22303
22304 /* If this function type has an ellipsis, add a
22305 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22306 if (formal_type != void_type_node)
22307 gen_unspecified_parameters_die (function_or_method_type, context_die);
22308
22309 /* Make our second (and final) pass over the list of formal parameter types
22310 and output DIEs to represent those types (as necessary). */
22311 for (link = TYPE_ARG_TYPES (function_or_method_type);
22312 link && TREE_VALUE (link);
22313 link = TREE_CHAIN (link))
22314 gen_type_die (TREE_VALUE (link), context_die);
22315 }
22316
22317 /* We want to generate the DIE for TYPE so that we can generate the
22318 die for MEMBER, which has been defined; we will need to refer back
22319 to the member declaration nested within TYPE. If we're trying to
22320 generate minimal debug info for TYPE, processing TYPE won't do the
22321 trick; we need to attach the member declaration by hand. */
22322
22323 static void
22324 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22325 {
22326 gen_type_die (type, context_die);
22327
22328 /* If we're trying to avoid duplicate debug info, we may not have
22329 emitted the member decl for this function. Emit it now. */
22330 if (TYPE_STUB_DECL (type)
22331 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22332 && ! lookup_decl_die (member))
22333 {
22334 dw_die_ref type_die;
22335 gcc_assert (!decl_ultimate_origin (member));
22336
22337 type_die = lookup_type_die_strip_naming_typedef (type);
22338 if (TREE_CODE (member) == FUNCTION_DECL)
22339 gen_subprogram_die (member, type_die);
22340 else if (TREE_CODE (member) == FIELD_DECL)
22341 {
22342 /* Ignore the nameless fields that are used to skip bits but handle
22343 C++ anonymous unions and structs. */
22344 if (DECL_NAME (member) != NULL_TREE
22345 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22346 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22347 {
22348 struct vlr_context vlr_ctx = {
22349 DECL_CONTEXT (member), /* struct_type */
22350 NULL_TREE /* variant_part_offset */
22351 };
22352 gen_type_die (member_declared_type (member), type_die);
22353 gen_field_die (member, &vlr_ctx, type_die);
22354 }
22355 }
22356 else
22357 gen_variable_die (member, NULL_TREE, type_die);
22358 }
22359 }
22360 \f
22361 /* Forward declare these functions, because they are mutually recursive
22362 with their set_block_* pairing functions. */
22363 static void set_decl_origin_self (tree);
22364
22365 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22366 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22367 that it points to the node itself, thus indicating that the node is its
22368 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22369 the given node is NULL, recursively descend the decl/block tree which
22370 it is the root of, and for each other ..._DECL or BLOCK node contained
22371 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22372 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22373 values to point to themselves. */
22374
22375 static void
22376 set_block_origin_self (tree stmt)
22377 {
22378 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22379 {
22380 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22381
22382 {
22383 tree local_decl;
22384
22385 for (local_decl = BLOCK_VARS (stmt);
22386 local_decl != NULL_TREE;
22387 local_decl = DECL_CHAIN (local_decl))
22388 /* Do not recurse on nested functions since the inlining status
22389 of parent and child can be different as per the DWARF spec. */
22390 if (TREE_CODE (local_decl) != FUNCTION_DECL
22391 && !DECL_EXTERNAL (local_decl))
22392 set_decl_origin_self (local_decl);
22393 }
22394
22395 {
22396 tree subblock;
22397
22398 for (subblock = BLOCK_SUBBLOCKS (stmt);
22399 subblock != NULL_TREE;
22400 subblock = BLOCK_CHAIN (subblock))
22401 set_block_origin_self (subblock); /* Recurse. */
22402 }
22403 }
22404 }
22405
22406 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22407 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22408 node to so that it points to the node itself, thus indicating that the
22409 node represents its own (abstract) origin. Additionally, if the
22410 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22411 the decl/block tree of which the given node is the root of, and for
22412 each other ..._DECL or BLOCK node contained therein whose
22413 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22414 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22415 point to themselves. */
22416
22417 static void
22418 set_decl_origin_self (tree decl)
22419 {
22420 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22421 {
22422 DECL_ABSTRACT_ORIGIN (decl) = decl;
22423 if (TREE_CODE (decl) == FUNCTION_DECL)
22424 {
22425 tree arg;
22426
22427 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22428 DECL_ABSTRACT_ORIGIN (arg) = arg;
22429 if (DECL_INITIAL (decl) != NULL_TREE
22430 && DECL_INITIAL (decl) != error_mark_node)
22431 set_block_origin_self (DECL_INITIAL (decl));
22432 }
22433 }
22434 }
22435 \f
22436 /* Mark the early DIE for DECL as the abstract instance. */
22437
22438 static void
22439 dwarf2out_abstract_function (tree decl)
22440 {
22441 dw_die_ref old_die;
22442
22443 /* Make sure we have the actual abstract inline, not a clone. */
22444 decl = DECL_ORIGIN (decl);
22445
22446 if (DECL_IGNORED_P (decl))
22447 return;
22448
22449 old_die = lookup_decl_die (decl);
22450 /* With early debug we always have an old DIE unless we are in LTO
22451 and the user did not compile but only link with debug. */
22452 if (in_lto_p && ! old_die)
22453 return;
22454 gcc_assert (old_die != NULL);
22455 if (get_AT (old_die, DW_AT_inline)
22456 || get_AT (old_die, DW_AT_abstract_origin))
22457 /* We've already generated the abstract instance. */
22458 return;
22459
22460 /* Go ahead and put DW_AT_inline on the DIE. */
22461 if (DECL_DECLARED_INLINE_P (decl))
22462 {
22463 if (cgraph_function_possibly_inlined_p (decl))
22464 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22465 else
22466 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22467 }
22468 else
22469 {
22470 if (cgraph_function_possibly_inlined_p (decl))
22471 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22472 else
22473 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22474 }
22475
22476 if (DECL_DECLARED_INLINE_P (decl)
22477 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22478 add_AT_flag (old_die, DW_AT_artificial, 1);
22479
22480 set_decl_origin_self (decl);
22481 }
22482
22483 /* Helper function of premark_used_types() which gets called through
22484 htab_traverse.
22485
22486 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22487 marked as unused by prune_unused_types. */
22488
22489 bool
22490 premark_used_types_helper (tree const &type, void *)
22491 {
22492 dw_die_ref die;
22493
22494 die = lookup_type_die (type);
22495 if (die != NULL)
22496 die->die_perennial_p = 1;
22497 return true;
22498 }
22499
22500 /* Helper function of premark_types_used_by_global_vars which gets called
22501 through htab_traverse.
22502
22503 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22504 marked as unused by prune_unused_types. The DIE of the type is marked
22505 only if the global variable using the type will actually be emitted. */
22506
22507 int
22508 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22509 void *)
22510 {
22511 struct types_used_by_vars_entry *entry;
22512 dw_die_ref die;
22513
22514 entry = (struct types_used_by_vars_entry *) *slot;
22515 gcc_assert (entry->type != NULL
22516 && entry->var_decl != NULL);
22517 die = lookup_type_die (entry->type);
22518 if (die)
22519 {
22520 /* Ask cgraph if the global variable really is to be emitted.
22521 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22522 varpool_node *node = varpool_node::get (entry->var_decl);
22523 if (node && node->definition)
22524 {
22525 die->die_perennial_p = 1;
22526 /* Keep the parent DIEs as well. */
22527 while ((die = die->die_parent) && die->die_perennial_p == 0)
22528 die->die_perennial_p = 1;
22529 }
22530 }
22531 return 1;
22532 }
22533
22534 /* Mark all members of used_types_hash as perennial. */
22535
22536 static void
22537 premark_used_types (struct function *fun)
22538 {
22539 if (fun && fun->used_types_hash)
22540 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22541 }
22542
22543 /* Mark all members of types_used_by_vars_entry as perennial. */
22544
22545 static void
22546 premark_types_used_by_global_vars (void)
22547 {
22548 if (types_used_by_vars_hash)
22549 types_used_by_vars_hash
22550 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22551 }
22552
22553 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22554 for CA_LOC call arg loc node. */
22555
22556 static dw_die_ref
22557 gen_call_site_die (tree decl, dw_die_ref subr_die,
22558 struct call_arg_loc_node *ca_loc)
22559 {
22560 dw_die_ref stmt_die = NULL, die;
22561 tree block = ca_loc->block;
22562
22563 while (block
22564 && block != DECL_INITIAL (decl)
22565 && TREE_CODE (block) == BLOCK)
22566 {
22567 stmt_die = BLOCK_DIE (block);
22568 if (stmt_die)
22569 break;
22570 block = BLOCK_SUPERCONTEXT (block);
22571 }
22572 if (stmt_die == NULL)
22573 stmt_die = subr_die;
22574 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22575 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22576 if (ca_loc->tail_call_p)
22577 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22578 if (ca_loc->symbol_ref)
22579 {
22580 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22581 if (tdie)
22582 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22583 else
22584 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22585 false);
22586 }
22587 return die;
22588 }
22589
22590 /* Generate a DIE to represent a declared function (either file-scope or
22591 block-local). */
22592
22593 static void
22594 gen_subprogram_die (tree decl, dw_die_ref context_die)
22595 {
22596 tree origin = decl_ultimate_origin (decl);
22597 dw_die_ref subr_die;
22598 dw_die_ref old_die = lookup_decl_die (decl);
22599
22600 /* This function gets called multiple times for different stages of
22601 the debug process. For example, for func() in this code:
22602
22603 namespace S
22604 {
22605 void func() { ... }
22606 }
22607
22608 ...we get called 4 times. Twice in early debug and twice in
22609 late debug:
22610
22611 Early debug
22612 -----------
22613
22614 1. Once while generating func() within the namespace. This is
22615 the declaration. The declaration bit below is set, as the
22616 context is the namespace.
22617
22618 A new DIE will be generated with DW_AT_declaration set.
22619
22620 2. Once for func() itself. This is the specification. The
22621 declaration bit below is clear as the context is the CU.
22622
22623 We will use the cached DIE from (1) to create a new DIE with
22624 DW_AT_specification pointing to the declaration in (1).
22625
22626 Late debug via rest_of_handle_final()
22627 -------------------------------------
22628
22629 3. Once generating func() within the namespace. This is also the
22630 declaration, as in (1), but this time we will early exit below
22631 as we have a cached DIE and a declaration needs no additional
22632 annotations (no locations), as the source declaration line
22633 info is enough.
22634
22635 4. Once for func() itself. As in (2), this is the specification,
22636 but this time we will re-use the cached DIE, and just annotate
22637 it with the location information that should now be available.
22638
22639 For something without namespaces, but with abstract instances, we
22640 are also called a multiple times:
22641
22642 class Base
22643 {
22644 public:
22645 Base (); // constructor declaration (1)
22646 };
22647
22648 Base::Base () { } // constructor specification (2)
22649
22650 Early debug
22651 -----------
22652
22653 1. Once for the Base() constructor by virtue of it being a
22654 member of the Base class. This is done via
22655 rest_of_type_compilation.
22656
22657 This is a declaration, so a new DIE will be created with
22658 DW_AT_declaration.
22659
22660 2. Once for the Base() constructor definition, but this time
22661 while generating the abstract instance of the base
22662 constructor (__base_ctor) which is being generated via early
22663 debug of reachable functions.
22664
22665 Even though we have a cached version of the declaration (1),
22666 we will create a DW_AT_specification of the declaration DIE
22667 in (1).
22668
22669 3. Once for the __base_ctor itself, but this time, we generate
22670 an DW_AT_abstract_origin version of the DW_AT_specification in
22671 (2).
22672
22673 Late debug via rest_of_handle_final
22674 -----------------------------------
22675
22676 4. One final time for the __base_ctor (which will have a cached
22677 DIE with DW_AT_abstract_origin created in (3). This time,
22678 we will just annotate the location information now
22679 available.
22680 */
22681 int declaration = (current_function_decl != decl
22682 || class_or_namespace_scope_p (context_die));
22683
22684 /* A declaration that has been previously dumped needs no
22685 additional information. */
22686 if (old_die && declaration)
22687 return;
22688
22689 /* Now that the C++ front end lazily declares artificial member fns, we
22690 might need to retrofit the declaration into its class. */
22691 if (!declaration && !origin && !old_die
22692 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22693 && !class_or_namespace_scope_p (context_die)
22694 && debug_info_level > DINFO_LEVEL_TERSE)
22695 old_die = force_decl_die (decl);
22696
22697 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22698 if (origin != NULL)
22699 {
22700 gcc_assert (!declaration || local_scope_p (context_die));
22701
22702 /* Fixup die_parent for the abstract instance of a nested
22703 inline function. */
22704 if (old_die && old_die->die_parent == NULL)
22705 add_child_die (context_die, old_die);
22706
22707 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22708 {
22709 /* If we have a DW_AT_abstract_origin we have a working
22710 cached version. */
22711 subr_die = old_die;
22712 }
22713 else
22714 {
22715 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22716 add_abstract_origin_attribute (subr_die, origin);
22717 /* This is where the actual code for a cloned function is.
22718 Let's emit linkage name attribute for it. This helps
22719 debuggers to e.g, set breakpoints into
22720 constructors/destructors when the user asks "break
22721 K::K". */
22722 add_linkage_name (subr_die, decl);
22723 }
22724 }
22725 /* A cached copy, possibly from early dwarf generation. Reuse as
22726 much as possible. */
22727 else if (old_die)
22728 {
22729 if (!get_AT_flag (old_die, DW_AT_declaration)
22730 /* We can have a normal definition following an inline one in the
22731 case of redefinition of GNU C extern inlines.
22732 It seems reasonable to use AT_specification in this case. */
22733 && !get_AT (old_die, DW_AT_inline))
22734 {
22735 /* Detect and ignore this case, where we are trying to output
22736 something we have already output. */
22737 if (get_AT (old_die, DW_AT_low_pc)
22738 || get_AT (old_die, DW_AT_ranges))
22739 return;
22740
22741 /* If we have no location information, this must be a
22742 partially generated DIE from early dwarf generation.
22743 Fall through and generate it. */
22744 }
22745
22746 /* If the definition comes from the same place as the declaration,
22747 maybe use the old DIE. We always want the DIE for this function
22748 that has the *_pc attributes to be under comp_unit_die so the
22749 debugger can find it. We also need to do this for abstract
22750 instances of inlines, since the spec requires the out-of-line copy
22751 to have the same parent. For local class methods, this doesn't
22752 apply; we just use the old DIE. */
22753 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22754 struct dwarf_file_data * file_index = lookup_filename (s.file);
22755 if (((is_unit_die (old_die->die_parent)
22756 /* This condition fixes the inconsistency/ICE with the
22757 following Fortran test (or some derivative thereof) while
22758 building libgfortran:
22759
22760 module some_m
22761 contains
22762 logical function funky (FLAG)
22763 funky = .true.
22764 end function
22765 end module
22766 */
22767 || (old_die->die_parent
22768 && old_die->die_parent->die_tag == DW_TAG_module)
22769 || context_die == NULL)
22770 && (DECL_ARTIFICIAL (decl)
22771 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22772 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22773 == (unsigned) s.line)
22774 && (!debug_column_info
22775 || s.column == 0
22776 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22777 == (unsigned) s.column)))))
22778 /* With LTO if there's an abstract instance for
22779 the old DIE, this is a concrete instance and
22780 thus re-use the DIE. */
22781 || get_AT (old_die, DW_AT_abstract_origin))
22782 {
22783 subr_die = old_die;
22784
22785 /* Clear out the declaration attribute, but leave the
22786 parameters so they can be augmented with location
22787 information later. Unless this was a declaration, in
22788 which case, wipe out the nameless parameters and recreate
22789 them further down. */
22790 if (remove_AT (subr_die, DW_AT_declaration))
22791 {
22792
22793 remove_AT (subr_die, DW_AT_object_pointer);
22794 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22795 }
22796 }
22797 /* Make a specification pointing to the previously built
22798 declaration. */
22799 else
22800 {
22801 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22802 add_AT_specification (subr_die, old_die);
22803 add_pubname (decl, subr_die);
22804 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22805 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22806 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22807 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22808 if (debug_column_info
22809 && s.column
22810 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22811 != (unsigned) s.column))
22812 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22813
22814 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22815 emit the real type on the definition die. */
22816 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22817 {
22818 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22819 if (die == auto_die || die == decltype_auto_die)
22820 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22821 TYPE_UNQUALIFIED, false, context_die);
22822 }
22823
22824 /* When we process the method declaration, we haven't seen
22825 the out-of-class defaulted definition yet, so we have to
22826 recheck now. */
22827 if ((dwarf_version >= 5 || ! dwarf_strict)
22828 && !get_AT (subr_die, DW_AT_defaulted))
22829 {
22830 int defaulted
22831 = lang_hooks.decls.decl_dwarf_attribute (decl,
22832 DW_AT_defaulted);
22833 if (defaulted != -1)
22834 {
22835 /* Other values must have been handled before. */
22836 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22837 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22838 }
22839 }
22840 }
22841 }
22842 /* Create a fresh DIE for anything else. */
22843 else
22844 {
22845 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22846
22847 if (TREE_PUBLIC (decl))
22848 add_AT_flag (subr_die, DW_AT_external, 1);
22849
22850 add_name_and_src_coords_attributes (subr_die, decl);
22851 add_pubname (decl, subr_die);
22852 if (debug_info_level > DINFO_LEVEL_TERSE)
22853 {
22854 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22855 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22856 TYPE_UNQUALIFIED, false, context_die);
22857 }
22858
22859 add_pure_or_virtual_attribute (subr_die, decl);
22860 if (DECL_ARTIFICIAL (decl))
22861 add_AT_flag (subr_die, DW_AT_artificial, 1);
22862
22863 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22864 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22865
22866 add_alignment_attribute (subr_die, decl);
22867
22868 add_accessibility_attribute (subr_die, decl);
22869 }
22870
22871 /* Unless we have an existing non-declaration DIE, equate the new
22872 DIE. */
22873 if (!old_die || is_declaration_die (old_die))
22874 equate_decl_number_to_die (decl, subr_die);
22875
22876 if (declaration)
22877 {
22878 if (!old_die || !get_AT (old_die, DW_AT_inline))
22879 {
22880 add_AT_flag (subr_die, DW_AT_declaration, 1);
22881
22882 /* If this is an explicit function declaration then generate
22883 a DW_AT_explicit attribute. */
22884 if ((dwarf_version >= 3 || !dwarf_strict)
22885 && lang_hooks.decls.decl_dwarf_attribute (decl,
22886 DW_AT_explicit) == 1)
22887 add_AT_flag (subr_die, DW_AT_explicit, 1);
22888
22889 /* If this is a C++11 deleted special function member then generate
22890 a DW_AT_deleted attribute. */
22891 if ((dwarf_version >= 5 || !dwarf_strict)
22892 && lang_hooks.decls.decl_dwarf_attribute (decl,
22893 DW_AT_deleted) == 1)
22894 add_AT_flag (subr_die, DW_AT_deleted, 1);
22895
22896 /* If this is a C++11 defaulted special function member then
22897 generate a DW_AT_defaulted attribute. */
22898 if (dwarf_version >= 5 || !dwarf_strict)
22899 {
22900 int defaulted
22901 = lang_hooks.decls.decl_dwarf_attribute (decl,
22902 DW_AT_defaulted);
22903 if (defaulted != -1)
22904 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22905 }
22906
22907 /* If this is a C++11 non-static member function with & ref-qualifier
22908 then generate a DW_AT_reference attribute. */
22909 if ((dwarf_version >= 5 || !dwarf_strict)
22910 && lang_hooks.decls.decl_dwarf_attribute (decl,
22911 DW_AT_reference) == 1)
22912 add_AT_flag (subr_die, DW_AT_reference, 1);
22913
22914 /* If this is a C++11 non-static member function with &&
22915 ref-qualifier then generate a DW_AT_reference attribute. */
22916 if ((dwarf_version >= 5 || !dwarf_strict)
22917 && lang_hooks.decls.decl_dwarf_attribute (decl,
22918 DW_AT_rvalue_reference)
22919 == 1)
22920 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22921 }
22922 }
22923 /* For non DECL_EXTERNALs, if range information is available, fill
22924 the DIE with it. */
22925 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22926 {
22927 HOST_WIDE_INT cfa_fb_offset;
22928
22929 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22930
22931 if (!crtl->has_bb_partition)
22932 {
22933 dw_fde_ref fde = fun->fde;
22934 if (fde->dw_fde_begin)
22935 {
22936 /* We have already generated the labels. */
22937 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22938 fde->dw_fde_end, false);
22939 }
22940 else
22941 {
22942 /* Create start/end labels and add the range. */
22943 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22944 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22945 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22946 current_function_funcdef_no);
22947 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22948 current_function_funcdef_no);
22949 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22950 false);
22951 }
22952
22953 #if VMS_DEBUGGING_INFO
22954 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22955 Section 2.3 Prologue and Epilogue Attributes:
22956 When a breakpoint is set on entry to a function, it is generally
22957 desirable for execution to be suspended, not on the very first
22958 instruction of the function, but rather at a point after the
22959 function's frame has been set up, after any language defined local
22960 declaration processing has been completed, and before execution of
22961 the first statement of the function begins. Debuggers generally
22962 cannot properly determine where this point is. Similarly for a
22963 breakpoint set on exit from a function. The prologue and epilogue
22964 attributes allow a compiler to communicate the location(s) to use. */
22965
22966 {
22967 if (fde->dw_fde_vms_end_prologue)
22968 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22969 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22970
22971 if (fde->dw_fde_vms_begin_epilogue)
22972 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22973 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22974 }
22975 #endif
22976
22977 }
22978 else
22979 {
22980 /* Generate pubnames entries for the split function code ranges. */
22981 dw_fde_ref fde = fun->fde;
22982
22983 if (fde->dw_fde_second_begin)
22984 {
22985 if (dwarf_version >= 3 || !dwarf_strict)
22986 {
22987 /* We should use ranges for non-contiguous code section
22988 addresses. Use the actual code range for the initial
22989 section, since the HOT/COLD labels might precede an
22990 alignment offset. */
22991 bool range_list_added = false;
22992 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
22993 fde->dw_fde_end, &range_list_added,
22994 false);
22995 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
22996 fde->dw_fde_second_end,
22997 &range_list_added, false);
22998 if (range_list_added)
22999 add_ranges (NULL);
23000 }
23001 else
23002 {
23003 /* There is no real support in DW2 for this .. so we make
23004 a work-around. First, emit the pub name for the segment
23005 containing the function label. Then make and emit a
23006 simplified subprogram DIE for the second segment with the
23007 name pre-fixed by __hot/cold_sect_of_. We use the same
23008 linkage name for the second die so that gdb will find both
23009 sections when given "b foo". */
23010 const char *name = NULL;
23011 tree decl_name = DECL_NAME (decl);
23012 dw_die_ref seg_die;
23013
23014 /* Do the 'primary' section. */
23015 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23016 fde->dw_fde_end, false);
23017
23018 /* Build a minimal DIE for the secondary section. */
23019 seg_die = new_die (DW_TAG_subprogram,
23020 subr_die->die_parent, decl);
23021
23022 if (TREE_PUBLIC (decl))
23023 add_AT_flag (seg_die, DW_AT_external, 1);
23024
23025 if (decl_name != NULL
23026 && IDENTIFIER_POINTER (decl_name) != NULL)
23027 {
23028 name = dwarf2_name (decl, 1);
23029 if (! DECL_ARTIFICIAL (decl))
23030 add_src_coords_attributes (seg_die, decl);
23031
23032 add_linkage_name (seg_die, decl);
23033 }
23034 gcc_assert (name != NULL);
23035 add_pure_or_virtual_attribute (seg_die, decl);
23036 if (DECL_ARTIFICIAL (decl))
23037 add_AT_flag (seg_die, DW_AT_artificial, 1);
23038
23039 name = concat ("__second_sect_of_", name, NULL);
23040 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23041 fde->dw_fde_second_end, false);
23042 add_name_attribute (seg_die, name);
23043 if (want_pubnames ())
23044 add_pubname_string (name, seg_die);
23045 }
23046 }
23047 else
23048 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23049 false);
23050 }
23051
23052 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23053
23054 /* We define the "frame base" as the function's CFA. This is more
23055 convenient for several reasons: (1) It's stable across the prologue
23056 and epilogue, which makes it better than just a frame pointer,
23057 (2) With dwarf3, there exists a one-byte encoding that allows us
23058 to reference the .debug_frame data by proxy, but failing that,
23059 (3) We can at least reuse the code inspection and interpretation
23060 code that determines the CFA position at various points in the
23061 function. */
23062 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23063 {
23064 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23065 add_AT_loc (subr_die, DW_AT_frame_base, op);
23066 }
23067 else
23068 {
23069 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23070 if (list->dw_loc_next)
23071 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23072 else
23073 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23074 }
23075
23076 /* Compute a displacement from the "steady-state frame pointer" to
23077 the CFA. The former is what all stack slots and argument slots
23078 will reference in the rtl; the latter is what we've told the
23079 debugger about. We'll need to adjust all frame_base references
23080 by this displacement. */
23081 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23082
23083 if (fun->static_chain_decl)
23084 {
23085 /* DWARF requires here a location expression that computes the
23086 address of the enclosing subprogram's frame base. The machinery
23087 in tree-nested.c is supposed to store this specific address in the
23088 last field of the FRAME record. */
23089 const tree frame_type
23090 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23091 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23092
23093 tree fb_expr
23094 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23095 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23096 fb_expr, fb_decl, NULL_TREE);
23097
23098 add_AT_location_description (subr_die, DW_AT_static_link,
23099 loc_list_from_tree (fb_expr, 0, NULL));
23100 }
23101
23102 resolve_variable_values ();
23103 }
23104
23105 /* Generate child dies for template paramaters. */
23106 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23107 gen_generic_params_dies (decl);
23108
23109 /* Now output descriptions of the arguments for this function. This gets
23110 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23111 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23112 `...' at the end of the formal parameter list. In order to find out if
23113 there was a trailing ellipsis or not, we must instead look at the type
23114 associated with the FUNCTION_DECL. This will be a node of type
23115 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23116 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23117 an ellipsis at the end. */
23118
23119 /* In the case where we are describing a mere function declaration, all we
23120 need to do here (and all we *can* do here) is to describe the *types* of
23121 its formal parameters. */
23122 if (debug_info_level <= DINFO_LEVEL_TERSE)
23123 ;
23124 else if (declaration)
23125 gen_formal_types_die (decl, subr_die);
23126 else
23127 {
23128 /* Generate DIEs to represent all known formal parameters. */
23129 tree parm = DECL_ARGUMENTS (decl);
23130 tree generic_decl = early_dwarf
23131 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23132 tree generic_decl_parm = generic_decl
23133 ? DECL_ARGUMENTS (generic_decl)
23134 : NULL;
23135
23136 /* Now we want to walk the list of parameters of the function and
23137 emit their relevant DIEs.
23138
23139 We consider the case of DECL being an instance of a generic function
23140 as well as it being a normal function.
23141
23142 If DECL is an instance of a generic function we walk the
23143 parameters of the generic function declaration _and_ the parameters of
23144 DECL itself. This is useful because we want to emit specific DIEs for
23145 function parameter packs and those are declared as part of the
23146 generic function declaration. In that particular case,
23147 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23148 That DIE has children DIEs representing the set of arguments
23149 of the pack. Note that the set of pack arguments can be empty.
23150 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23151 children DIE.
23152
23153 Otherwise, we just consider the parameters of DECL. */
23154 while (generic_decl_parm || parm)
23155 {
23156 if (generic_decl_parm
23157 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23158 gen_formal_parameter_pack_die (generic_decl_parm,
23159 parm, subr_die,
23160 &parm);
23161 else if (parm)
23162 {
23163 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23164
23165 if (early_dwarf
23166 && parm == DECL_ARGUMENTS (decl)
23167 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23168 && parm_die
23169 && (dwarf_version >= 3 || !dwarf_strict))
23170 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23171
23172 parm = DECL_CHAIN (parm);
23173 }
23174 else if (parm)
23175 parm = DECL_CHAIN (parm);
23176
23177 if (generic_decl_parm)
23178 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23179 }
23180
23181 /* Decide whether we need an unspecified_parameters DIE at the end.
23182 There are 2 more cases to do this for: 1) the ansi ... declaration -
23183 this is detectable when the end of the arg list is not a
23184 void_type_node 2) an unprototyped function declaration (not a
23185 definition). This just means that we have no info about the
23186 parameters at all. */
23187 if (early_dwarf)
23188 {
23189 if (prototype_p (TREE_TYPE (decl)))
23190 {
23191 /* This is the prototyped case, check for.... */
23192 if (stdarg_p (TREE_TYPE (decl)))
23193 gen_unspecified_parameters_die (decl, subr_die);
23194 }
23195 else if (DECL_INITIAL (decl) == NULL_TREE)
23196 gen_unspecified_parameters_die (decl, subr_die);
23197 }
23198 }
23199
23200 if (subr_die != old_die)
23201 /* Add the calling convention attribute if requested. */
23202 add_calling_convention_attribute (subr_die, decl);
23203
23204 /* Output Dwarf info for all of the stuff within the body of the function
23205 (if it has one - it may be just a declaration).
23206
23207 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23208 a function. This BLOCK actually represents the outermost binding contour
23209 for the function, i.e. the contour in which the function's formal
23210 parameters and labels get declared. Curiously, it appears that the front
23211 end doesn't actually put the PARM_DECL nodes for the current function onto
23212 the BLOCK_VARS list for this outer scope, but are strung off of the
23213 DECL_ARGUMENTS list for the function instead.
23214
23215 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23216 the LABEL_DECL nodes for the function however, and we output DWARF info
23217 for those in decls_for_scope. Just within the `outer_scope' there will be
23218 a BLOCK node representing the function's outermost pair of curly braces,
23219 and any blocks used for the base and member initializers of a C++
23220 constructor function. */
23221 tree outer_scope = DECL_INITIAL (decl);
23222 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23223 {
23224 int call_site_note_count = 0;
23225 int tail_call_site_note_count = 0;
23226
23227 /* Emit a DW_TAG_variable DIE for a named return value. */
23228 if (DECL_NAME (DECL_RESULT (decl)))
23229 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23230
23231 /* The first time through decls_for_scope we will generate the
23232 DIEs for the locals. The second time, we fill in the
23233 location info. */
23234 decls_for_scope (outer_scope, subr_die);
23235
23236 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23237 {
23238 struct call_arg_loc_node *ca_loc;
23239 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23240 {
23241 dw_die_ref die = NULL;
23242 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23243 rtx arg, next_arg;
23244
23245 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23246 ? XEXP (ca_loc->call_arg_loc_note, 0)
23247 : NULL_RTX);
23248 arg; arg = next_arg)
23249 {
23250 dw_loc_descr_ref reg, val;
23251 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23252 dw_die_ref cdie, tdie = NULL;
23253
23254 next_arg = XEXP (arg, 1);
23255 if (REG_P (XEXP (XEXP (arg, 0), 0))
23256 && next_arg
23257 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23258 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23259 && REGNO (XEXP (XEXP (arg, 0), 0))
23260 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23261 next_arg = XEXP (next_arg, 1);
23262 if (mode == VOIDmode)
23263 {
23264 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23265 if (mode == VOIDmode)
23266 mode = GET_MODE (XEXP (arg, 0));
23267 }
23268 if (mode == VOIDmode || mode == BLKmode)
23269 continue;
23270 /* Get dynamic information about call target only if we
23271 have no static information: we cannot generate both
23272 DW_AT_call_origin and DW_AT_call_target
23273 attributes. */
23274 if (ca_loc->symbol_ref == NULL_RTX)
23275 {
23276 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23277 {
23278 tloc = XEXP (XEXP (arg, 0), 1);
23279 continue;
23280 }
23281 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23282 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23283 {
23284 tlocc = XEXP (XEXP (arg, 0), 1);
23285 continue;
23286 }
23287 }
23288 reg = NULL;
23289 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23290 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23291 VAR_INIT_STATUS_INITIALIZED);
23292 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23293 {
23294 rtx mem = XEXP (XEXP (arg, 0), 0);
23295 reg = mem_loc_descriptor (XEXP (mem, 0),
23296 get_address_mode (mem),
23297 GET_MODE (mem),
23298 VAR_INIT_STATUS_INITIALIZED);
23299 }
23300 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23301 == DEBUG_PARAMETER_REF)
23302 {
23303 tree tdecl
23304 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23305 tdie = lookup_decl_die (tdecl);
23306 if (tdie == NULL)
23307 continue;
23308 }
23309 else
23310 continue;
23311 if (reg == NULL
23312 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23313 != DEBUG_PARAMETER_REF)
23314 continue;
23315 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23316 VOIDmode,
23317 VAR_INIT_STATUS_INITIALIZED);
23318 if (val == NULL)
23319 continue;
23320 if (die == NULL)
23321 die = gen_call_site_die (decl, subr_die, ca_loc);
23322 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23323 NULL_TREE);
23324 if (reg != NULL)
23325 add_AT_loc (cdie, DW_AT_location, reg);
23326 else if (tdie != NULL)
23327 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23328 tdie);
23329 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23330 if (next_arg != XEXP (arg, 1))
23331 {
23332 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23333 if (mode == VOIDmode)
23334 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23335 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23336 0), 1),
23337 mode, VOIDmode,
23338 VAR_INIT_STATUS_INITIALIZED);
23339 if (val != NULL)
23340 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23341 val);
23342 }
23343 }
23344 if (die == NULL
23345 && (ca_loc->symbol_ref || tloc))
23346 die = gen_call_site_die (decl, subr_die, ca_loc);
23347 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23348 {
23349 dw_loc_descr_ref tval = NULL;
23350
23351 if (tloc != NULL_RTX)
23352 tval = mem_loc_descriptor (tloc,
23353 GET_MODE (tloc) == VOIDmode
23354 ? Pmode : GET_MODE (tloc),
23355 VOIDmode,
23356 VAR_INIT_STATUS_INITIALIZED);
23357 if (tval)
23358 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23359 else if (tlocc != NULL_RTX)
23360 {
23361 tval = mem_loc_descriptor (tlocc,
23362 GET_MODE (tlocc) == VOIDmode
23363 ? Pmode : GET_MODE (tlocc),
23364 VOIDmode,
23365 VAR_INIT_STATUS_INITIALIZED);
23366 if (tval)
23367 add_AT_loc (die,
23368 dwarf_AT (DW_AT_call_target_clobbered),
23369 tval);
23370 }
23371 }
23372 if (die != NULL)
23373 {
23374 call_site_note_count++;
23375 if (ca_loc->tail_call_p)
23376 tail_call_site_note_count++;
23377 }
23378 }
23379 }
23380 call_arg_locations = NULL;
23381 call_arg_loc_last = NULL;
23382 if (tail_call_site_count >= 0
23383 && tail_call_site_count == tail_call_site_note_count
23384 && (!dwarf_strict || dwarf_version >= 5))
23385 {
23386 if (call_site_count >= 0
23387 && call_site_count == call_site_note_count)
23388 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23389 else
23390 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23391 }
23392 call_site_count = -1;
23393 tail_call_site_count = -1;
23394 }
23395
23396 /* Mark used types after we have created DIEs for the functions scopes. */
23397 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23398 }
23399
23400 /* Returns a hash value for X (which really is a die_struct). */
23401
23402 hashval_t
23403 block_die_hasher::hash (die_struct *d)
23404 {
23405 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23406 }
23407
23408 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23409 as decl_id and die_parent of die_struct Y. */
23410
23411 bool
23412 block_die_hasher::equal (die_struct *x, die_struct *y)
23413 {
23414 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23415 }
23416
23417 /* Hold information about markers for inlined entry points. */
23418 struct GTY ((for_user)) inline_entry_data
23419 {
23420 /* The block that's the inlined_function_outer_scope for an inlined
23421 function. */
23422 tree block;
23423
23424 /* The label at the inlined entry point. */
23425 const char *label_pfx;
23426 unsigned int label_num;
23427
23428 /* The view number to be used as the inlined entry point. */
23429 var_loc_view view;
23430 };
23431
23432 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23433 {
23434 typedef tree compare_type;
23435 static inline hashval_t hash (const inline_entry_data *);
23436 static inline bool equal (const inline_entry_data *, const_tree);
23437 };
23438
23439 /* Hash table routines for inline_entry_data. */
23440
23441 inline hashval_t
23442 inline_entry_data_hasher::hash (const inline_entry_data *data)
23443 {
23444 return htab_hash_pointer (data->block);
23445 }
23446
23447 inline bool
23448 inline_entry_data_hasher::equal (const inline_entry_data *data,
23449 const_tree block)
23450 {
23451 return data->block == block;
23452 }
23453
23454 /* Inlined entry points pending DIE creation in this compilation unit. */
23455
23456 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23457
23458
23459 /* Return TRUE if DECL, which may have been previously generated as
23460 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23461 true if decl (or its origin) is either an extern declaration or a
23462 class/namespace scoped declaration.
23463
23464 The declare_in_namespace support causes us to get two DIEs for one
23465 variable, both of which are declarations. We want to avoid
23466 considering one to be a specification, so we must test for
23467 DECLARATION and DW_AT_declaration. */
23468 static inline bool
23469 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23470 {
23471 return (old_die && TREE_STATIC (decl) && !declaration
23472 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23473 }
23474
23475 /* Return true if DECL is a local static. */
23476
23477 static inline bool
23478 local_function_static (tree decl)
23479 {
23480 gcc_assert (VAR_P (decl));
23481 return TREE_STATIC (decl)
23482 && DECL_CONTEXT (decl)
23483 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23484 }
23485
23486 /* Generate a DIE to represent a declared data object.
23487 Either DECL or ORIGIN must be non-null. */
23488
23489 static void
23490 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23491 {
23492 HOST_WIDE_INT off = 0;
23493 tree com_decl;
23494 tree decl_or_origin = decl ? decl : origin;
23495 tree ultimate_origin;
23496 dw_die_ref var_die;
23497 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23498 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23499 || class_or_namespace_scope_p (context_die));
23500 bool specialization_p = false;
23501 bool no_linkage_name = false;
23502
23503 /* While C++ inline static data members have definitions inside of the
23504 class, force the first DIE to be a declaration, then let gen_member_die
23505 reparent it to the class context and call gen_variable_die again
23506 to create the outside of the class DIE for the definition. */
23507 if (!declaration
23508 && old_die == NULL
23509 && decl
23510 && DECL_CONTEXT (decl)
23511 && TYPE_P (DECL_CONTEXT (decl))
23512 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23513 {
23514 declaration = true;
23515 if (dwarf_version < 5)
23516 no_linkage_name = true;
23517 }
23518
23519 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23520 if (decl || ultimate_origin)
23521 origin = ultimate_origin;
23522 com_decl = fortran_common (decl_or_origin, &off);
23523
23524 /* Symbol in common gets emitted as a child of the common block, in the form
23525 of a data member. */
23526 if (com_decl)
23527 {
23528 dw_die_ref com_die;
23529 dw_loc_list_ref loc = NULL;
23530 die_node com_die_arg;
23531
23532 var_die = lookup_decl_die (decl_or_origin);
23533 if (var_die)
23534 {
23535 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23536 {
23537 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23538 if (loc)
23539 {
23540 if (off)
23541 {
23542 /* Optimize the common case. */
23543 if (single_element_loc_list_p (loc)
23544 && loc->expr->dw_loc_opc == DW_OP_addr
23545 && loc->expr->dw_loc_next == NULL
23546 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23547 == SYMBOL_REF)
23548 {
23549 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23550 loc->expr->dw_loc_oprnd1.v.val_addr
23551 = plus_constant (GET_MODE (x), x , off);
23552 }
23553 else
23554 loc_list_plus_const (loc, off);
23555 }
23556 add_AT_location_description (var_die, DW_AT_location, loc);
23557 remove_AT (var_die, DW_AT_declaration);
23558 }
23559 }
23560 return;
23561 }
23562
23563 if (common_block_die_table == NULL)
23564 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23565
23566 com_die_arg.decl_id = DECL_UID (com_decl);
23567 com_die_arg.die_parent = context_die;
23568 com_die = common_block_die_table->find (&com_die_arg);
23569 if (! early_dwarf)
23570 loc = loc_list_from_tree (com_decl, 2, NULL);
23571 if (com_die == NULL)
23572 {
23573 const char *cnam
23574 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23575 die_node **slot;
23576
23577 com_die = new_die (DW_TAG_common_block, context_die, decl);
23578 add_name_and_src_coords_attributes (com_die, com_decl);
23579 if (loc)
23580 {
23581 add_AT_location_description (com_die, DW_AT_location, loc);
23582 /* Avoid sharing the same loc descriptor between
23583 DW_TAG_common_block and DW_TAG_variable. */
23584 loc = loc_list_from_tree (com_decl, 2, NULL);
23585 }
23586 else if (DECL_EXTERNAL (decl_or_origin))
23587 add_AT_flag (com_die, DW_AT_declaration, 1);
23588 if (want_pubnames ())
23589 add_pubname_string (cnam, com_die); /* ??? needed? */
23590 com_die->decl_id = DECL_UID (com_decl);
23591 slot = common_block_die_table->find_slot (com_die, INSERT);
23592 *slot = com_die;
23593 }
23594 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23595 {
23596 add_AT_location_description (com_die, DW_AT_location, loc);
23597 loc = loc_list_from_tree (com_decl, 2, NULL);
23598 remove_AT (com_die, DW_AT_declaration);
23599 }
23600 var_die = new_die (DW_TAG_variable, com_die, decl);
23601 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23602 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23603 decl_quals (decl_or_origin), false,
23604 context_die);
23605 add_alignment_attribute (var_die, decl);
23606 add_AT_flag (var_die, DW_AT_external, 1);
23607 if (loc)
23608 {
23609 if (off)
23610 {
23611 /* Optimize the common case. */
23612 if (single_element_loc_list_p (loc)
23613 && loc->expr->dw_loc_opc == DW_OP_addr
23614 && loc->expr->dw_loc_next == NULL
23615 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23616 {
23617 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23618 loc->expr->dw_loc_oprnd1.v.val_addr
23619 = plus_constant (GET_MODE (x), x, off);
23620 }
23621 else
23622 loc_list_plus_const (loc, off);
23623 }
23624 add_AT_location_description (var_die, DW_AT_location, loc);
23625 }
23626 else if (DECL_EXTERNAL (decl_or_origin))
23627 add_AT_flag (var_die, DW_AT_declaration, 1);
23628 if (decl)
23629 equate_decl_number_to_die (decl, var_die);
23630 return;
23631 }
23632
23633 if (old_die)
23634 {
23635 if (declaration)
23636 {
23637 /* A declaration that has been previously dumped, needs no
23638 further annotations, since it doesn't need location on
23639 the second pass. */
23640 return;
23641 }
23642 else if (decl_will_get_specification_p (old_die, decl, declaration)
23643 && !get_AT (old_die, DW_AT_specification))
23644 {
23645 /* Fall-thru so we can make a new variable die along with a
23646 DW_AT_specification. */
23647 }
23648 else if (origin && old_die->die_parent != context_die)
23649 {
23650 /* If we will be creating an inlined instance, we need a
23651 new DIE that will get annotated with
23652 DW_AT_abstract_origin. */
23653 gcc_assert (!DECL_ABSTRACT_P (decl));
23654 }
23655 else
23656 {
23657 /* If a DIE was dumped early, it still needs location info.
23658 Skip to where we fill the location bits. */
23659 var_die = old_die;
23660
23661 /* ??? In LTRANS we cannot annotate early created variably
23662 modified type DIEs without copying them and adjusting all
23663 references to them. Thus we dumped them again. Also add a
23664 reference to them but beware of -g0 compile and -g link
23665 in which case the reference will be already present. */
23666 tree type = TREE_TYPE (decl_or_origin);
23667 if (in_lto_p
23668 && ! get_AT (var_die, DW_AT_type)
23669 && variably_modified_type_p
23670 (type, decl_function_context (decl_or_origin)))
23671 {
23672 if (decl_by_reference_p (decl_or_origin))
23673 add_type_attribute (var_die, TREE_TYPE (type),
23674 TYPE_UNQUALIFIED, false, context_die);
23675 else
23676 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23677 false, context_die);
23678 }
23679
23680 goto gen_variable_die_location;
23681 }
23682 }
23683
23684 /* For static data members, the declaration in the class is supposed
23685 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23686 also in DWARF2; the specification should still be DW_TAG_variable
23687 referencing the DW_TAG_member DIE. */
23688 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23689 var_die = new_die (DW_TAG_member, context_die, decl);
23690 else
23691 var_die = new_die (DW_TAG_variable, context_die, decl);
23692
23693 if (origin != NULL)
23694 add_abstract_origin_attribute (var_die, origin);
23695
23696 /* Loop unrolling can create multiple blocks that refer to the same
23697 static variable, so we must test for the DW_AT_declaration flag.
23698
23699 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23700 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23701 sharing them.
23702
23703 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23704 else if (decl_will_get_specification_p (old_die, decl, declaration))
23705 {
23706 /* This is a definition of a C++ class level static. */
23707 add_AT_specification (var_die, old_die);
23708 specialization_p = true;
23709 if (DECL_NAME (decl))
23710 {
23711 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23712 struct dwarf_file_data * file_index = lookup_filename (s.file);
23713
23714 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23715 add_AT_file (var_die, DW_AT_decl_file, file_index);
23716
23717 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23718 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23719
23720 if (debug_column_info
23721 && s.column
23722 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23723 != (unsigned) s.column))
23724 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23725
23726 if (old_die->die_tag == DW_TAG_member)
23727 add_linkage_name (var_die, decl);
23728 }
23729 }
23730 else
23731 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23732
23733 if ((origin == NULL && !specialization_p)
23734 || (origin != NULL
23735 && !DECL_ABSTRACT_P (decl_or_origin)
23736 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23737 decl_function_context
23738 (decl_or_origin))))
23739 {
23740 tree type = TREE_TYPE (decl_or_origin);
23741
23742 if (decl_by_reference_p (decl_or_origin))
23743 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23744 context_die);
23745 else
23746 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23747 context_die);
23748 }
23749
23750 if (origin == NULL && !specialization_p)
23751 {
23752 if (TREE_PUBLIC (decl))
23753 add_AT_flag (var_die, DW_AT_external, 1);
23754
23755 if (DECL_ARTIFICIAL (decl))
23756 add_AT_flag (var_die, DW_AT_artificial, 1);
23757
23758 add_alignment_attribute (var_die, decl);
23759
23760 add_accessibility_attribute (var_die, decl);
23761 }
23762
23763 if (declaration)
23764 add_AT_flag (var_die, DW_AT_declaration, 1);
23765
23766 if (decl && (DECL_ABSTRACT_P (decl)
23767 || !old_die || is_declaration_die (old_die)))
23768 equate_decl_number_to_die (decl, var_die);
23769
23770 gen_variable_die_location:
23771 if (! declaration
23772 && (! DECL_ABSTRACT_P (decl_or_origin)
23773 /* Local static vars are shared between all clones/inlines,
23774 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23775 already set. */
23776 || (VAR_P (decl_or_origin)
23777 && TREE_STATIC (decl_or_origin)
23778 && DECL_RTL_SET_P (decl_or_origin))))
23779 {
23780 if (early_dwarf)
23781 add_pubname (decl_or_origin, var_die);
23782 else
23783 add_location_or_const_value_attribute (var_die, decl_or_origin,
23784 decl == NULL);
23785 }
23786 else
23787 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23788
23789 if ((dwarf_version >= 4 || !dwarf_strict)
23790 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23791 DW_AT_const_expr) == 1
23792 && !get_AT (var_die, DW_AT_const_expr)
23793 && !specialization_p)
23794 add_AT_flag (var_die, DW_AT_const_expr, 1);
23795
23796 if (!dwarf_strict)
23797 {
23798 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23799 DW_AT_inline);
23800 if (inl != -1
23801 && !get_AT (var_die, DW_AT_inline)
23802 && !specialization_p)
23803 add_AT_unsigned (var_die, DW_AT_inline, inl);
23804 }
23805 }
23806
23807 /* Generate a DIE to represent a named constant. */
23808
23809 static void
23810 gen_const_die (tree decl, dw_die_ref context_die)
23811 {
23812 dw_die_ref const_die;
23813 tree type = TREE_TYPE (decl);
23814
23815 const_die = lookup_decl_die (decl);
23816 if (const_die)
23817 return;
23818
23819 const_die = new_die (DW_TAG_constant, context_die, decl);
23820 equate_decl_number_to_die (decl, const_die);
23821 add_name_and_src_coords_attributes (const_die, decl);
23822 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23823 if (TREE_PUBLIC (decl))
23824 add_AT_flag (const_die, DW_AT_external, 1);
23825 if (DECL_ARTIFICIAL (decl))
23826 add_AT_flag (const_die, DW_AT_artificial, 1);
23827 tree_add_const_value_attribute_for_decl (const_die, decl);
23828 }
23829
23830 /* Generate a DIE to represent a label identifier. */
23831
23832 static void
23833 gen_label_die (tree decl, dw_die_ref context_die)
23834 {
23835 tree origin = decl_ultimate_origin (decl);
23836 dw_die_ref lbl_die = lookup_decl_die (decl);
23837 rtx insn;
23838 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23839
23840 if (!lbl_die)
23841 {
23842 lbl_die = new_die (DW_TAG_label, context_die, decl);
23843 equate_decl_number_to_die (decl, lbl_die);
23844
23845 if (origin != NULL)
23846 add_abstract_origin_attribute (lbl_die, origin);
23847 else
23848 add_name_and_src_coords_attributes (lbl_die, decl);
23849 }
23850
23851 if (DECL_ABSTRACT_P (decl))
23852 equate_decl_number_to_die (decl, lbl_die);
23853 else if (! early_dwarf)
23854 {
23855 insn = DECL_RTL_IF_SET (decl);
23856
23857 /* Deleted labels are programmer specified labels which have been
23858 eliminated because of various optimizations. We still emit them
23859 here so that it is possible to put breakpoints on them. */
23860 if (insn
23861 && (LABEL_P (insn)
23862 || ((NOTE_P (insn)
23863 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23864 {
23865 /* When optimization is enabled (via -O) some parts of the compiler
23866 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23867 represent source-level labels which were explicitly declared by
23868 the user. This really shouldn't be happening though, so catch
23869 it if it ever does happen. */
23870 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23871
23872 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23873 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23874 }
23875 else if (insn
23876 && NOTE_P (insn)
23877 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23878 && CODE_LABEL_NUMBER (insn) != -1)
23879 {
23880 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23881 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23882 }
23883 }
23884 }
23885
23886 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23887 attributes to the DIE for a block STMT, to describe where the inlined
23888 function was called from. This is similar to add_src_coords_attributes. */
23889
23890 static inline void
23891 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23892 {
23893 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23894
23895 if (dwarf_version >= 3 || !dwarf_strict)
23896 {
23897 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23898 add_AT_unsigned (die, DW_AT_call_line, s.line);
23899 if (debug_column_info && s.column)
23900 add_AT_unsigned (die, DW_AT_call_column, s.column);
23901 }
23902 }
23903
23904
23905 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23906 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23907
23908 static inline void
23909 add_high_low_attributes (tree stmt, dw_die_ref die)
23910 {
23911 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23912
23913 if (inline_entry_data **iedp
23914 = !inline_entry_data_table ? NULL
23915 : inline_entry_data_table->find_slot_with_hash (stmt,
23916 htab_hash_pointer (stmt),
23917 NO_INSERT))
23918 {
23919 inline_entry_data *ied = *iedp;
23920 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23921 gcc_assert (debug_inline_points);
23922 gcc_assert (inlined_function_outer_scope_p (stmt));
23923
23924 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23925 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23926
23927 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23928 && !dwarf_strict)
23929 {
23930 if (!output_asm_line_debug_info ())
23931 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23932 else
23933 {
23934 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23935 /* FIXME: this will resolve to a small number. Could we
23936 possibly emit smaller data? Ideally we'd emit a
23937 uleb128, but that would make the size of DIEs
23938 impossible for the compiler to compute, since it's
23939 the assembler that computes the value of the view
23940 label in this case. Ideally, we'd have a single form
23941 encompassing both the address and the view, and
23942 indirecting them through a table might make things
23943 easier, but even that would be more wasteful,
23944 space-wise, than what we have now. */
23945 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23946 }
23947 }
23948
23949 inline_entry_data_table->clear_slot (iedp);
23950 }
23951
23952 if (BLOCK_FRAGMENT_CHAIN (stmt)
23953 && (dwarf_version >= 3 || !dwarf_strict))
23954 {
23955 tree chain, superblock = NULL_TREE;
23956 dw_die_ref pdie;
23957 dw_attr_node *attr = NULL;
23958
23959 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23960 {
23961 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23962 BLOCK_NUMBER (stmt));
23963 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23964 }
23965
23966 /* Optimize duplicate .debug_ranges lists or even tails of
23967 lists. If this BLOCK has same ranges as its supercontext,
23968 lookup DW_AT_ranges attribute in the supercontext (and
23969 recursively so), verify that the ranges_table contains the
23970 right values and use it instead of adding a new .debug_range. */
23971 for (chain = stmt, pdie = die;
23972 BLOCK_SAME_RANGE (chain);
23973 chain = BLOCK_SUPERCONTEXT (chain))
23974 {
23975 dw_attr_node *new_attr;
23976
23977 pdie = pdie->die_parent;
23978 if (pdie == NULL)
23979 break;
23980 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23981 break;
23982 new_attr = get_AT (pdie, DW_AT_ranges);
23983 if (new_attr == NULL
23984 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23985 break;
23986 attr = new_attr;
23987 superblock = BLOCK_SUPERCONTEXT (chain);
23988 }
23989 if (attr != NULL
23990 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
23991 == BLOCK_NUMBER (superblock))
23992 && BLOCK_FRAGMENT_CHAIN (superblock))
23993 {
23994 unsigned long off = attr->dw_attr_val.v.val_offset;
23995 unsigned long supercnt = 0, thiscnt = 0;
23996 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
23997 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
23998 {
23999 ++supercnt;
24000 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24001 == BLOCK_NUMBER (chain));
24002 }
24003 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24004 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24005 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24006 ++thiscnt;
24007 gcc_assert (supercnt >= thiscnt);
24008 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24009 false);
24010 note_rnglist_head (off + supercnt - thiscnt);
24011 return;
24012 }
24013
24014 unsigned int offset = add_ranges (stmt, true);
24015 add_AT_range_list (die, DW_AT_ranges, offset, false);
24016 note_rnglist_head (offset);
24017
24018 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24019 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24020 do
24021 {
24022 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24023 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24024 chain = BLOCK_FRAGMENT_CHAIN (chain);
24025 }
24026 while (chain);
24027 add_ranges (NULL);
24028 }
24029 else
24030 {
24031 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24032 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24033 BLOCK_NUMBER (stmt));
24034 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24035 BLOCK_NUMBER (stmt));
24036 add_AT_low_high_pc (die, label, label_high, false);
24037 }
24038 }
24039
24040 /* Generate a DIE for a lexical block. */
24041
24042 static void
24043 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24044 {
24045 dw_die_ref old_die = BLOCK_DIE (stmt);
24046 dw_die_ref stmt_die = NULL;
24047 if (!old_die)
24048 {
24049 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24050 BLOCK_DIE (stmt) = stmt_die;
24051 }
24052
24053 if (BLOCK_ABSTRACT (stmt))
24054 {
24055 if (old_die)
24056 {
24057 /* This must have been generated early and it won't even
24058 need location information since it's a DW_AT_inline
24059 function. */
24060 if (flag_checking)
24061 for (dw_die_ref c = context_die; c; c = c->die_parent)
24062 if (c->die_tag == DW_TAG_inlined_subroutine
24063 || c->die_tag == DW_TAG_subprogram)
24064 {
24065 gcc_assert (get_AT (c, DW_AT_inline));
24066 break;
24067 }
24068 return;
24069 }
24070 }
24071 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24072 {
24073 /* If this is an inlined instance, create a new lexical die for
24074 anything below to attach DW_AT_abstract_origin to. */
24075 if (old_die)
24076 {
24077 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24078 BLOCK_DIE (stmt) = stmt_die;
24079 old_die = NULL;
24080 }
24081
24082 tree origin = block_ultimate_origin (stmt);
24083 if (origin != NULL_TREE && origin != stmt)
24084 add_abstract_origin_attribute (stmt_die, origin);
24085 }
24086
24087 if (old_die)
24088 stmt_die = old_die;
24089
24090 /* A non abstract block whose blocks have already been reordered
24091 should have the instruction range for this block. If so, set the
24092 high/low attributes. */
24093 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24094 {
24095 gcc_assert (stmt_die);
24096 add_high_low_attributes (stmt, stmt_die);
24097 }
24098
24099 decls_for_scope (stmt, stmt_die);
24100 }
24101
24102 /* Generate a DIE for an inlined subprogram. */
24103
24104 static void
24105 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24106 {
24107 tree decl;
24108
24109 /* The instance of function that is effectively being inlined shall not
24110 be abstract. */
24111 gcc_assert (! BLOCK_ABSTRACT (stmt));
24112
24113 decl = block_ultimate_origin (stmt);
24114
24115 /* Make sure any inlined functions are known to be inlineable. */
24116 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24117 || cgraph_function_possibly_inlined_p (decl));
24118
24119 if (! BLOCK_ABSTRACT (stmt))
24120 {
24121 dw_die_ref subr_die
24122 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24123
24124 if (call_arg_locations || debug_inline_points)
24125 BLOCK_DIE (stmt) = subr_die;
24126 add_abstract_origin_attribute (subr_die, decl);
24127 if (TREE_ASM_WRITTEN (stmt))
24128 add_high_low_attributes (stmt, subr_die);
24129 add_call_src_coords_attributes (stmt, subr_die);
24130
24131 decls_for_scope (stmt, subr_die);
24132 }
24133 }
24134
24135 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24136 the comment for VLR_CONTEXT. */
24137
24138 static void
24139 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24140 {
24141 dw_die_ref decl_die;
24142
24143 if (TREE_TYPE (decl) == error_mark_node)
24144 return;
24145
24146 decl_die = new_die (DW_TAG_member, context_die, decl);
24147 add_name_and_src_coords_attributes (decl_die, decl);
24148 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24149 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24150 context_die);
24151
24152 if (DECL_BIT_FIELD_TYPE (decl))
24153 {
24154 add_byte_size_attribute (decl_die, decl);
24155 add_bit_size_attribute (decl_die, decl);
24156 add_bit_offset_attribute (decl_die, decl, ctx);
24157 }
24158
24159 add_alignment_attribute (decl_die, decl);
24160
24161 /* If we have a variant part offset, then we are supposed to process a member
24162 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24163 trees. */
24164 gcc_assert (ctx->variant_part_offset == NULL_TREE
24165 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24166 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24167 add_data_member_location_attribute (decl_die, decl, ctx);
24168
24169 if (DECL_ARTIFICIAL (decl))
24170 add_AT_flag (decl_die, DW_AT_artificial, 1);
24171
24172 add_accessibility_attribute (decl_die, decl);
24173
24174 /* Equate decl number to die, so that we can look up this decl later on. */
24175 equate_decl_number_to_die (decl, decl_die);
24176 }
24177
24178 /* Generate a DIE for a pointer to a member type. TYPE can be an
24179 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24180 pointer to member function. */
24181
24182 static void
24183 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24184 {
24185 if (lookup_type_die (type))
24186 return;
24187
24188 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24189 scope_die_for (type, context_die), type);
24190
24191 equate_type_number_to_die (type, ptr_die);
24192 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24193 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24194 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24195 context_die);
24196 add_alignment_attribute (ptr_die, type);
24197
24198 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24199 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24200 {
24201 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24202 add_AT_loc (ptr_die, DW_AT_use_location, op);
24203 }
24204 }
24205
24206 static char *producer_string;
24207
24208 /* Return a heap allocated producer string including command line options
24209 if -grecord-gcc-switches. */
24210
24211 static char *
24212 gen_producer_string (void)
24213 {
24214 size_t j;
24215 auto_vec<const char *> switches;
24216 const char *language_string = lang_hooks.name;
24217 char *producer, *tail;
24218 const char *p;
24219 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24220 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24221
24222 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24223 switch (save_decoded_options[j].opt_index)
24224 {
24225 case OPT_o:
24226 case OPT_d:
24227 case OPT_dumpbase:
24228 case OPT_dumpdir:
24229 case OPT_auxbase:
24230 case OPT_auxbase_strip:
24231 case OPT_quiet:
24232 case OPT_version:
24233 case OPT_v:
24234 case OPT_w:
24235 case OPT_L:
24236 case OPT_D:
24237 case OPT_I:
24238 case OPT_U:
24239 case OPT_SPECIAL_unknown:
24240 case OPT_SPECIAL_ignore:
24241 case OPT_SPECIAL_program_name:
24242 case OPT_SPECIAL_input_file:
24243 case OPT_grecord_gcc_switches:
24244 case OPT__output_pch_:
24245 case OPT_fdiagnostics_show_location_:
24246 case OPT_fdiagnostics_show_option:
24247 case OPT_fdiagnostics_show_caret:
24248 case OPT_fdiagnostics_color_:
24249 case OPT_fverbose_asm:
24250 case OPT____:
24251 case OPT__sysroot_:
24252 case OPT_nostdinc:
24253 case OPT_nostdinc__:
24254 case OPT_fpreprocessed:
24255 case OPT_fltrans_output_list_:
24256 case OPT_fresolution_:
24257 case OPT_fdebug_prefix_map_:
24258 case OPT_fmacro_prefix_map_:
24259 case OPT_ffile_prefix_map_:
24260 case OPT_fcompare_debug:
24261 case OPT_fchecking:
24262 case OPT_fchecking_:
24263 /* Ignore these. */
24264 continue;
24265 default:
24266 if (cl_options[save_decoded_options[j].opt_index].flags
24267 & CL_NO_DWARF_RECORD)
24268 continue;
24269 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24270 == '-');
24271 switch (save_decoded_options[j].canonical_option[0][1])
24272 {
24273 case 'M':
24274 case 'i':
24275 case 'W':
24276 continue;
24277 case 'f':
24278 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24279 "dump", 4) == 0)
24280 continue;
24281 break;
24282 default:
24283 break;
24284 }
24285 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24286 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24287 break;
24288 }
24289
24290 producer = XNEWVEC (char, plen + 1 + len + 1);
24291 tail = producer;
24292 sprintf (tail, "%s %s", language_string, version_string);
24293 tail += plen;
24294
24295 FOR_EACH_VEC_ELT (switches, j, p)
24296 {
24297 len = strlen (p);
24298 *tail = ' ';
24299 memcpy (tail + 1, p, len);
24300 tail += len + 1;
24301 }
24302
24303 *tail = '\0';
24304 return producer;
24305 }
24306
24307 /* Given a C and/or C++ language/version string return the "highest".
24308 C++ is assumed to be "higher" than C in this case. Used for merging
24309 LTO translation unit languages. */
24310 static const char *
24311 highest_c_language (const char *lang1, const char *lang2)
24312 {
24313 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24314 return "GNU C++17";
24315 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24316 return "GNU C++14";
24317 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24318 return "GNU C++11";
24319 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24320 return "GNU C++98";
24321
24322 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24323 return "GNU C17";
24324 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24325 return "GNU C11";
24326 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24327 return "GNU C99";
24328 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24329 return "GNU C89";
24330
24331 gcc_unreachable ();
24332 }
24333
24334
24335 /* Generate the DIE for the compilation unit. */
24336
24337 static dw_die_ref
24338 gen_compile_unit_die (const char *filename)
24339 {
24340 dw_die_ref die;
24341 const char *language_string = lang_hooks.name;
24342 int language;
24343
24344 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24345
24346 if (filename)
24347 {
24348 add_name_attribute (die, filename);
24349 /* Don't add cwd for <built-in>. */
24350 if (filename[0] != '<')
24351 add_comp_dir_attribute (die);
24352 }
24353
24354 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24355
24356 /* If our producer is LTO try to figure out a common language to use
24357 from the global list of translation units. */
24358 if (strcmp (language_string, "GNU GIMPLE") == 0)
24359 {
24360 unsigned i;
24361 tree t;
24362 const char *common_lang = NULL;
24363
24364 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24365 {
24366 if (!TRANSLATION_UNIT_LANGUAGE (t))
24367 continue;
24368 if (!common_lang)
24369 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24370 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24371 ;
24372 else if (strncmp (common_lang, "GNU C", 5) == 0
24373 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24374 /* Mixing C and C++ is ok, use C++ in that case. */
24375 common_lang = highest_c_language (common_lang,
24376 TRANSLATION_UNIT_LANGUAGE (t));
24377 else
24378 {
24379 /* Fall back to C. */
24380 common_lang = NULL;
24381 break;
24382 }
24383 }
24384
24385 if (common_lang)
24386 language_string = common_lang;
24387 }
24388
24389 language = DW_LANG_C;
24390 if (strncmp (language_string, "GNU C", 5) == 0
24391 && ISDIGIT (language_string[5]))
24392 {
24393 language = DW_LANG_C89;
24394 if (dwarf_version >= 3 || !dwarf_strict)
24395 {
24396 if (strcmp (language_string, "GNU C89") != 0)
24397 language = DW_LANG_C99;
24398
24399 if (dwarf_version >= 5 /* || !dwarf_strict */)
24400 if (strcmp (language_string, "GNU C11") == 0
24401 || strcmp (language_string, "GNU C17") == 0)
24402 language = DW_LANG_C11;
24403 }
24404 }
24405 else if (strncmp (language_string, "GNU C++", 7) == 0)
24406 {
24407 language = DW_LANG_C_plus_plus;
24408 if (dwarf_version >= 5 /* || !dwarf_strict */)
24409 {
24410 if (strcmp (language_string, "GNU C++11") == 0)
24411 language = DW_LANG_C_plus_plus_11;
24412 else if (strcmp (language_string, "GNU C++14") == 0)
24413 language = DW_LANG_C_plus_plus_14;
24414 else if (strcmp (language_string, "GNU C++17") == 0)
24415 /* For now. */
24416 language = DW_LANG_C_plus_plus_14;
24417 }
24418 }
24419 else if (strcmp (language_string, "GNU F77") == 0)
24420 language = DW_LANG_Fortran77;
24421 else if (dwarf_version >= 3 || !dwarf_strict)
24422 {
24423 if (strcmp (language_string, "GNU Ada") == 0)
24424 language = DW_LANG_Ada95;
24425 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24426 {
24427 language = DW_LANG_Fortran95;
24428 if (dwarf_version >= 5 /* || !dwarf_strict */)
24429 {
24430 if (strcmp (language_string, "GNU Fortran2003") == 0)
24431 language = DW_LANG_Fortran03;
24432 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24433 language = DW_LANG_Fortran08;
24434 }
24435 }
24436 else if (strcmp (language_string, "GNU Objective-C") == 0)
24437 language = DW_LANG_ObjC;
24438 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24439 language = DW_LANG_ObjC_plus_plus;
24440 else if (dwarf_version >= 5 || !dwarf_strict)
24441 {
24442 if (strcmp (language_string, "GNU Go") == 0)
24443 language = DW_LANG_Go;
24444 }
24445 }
24446 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24447 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24448 language = DW_LANG_Fortran90;
24449 /* Likewise for Ada. */
24450 else if (strcmp (language_string, "GNU Ada") == 0)
24451 language = DW_LANG_Ada83;
24452
24453 add_AT_unsigned (die, DW_AT_language, language);
24454
24455 switch (language)
24456 {
24457 case DW_LANG_Fortran77:
24458 case DW_LANG_Fortran90:
24459 case DW_LANG_Fortran95:
24460 case DW_LANG_Fortran03:
24461 case DW_LANG_Fortran08:
24462 /* Fortran has case insensitive identifiers and the front-end
24463 lowercases everything. */
24464 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24465 break;
24466 default:
24467 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24468 break;
24469 }
24470 return die;
24471 }
24472
24473 /* Generate the DIE for a base class. */
24474
24475 static void
24476 gen_inheritance_die (tree binfo, tree access, tree type,
24477 dw_die_ref context_die)
24478 {
24479 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24480 struct vlr_context ctx = { type, NULL };
24481
24482 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24483 context_die);
24484 add_data_member_location_attribute (die, binfo, &ctx);
24485
24486 if (BINFO_VIRTUAL_P (binfo))
24487 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24488
24489 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24490 children, otherwise the default is DW_ACCESS_public. In DWARF2
24491 the default has always been DW_ACCESS_private. */
24492 if (access == access_public_node)
24493 {
24494 if (dwarf_version == 2
24495 || context_die->die_tag == DW_TAG_class_type)
24496 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24497 }
24498 else if (access == access_protected_node)
24499 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24500 else if (dwarf_version > 2
24501 && context_die->die_tag != DW_TAG_class_type)
24502 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24503 }
24504
24505 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24506 structure. */
24507 static bool
24508 is_variant_part (tree decl)
24509 {
24510 return (TREE_CODE (decl) == FIELD_DECL
24511 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24512 }
24513
24514 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24515 return the FIELD_DECL. Return NULL_TREE otherwise. */
24516
24517 static tree
24518 analyze_discr_in_predicate (tree operand, tree struct_type)
24519 {
24520 bool continue_stripping = true;
24521 while (continue_stripping)
24522 switch (TREE_CODE (operand))
24523 {
24524 CASE_CONVERT:
24525 operand = TREE_OPERAND (operand, 0);
24526 break;
24527 default:
24528 continue_stripping = false;
24529 break;
24530 }
24531
24532 /* Match field access to members of struct_type only. */
24533 if (TREE_CODE (operand) == COMPONENT_REF
24534 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24535 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24536 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24537 return TREE_OPERAND (operand, 1);
24538 else
24539 return NULL_TREE;
24540 }
24541
24542 /* Check that SRC is a constant integer that can be represented as a native
24543 integer constant (either signed or unsigned). If so, store it into DEST and
24544 return true. Return false otherwise. */
24545
24546 static bool
24547 get_discr_value (tree src, dw_discr_value *dest)
24548 {
24549 tree discr_type = TREE_TYPE (src);
24550
24551 if (lang_hooks.types.get_debug_type)
24552 {
24553 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24554 if (debug_type != NULL)
24555 discr_type = debug_type;
24556 }
24557
24558 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24559 return false;
24560
24561 /* Signedness can vary between the original type and the debug type. This
24562 can happen for character types in Ada for instance: the character type
24563 used for code generation can be signed, to be compatible with the C one,
24564 but from a debugger point of view, it must be unsigned. */
24565 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24566 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24567
24568 if (is_orig_unsigned != is_debug_unsigned)
24569 src = fold_convert (discr_type, src);
24570
24571 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24572 return false;
24573
24574 dest->pos = is_debug_unsigned;
24575 if (is_debug_unsigned)
24576 dest->v.uval = tree_to_uhwi (src);
24577 else
24578 dest->v.sval = tree_to_shwi (src);
24579
24580 return true;
24581 }
24582
24583 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24584 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24585 store NULL_TREE in DISCR_DECL. Otherwise:
24586
24587 - store the discriminant field in STRUCT_TYPE that controls the variant
24588 part to *DISCR_DECL
24589
24590 - put in *DISCR_LISTS_P an array where for each variant, the item
24591 represents the corresponding matching list of discriminant values.
24592
24593 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24594 the above array.
24595
24596 Note that when the array is allocated (i.e. when the analysis is
24597 successful), it is up to the caller to free the array. */
24598
24599 static void
24600 analyze_variants_discr (tree variant_part_decl,
24601 tree struct_type,
24602 tree *discr_decl,
24603 dw_discr_list_ref **discr_lists_p,
24604 unsigned *discr_lists_length)
24605 {
24606 tree variant_part_type = TREE_TYPE (variant_part_decl);
24607 tree variant;
24608 dw_discr_list_ref *discr_lists;
24609 unsigned i;
24610
24611 /* Compute how many variants there are in this variant part. */
24612 *discr_lists_length = 0;
24613 for (variant = TYPE_FIELDS (variant_part_type);
24614 variant != NULL_TREE;
24615 variant = DECL_CHAIN (variant))
24616 ++*discr_lists_length;
24617
24618 *discr_decl = NULL_TREE;
24619 *discr_lists_p
24620 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24621 sizeof (**discr_lists_p));
24622 discr_lists = *discr_lists_p;
24623
24624 /* And then analyze all variants to extract discriminant information for all
24625 of them. This analysis is conservative: as soon as we detect something we
24626 do not support, abort everything and pretend we found nothing. */
24627 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24628 variant != NULL_TREE;
24629 variant = DECL_CHAIN (variant), ++i)
24630 {
24631 tree match_expr = DECL_QUALIFIER (variant);
24632
24633 /* Now, try to analyze the predicate and deduce a discriminant for
24634 it. */
24635 if (match_expr == boolean_true_node)
24636 /* Typically happens for the default variant: it matches all cases that
24637 previous variants rejected. Don't output any matching value for
24638 this one. */
24639 continue;
24640
24641 /* The following loop tries to iterate over each discriminant
24642 possibility: single values or ranges. */
24643 while (match_expr != NULL_TREE)
24644 {
24645 tree next_round_match_expr;
24646 tree candidate_discr = NULL_TREE;
24647 dw_discr_list_ref new_node = NULL;
24648
24649 /* Possibilities are matched one after the other by nested
24650 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24651 continue with the rest at next iteration. */
24652 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24653 {
24654 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24655 match_expr = TREE_OPERAND (match_expr, 1);
24656 }
24657 else
24658 next_round_match_expr = NULL_TREE;
24659
24660 if (match_expr == boolean_false_node)
24661 /* This sub-expression matches nothing: just wait for the next
24662 one. */
24663 ;
24664
24665 else if (TREE_CODE (match_expr) == EQ_EXPR)
24666 {
24667 /* We are matching: <discr_field> == <integer_cst>
24668 This sub-expression matches a single value. */
24669 tree integer_cst = TREE_OPERAND (match_expr, 1);
24670
24671 candidate_discr
24672 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24673 struct_type);
24674
24675 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24676 if (!get_discr_value (integer_cst,
24677 &new_node->dw_discr_lower_bound))
24678 goto abort;
24679 new_node->dw_discr_range = false;
24680 }
24681
24682 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24683 {
24684 /* We are matching:
24685 <discr_field> > <integer_cst>
24686 && <discr_field> < <integer_cst>.
24687 This sub-expression matches the range of values between the
24688 two matched integer constants. Note that comparisons can be
24689 inclusive or exclusive. */
24690 tree candidate_discr_1, candidate_discr_2;
24691 tree lower_cst, upper_cst;
24692 bool lower_cst_included, upper_cst_included;
24693 tree lower_op = TREE_OPERAND (match_expr, 0);
24694 tree upper_op = TREE_OPERAND (match_expr, 1);
24695
24696 /* When the comparison is exclusive, the integer constant is not
24697 the discriminant range bound we are looking for: we will have
24698 to increment or decrement it. */
24699 if (TREE_CODE (lower_op) == GE_EXPR)
24700 lower_cst_included = true;
24701 else if (TREE_CODE (lower_op) == GT_EXPR)
24702 lower_cst_included = false;
24703 else
24704 goto abort;
24705
24706 if (TREE_CODE (upper_op) == LE_EXPR)
24707 upper_cst_included = true;
24708 else if (TREE_CODE (upper_op) == LT_EXPR)
24709 upper_cst_included = false;
24710 else
24711 goto abort;
24712
24713 /* Extract the discriminant from the first operand and check it
24714 is consistant with the same analysis in the second
24715 operand. */
24716 candidate_discr_1
24717 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24718 struct_type);
24719 candidate_discr_2
24720 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24721 struct_type);
24722 if (candidate_discr_1 == candidate_discr_2)
24723 candidate_discr = candidate_discr_1;
24724 else
24725 goto abort;
24726
24727 /* Extract bounds from both. */
24728 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24729 lower_cst = TREE_OPERAND (lower_op, 1);
24730 upper_cst = TREE_OPERAND (upper_op, 1);
24731
24732 if (!lower_cst_included)
24733 lower_cst
24734 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24735 build_int_cst (TREE_TYPE (lower_cst), 1));
24736 if (!upper_cst_included)
24737 upper_cst
24738 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24739 build_int_cst (TREE_TYPE (upper_cst), 1));
24740
24741 if (!get_discr_value (lower_cst,
24742 &new_node->dw_discr_lower_bound)
24743 || !get_discr_value (upper_cst,
24744 &new_node->dw_discr_upper_bound))
24745 goto abort;
24746
24747 new_node->dw_discr_range = true;
24748 }
24749
24750 else
24751 /* Unsupported sub-expression: we cannot determine the set of
24752 matching discriminant values. Abort everything. */
24753 goto abort;
24754
24755 /* If the discriminant info is not consistant with what we saw so
24756 far, consider the analysis failed and abort everything. */
24757 if (candidate_discr == NULL_TREE
24758 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24759 goto abort;
24760 else
24761 *discr_decl = candidate_discr;
24762
24763 if (new_node != NULL)
24764 {
24765 new_node->dw_discr_next = discr_lists[i];
24766 discr_lists[i] = new_node;
24767 }
24768 match_expr = next_round_match_expr;
24769 }
24770 }
24771
24772 /* If we reach this point, we could match everything we were interested
24773 in. */
24774 return;
24775
24776 abort:
24777 /* Clean all data structure and return no result. */
24778 free (*discr_lists_p);
24779 *discr_lists_p = NULL;
24780 *discr_decl = NULL_TREE;
24781 }
24782
24783 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24784 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24785 under CONTEXT_DIE.
24786
24787 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24788 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24789 this type, which are record types, represent the available variants and each
24790 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24791 values are inferred from these attributes.
24792
24793 In trees, the offsets for the fields inside these sub-records are relative
24794 to the variant part itself, whereas the corresponding DIEs should have
24795 offset attributes that are relative to the embedding record base address.
24796 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24797 must be an expression that computes the offset of the variant part to
24798 describe in DWARF. */
24799
24800 static void
24801 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24802 dw_die_ref context_die)
24803 {
24804 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24805 tree variant_part_offset = vlr_ctx->variant_part_offset;
24806 struct loc_descr_context ctx = {
24807 vlr_ctx->struct_type, /* context_type */
24808 NULL_TREE, /* base_decl */
24809 NULL, /* dpi */
24810 false, /* placeholder_arg */
24811 false /* placeholder_seen */
24812 };
24813
24814 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24815 NULL_TREE if there is no such field. */
24816 tree discr_decl = NULL_TREE;
24817 dw_discr_list_ref *discr_lists;
24818 unsigned discr_lists_length = 0;
24819 unsigned i;
24820
24821 dw_die_ref dwarf_proc_die = NULL;
24822 dw_die_ref variant_part_die
24823 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24824
24825 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24826
24827 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24828 &discr_decl, &discr_lists, &discr_lists_length);
24829
24830 if (discr_decl != NULL_TREE)
24831 {
24832 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24833
24834 if (discr_die)
24835 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24836 else
24837 /* We have no DIE for the discriminant, so just discard all
24838 discrimimant information in the output. */
24839 discr_decl = NULL_TREE;
24840 }
24841
24842 /* If the offset for this variant part is more complex than a constant,
24843 create a DWARF procedure for it so that we will not have to generate DWARF
24844 expressions for it for each member. */
24845 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24846 && (dwarf_version >= 3 || !dwarf_strict))
24847 {
24848 const tree dwarf_proc_fndecl
24849 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24850 build_function_type (TREE_TYPE (variant_part_offset),
24851 NULL_TREE));
24852 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24853 const dw_loc_descr_ref dwarf_proc_body
24854 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24855
24856 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24857 dwarf_proc_fndecl, context_die);
24858 if (dwarf_proc_die != NULL)
24859 variant_part_offset = dwarf_proc_call;
24860 }
24861
24862 /* Output DIEs for all variants. */
24863 i = 0;
24864 for (tree variant = TYPE_FIELDS (variant_part_type);
24865 variant != NULL_TREE;
24866 variant = DECL_CHAIN (variant), ++i)
24867 {
24868 tree variant_type = TREE_TYPE (variant);
24869 dw_die_ref variant_die;
24870
24871 /* All variants (i.e. members of a variant part) are supposed to be
24872 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24873 under these records. */
24874 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24875
24876 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24877 equate_decl_number_to_die (variant, variant_die);
24878
24879 /* Output discriminant values this variant matches, if any. */
24880 if (discr_decl == NULL || discr_lists[i] == NULL)
24881 /* In the case we have discriminant information at all, this is
24882 probably the default variant: as the standard says, don't
24883 output any discriminant value/list attribute. */
24884 ;
24885 else if (discr_lists[i]->dw_discr_next == NULL
24886 && !discr_lists[i]->dw_discr_range)
24887 /* If there is only one accepted value, don't bother outputting a
24888 list. */
24889 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24890 else
24891 add_discr_list (variant_die, discr_lists[i]);
24892
24893 for (tree member = TYPE_FIELDS (variant_type);
24894 member != NULL_TREE;
24895 member = DECL_CHAIN (member))
24896 {
24897 struct vlr_context vlr_sub_ctx = {
24898 vlr_ctx->struct_type, /* struct_type */
24899 NULL /* variant_part_offset */
24900 };
24901 if (is_variant_part (member))
24902 {
24903 /* All offsets for fields inside variant parts are relative to
24904 the top-level embedding RECORD_TYPE's base address. On the
24905 other hand, offsets in GCC's types are relative to the
24906 nested-most variant part. So we have to sum offsets each time
24907 we recurse. */
24908
24909 vlr_sub_ctx.variant_part_offset
24910 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24911 variant_part_offset, byte_position (member));
24912 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24913 }
24914 else
24915 {
24916 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24917 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24918 }
24919 }
24920 }
24921
24922 free (discr_lists);
24923 }
24924
24925 /* Generate a DIE for a class member. */
24926
24927 static void
24928 gen_member_die (tree type, dw_die_ref context_die)
24929 {
24930 tree member;
24931 tree binfo = TYPE_BINFO (type);
24932
24933 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24934
24935 /* If this is not an incomplete type, output descriptions of each of its
24936 members. Note that as we output the DIEs necessary to represent the
24937 members of this record or union type, we will also be trying to output
24938 DIEs to represent the *types* of those members. However the `type'
24939 function (above) will specifically avoid generating type DIEs for member
24940 types *within* the list of member DIEs for this (containing) type except
24941 for those types (of members) which are explicitly marked as also being
24942 members of this (containing) type themselves. The g++ front- end can
24943 force any given type to be treated as a member of some other (containing)
24944 type by setting the TYPE_CONTEXT of the given (member) type to point to
24945 the TREE node representing the appropriate (containing) type. */
24946
24947 /* First output info about the base classes. */
24948 if (binfo)
24949 {
24950 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24951 int i;
24952 tree base;
24953
24954 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24955 gen_inheritance_die (base,
24956 (accesses ? (*accesses)[i] : access_public_node),
24957 type,
24958 context_die);
24959 }
24960
24961 /* Now output info about the data members and type members. */
24962 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24963 {
24964 struct vlr_context vlr_ctx = { type, NULL_TREE };
24965 bool static_inline_p
24966 = (TREE_STATIC (member)
24967 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24968 != -1));
24969
24970 /* Ignore clones. */
24971 if (DECL_ABSTRACT_ORIGIN (member))
24972 continue;
24973
24974 /* If we thought we were generating minimal debug info for TYPE
24975 and then changed our minds, some of the member declarations
24976 may have already been defined. Don't define them again, but
24977 do put them in the right order. */
24978
24979 if (dw_die_ref child = lookup_decl_die (member))
24980 {
24981 /* Handle inline static data members, which only have in-class
24982 declarations. */
24983 dw_die_ref ref = NULL;
24984 if (child->die_tag == DW_TAG_variable
24985 && child->die_parent == comp_unit_die ())
24986 {
24987 ref = get_AT_ref (child, DW_AT_specification);
24988 /* For C++17 inline static data members followed by redundant
24989 out of class redeclaration, we might get here with
24990 child being the DIE created for the out of class
24991 redeclaration and with its DW_AT_specification being
24992 the DIE created for in-class definition. We want to
24993 reparent the latter, and don't want to create another
24994 DIE with DW_AT_specification in that case, because
24995 we already have one. */
24996 if (ref
24997 && static_inline_p
24998 && ref->die_tag == DW_TAG_variable
24999 && ref->die_parent == comp_unit_die ()
25000 && get_AT (ref, DW_AT_specification) == NULL)
25001 {
25002 child = ref;
25003 ref = NULL;
25004 static_inline_p = false;
25005 }
25006 }
25007
25008 if (child->die_tag == DW_TAG_variable
25009 && child->die_parent == comp_unit_die ()
25010 && ref == NULL)
25011 {
25012 reparent_child (child, context_die);
25013 if (dwarf_version < 5)
25014 child->die_tag = DW_TAG_member;
25015 }
25016 else
25017 splice_child_die (context_die, child);
25018 }
25019
25020 /* Do not generate standard DWARF for variant parts if we are generating
25021 the corresponding GNAT encodings: DIEs generated for both would
25022 conflict in our mappings. */
25023 else if (is_variant_part (member)
25024 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25025 {
25026 vlr_ctx.variant_part_offset = byte_position (member);
25027 gen_variant_part (member, &vlr_ctx, context_die);
25028 }
25029 else
25030 {
25031 vlr_ctx.variant_part_offset = NULL_TREE;
25032 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25033 }
25034
25035 /* For C++ inline static data members emit immediately a DW_TAG_variable
25036 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25037 DW_AT_specification. */
25038 if (static_inline_p)
25039 {
25040 int old_extern = DECL_EXTERNAL (member);
25041 DECL_EXTERNAL (member) = 0;
25042 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25043 DECL_EXTERNAL (member) = old_extern;
25044 }
25045 }
25046 }
25047
25048 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25049 is set, we pretend that the type was never defined, so we only get the
25050 member DIEs needed by later specification DIEs. */
25051
25052 static void
25053 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25054 enum debug_info_usage usage)
25055 {
25056 if (TREE_ASM_WRITTEN (type))
25057 {
25058 /* Fill in the bound of variable-length fields in late dwarf if
25059 still incomplete. */
25060 if (!early_dwarf && variably_modified_type_p (type, NULL))
25061 for (tree member = TYPE_FIELDS (type);
25062 member;
25063 member = DECL_CHAIN (member))
25064 fill_variable_array_bounds (TREE_TYPE (member));
25065 return;
25066 }
25067
25068 dw_die_ref type_die = lookup_type_die (type);
25069 dw_die_ref scope_die = 0;
25070 int nested = 0;
25071 int complete = (TYPE_SIZE (type)
25072 && (! TYPE_STUB_DECL (type)
25073 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25074 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25075 complete = complete && should_emit_struct_debug (type, usage);
25076
25077 if (type_die && ! complete)
25078 return;
25079
25080 if (TYPE_CONTEXT (type) != NULL_TREE
25081 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25082 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25083 nested = 1;
25084
25085 scope_die = scope_die_for (type, context_die);
25086
25087 /* Generate child dies for template paramaters. */
25088 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25089 schedule_generic_params_dies_gen (type);
25090
25091 if (! type_die || (nested && is_cu_die (scope_die)))
25092 /* First occurrence of type or toplevel definition of nested class. */
25093 {
25094 dw_die_ref old_die = type_die;
25095
25096 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25097 ? record_type_tag (type) : DW_TAG_union_type,
25098 scope_die, type);
25099 equate_type_number_to_die (type, type_die);
25100 if (old_die)
25101 add_AT_specification (type_die, old_die);
25102 else
25103 add_name_attribute (type_die, type_tag (type));
25104 }
25105 else
25106 remove_AT (type_die, DW_AT_declaration);
25107
25108 /* If this type has been completed, then give it a byte_size attribute and
25109 then give a list of members. */
25110 if (complete && !ns_decl)
25111 {
25112 /* Prevent infinite recursion in cases where the type of some member of
25113 this type is expressed in terms of this type itself. */
25114 TREE_ASM_WRITTEN (type) = 1;
25115 add_byte_size_attribute (type_die, type);
25116 add_alignment_attribute (type_die, type);
25117 if (TYPE_STUB_DECL (type) != NULL_TREE)
25118 {
25119 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25120 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25121 }
25122
25123 /* If the first reference to this type was as the return type of an
25124 inline function, then it may not have a parent. Fix this now. */
25125 if (type_die->die_parent == NULL)
25126 add_child_die (scope_die, type_die);
25127
25128 gen_member_die (type, type_die);
25129
25130 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25131 if (TYPE_ARTIFICIAL (type))
25132 add_AT_flag (type_die, DW_AT_artificial, 1);
25133
25134 /* GNU extension: Record what type our vtable lives in. */
25135 if (TYPE_VFIELD (type))
25136 {
25137 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25138
25139 gen_type_die (vtype, context_die);
25140 add_AT_die_ref (type_die, DW_AT_containing_type,
25141 lookup_type_die (vtype));
25142 }
25143 }
25144 else
25145 {
25146 add_AT_flag (type_die, DW_AT_declaration, 1);
25147
25148 /* We don't need to do this for function-local types. */
25149 if (TYPE_STUB_DECL (type)
25150 && ! decl_function_context (TYPE_STUB_DECL (type)))
25151 vec_safe_push (incomplete_types, type);
25152 }
25153
25154 if (get_AT (type_die, DW_AT_name))
25155 add_pubtype (type, type_die);
25156 }
25157
25158 /* Generate a DIE for a subroutine _type_. */
25159
25160 static void
25161 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25162 {
25163 tree return_type = TREE_TYPE (type);
25164 dw_die_ref subr_die
25165 = new_die (DW_TAG_subroutine_type,
25166 scope_die_for (type, context_die), type);
25167
25168 equate_type_number_to_die (type, subr_die);
25169 add_prototyped_attribute (subr_die, type);
25170 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25171 context_die);
25172 add_alignment_attribute (subr_die, type);
25173 gen_formal_types_die (type, subr_die);
25174
25175 if (get_AT (subr_die, DW_AT_name))
25176 add_pubtype (type, subr_die);
25177 if ((dwarf_version >= 5 || !dwarf_strict)
25178 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25179 add_AT_flag (subr_die, DW_AT_reference, 1);
25180 if ((dwarf_version >= 5 || !dwarf_strict)
25181 && lang_hooks.types.type_dwarf_attribute (type,
25182 DW_AT_rvalue_reference) != -1)
25183 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25184 }
25185
25186 /* Generate a DIE for a type definition. */
25187
25188 static void
25189 gen_typedef_die (tree decl, dw_die_ref context_die)
25190 {
25191 dw_die_ref type_die;
25192 tree type;
25193
25194 if (TREE_ASM_WRITTEN (decl))
25195 {
25196 if (DECL_ORIGINAL_TYPE (decl))
25197 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25198 return;
25199 }
25200
25201 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25202 checks in process_scope_var and modified_type_die), this should be called
25203 only for original types. */
25204 gcc_assert (decl_ultimate_origin (decl) == NULL
25205 || decl_ultimate_origin (decl) == decl);
25206
25207 TREE_ASM_WRITTEN (decl) = 1;
25208 type_die = new_die (DW_TAG_typedef, context_die, decl);
25209
25210 add_name_and_src_coords_attributes (type_die, decl);
25211 if (DECL_ORIGINAL_TYPE (decl))
25212 {
25213 type = DECL_ORIGINAL_TYPE (decl);
25214 if (type == error_mark_node)
25215 return;
25216
25217 gcc_assert (type != TREE_TYPE (decl));
25218 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25219 }
25220 else
25221 {
25222 type = TREE_TYPE (decl);
25223 if (type == error_mark_node)
25224 return;
25225
25226 if (is_naming_typedef_decl (TYPE_NAME (type)))
25227 {
25228 /* Here, we are in the case of decl being a typedef naming
25229 an anonymous type, e.g:
25230 typedef struct {...} foo;
25231 In that case TREE_TYPE (decl) is not a typedef variant
25232 type and TYPE_NAME of the anonymous type is set to the
25233 TYPE_DECL of the typedef. This construct is emitted by
25234 the C++ FE.
25235
25236 TYPE is the anonymous struct named by the typedef
25237 DECL. As we need the DW_AT_type attribute of the
25238 DW_TAG_typedef to point to the DIE of TYPE, let's
25239 generate that DIE right away. add_type_attribute
25240 called below will then pick (via lookup_type_die) that
25241 anonymous struct DIE. */
25242 if (!TREE_ASM_WRITTEN (type))
25243 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25244
25245 /* This is a GNU Extension. We are adding a
25246 DW_AT_linkage_name attribute to the DIE of the
25247 anonymous struct TYPE. The value of that attribute
25248 is the name of the typedef decl naming the anonymous
25249 struct. This greatly eases the work of consumers of
25250 this debug info. */
25251 add_linkage_name_raw (lookup_type_die (type), decl);
25252 }
25253 }
25254
25255 add_type_attribute (type_die, type, decl_quals (decl), false,
25256 context_die);
25257
25258 if (is_naming_typedef_decl (decl))
25259 /* We want that all subsequent calls to lookup_type_die with
25260 TYPE in argument yield the DW_TAG_typedef we have just
25261 created. */
25262 equate_type_number_to_die (type, type_die);
25263
25264 add_alignment_attribute (type_die, TREE_TYPE (decl));
25265
25266 add_accessibility_attribute (type_die, decl);
25267
25268 if (DECL_ABSTRACT_P (decl))
25269 equate_decl_number_to_die (decl, type_die);
25270
25271 if (get_AT (type_die, DW_AT_name))
25272 add_pubtype (decl, type_die);
25273 }
25274
25275 /* Generate a DIE for a struct, class, enum or union type. */
25276
25277 static void
25278 gen_tagged_type_die (tree type,
25279 dw_die_ref context_die,
25280 enum debug_info_usage usage)
25281 {
25282 if (type == NULL_TREE
25283 || !is_tagged_type (type))
25284 return;
25285
25286 if (TREE_ASM_WRITTEN (type))
25287 ;
25288 /* If this is a nested type whose containing class hasn't been written
25289 out yet, writing it out will cover this one, too. This does not apply
25290 to instantiations of member class templates; they need to be added to
25291 the containing class as they are generated. FIXME: This hurts the
25292 idea of combining type decls from multiple TUs, since we can't predict
25293 what set of template instantiations we'll get. */
25294 else if (TYPE_CONTEXT (type)
25295 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25296 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25297 {
25298 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25299
25300 if (TREE_ASM_WRITTEN (type))
25301 return;
25302
25303 /* If that failed, attach ourselves to the stub. */
25304 context_die = lookup_type_die (TYPE_CONTEXT (type));
25305 }
25306 else if (TYPE_CONTEXT (type) != NULL_TREE
25307 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25308 {
25309 /* If this type is local to a function that hasn't been written
25310 out yet, use a NULL context for now; it will be fixed up in
25311 decls_for_scope. */
25312 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25313 /* A declaration DIE doesn't count; nested types need to go in the
25314 specification. */
25315 if (context_die && is_declaration_die (context_die))
25316 context_die = NULL;
25317 }
25318 else
25319 context_die = declare_in_namespace (type, context_die);
25320
25321 if (TREE_CODE (type) == ENUMERAL_TYPE)
25322 {
25323 /* This might have been written out by the call to
25324 declare_in_namespace. */
25325 if (!TREE_ASM_WRITTEN (type))
25326 gen_enumeration_type_die (type, context_die);
25327 }
25328 else
25329 gen_struct_or_union_type_die (type, context_die, usage);
25330
25331 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25332 it up if it is ever completed. gen_*_type_die will set it for us
25333 when appropriate. */
25334 }
25335
25336 /* Generate a type description DIE. */
25337
25338 static void
25339 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25340 enum debug_info_usage usage)
25341 {
25342 struct array_descr_info info;
25343
25344 if (type == NULL_TREE || type == error_mark_node)
25345 return;
25346
25347 if (flag_checking && type)
25348 verify_type (type);
25349
25350 if (TYPE_NAME (type) != NULL_TREE
25351 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25352 && is_redundant_typedef (TYPE_NAME (type))
25353 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25354 /* The DECL of this type is a typedef we don't want to emit debug
25355 info for but we want debug info for its underlying typedef.
25356 This can happen for e.g, the injected-class-name of a C++
25357 type. */
25358 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25359
25360 /* If TYPE is a typedef type variant, let's generate debug info
25361 for the parent typedef which TYPE is a type of. */
25362 if (typedef_variant_p (type))
25363 {
25364 if (TREE_ASM_WRITTEN (type))
25365 return;
25366
25367 tree name = TYPE_NAME (type);
25368 tree origin = decl_ultimate_origin (name);
25369 if (origin != NULL && origin != name)
25370 {
25371 gen_decl_die (origin, NULL, NULL, context_die);
25372 return;
25373 }
25374
25375 /* Prevent broken recursion; we can't hand off to the same type. */
25376 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25377
25378 /* Give typedefs the right scope. */
25379 context_die = scope_die_for (type, context_die);
25380
25381 TREE_ASM_WRITTEN (type) = 1;
25382
25383 gen_decl_die (name, NULL, NULL, context_die);
25384 return;
25385 }
25386
25387 /* If type is an anonymous tagged type named by a typedef, let's
25388 generate debug info for the typedef. */
25389 if (is_naming_typedef_decl (TYPE_NAME (type)))
25390 {
25391 /* Use the DIE of the containing namespace as the parent DIE of
25392 the type description DIE we want to generate. */
25393 if (DECL_CONTEXT (TYPE_NAME (type))
25394 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25395 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25396
25397 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25398 return;
25399 }
25400
25401 if (lang_hooks.types.get_debug_type)
25402 {
25403 tree debug_type = lang_hooks.types.get_debug_type (type);
25404
25405 if (debug_type != NULL_TREE && debug_type != type)
25406 {
25407 gen_type_die_with_usage (debug_type, context_die, usage);
25408 return;
25409 }
25410 }
25411
25412 /* We are going to output a DIE to represent the unqualified version
25413 of this type (i.e. without any const or volatile qualifiers) so
25414 get the main variant (i.e. the unqualified version) of this type
25415 now. (Vectors and arrays are special because the debugging info is in the
25416 cloned type itself. Similarly function/method types can contain extra
25417 ref-qualification). */
25418 if (TREE_CODE (type) == FUNCTION_TYPE
25419 || TREE_CODE (type) == METHOD_TYPE)
25420 {
25421 /* For function/method types, can't use type_main_variant here,
25422 because that can have different ref-qualifiers for C++,
25423 but try to canonicalize. */
25424 tree main = TYPE_MAIN_VARIANT (type);
25425 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25426 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25427 && check_base_type (t, main)
25428 && check_lang_type (t, type))
25429 {
25430 type = t;
25431 break;
25432 }
25433 }
25434 else if (TREE_CODE (type) != VECTOR_TYPE
25435 && TREE_CODE (type) != ARRAY_TYPE)
25436 type = type_main_variant (type);
25437
25438 /* If this is an array type with hidden descriptor, handle it first. */
25439 if (!TREE_ASM_WRITTEN (type)
25440 && lang_hooks.types.get_array_descr_info)
25441 {
25442 memset (&info, 0, sizeof (info));
25443 if (lang_hooks.types.get_array_descr_info (type, &info))
25444 {
25445 /* Fortran sometimes emits array types with no dimension. */
25446 gcc_assert (info.ndimensions >= 0
25447 && (info.ndimensions
25448 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25449 gen_descr_array_type_die (type, &info, context_die);
25450 TREE_ASM_WRITTEN (type) = 1;
25451 return;
25452 }
25453 }
25454
25455 if (TREE_ASM_WRITTEN (type))
25456 {
25457 /* Variable-length types may be incomplete even if
25458 TREE_ASM_WRITTEN. For such types, fall through to
25459 gen_array_type_die() and possibly fill in
25460 DW_AT_{upper,lower}_bound attributes. */
25461 if ((TREE_CODE (type) != ARRAY_TYPE
25462 && TREE_CODE (type) != RECORD_TYPE
25463 && TREE_CODE (type) != UNION_TYPE
25464 && TREE_CODE (type) != QUAL_UNION_TYPE)
25465 || !variably_modified_type_p (type, NULL))
25466 return;
25467 }
25468
25469 switch (TREE_CODE (type))
25470 {
25471 case ERROR_MARK:
25472 break;
25473
25474 case POINTER_TYPE:
25475 case REFERENCE_TYPE:
25476 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25477 ensures that the gen_type_die recursion will terminate even if the
25478 type is recursive. Recursive types are possible in Ada. */
25479 /* ??? We could perhaps do this for all types before the switch
25480 statement. */
25481 TREE_ASM_WRITTEN (type) = 1;
25482
25483 /* For these types, all that is required is that we output a DIE (or a
25484 set of DIEs) to represent the "basis" type. */
25485 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25486 DINFO_USAGE_IND_USE);
25487 break;
25488
25489 case OFFSET_TYPE:
25490 /* This code is used for C++ pointer-to-data-member types.
25491 Output a description of the relevant class type. */
25492 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25493 DINFO_USAGE_IND_USE);
25494
25495 /* Output a description of the type of the object pointed to. */
25496 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25497 DINFO_USAGE_IND_USE);
25498
25499 /* Now output a DIE to represent this pointer-to-data-member type
25500 itself. */
25501 gen_ptr_to_mbr_type_die (type, context_die);
25502 break;
25503
25504 case FUNCTION_TYPE:
25505 /* Force out return type (in case it wasn't forced out already). */
25506 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25507 DINFO_USAGE_DIR_USE);
25508 gen_subroutine_type_die (type, context_die);
25509 break;
25510
25511 case METHOD_TYPE:
25512 /* Force out return type (in case it wasn't forced out already). */
25513 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25514 DINFO_USAGE_DIR_USE);
25515 gen_subroutine_type_die (type, context_die);
25516 break;
25517
25518 case ARRAY_TYPE:
25519 case VECTOR_TYPE:
25520 gen_array_type_die (type, context_die);
25521 break;
25522
25523 case ENUMERAL_TYPE:
25524 case RECORD_TYPE:
25525 case UNION_TYPE:
25526 case QUAL_UNION_TYPE:
25527 gen_tagged_type_die (type, context_die, usage);
25528 return;
25529
25530 case VOID_TYPE:
25531 case INTEGER_TYPE:
25532 case REAL_TYPE:
25533 case FIXED_POINT_TYPE:
25534 case COMPLEX_TYPE:
25535 case BOOLEAN_TYPE:
25536 /* No DIEs needed for fundamental types. */
25537 break;
25538
25539 case NULLPTR_TYPE:
25540 case LANG_TYPE:
25541 /* Just use DW_TAG_unspecified_type. */
25542 {
25543 dw_die_ref type_die = lookup_type_die (type);
25544 if (type_die == NULL)
25545 {
25546 tree name = TYPE_IDENTIFIER (type);
25547 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25548 type);
25549 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25550 equate_type_number_to_die (type, type_die);
25551 }
25552 }
25553 break;
25554
25555 default:
25556 if (is_cxx_auto (type))
25557 {
25558 tree name = TYPE_IDENTIFIER (type);
25559 dw_die_ref *die = (name == get_identifier ("auto")
25560 ? &auto_die : &decltype_auto_die);
25561 if (!*die)
25562 {
25563 *die = new_die (DW_TAG_unspecified_type,
25564 comp_unit_die (), NULL_TREE);
25565 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25566 }
25567 equate_type_number_to_die (type, *die);
25568 break;
25569 }
25570 gcc_unreachable ();
25571 }
25572
25573 TREE_ASM_WRITTEN (type) = 1;
25574 }
25575
25576 static void
25577 gen_type_die (tree type, dw_die_ref context_die)
25578 {
25579 if (type != error_mark_node)
25580 {
25581 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25582 if (flag_checking)
25583 {
25584 dw_die_ref die = lookup_type_die (type);
25585 if (die)
25586 check_die (die);
25587 }
25588 }
25589 }
25590
25591 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25592 things which are local to the given block. */
25593
25594 static void
25595 gen_block_die (tree stmt, dw_die_ref context_die)
25596 {
25597 int must_output_die = 0;
25598 bool inlined_func;
25599
25600 /* Ignore blocks that are NULL. */
25601 if (stmt == NULL_TREE)
25602 return;
25603
25604 inlined_func = inlined_function_outer_scope_p (stmt);
25605
25606 /* If the block is one fragment of a non-contiguous block, do not
25607 process the variables, since they will have been done by the
25608 origin block. Do process subblocks. */
25609 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25610 {
25611 tree sub;
25612
25613 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25614 gen_block_die (sub, context_die);
25615
25616 return;
25617 }
25618
25619 /* Determine if we need to output any Dwarf DIEs at all to represent this
25620 block. */
25621 if (inlined_func)
25622 /* The outer scopes for inlinings *must* always be represented. We
25623 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25624 must_output_die = 1;
25625 else if (BLOCK_DIE (stmt))
25626 /* If we already have a DIE then it was filled early. Meanwhile
25627 we might have pruned all BLOCK_VARS as optimized out but we
25628 still want to generate high/low PC attributes so output it. */
25629 must_output_die = 1;
25630 else if (TREE_USED (stmt)
25631 || TREE_ASM_WRITTEN (stmt)
25632 || BLOCK_ABSTRACT (stmt))
25633 {
25634 /* Determine if this block directly contains any "significant"
25635 local declarations which we will need to output DIEs for. */
25636 if (debug_info_level > DINFO_LEVEL_TERSE)
25637 {
25638 /* We are not in terse mode so any local declaration that
25639 is not ignored for debug purposes counts as being a
25640 "significant" one. */
25641 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25642 must_output_die = 1;
25643 else
25644 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25645 if (!DECL_IGNORED_P (var))
25646 {
25647 must_output_die = 1;
25648 break;
25649 }
25650 }
25651 else if (!dwarf2out_ignore_block (stmt))
25652 must_output_die = 1;
25653 }
25654
25655 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25656 DIE for any block which contains no significant local declarations at
25657 all. Rather, in such cases we just call `decls_for_scope' so that any
25658 needed Dwarf info for any sub-blocks will get properly generated. Note
25659 that in terse mode, our definition of what constitutes a "significant"
25660 local declaration gets restricted to include only inlined function
25661 instances and local (nested) function definitions. */
25662 if (must_output_die)
25663 {
25664 if (inlined_func)
25665 {
25666 /* If STMT block is abstract, that means we have been called
25667 indirectly from dwarf2out_abstract_function.
25668 That function rightfully marks the descendent blocks (of
25669 the abstract function it is dealing with) as being abstract,
25670 precisely to prevent us from emitting any
25671 DW_TAG_inlined_subroutine DIE as a descendent
25672 of an abstract function instance. So in that case, we should
25673 not call gen_inlined_subroutine_die.
25674
25675 Later though, when cgraph asks dwarf2out to emit info
25676 for the concrete instance of the function decl into which
25677 the concrete instance of STMT got inlined, the later will lead
25678 to the generation of a DW_TAG_inlined_subroutine DIE. */
25679 if (! BLOCK_ABSTRACT (stmt))
25680 gen_inlined_subroutine_die (stmt, context_die);
25681 }
25682 else
25683 gen_lexical_block_die (stmt, context_die);
25684 }
25685 else
25686 decls_for_scope (stmt, context_die);
25687 }
25688
25689 /* Process variable DECL (or variable with origin ORIGIN) within
25690 block STMT and add it to CONTEXT_DIE. */
25691 static void
25692 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25693 {
25694 dw_die_ref die;
25695 tree decl_or_origin = decl ? decl : origin;
25696
25697 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25698 die = lookup_decl_die (decl_or_origin);
25699 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25700 {
25701 if (TYPE_DECL_IS_STUB (decl_or_origin))
25702 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25703 else
25704 die = lookup_decl_die (decl_or_origin);
25705 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25706 if (! die && ! early_dwarf)
25707 return;
25708 }
25709 else
25710 die = NULL;
25711
25712 /* Avoid creating DIEs for local typedefs and concrete static variables that
25713 will only be pruned later. */
25714 if ((origin || decl_ultimate_origin (decl))
25715 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25716 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25717 {
25718 origin = decl_ultimate_origin (decl_or_origin);
25719 if (decl && VAR_P (decl) && die != NULL)
25720 {
25721 die = lookup_decl_die (origin);
25722 if (die != NULL)
25723 equate_decl_number_to_die (decl, die);
25724 }
25725 return;
25726 }
25727
25728 if (die != NULL && die->die_parent == NULL)
25729 add_child_die (context_die, die);
25730 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25731 {
25732 if (early_dwarf)
25733 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25734 stmt, context_die);
25735 }
25736 else
25737 {
25738 if (decl && DECL_P (decl))
25739 {
25740 die = lookup_decl_die (decl);
25741
25742 /* Early created DIEs do not have a parent as the decls refer
25743 to the function as DECL_CONTEXT rather than the BLOCK. */
25744 if (die && die->die_parent == NULL)
25745 {
25746 gcc_assert (in_lto_p);
25747 add_child_die (context_die, die);
25748 }
25749 }
25750
25751 gen_decl_die (decl, origin, NULL, context_die);
25752 }
25753 }
25754
25755 /* Generate all of the decls declared within a given scope and (recursively)
25756 all of its sub-blocks. */
25757
25758 static void
25759 decls_for_scope (tree stmt, dw_die_ref context_die)
25760 {
25761 tree decl;
25762 unsigned int i;
25763 tree subblocks;
25764
25765 /* Ignore NULL blocks. */
25766 if (stmt == NULL_TREE)
25767 return;
25768
25769 /* Output the DIEs to represent all of the data objects and typedefs
25770 declared directly within this block but not within any nested
25771 sub-blocks. Also, nested function and tag DIEs have been
25772 generated with a parent of NULL; fix that up now. We don't
25773 have to do this if we're at -g1. */
25774 if (debug_info_level > DINFO_LEVEL_TERSE)
25775 {
25776 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25777 process_scope_var (stmt, decl, NULL_TREE, context_die);
25778 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25779 origin - avoid doing this twice as we have no good way to see
25780 if we've done it once already. */
25781 if (! early_dwarf)
25782 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25783 {
25784 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25785 if (decl == current_function_decl)
25786 /* Ignore declarations of the current function, while they
25787 are declarations, gen_subprogram_die would treat them
25788 as definitions again, because they are equal to
25789 current_function_decl and endlessly recurse. */;
25790 else if (TREE_CODE (decl) == FUNCTION_DECL)
25791 process_scope_var (stmt, decl, NULL_TREE, context_die);
25792 else
25793 process_scope_var (stmt, NULL_TREE, decl, context_die);
25794 }
25795 }
25796
25797 /* Even if we're at -g1, we need to process the subblocks in order to get
25798 inlined call information. */
25799
25800 /* Output the DIEs to represent all sub-blocks (and the items declared
25801 therein) of this block. */
25802 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25803 subblocks != NULL;
25804 subblocks = BLOCK_CHAIN (subblocks))
25805 gen_block_die (subblocks, context_die);
25806 }
25807
25808 /* Is this a typedef we can avoid emitting? */
25809
25810 bool
25811 is_redundant_typedef (const_tree decl)
25812 {
25813 if (TYPE_DECL_IS_STUB (decl))
25814 return true;
25815
25816 if (DECL_ARTIFICIAL (decl)
25817 && DECL_CONTEXT (decl)
25818 && is_tagged_type (DECL_CONTEXT (decl))
25819 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25820 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25821 /* Also ignore the artificial member typedef for the class name. */
25822 return true;
25823
25824 return false;
25825 }
25826
25827 /* Return TRUE if TYPE is a typedef that names a type for linkage
25828 purposes. This kind of typedefs is produced by the C++ FE for
25829 constructs like:
25830
25831 typedef struct {...} foo;
25832
25833 In that case, there is no typedef variant type produced for foo.
25834 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25835 struct type. */
25836
25837 static bool
25838 is_naming_typedef_decl (const_tree decl)
25839 {
25840 if (decl == NULL_TREE
25841 || TREE_CODE (decl) != TYPE_DECL
25842 || DECL_NAMELESS (decl)
25843 || !is_tagged_type (TREE_TYPE (decl))
25844 || DECL_IS_BUILTIN (decl)
25845 || is_redundant_typedef (decl)
25846 /* It looks like Ada produces TYPE_DECLs that are very similar
25847 to C++ naming typedefs but that have different
25848 semantics. Let's be specific to c++ for now. */
25849 || !is_cxx (decl))
25850 return FALSE;
25851
25852 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25853 && TYPE_NAME (TREE_TYPE (decl)) == decl
25854 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25855 != TYPE_NAME (TREE_TYPE (decl))));
25856 }
25857
25858 /* Looks up the DIE for a context. */
25859
25860 static inline dw_die_ref
25861 lookup_context_die (tree context)
25862 {
25863 if (context)
25864 {
25865 /* Find die that represents this context. */
25866 if (TYPE_P (context))
25867 {
25868 context = TYPE_MAIN_VARIANT (context);
25869 dw_die_ref ctx = lookup_type_die (context);
25870 if (!ctx)
25871 return NULL;
25872 return strip_naming_typedef (context, ctx);
25873 }
25874 else
25875 return lookup_decl_die (context);
25876 }
25877 return comp_unit_die ();
25878 }
25879
25880 /* Returns the DIE for a context. */
25881
25882 static inline dw_die_ref
25883 get_context_die (tree context)
25884 {
25885 if (context)
25886 {
25887 /* Find die that represents this context. */
25888 if (TYPE_P (context))
25889 {
25890 context = TYPE_MAIN_VARIANT (context);
25891 return strip_naming_typedef (context, force_type_die (context));
25892 }
25893 else
25894 return force_decl_die (context);
25895 }
25896 return comp_unit_die ();
25897 }
25898
25899 /* Returns the DIE for decl. A DIE will always be returned. */
25900
25901 static dw_die_ref
25902 force_decl_die (tree decl)
25903 {
25904 dw_die_ref decl_die;
25905 unsigned saved_external_flag;
25906 tree save_fn = NULL_TREE;
25907 decl_die = lookup_decl_die (decl);
25908 if (!decl_die)
25909 {
25910 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25911
25912 decl_die = lookup_decl_die (decl);
25913 if (decl_die)
25914 return decl_die;
25915
25916 switch (TREE_CODE (decl))
25917 {
25918 case FUNCTION_DECL:
25919 /* Clear current_function_decl, so that gen_subprogram_die thinks
25920 that this is a declaration. At this point, we just want to force
25921 declaration die. */
25922 save_fn = current_function_decl;
25923 current_function_decl = NULL_TREE;
25924 gen_subprogram_die (decl, context_die);
25925 current_function_decl = save_fn;
25926 break;
25927
25928 case VAR_DECL:
25929 /* Set external flag to force declaration die. Restore it after
25930 gen_decl_die() call. */
25931 saved_external_flag = DECL_EXTERNAL (decl);
25932 DECL_EXTERNAL (decl) = 1;
25933 gen_decl_die (decl, NULL, NULL, context_die);
25934 DECL_EXTERNAL (decl) = saved_external_flag;
25935 break;
25936
25937 case NAMESPACE_DECL:
25938 if (dwarf_version >= 3 || !dwarf_strict)
25939 dwarf2out_decl (decl);
25940 else
25941 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25942 decl_die = comp_unit_die ();
25943 break;
25944
25945 case TRANSLATION_UNIT_DECL:
25946 decl_die = comp_unit_die ();
25947 break;
25948
25949 default:
25950 gcc_unreachable ();
25951 }
25952
25953 /* We should be able to find the DIE now. */
25954 if (!decl_die)
25955 decl_die = lookup_decl_die (decl);
25956 gcc_assert (decl_die);
25957 }
25958
25959 return decl_die;
25960 }
25961
25962 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25963 always returned. */
25964
25965 static dw_die_ref
25966 force_type_die (tree type)
25967 {
25968 dw_die_ref type_die;
25969
25970 type_die = lookup_type_die (type);
25971 if (!type_die)
25972 {
25973 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25974
25975 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25976 false, context_die);
25977 gcc_assert (type_die);
25978 }
25979 return type_die;
25980 }
25981
25982 /* Force out any required namespaces to be able to output DECL,
25983 and return the new context_die for it, if it's changed. */
25984
25985 static dw_die_ref
25986 setup_namespace_context (tree thing, dw_die_ref context_die)
25987 {
25988 tree context = (DECL_P (thing)
25989 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25990 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25991 /* Force out the namespace. */
25992 context_die = force_decl_die (context);
25993
25994 return context_die;
25995 }
25996
25997 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25998 type) within its namespace, if appropriate.
25999
26000 For compatibility with older debuggers, namespace DIEs only contain
26001 declarations; all definitions are emitted at CU scope, with
26002 DW_AT_specification pointing to the declaration (like with class
26003 members). */
26004
26005 static dw_die_ref
26006 declare_in_namespace (tree thing, dw_die_ref context_die)
26007 {
26008 dw_die_ref ns_context;
26009
26010 if (debug_info_level <= DINFO_LEVEL_TERSE)
26011 return context_die;
26012
26013 /* External declarations in the local scope only need to be emitted
26014 once, not once in the namespace and once in the scope.
26015
26016 This avoids declaring the `extern' below in the
26017 namespace DIE as well as in the innermost scope:
26018
26019 namespace S
26020 {
26021 int i=5;
26022 int foo()
26023 {
26024 int i=8;
26025 extern int i;
26026 return i;
26027 }
26028 }
26029 */
26030 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26031 return context_die;
26032
26033 /* If this decl is from an inlined function, then don't try to emit it in its
26034 namespace, as we will get confused. It would have already been emitted
26035 when the abstract instance of the inline function was emitted anyways. */
26036 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26037 return context_die;
26038
26039 ns_context = setup_namespace_context (thing, context_die);
26040
26041 if (ns_context != context_die)
26042 {
26043 if (is_fortran ())
26044 return ns_context;
26045 if (DECL_P (thing))
26046 gen_decl_die (thing, NULL, NULL, ns_context);
26047 else
26048 gen_type_die (thing, ns_context);
26049 }
26050 return context_die;
26051 }
26052
26053 /* Generate a DIE for a namespace or namespace alias. */
26054
26055 static void
26056 gen_namespace_die (tree decl, dw_die_ref context_die)
26057 {
26058 dw_die_ref namespace_die;
26059
26060 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26061 they are an alias of. */
26062 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26063 {
26064 /* Output a real namespace or module. */
26065 context_die = setup_namespace_context (decl, comp_unit_die ());
26066 namespace_die = new_die (is_fortran ()
26067 ? DW_TAG_module : DW_TAG_namespace,
26068 context_die, decl);
26069 /* For Fortran modules defined in different CU don't add src coords. */
26070 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26071 {
26072 const char *name = dwarf2_name (decl, 0);
26073 if (name)
26074 add_name_attribute (namespace_die, name);
26075 }
26076 else
26077 add_name_and_src_coords_attributes (namespace_die, decl);
26078 if (DECL_EXTERNAL (decl))
26079 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26080 equate_decl_number_to_die (decl, namespace_die);
26081 }
26082 else
26083 {
26084 /* Output a namespace alias. */
26085
26086 /* Force out the namespace we are an alias of, if necessary. */
26087 dw_die_ref origin_die
26088 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26089
26090 if (DECL_FILE_SCOPE_P (decl)
26091 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26092 context_die = setup_namespace_context (decl, comp_unit_die ());
26093 /* Now create the namespace alias DIE. */
26094 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26095 add_name_and_src_coords_attributes (namespace_die, decl);
26096 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26097 equate_decl_number_to_die (decl, namespace_die);
26098 }
26099 if ((dwarf_version >= 5 || !dwarf_strict)
26100 && lang_hooks.decls.decl_dwarf_attribute (decl,
26101 DW_AT_export_symbols) == 1)
26102 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26103
26104 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26105 if (want_pubnames ())
26106 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26107 }
26108
26109 /* Generate Dwarf debug information for a decl described by DECL.
26110 The return value is currently only meaningful for PARM_DECLs,
26111 for all other decls it returns NULL.
26112
26113 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26114 It can be NULL otherwise. */
26115
26116 static dw_die_ref
26117 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26118 dw_die_ref context_die)
26119 {
26120 tree decl_or_origin = decl ? decl : origin;
26121 tree class_origin = NULL, ultimate_origin;
26122
26123 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26124 return NULL;
26125
26126 switch (TREE_CODE (decl_or_origin))
26127 {
26128 case ERROR_MARK:
26129 break;
26130
26131 case CONST_DECL:
26132 if (!is_fortran () && !is_ada ())
26133 {
26134 /* The individual enumerators of an enum type get output when we output
26135 the Dwarf representation of the relevant enum type itself. */
26136 break;
26137 }
26138
26139 /* Emit its type. */
26140 gen_type_die (TREE_TYPE (decl), context_die);
26141
26142 /* And its containing namespace. */
26143 context_die = declare_in_namespace (decl, context_die);
26144
26145 gen_const_die (decl, context_die);
26146 break;
26147
26148 case FUNCTION_DECL:
26149 #if 0
26150 /* FIXME */
26151 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26152 on local redeclarations of global functions. That seems broken. */
26153 if (current_function_decl != decl)
26154 /* This is only a declaration. */;
26155 #endif
26156
26157 /* We should have abstract copies already and should not generate
26158 stray type DIEs in late LTO dumping. */
26159 if (! early_dwarf)
26160 ;
26161
26162 /* If we're emitting a clone, emit info for the abstract instance. */
26163 else if (origin || DECL_ORIGIN (decl) != decl)
26164 dwarf2out_abstract_function (origin
26165 ? DECL_ORIGIN (origin)
26166 : DECL_ABSTRACT_ORIGIN (decl));
26167
26168 /* If we're emitting a possibly inlined function emit it as
26169 abstract instance. */
26170 else if (cgraph_function_possibly_inlined_p (decl)
26171 && ! DECL_ABSTRACT_P (decl)
26172 && ! class_or_namespace_scope_p (context_die)
26173 /* dwarf2out_abstract_function won't emit a die if this is just
26174 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26175 that case, because that works only if we have a die. */
26176 && DECL_INITIAL (decl) != NULL_TREE)
26177 dwarf2out_abstract_function (decl);
26178
26179 /* Otherwise we're emitting the primary DIE for this decl. */
26180 else if (debug_info_level > DINFO_LEVEL_TERSE)
26181 {
26182 /* Before we describe the FUNCTION_DECL itself, make sure that we
26183 have its containing type. */
26184 if (!origin)
26185 origin = decl_class_context (decl);
26186 if (origin != NULL_TREE)
26187 gen_type_die (origin, context_die);
26188
26189 /* And its return type. */
26190 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26191
26192 /* And its virtual context. */
26193 if (DECL_VINDEX (decl) != NULL_TREE)
26194 gen_type_die (DECL_CONTEXT (decl), context_die);
26195
26196 /* Make sure we have a member DIE for decl. */
26197 if (origin != NULL_TREE)
26198 gen_type_die_for_member (origin, decl, context_die);
26199
26200 /* And its containing namespace. */
26201 context_die = declare_in_namespace (decl, context_die);
26202 }
26203
26204 /* Now output a DIE to represent the function itself. */
26205 if (decl)
26206 gen_subprogram_die (decl, context_die);
26207 break;
26208
26209 case TYPE_DECL:
26210 /* If we are in terse mode, don't generate any DIEs to represent any
26211 actual typedefs. */
26212 if (debug_info_level <= DINFO_LEVEL_TERSE)
26213 break;
26214
26215 /* In the special case of a TYPE_DECL node representing the declaration
26216 of some type tag, if the given TYPE_DECL is marked as having been
26217 instantiated from some other (original) TYPE_DECL node (e.g. one which
26218 was generated within the original definition of an inline function) we
26219 used to generate a special (abbreviated) DW_TAG_structure_type,
26220 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26221 should be actually referencing those DIEs, as variable DIEs with that
26222 type would be emitted already in the abstract origin, so it was always
26223 removed during unused type prunning. Don't add anything in this
26224 case. */
26225 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26226 break;
26227
26228 if (is_redundant_typedef (decl))
26229 gen_type_die (TREE_TYPE (decl), context_die);
26230 else
26231 /* Output a DIE to represent the typedef itself. */
26232 gen_typedef_die (decl, context_die);
26233 break;
26234
26235 case LABEL_DECL:
26236 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26237 gen_label_die (decl, context_die);
26238 break;
26239
26240 case VAR_DECL:
26241 case RESULT_DECL:
26242 /* If we are in terse mode, don't generate any DIEs to represent any
26243 variable declarations or definitions. */
26244 if (debug_info_level <= DINFO_LEVEL_TERSE)
26245 break;
26246
26247 /* Avoid generating stray type DIEs during late dwarf dumping.
26248 All types have been dumped early. */
26249 if (early_dwarf
26250 /* ??? But in LTRANS we cannot annotate early created variably
26251 modified type DIEs without copying them and adjusting all
26252 references to them. Dump them again as happens for inlining
26253 which copies both the decl and the types. */
26254 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26255 in VLA bound information for example. */
26256 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26257 current_function_decl)))
26258 {
26259 /* Output any DIEs that are needed to specify the type of this data
26260 object. */
26261 if (decl_by_reference_p (decl_or_origin))
26262 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26263 else
26264 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26265 }
26266
26267 if (early_dwarf)
26268 {
26269 /* And its containing type. */
26270 class_origin = decl_class_context (decl_or_origin);
26271 if (class_origin != NULL_TREE)
26272 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26273
26274 /* And its containing namespace. */
26275 context_die = declare_in_namespace (decl_or_origin, context_die);
26276 }
26277
26278 /* Now output the DIE to represent the data object itself. This gets
26279 complicated because of the possibility that the VAR_DECL really
26280 represents an inlined instance of a formal parameter for an inline
26281 function. */
26282 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26283 if (ultimate_origin != NULL_TREE
26284 && TREE_CODE (ultimate_origin) == PARM_DECL)
26285 gen_formal_parameter_die (decl, origin,
26286 true /* Emit name attribute. */,
26287 context_die);
26288 else
26289 gen_variable_die (decl, origin, context_die);
26290 break;
26291
26292 case FIELD_DECL:
26293 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26294 /* Ignore the nameless fields that are used to skip bits but handle C++
26295 anonymous unions and structs. */
26296 if (DECL_NAME (decl) != NULL_TREE
26297 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26298 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26299 {
26300 gen_type_die (member_declared_type (decl), context_die);
26301 gen_field_die (decl, ctx, context_die);
26302 }
26303 break;
26304
26305 case PARM_DECL:
26306 /* Avoid generating stray type DIEs during late dwarf dumping.
26307 All types have been dumped early. */
26308 if (early_dwarf
26309 /* ??? But in LTRANS we cannot annotate early created variably
26310 modified type DIEs without copying them and adjusting all
26311 references to them. Dump them again as happens for inlining
26312 which copies both the decl and the types. */
26313 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26314 in VLA bound information for example. */
26315 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26316 current_function_decl)))
26317 {
26318 if (DECL_BY_REFERENCE (decl_or_origin))
26319 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26320 else
26321 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26322 }
26323 return gen_formal_parameter_die (decl, origin,
26324 true /* Emit name attribute. */,
26325 context_die);
26326
26327 case NAMESPACE_DECL:
26328 if (dwarf_version >= 3 || !dwarf_strict)
26329 gen_namespace_die (decl, context_die);
26330 break;
26331
26332 case IMPORTED_DECL:
26333 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26334 DECL_CONTEXT (decl), context_die);
26335 break;
26336
26337 case NAMELIST_DECL:
26338 gen_namelist_decl (DECL_NAME (decl), context_die,
26339 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26340 break;
26341
26342 default:
26343 /* Probably some frontend-internal decl. Assume we don't care. */
26344 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26345 break;
26346 }
26347
26348 return NULL;
26349 }
26350 \f
26351 /* Output initial debug information for global DECL. Called at the
26352 end of the parsing process.
26353
26354 This is the initial debug generation process. As such, the DIEs
26355 generated may be incomplete. A later debug generation pass
26356 (dwarf2out_late_global_decl) will augment the information generated
26357 in this pass (e.g., with complete location info). */
26358
26359 static void
26360 dwarf2out_early_global_decl (tree decl)
26361 {
26362 set_early_dwarf s;
26363
26364 /* gen_decl_die() will set DECL_ABSTRACT because
26365 cgraph_function_possibly_inlined_p() returns true. This is in
26366 turn will cause DW_AT_inline attributes to be set.
26367
26368 This happens because at early dwarf generation, there is no
26369 cgraph information, causing cgraph_function_possibly_inlined_p()
26370 to return true. Trick cgraph_function_possibly_inlined_p()
26371 while we generate dwarf early. */
26372 bool save = symtab->global_info_ready;
26373 symtab->global_info_ready = true;
26374
26375 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26376 other DECLs and they can point to template types or other things
26377 that dwarf2out can't handle when done via dwarf2out_decl. */
26378 if (TREE_CODE (decl) != TYPE_DECL
26379 && TREE_CODE (decl) != PARM_DECL)
26380 {
26381 if (TREE_CODE (decl) == FUNCTION_DECL)
26382 {
26383 tree save_fndecl = current_function_decl;
26384
26385 /* For nested functions, make sure we have DIEs for the parents first
26386 so that all nested DIEs are generated at the proper scope in the
26387 first shot. */
26388 tree context = decl_function_context (decl);
26389 if (context != NULL)
26390 {
26391 dw_die_ref context_die = lookup_decl_die (context);
26392 current_function_decl = context;
26393
26394 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26395 enough so that it lands in its own context. This avoids type
26396 pruning issues later on. */
26397 if (context_die == NULL || is_declaration_die (context_die))
26398 dwarf2out_decl (context);
26399 }
26400
26401 /* Emit an abstract origin of a function first. This happens
26402 with C++ constructor clones for example and makes
26403 dwarf2out_abstract_function happy which requires the early
26404 DIE of the abstract instance to be present. */
26405 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26406 dw_die_ref origin_die;
26407 if (origin != NULL
26408 /* Do not emit the DIE multiple times but make sure to
26409 process it fully here in case we just saw a declaration. */
26410 && ((origin_die = lookup_decl_die (origin)) == NULL
26411 || is_declaration_die (origin_die)))
26412 {
26413 current_function_decl = origin;
26414 dwarf2out_decl (origin);
26415 }
26416
26417 /* Emit the DIE for decl but avoid doing that multiple times. */
26418 dw_die_ref old_die;
26419 if ((old_die = lookup_decl_die (decl)) == NULL
26420 || is_declaration_die (old_die))
26421 {
26422 current_function_decl = decl;
26423 dwarf2out_decl (decl);
26424 }
26425
26426 current_function_decl = save_fndecl;
26427 }
26428 else
26429 dwarf2out_decl (decl);
26430 }
26431 symtab->global_info_ready = save;
26432 }
26433
26434 /* Return whether EXPR is an expression with the following pattern:
26435 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26436
26437 static bool
26438 is_trivial_indirect_ref (tree expr)
26439 {
26440 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26441 return false;
26442
26443 tree nop = TREE_OPERAND (expr, 0);
26444 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26445 return false;
26446
26447 tree int_cst = TREE_OPERAND (nop, 0);
26448 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26449 }
26450
26451 /* Output debug information for global decl DECL. Called from
26452 toplev.c after compilation proper has finished. */
26453
26454 static void
26455 dwarf2out_late_global_decl (tree decl)
26456 {
26457 /* Fill-in any location information we were unable to determine
26458 on the first pass. */
26459 if (VAR_P (decl))
26460 {
26461 dw_die_ref die = lookup_decl_die (decl);
26462
26463 /* We may have to generate early debug late for LTO in case debug
26464 was not enabled at compile-time or the target doesn't support
26465 the LTO early debug scheme. */
26466 if (! die && in_lto_p)
26467 {
26468 dwarf2out_decl (decl);
26469 die = lookup_decl_die (decl);
26470 }
26471
26472 if (die)
26473 {
26474 /* We get called via the symtab code invoking late_global_decl
26475 for symbols that are optimized out.
26476
26477 Do not add locations for those, except if they have a
26478 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26479 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26480 INDIRECT_REF expression, as this could generate relocations to
26481 text symbols in LTO object files, which is invalid. */
26482 varpool_node *node = varpool_node::get (decl);
26483 if ((! node || ! node->definition)
26484 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26485 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26486 tree_add_const_value_attribute_for_decl (die, decl);
26487 else
26488 add_location_or_const_value_attribute (die, decl, false);
26489 }
26490 }
26491 }
26492
26493 /* Output debug information for type decl DECL. Called from toplev.c
26494 and from language front ends (to record built-in types). */
26495 static void
26496 dwarf2out_type_decl (tree decl, int local)
26497 {
26498 if (!local)
26499 {
26500 set_early_dwarf s;
26501 dwarf2out_decl (decl);
26502 }
26503 }
26504
26505 /* Output debug information for imported module or decl DECL.
26506 NAME is non-NULL name in the lexical block if the decl has been renamed.
26507 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26508 that DECL belongs to.
26509 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26510 static void
26511 dwarf2out_imported_module_or_decl_1 (tree decl,
26512 tree name,
26513 tree lexical_block,
26514 dw_die_ref lexical_block_die)
26515 {
26516 expanded_location xloc;
26517 dw_die_ref imported_die = NULL;
26518 dw_die_ref at_import_die;
26519
26520 if (TREE_CODE (decl) == IMPORTED_DECL)
26521 {
26522 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26523 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26524 gcc_assert (decl);
26525 }
26526 else
26527 xloc = expand_location (input_location);
26528
26529 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26530 {
26531 at_import_die = force_type_die (TREE_TYPE (decl));
26532 /* For namespace N { typedef void T; } using N::T; base_type_die
26533 returns NULL, but DW_TAG_imported_declaration requires
26534 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26535 if (!at_import_die)
26536 {
26537 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26538 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26539 at_import_die = lookup_type_die (TREE_TYPE (decl));
26540 gcc_assert (at_import_die);
26541 }
26542 }
26543 else
26544 {
26545 at_import_die = lookup_decl_die (decl);
26546 if (!at_import_die)
26547 {
26548 /* If we're trying to avoid duplicate debug info, we may not have
26549 emitted the member decl for this field. Emit it now. */
26550 if (TREE_CODE (decl) == FIELD_DECL)
26551 {
26552 tree type = DECL_CONTEXT (decl);
26553
26554 if (TYPE_CONTEXT (type)
26555 && TYPE_P (TYPE_CONTEXT (type))
26556 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26557 DINFO_USAGE_DIR_USE))
26558 return;
26559 gen_type_die_for_member (type, decl,
26560 get_context_die (TYPE_CONTEXT (type)));
26561 }
26562 if (TREE_CODE (decl) == NAMELIST_DECL)
26563 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26564 get_context_die (DECL_CONTEXT (decl)),
26565 NULL_TREE);
26566 else
26567 at_import_die = force_decl_die (decl);
26568 }
26569 }
26570
26571 if (TREE_CODE (decl) == NAMESPACE_DECL)
26572 {
26573 if (dwarf_version >= 3 || !dwarf_strict)
26574 imported_die = new_die (DW_TAG_imported_module,
26575 lexical_block_die,
26576 lexical_block);
26577 else
26578 return;
26579 }
26580 else
26581 imported_die = new_die (DW_TAG_imported_declaration,
26582 lexical_block_die,
26583 lexical_block);
26584
26585 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26586 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26587 if (debug_column_info && xloc.column)
26588 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26589 if (name)
26590 add_AT_string (imported_die, DW_AT_name,
26591 IDENTIFIER_POINTER (name));
26592 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26593 }
26594
26595 /* Output debug information for imported module or decl DECL.
26596 NAME is non-NULL name in context if the decl has been renamed.
26597 CHILD is true if decl is one of the renamed decls as part of
26598 importing whole module.
26599 IMPLICIT is set if this hook is called for an implicit import
26600 such as inline namespace. */
26601
26602 static void
26603 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26604 bool child, bool implicit)
26605 {
26606 /* dw_die_ref at_import_die; */
26607 dw_die_ref scope_die;
26608
26609 if (debug_info_level <= DINFO_LEVEL_TERSE)
26610 return;
26611
26612 gcc_assert (decl);
26613
26614 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26615 should be enough, for DWARF4 and older even if we emit as extension
26616 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26617 for the benefit of consumers unaware of DW_AT_export_symbols. */
26618 if (implicit
26619 && dwarf_version >= 5
26620 && lang_hooks.decls.decl_dwarf_attribute (decl,
26621 DW_AT_export_symbols) == 1)
26622 return;
26623
26624 set_early_dwarf s;
26625
26626 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26627 We need decl DIE for reference and scope die. First, get DIE for the decl
26628 itself. */
26629
26630 /* Get the scope die for decl context. Use comp_unit_die for global module
26631 or decl. If die is not found for non globals, force new die. */
26632 if (context
26633 && TYPE_P (context)
26634 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26635 return;
26636
26637 scope_die = get_context_die (context);
26638
26639 if (child)
26640 {
26641 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26642 there is nothing we can do, here. */
26643 if (dwarf_version < 3 && dwarf_strict)
26644 return;
26645
26646 gcc_assert (scope_die->die_child);
26647 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26648 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26649 scope_die = scope_die->die_child;
26650 }
26651
26652 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26653 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26654 }
26655
26656 /* Output debug information for namelists. */
26657
26658 static dw_die_ref
26659 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26660 {
26661 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26662 tree value;
26663 unsigned i;
26664
26665 if (debug_info_level <= DINFO_LEVEL_TERSE)
26666 return NULL;
26667
26668 gcc_assert (scope_die != NULL);
26669 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26670 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26671
26672 /* If there are no item_decls, we have a nondefining namelist, e.g.
26673 with USE association; hence, set DW_AT_declaration. */
26674 if (item_decls == NULL_TREE)
26675 {
26676 add_AT_flag (nml_die, DW_AT_declaration, 1);
26677 return nml_die;
26678 }
26679
26680 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26681 {
26682 nml_item_ref_die = lookup_decl_die (value);
26683 if (!nml_item_ref_die)
26684 nml_item_ref_die = force_decl_die (value);
26685
26686 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26687 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26688 }
26689 return nml_die;
26690 }
26691
26692
26693 /* Write the debugging output for DECL and return the DIE. */
26694
26695 static void
26696 dwarf2out_decl (tree decl)
26697 {
26698 dw_die_ref context_die = comp_unit_die ();
26699
26700 switch (TREE_CODE (decl))
26701 {
26702 case ERROR_MARK:
26703 return;
26704
26705 case FUNCTION_DECL:
26706 /* If we're a nested function, initially use a parent of NULL; if we're
26707 a plain function, this will be fixed up in decls_for_scope. If
26708 we're a method, it will be ignored, since we already have a DIE. */
26709 if (decl_function_context (decl)
26710 /* But if we're in terse mode, we don't care about scope. */
26711 && debug_info_level > DINFO_LEVEL_TERSE)
26712 context_die = NULL;
26713 break;
26714
26715 case VAR_DECL:
26716 /* For local statics lookup proper context die. */
26717 if (local_function_static (decl))
26718 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26719
26720 /* If we are in terse mode, don't generate any DIEs to represent any
26721 variable declarations or definitions. */
26722 if (debug_info_level <= DINFO_LEVEL_TERSE)
26723 return;
26724 break;
26725
26726 case CONST_DECL:
26727 if (debug_info_level <= DINFO_LEVEL_TERSE)
26728 return;
26729 if (!is_fortran () && !is_ada ())
26730 return;
26731 if (TREE_STATIC (decl) && decl_function_context (decl))
26732 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26733 break;
26734
26735 case NAMESPACE_DECL:
26736 case IMPORTED_DECL:
26737 if (debug_info_level <= DINFO_LEVEL_TERSE)
26738 return;
26739 if (lookup_decl_die (decl) != NULL)
26740 return;
26741 break;
26742
26743 case TYPE_DECL:
26744 /* Don't emit stubs for types unless they are needed by other DIEs. */
26745 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26746 return;
26747
26748 /* Don't bother trying to generate any DIEs to represent any of the
26749 normal built-in types for the language we are compiling. */
26750 if (DECL_IS_BUILTIN (decl))
26751 return;
26752
26753 /* If we are in terse mode, don't generate any DIEs for types. */
26754 if (debug_info_level <= DINFO_LEVEL_TERSE)
26755 return;
26756
26757 /* If we're a function-scope tag, initially use a parent of NULL;
26758 this will be fixed up in decls_for_scope. */
26759 if (decl_function_context (decl))
26760 context_die = NULL;
26761
26762 break;
26763
26764 case NAMELIST_DECL:
26765 break;
26766
26767 default:
26768 return;
26769 }
26770
26771 gen_decl_die (decl, NULL, NULL, context_die);
26772
26773 if (flag_checking)
26774 {
26775 dw_die_ref die = lookup_decl_die (decl);
26776 if (die)
26777 check_die (die);
26778 }
26779 }
26780
26781 /* Write the debugging output for DECL. */
26782
26783 static void
26784 dwarf2out_function_decl (tree decl)
26785 {
26786 dwarf2out_decl (decl);
26787 call_arg_locations = NULL;
26788 call_arg_loc_last = NULL;
26789 call_site_count = -1;
26790 tail_call_site_count = -1;
26791 decl_loc_table->empty ();
26792 cached_dw_loc_list_table->empty ();
26793 }
26794
26795 /* Output a marker (i.e. a label) for the beginning of the generated code for
26796 a lexical block. */
26797
26798 static void
26799 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26800 unsigned int blocknum)
26801 {
26802 switch_to_section (current_function_section ());
26803 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26804 }
26805
26806 /* Output a marker (i.e. a label) for the end of the generated code for a
26807 lexical block. */
26808
26809 static void
26810 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26811 {
26812 switch_to_section (current_function_section ());
26813 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26814 }
26815
26816 /* Returns nonzero if it is appropriate not to emit any debugging
26817 information for BLOCK, because it doesn't contain any instructions.
26818
26819 Don't allow this for blocks with nested functions or local classes
26820 as we would end up with orphans, and in the presence of scheduling
26821 we may end up calling them anyway. */
26822
26823 static bool
26824 dwarf2out_ignore_block (const_tree block)
26825 {
26826 tree decl;
26827 unsigned int i;
26828
26829 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26830 if (TREE_CODE (decl) == FUNCTION_DECL
26831 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26832 return 0;
26833 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26834 {
26835 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26836 if (TREE_CODE (decl) == FUNCTION_DECL
26837 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26838 return 0;
26839 }
26840
26841 return 1;
26842 }
26843
26844 /* Hash table routines for file_hash. */
26845
26846 bool
26847 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26848 {
26849 return filename_cmp (p1->filename, p2) == 0;
26850 }
26851
26852 hashval_t
26853 dwarf_file_hasher::hash (dwarf_file_data *p)
26854 {
26855 return htab_hash_string (p->filename);
26856 }
26857
26858 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26859 dwarf2out.c) and return its "index". The index of each (known) filename is
26860 just a unique number which is associated with only that one filename. We
26861 need such numbers for the sake of generating labels (in the .debug_sfnames
26862 section) and references to those files numbers (in the .debug_srcinfo
26863 and .debug_macinfo sections). If the filename given as an argument is not
26864 found in our current list, add it to the list and assign it the next
26865 available unique index number. */
26866
26867 static struct dwarf_file_data *
26868 lookup_filename (const char *file_name)
26869 {
26870 struct dwarf_file_data * created;
26871
26872 if (!file_name)
26873 return NULL;
26874
26875 dwarf_file_data **slot
26876 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26877 INSERT);
26878 if (*slot)
26879 return *slot;
26880
26881 created = ggc_alloc<dwarf_file_data> ();
26882 created->filename = file_name;
26883 created->emitted_number = 0;
26884 *slot = created;
26885 return created;
26886 }
26887
26888 /* If the assembler will construct the file table, then translate the compiler
26889 internal file table number into the assembler file table number, and emit
26890 a .file directive if we haven't already emitted one yet. The file table
26891 numbers are different because we prune debug info for unused variables and
26892 types, which may include filenames. */
26893
26894 static int
26895 maybe_emit_file (struct dwarf_file_data * fd)
26896 {
26897 if (! fd->emitted_number)
26898 {
26899 if (last_emitted_file)
26900 fd->emitted_number = last_emitted_file->emitted_number + 1;
26901 else
26902 fd->emitted_number = 1;
26903 last_emitted_file = fd;
26904
26905 if (output_asm_line_debug_info ())
26906 {
26907 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26908 output_quoted_string (asm_out_file,
26909 remap_debug_filename (fd->filename));
26910 fputc ('\n', asm_out_file);
26911 }
26912 }
26913
26914 return fd->emitted_number;
26915 }
26916
26917 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26918 That generation should happen after function debug info has been
26919 generated. The value of the attribute is the constant value of ARG. */
26920
26921 static void
26922 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26923 {
26924 die_arg_entry entry;
26925
26926 if (!die || !arg)
26927 return;
26928
26929 gcc_assert (early_dwarf);
26930
26931 if (!tmpl_value_parm_die_table)
26932 vec_alloc (tmpl_value_parm_die_table, 32);
26933
26934 entry.die = die;
26935 entry.arg = arg;
26936 vec_safe_push (tmpl_value_parm_die_table, entry);
26937 }
26938
26939 /* Return TRUE if T is an instance of generic type, FALSE
26940 otherwise. */
26941
26942 static bool
26943 generic_type_p (tree t)
26944 {
26945 if (t == NULL_TREE || !TYPE_P (t))
26946 return false;
26947 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26948 }
26949
26950 /* Schedule the generation of the generic parameter dies for the
26951 instance of generic type T. The proper generation itself is later
26952 done by gen_scheduled_generic_parms_dies. */
26953
26954 static void
26955 schedule_generic_params_dies_gen (tree t)
26956 {
26957 if (!generic_type_p (t))
26958 return;
26959
26960 gcc_assert (early_dwarf);
26961
26962 if (!generic_type_instances)
26963 vec_alloc (generic_type_instances, 256);
26964
26965 vec_safe_push (generic_type_instances, t);
26966 }
26967
26968 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26969 by append_entry_to_tmpl_value_parm_die_table. This function must
26970 be called after function DIEs have been generated. */
26971
26972 static void
26973 gen_remaining_tmpl_value_param_die_attribute (void)
26974 {
26975 if (tmpl_value_parm_die_table)
26976 {
26977 unsigned i, j;
26978 die_arg_entry *e;
26979
26980 /* We do this in two phases - first get the cases we can
26981 handle during early-finish, preserving those we cannot
26982 (containing symbolic constants where we don't yet know
26983 whether we are going to output the referenced symbols).
26984 For those we try again at late-finish. */
26985 j = 0;
26986 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26987 {
26988 if (!e->die->removed
26989 && !tree_add_const_value_attribute (e->die, e->arg))
26990 {
26991 dw_loc_descr_ref loc = NULL;
26992 if (! early_dwarf
26993 && (dwarf_version >= 5 || !dwarf_strict))
26994 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26995 if (loc)
26996 add_AT_loc (e->die, DW_AT_location, loc);
26997 else
26998 (*tmpl_value_parm_die_table)[j++] = *e;
26999 }
27000 }
27001 tmpl_value_parm_die_table->truncate (j);
27002 }
27003 }
27004
27005 /* Generate generic parameters DIEs for instances of generic types
27006 that have been previously scheduled by
27007 schedule_generic_params_dies_gen. This function must be called
27008 after all the types of the CU have been laid out. */
27009
27010 static void
27011 gen_scheduled_generic_parms_dies (void)
27012 {
27013 unsigned i;
27014 tree t;
27015
27016 if (!generic_type_instances)
27017 return;
27018
27019 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27020 if (COMPLETE_TYPE_P (t))
27021 gen_generic_params_dies (t);
27022
27023 generic_type_instances = NULL;
27024 }
27025
27026
27027 /* Replace DW_AT_name for the decl with name. */
27028
27029 static void
27030 dwarf2out_set_name (tree decl, tree name)
27031 {
27032 dw_die_ref die;
27033 dw_attr_node *attr;
27034 const char *dname;
27035
27036 die = TYPE_SYMTAB_DIE (decl);
27037 if (!die)
27038 return;
27039
27040 dname = dwarf2_name (name, 0);
27041 if (!dname)
27042 return;
27043
27044 attr = get_AT (die, DW_AT_name);
27045 if (attr)
27046 {
27047 struct indirect_string_node *node;
27048
27049 node = find_AT_string (dname);
27050 /* replace the string. */
27051 attr->dw_attr_val.v.val_str = node;
27052 }
27053
27054 else
27055 add_name_attribute (die, dname);
27056 }
27057
27058 /* True if before or during processing of the first function being emitted. */
27059 static bool in_first_function_p = true;
27060 /* True if loc_note during dwarf2out_var_location call might still be
27061 before first real instruction at address equal to .Ltext0. */
27062 static bool maybe_at_text_label_p = true;
27063 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27064 static unsigned int first_loclabel_num_not_at_text_label;
27065
27066 /* Look ahead for a real insn, or for a begin stmt marker. */
27067
27068 static rtx_insn *
27069 dwarf2out_next_real_insn (rtx_insn *loc_note)
27070 {
27071 rtx_insn *next_real = NEXT_INSN (loc_note);
27072
27073 while (next_real)
27074 if (INSN_P (next_real))
27075 break;
27076 else
27077 next_real = NEXT_INSN (next_real);
27078
27079 return next_real;
27080 }
27081
27082 /* Called by the final INSN scan whenever we see a var location. We
27083 use it to drop labels in the right places, and throw the location in
27084 our lookup table. */
27085
27086 static void
27087 dwarf2out_var_location (rtx_insn *loc_note)
27088 {
27089 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27090 struct var_loc_node *newloc;
27091 rtx_insn *next_real, *next_note;
27092 rtx_insn *call_insn = NULL;
27093 static const char *last_label;
27094 static const char *last_postcall_label;
27095 static bool last_in_cold_section_p;
27096 static rtx_insn *expected_next_loc_note;
27097 tree decl;
27098 bool var_loc_p;
27099 var_loc_view view = 0;
27100
27101 if (!NOTE_P (loc_note))
27102 {
27103 if (CALL_P (loc_note))
27104 {
27105 maybe_reset_location_view (loc_note, cur_line_info_table);
27106 call_site_count++;
27107 if (SIBLING_CALL_P (loc_note))
27108 tail_call_site_count++;
27109 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27110 {
27111 call_insn = loc_note;
27112 loc_note = NULL;
27113 var_loc_p = false;
27114
27115 next_real = dwarf2out_next_real_insn (call_insn);
27116 next_note = NULL;
27117 cached_next_real_insn = NULL;
27118 goto create_label;
27119 }
27120 if (optimize == 0 && !flag_var_tracking)
27121 {
27122 /* When the var-tracking pass is not running, there is no note
27123 for indirect calls whose target is compile-time known. In this
27124 case, process such calls specifically so that we generate call
27125 sites for them anyway. */
27126 rtx x = PATTERN (loc_note);
27127 if (GET_CODE (x) == PARALLEL)
27128 x = XVECEXP (x, 0, 0);
27129 if (GET_CODE (x) == SET)
27130 x = SET_SRC (x);
27131 if (GET_CODE (x) == CALL)
27132 x = XEXP (x, 0);
27133 if (!MEM_P (x)
27134 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27135 || !SYMBOL_REF_DECL (XEXP (x, 0))
27136 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27137 != FUNCTION_DECL))
27138 {
27139 call_insn = loc_note;
27140 loc_note = NULL;
27141 var_loc_p = false;
27142
27143 next_real = dwarf2out_next_real_insn (call_insn);
27144 next_note = NULL;
27145 cached_next_real_insn = NULL;
27146 goto create_label;
27147 }
27148 }
27149 }
27150 else if (!debug_variable_location_views)
27151 gcc_unreachable ();
27152 else
27153 maybe_reset_location_view (loc_note, cur_line_info_table);
27154
27155 return;
27156 }
27157
27158 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27159 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27160 return;
27161
27162 /* Optimize processing a large consecutive sequence of location
27163 notes so we don't spend too much time in next_real_insn. If the
27164 next insn is another location note, remember the next_real_insn
27165 calculation for next time. */
27166 next_real = cached_next_real_insn;
27167 if (next_real)
27168 {
27169 if (expected_next_loc_note != loc_note)
27170 next_real = NULL;
27171 }
27172
27173 next_note = NEXT_INSN (loc_note);
27174 if (! next_note
27175 || next_note->deleted ()
27176 || ! NOTE_P (next_note)
27177 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27178 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27179 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27180 next_note = NULL;
27181
27182 if (! next_real)
27183 next_real = dwarf2out_next_real_insn (loc_note);
27184
27185 if (next_note)
27186 {
27187 expected_next_loc_note = next_note;
27188 cached_next_real_insn = next_real;
27189 }
27190 else
27191 cached_next_real_insn = NULL;
27192
27193 /* If there are no instructions which would be affected by this note,
27194 don't do anything. */
27195 if (var_loc_p
27196 && next_real == NULL_RTX
27197 && !NOTE_DURING_CALL_P (loc_note))
27198 return;
27199
27200 create_label:
27201
27202 if (next_real == NULL_RTX)
27203 next_real = get_last_insn ();
27204
27205 /* If there were any real insns between note we processed last time
27206 and this note (or if it is the first note), clear
27207 last_{,postcall_}label so that they are not reused this time. */
27208 if (last_var_location_insn == NULL_RTX
27209 || last_var_location_insn != next_real
27210 || last_in_cold_section_p != in_cold_section_p)
27211 {
27212 last_label = NULL;
27213 last_postcall_label = NULL;
27214 }
27215
27216 if (var_loc_p)
27217 {
27218 const char *label
27219 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27220 view = cur_line_info_table->view;
27221 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27222 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27223 if (newloc == NULL)
27224 return;
27225 }
27226 else
27227 {
27228 decl = NULL_TREE;
27229 newloc = NULL;
27230 }
27231
27232 /* If there were no real insns between note we processed last time
27233 and this note, use the label we emitted last time. Otherwise
27234 create a new label and emit it. */
27235 if (last_label == NULL)
27236 {
27237 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27238 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27239 loclabel_num++;
27240 last_label = ggc_strdup (loclabel);
27241 /* See if loclabel might be equal to .Ltext0. If yes,
27242 bump first_loclabel_num_not_at_text_label. */
27243 if (!have_multiple_function_sections
27244 && in_first_function_p
27245 && maybe_at_text_label_p)
27246 {
27247 static rtx_insn *last_start;
27248 rtx_insn *insn;
27249 for (insn = loc_note; insn; insn = previous_insn (insn))
27250 if (insn == last_start)
27251 break;
27252 else if (!NONDEBUG_INSN_P (insn))
27253 continue;
27254 else
27255 {
27256 rtx body = PATTERN (insn);
27257 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27258 continue;
27259 /* Inline asm could occupy zero bytes. */
27260 else if (GET_CODE (body) == ASM_INPUT
27261 || asm_noperands (body) >= 0)
27262 continue;
27263 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27264 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27265 continue;
27266 #endif
27267 else
27268 {
27269 /* Assume insn has non-zero length. */
27270 maybe_at_text_label_p = false;
27271 break;
27272 }
27273 }
27274 if (maybe_at_text_label_p)
27275 {
27276 last_start = loc_note;
27277 first_loclabel_num_not_at_text_label = loclabel_num;
27278 }
27279 }
27280 }
27281
27282 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27283 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27284
27285 if (!var_loc_p)
27286 {
27287 struct call_arg_loc_node *ca_loc
27288 = ggc_cleared_alloc<call_arg_loc_node> ();
27289 rtx_insn *prev = call_insn;
27290
27291 ca_loc->call_arg_loc_note
27292 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27293 ca_loc->next = NULL;
27294 ca_loc->label = last_label;
27295 gcc_assert (prev
27296 && (CALL_P (prev)
27297 || (NONJUMP_INSN_P (prev)
27298 && GET_CODE (PATTERN (prev)) == SEQUENCE
27299 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27300 if (!CALL_P (prev))
27301 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27302 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27303
27304 /* Look for a SYMBOL_REF in the "prev" instruction. */
27305 rtx x = get_call_rtx_from (PATTERN (prev));
27306 if (x)
27307 {
27308 /* Try to get the call symbol, if any. */
27309 if (MEM_P (XEXP (x, 0)))
27310 x = XEXP (x, 0);
27311 /* First, look for a memory access to a symbol_ref. */
27312 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27313 && SYMBOL_REF_DECL (XEXP (x, 0))
27314 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27315 ca_loc->symbol_ref = XEXP (x, 0);
27316 /* Otherwise, look at a compile-time known user-level function
27317 declaration. */
27318 else if (MEM_P (x)
27319 && MEM_EXPR (x)
27320 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27321 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27322 }
27323
27324 ca_loc->block = insn_scope (prev);
27325 if (call_arg_locations)
27326 call_arg_loc_last->next = ca_loc;
27327 else
27328 call_arg_locations = ca_loc;
27329 call_arg_loc_last = ca_loc;
27330 }
27331 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27332 {
27333 newloc->label = last_label;
27334 newloc->view = view;
27335 }
27336 else
27337 {
27338 if (!last_postcall_label)
27339 {
27340 sprintf (loclabel, "%s-1", last_label);
27341 last_postcall_label = ggc_strdup (loclabel);
27342 }
27343 newloc->label = last_postcall_label;
27344 /* ??? This view is at last_label, not last_label-1, but we
27345 could only assume view at last_label-1 is zero if we could
27346 assume calls always have length greater than one. This is
27347 probably true in general, though there might be a rare
27348 exception to this rule, e.g. if a call insn is optimized out
27349 by target magic. Then, even the -1 in the label will be
27350 wrong, which might invalidate the range. Anyway, using view,
27351 though technically possibly incorrect, will work as far as
27352 ranges go: since L-1 is in the middle of the call insn,
27353 (L-1).0 and (L-1).V shouldn't make any difference, and having
27354 the loclist entry refer to the .loc entry might be useful, so
27355 leave it like this. */
27356 newloc->view = view;
27357 }
27358
27359 if (var_loc_p && flag_debug_asm)
27360 {
27361 const char *name, *sep, *patstr;
27362 if (decl && DECL_NAME (decl))
27363 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27364 else
27365 name = "";
27366 if (NOTE_VAR_LOCATION_LOC (loc_note))
27367 {
27368 sep = " => ";
27369 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27370 }
27371 else
27372 {
27373 sep = " ";
27374 patstr = "RESET";
27375 }
27376 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27377 name, sep, patstr);
27378 }
27379
27380 last_var_location_insn = next_real;
27381 last_in_cold_section_p = in_cold_section_p;
27382 }
27383
27384 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27385 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27386 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27387 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27388 BLOCK_FRAGMENT_ORIGIN links. */
27389 static bool
27390 block_within_block_p (tree block, tree outer, bool bothways)
27391 {
27392 if (block == outer)
27393 return true;
27394
27395 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27396 for (tree context = BLOCK_SUPERCONTEXT (block);
27397 context != outer;
27398 context = BLOCK_SUPERCONTEXT (context))
27399 if (!context || TREE_CODE (context) != BLOCK)
27400 return false;
27401
27402 if (!bothways)
27403 return true;
27404
27405 /* Now check that each block is actually referenced by its
27406 parent. */
27407 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27408 context = BLOCK_SUPERCONTEXT (context))
27409 {
27410 if (BLOCK_FRAGMENT_ORIGIN (context))
27411 {
27412 gcc_assert (!BLOCK_SUBBLOCKS (context));
27413 context = BLOCK_FRAGMENT_ORIGIN (context);
27414 }
27415 for (tree sub = BLOCK_SUBBLOCKS (context);
27416 sub != block;
27417 sub = BLOCK_CHAIN (sub))
27418 if (!sub)
27419 return false;
27420 if (context == outer)
27421 return true;
27422 else
27423 block = context;
27424 }
27425 }
27426
27427 /* Called during final while assembling the marker of the entry point
27428 for an inlined function. */
27429
27430 static void
27431 dwarf2out_inline_entry (tree block)
27432 {
27433 gcc_assert (debug_inline_points);
27434
27435 /* If we can't represent it, don't bother. */
27436 if (!(dwarf_version >= 3 || !dwarf_strict))
27437 return;
27438
27439 gcc_assert (DECL_P (block_ultimate_origin (block)));
27440
27441 /* Sanity check the block tree. This would catch a case in which
27442 BLOCK got removed from the tree reachable from the outermost
27443 lexical block, but got retained in markers. It would still link
27444 back to its parents, but some ancestor would be missing a link
27445 down the path to the sub BLOCK. If the block got removed, its
27446 BLOCK_NUMBER will not be a usable value. */
27447 if (flag_checking)
27448 gcc_assert (block_within_block_p (block,
27449 DECL_INITIAL (current_function_decl),
27450 true));
27451
27452 gcc_assert (inlined_function_outer_scope_p (block));
27453 gcc_assert (!BLOCK_DIE (block));
27454
27455 if (BLOCK_FRAGMENT_ORIGIN (block))
27456 block = BLOCK_FRAGMENT_ORIGIN (block);
27457 /* Can the entry point ever not be at the beginning of an
27458 unfragmented lexical block? */
27459 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27460 || (cur_line_info_table
27461 && !ZERO_VIEW_P (cur_line_info_table->view))))
27462 return;
27463
27464 if (!inline_entry_data_table)
27465 inline_entry_data_table
27466 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27467
27468
27469 inline_entry_data **iedp
27470 = inline_entry_data_table->find_slot_with_hash (block,
27471 htab_hash_pointer (block),
27472 INSERT);
27473 if (*iedp)
27474 /* ??? Ideally, we'd record all entry points for the same inlined
27475 function (some may have been duplicated by e.g. unrolling), but
27476 we have no way to represent that ATM. */
27477 return;
27478
27479 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27480 ied->block = block;
27481 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27482 ied->label_num = BLOCK_NUMBER (block);
27483 if (cur_line_info_table)
27484 ied->view = cur_line_info_table->view;
27485
27486 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27487
27488 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27489 BLOCK_NUMBER (block));
27490 ASM_OUTPUT_LABEL (asm_out_file, label);
27491 }
27492
27493 /* Called from finalize_size_functions for size functions so that their body
27494 can be encoded in the debug info to describe the layout of variable-length
27495 structures. */
27496
27497 static void
27498 dwarf2out_size_function (tree decl)
27499 {
27500 function_to_dwarf_procedure (decl);
27501 }
27502
27503 /* Note in one location list that text section has changed. */
27504
27505 int
27506 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27507 {
27508 var_loc_list *list = *slot;
27509 if (list->first)
27510 list->last_before_switch
27511 = list->last->next ? list->last->next : list->last;
27512 return 1;
27513 }
27514
27515 /* Note in all location lists that text section has changed. */
27516
27517 static void
27518 var_location_switch_text_section (void)
27519 {
27520 if (decl_loc_table == NULL)
27521 return;
27522
27523 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27524 }
27525
27526 /* Create a new line number table. */
27527
27528 static dw_line_info_table *
27529 new_line_info_table (void)
27530 {
27531 dw_line_info_table *table;
27532
27533 table = ggc_cleared_alloc<dw_line_info_table> ();
27534 table->file_num = 1;
27535 table->line_num = 1;
27536 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27537 FORCE_RESET_NEXT_VIEW (table->view);
27538 table->symviews_since_reset = 0;
27539
27540 return table;
27541 }
27542
27543 /* Lookup the "current" table into which we emit line info, so
27544 that we don't have to do it for every source line. */
27545
27546 static void
27547 set_cur_line_info_table (section *sec)
27548 {
27549 dw_line_info_table *table;
27550
27551 if (sec == text_section)
27552 table = text_section_line_info;
27553 else if (sec == cold_text_section)
27554 {
27555 table = cold_text_section_line_info;
27556 if (!table)
27557 {
27558 cold_text_section_line_info = table = new_line_info_table ();
27559 table->end_label = cold_end_label;
27560 }
27561 }
27562 else
27563 {
27564 const char *end_label;
27565
27566 if (crtl->has_bb_partition)
27567 {
27568 if (in_cold_section_p)
27569 end_label = crtl->subsections.cold_section_end_label;
27570 else
27571 end_label = crtl->subsections.hot_section_end_label;
27572 }
27573 else
27574 {
27575 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27576 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27577 current_function_funcdef_no);
27578 end_label = ggc_strdup (label);
27579 }
27580
27581 table = new_line_info_table ();
27582 table->end_label = end_label;
27583
27584 vec_safe_push (separate_line_info, table);
27585 }
27586
27587 if (output_asm_line_debug_info ())
27588 table->is_stmt = (cur_line_info_table
27589 ? cur_line_info_table->is_stmt
27590 : DWARF_LINE_DEFAULT_IS_STMT_START);
27591 cur_line_info_table = table;
27592 }
27593
27594
27595 /* We need to reset the locations at the beginning of each
27596 function. We can't do this in the end_function hook, because the
27597 declarations that use the locations won't have been output when
27598 that hook is called. Also compute have_multiple_function_sections here. */
27599
27600 static void
27601 dwarf2out_begin_function (tree fun)
27602 {
27603 section *sec = function_section (fun);
27604
27605 if (sec != text_section)
27606 have_multiple_function_sections = true;
27607
27608 if (crtl->has_bb_partition && !cold_text_section)
27609 {
27610 gcc_assert (current_function_decl == fun);
27611 cold_text_section = unlikely_text_section ();
27612 switch_to_section (cold_text_section);
27613 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27614 switch_to_section (sec);
27615 }
27616
27617 dwarf2out_note_section_used ();
27618 call_site_count = 0;
27619 tail_call_site_count = 0;
27620
27621 set_cur_line_info_table (sec);
27622 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27623 }
27624
27625 /* Helper function of dwarf2out_end_function, called only after emitting
27626 the very first function into assembly. Check if some .debug_loc range
27627 might end with a .LVL* label that could be equal to .Ltext0.
27628 In that case we must force using absolute addresses in .debug_loc ranges,
27629 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27630 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27631 list terminator.
27632 Set have_multiple_function_sections to true in that case and
27633 terminate htab traversal. */
27634
27635 int
27636 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27637 {
27638 var_loc_list *entry = *slot;
27639 struct var_loc_node *node;
27640
27641 node = entry->first;
27642 if (node && node->next && node->next->label)
27643 {
27644 unsigned int i;
27645 const char *label = node->next->label;
27646 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27647
27648 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27649 {
27650 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27651 if (strcmp (label, loclabel) == 0)
27652 {
27653 have_multiple_function_sections = true;
27654 return 0;
27655 }
27656 }
27657 }
27658 return 1;
27659 }
27660
27661 /* Hook called after emitting a function into assembly.
27662 This does something only for the very first function emitted. */
27663
27664 static void
27665 dwarf2out_end_function (unsigned int)
27666 {
27667 if (in_first_function_p
27668 && !have_multiple_function_sections
27669 && first_loclabel_num_not_at_text_label
27670 && decl_loc_table)
27671 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27672 in_first_function_p = false;
27673 maybe_at_text_label_p = false;
27674 }
27675
27676 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27677 front-ends register a translation unit even before dwarf2out_init is
27678 called. */
27679 static tree main_translation_unit = NULL_TREE;
27680
27681 /* Hook called by front-ends after they built their main translation unit.
27682 Associate comp_unit_die to UNIT. */
27683
27684 static void
27685 dwarf2out_register_main_translation_unit (tree unit)
27686 {
27687 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27688 && main_translation_unit == NULL_TREE);
27689 main_translation_unit = unit;
27690 /* If dwarf2out_init has not been called yet, it will perform the association
27691 itself looking at main_translation_unit. */
27692 if (decl_die_table != NULL)
27693 equate_decl_number_to_die (unit, comp_unit_die ());
27694 }
27695
27696 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27697
27698 static void
27699 push_dw_line_info_entry (dw_line_info_table *table,
27700 enum dw_line_info_opcode opcode, unsigned int val)
27701 {
27702 dw_line_info_entry e;
27703 e.opcode = opcode;
27704 e.val = val;
27705 vec_safe_push (table->entries, e);
27706 }
27707
27708 /* Output a label to mark the beginning of a source code line entry
27709 and record information relating to this source line, in
27710 'line_info_table' for later output of the .debug_line section. */
27711 /* ??? The discriminator parameter ought to be unsigned. */
27712
27713 static void
27714 dwarf2out_source_line (unsigned int line, unsigned int column,
27715 const char *filename,
27716 int discriminator, bool is_stmt)
27717 {
27718 unsigned int file_num;
27719 dw_line_info_table *table;
27720 static var_loc_view lvugid;
27721
27722 if (debug_info_level < DINFO_LEVEL_TERSE)
27723 return;
27724
27725 table = cur_line_info_table;
27726
27727 if (line == 0)
27728 {
27729 if (debug_variable_location_views
27730 && output_asm_line_debug_info ()
27731 && table && !RESETTING_VIEW_P (table->view))
27732 {
27733 /* If we're using the assembler to compute view numbers, we
27734 can't issue a .loc directive for line zero, so we can't
27735 get a view number at this point. We might attempt to
27736 compute it from the previous view, or equate it to a
27737 subsequent view (though it might not be there!), but
27738 since we're omitting the line number entry, we might as
27739 well omit the view number as well. That means pretending
27740 it's a view number zero, which might very well turn out
27741 to be correct. ??? Extend the assembler so that the
27742 compiler could emit e.g. ".locview .LVU#", to output a
27743 view without changing line number information. We'd then
27744 have to count it in symviews_since_reset; when it's omitted,
27745 it doesn't count. */
27746 if (!zero_view_p)
27747 zero_view_p = BITMAP_GGC_ALLOC ();
27748 bitmap_set_bit (zero_view_p, table->view);
27749 if (flag_debug_asm)
27750 {
27751 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27752 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27753 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27754 ASM_COMMENT_START);
27755 assemble_name (asm_out_file, label);
27756 putc ('\n', asm_out_file);
27757 }
27758 table->view = ++lvugid;
27759 }
27760 return;
27761 }
27762
27763 /* The discriminator column was added in dwarf4. Simplify the below
27764 by simply removing it if we're not supposed to output it. */
27765 if (dwarf_version < 4 && dwarf_strict)
27766 discriminator = 0;
27767
27768 if (!debug_column_info)
27769 column = 0;
27770
27771 file_num = maybe_emit_file (lookup_filename (filename));
27772
27773 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27774 the debugger has used the second (possibly duplicate) line number
27775 at the beginning of the function to mark the end of the prologue.
27776 We could eliminate any other duplicates within the function. For
27777 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27778 that second line number entry. */
27779 /* Recall that this end-of-prologue indication is *not* the same thing
27780 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27781 to which the hook corresponds, follows the last insn that was
27782 emitted by gen_prologue. What we need is to precede the first insn
27783 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27784 insn that corresponds to something the user wrote. These may be
27785 very different locations once scheduling is enabled. */
27786
27787 if (0 && file_num == table->file_num
27788 && line == table->line_num
27789 && column == table->column_num
27790 && discriminator == table->discrim_num
27791 && is_stmt == table->is_stmt)
27792 return;
27793
27794 switch_to_section (current_function_section ());
27795
27796 /* If requested, emit something human-readable. */
27797 if (flag_debug_asm)
27798 {
27799 if (debug_column_info)
27800 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27801 filename, line, column);
27802 else
27803 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27804 filename, line);
27805 }
27806
27807 if (output_asm_line_debug_info ())
27808 {
27809 /* Emit the .loc directive understood by GNU as. */
27810 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27811 file_num, line, is_stmt, discriminator */
27812 fputs ("\t.loc ", asm_out_file);
27813 fprint_ul (asm_out_file, file_num);
27814 putc (' ', asm_out_file);
27815 fprint_ul (asm_out_file, line);
27816 putc (' ', asm_out_file);
27817 fprint_ul (asm_out_file, column);
27818
27819 if (is_stmt != table->is_stmt)
27820 {
27821 fputs (" is_stmt ", asm_out_file);
27822 putc (is_stmt ? '1' : '0', asm_out_file);
27823 }
27824 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27825 {
27826 gcc_assert (discriminator > 0);
27827 fputs (" discriminator ", asm_out_file);
27828 fprint_ul (asm_out_file, (unsigned long) discriminator);
27829 }
27830 if (debug_variable_location_views)
27831 {
27832 if (!RESETTING_VIEW_P (table->view))
27833 {
27834 table->symviews_since_reset++;
27835 if (table->symviews_since_reset > symview_upper_bound)
27836 symview_upper_bound = table->symviews_since_reset;
27837 /* When we're using the assembler to compute view
27838 numbers, we output symbolic labels after "view" in
27839 .loc directives, and the assembler will set them for
27840 us, so that we can refer to the view numbers in
27841 location lists. The only exceptions are when we know
27842 a view will be zero: "-0" is a forced reset, used
27843 e.g. in the beginning of functions, whereas "0" tells
27844 the assembler to check that there was a PC change
27845 since the previous view, in a way that implicitly
27846 resets the next view. */
27847 fputs (" view ", asm_out_file);
27848 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27849 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27850 assemble_name (asm_out_file, label);
27851 table->view = ++lvugid;
27852 }
27853 else
27854 {
27855 table->symviews_since_reset = 0;
27856 if (FORCE_RESETTING_VIEW_P (table->view))
27857 fputs (" view -0", asm_out_file);
27858 else
27859 fputs (" view 0", asm_out_file);
27860 /* Mark the present view as a zero view. Earlier debug
27861 binds may have already added its id to loclists to be
27862 emitted later, so we can't reuse the id for something
27863 else. However, it's good to know whether a view is
27864 known to be zero, because then we may be able to
27865 optimize out locviews that are all zeros, so take
27866 note of it in zero_view_p. */
27867 if (!zero_view_p)
27868 zero_view_p = BITMAP_GGC_ALLOC ();
27869 bitmap_set_bit (zero_view_p, lvugid);
27870 table->view = ++lvugid;
27871 }
27872 }
27873 putc ('\n', asm_out_file);
27874 }
27875 else
27876 {
27877 unsigned int label_num = ++line_info_label_num;
27878
27879 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27880
27881 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27882 push_dw_line_info_entry (table, LI_adv_address, label_num);
27883 else
27884 push_dw_line_info_entry (table, LI_set_address, label_num);
27885 if (debug_variable_location_views)
27886 {
27887 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27888 if (resetting)
27889 table->view = 0;
27890
27891 if (flag_debug_asm)
27892 fprintf (asm_out_file, "\t%s view %s%d\n",
27893 ASM_COMMENT_START,
27894 resetting ? "-" : "",
27895 table->view);
27896
27897 table->view++;
27898 }
27899 if (file_num != table->file_num)
27900 push_dw_line_info_entry (table, LI_set_file, file_num);
27901 if (discriminator != table->discrim_num)
27902 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27903 if (is_stmt != table->is_stmt)
27904 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27905 push_dw_line_info_entry (table, LI_set_line, line);
27906 if (debug_column_info)
27907 push_dw_line_info_entry (table, LI_set_column, column);
27908 }
27909
27910 table->file_num = file_num;
27911 table->line_num = line;
27912 table->column_num = column;
27913 table->discrim_num = discriminator;
27914 table->is_stmt = is_stmt;
27915 table->in_use = true;
27916 }
27917
27918 /* Record the beginning of a new source file. */
27919
27920 static void
27921 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27922 {
27923 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27924 {
27925 macinfo_entry e;
27926 e.code = DW_MACINFO_start_file;
27927 e.lineno = lineno;
27928 e.info = ggc_strdup (filename);
27929 vec_safe_push (macinfo_table, e);
27930 }
27931 }
27932
27933 /* Record the end of a source file. */
27934
27935 static void
27936 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27937 {
27938 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27939 {
27940 macinfo_entry e;
27941 e.code = DW_MACINFO_end_file;
27942 e.lineno = lineno;
27943 e.info = NULL;
27944 vec_safe_push (macinfo_table, e);
27945 }
27946 }
27947
27948 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27949 the tail part of the directive line, i.e. the part which is past the
27950 initial whitespace, #, whitespace, directive-name, whitespace part. */
27951
27952 static void
27953 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27954 const char *buffer ATTRIBUTE_UNUSED)
27955 {
27956 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27957 {
27958 macinfo_entry e;
27959 /* Insert a dummy first entry to be able to optimize the whole
27960 predefined macro block using DW_MACRO_import. */
27961 if (macinfo_table->is_empty () && lineno <= 1)
27962 {
27963 e.code = 0;
27964 e.lineno = 0;
27965 e.info = NULL;
27966 vec_safe_push (macinfo_table, e);
27967 }
27968 e.code = DW_MACINFO_define;
27969 e.lineno = lineno;
27970 e.info = ggc_strdup (buffer);
27971 vec_safe_push (macinfo_table, e);
27972 }
27973 }
27974
27975 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27976 the tail part of the directive line, i.e. the part which is past the
27977 initial whitespace, #, whitespace, directive-name, whitespace part. */
27978
27979 static void
27980 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27981 const char *buffer ATTRIBUTE_UNUSED)
27982 {
27983 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27984 {
27985 macinfo_entry e;
27986 /* Insert a dummy first entry to be able to optimize the whole
27987 predefined macro block using DW_MACRO_import. */
27988 if (macinfo_table->is_empty () && lineno <= 1)
27989 {
27990 e.code = 0;
27991 e.lineno = 0;
27992 e.info = NULL;
27993 vec_safe_push (macinfo_table, e);
27994 }
27995 e.code = DW_MACINFO_undef;
27996 e.lineno = lineno;
27997 e.info = ggc_strdup (buffer);
27998 vec_safe_push (macinfo_table, e);
27999 }
28000 }
28001
28002 /* Helpers to manipulate hash table of CUs. */
28003
28004 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28005 {
28006 static inline hashval_t hash (const macinfo_entry *);
28007 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28008 };
28009
28010 inline hashval_t
28011 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28012 {
28013 return htab_hash_string (entry->info);
28014 }
28015
28016 inline bool
28017 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28018 const macinfo_entry *entry2)
28019 {
28020 return !strcmp (entry1->info, entry2->info);
28021 }
28022
28023 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28024
28025 /* Output a single .debug_macinfo entry. */
28026
28027 static void
28028 output_macinfo_op (macinfo_entry *ref)
28029 {
28030 int file_num;
28031 size_t len;
28032 struct indirect_string_node *node;
28033 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28034 struct dwarf_file_data *fd;
28035
28036 switch (ref->code)
28037 {
28038 case DW_MACINFO_start_file:
28039 fd = lookup_filename (ref->info);
28040 file_num = maybe_emit_file (fd);
28041 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28042 dw2_asm_output_data_uleb128 (ref->lineno,
28043 "Included from line number %lu",
28044 (unsigned long) ref->lineno);
28045 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28046 break;
28047 case DW_MACINFO_end_file:
28048 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28049 break;
28050 case DW_MACINFO_define:
28051 case DW_MACINFO_undef:
28052 len = strlen (ref->info) + 1;
28053 if (!dwarf_strict
28054 && len > DWARF_OFFSET_SIZE
28055 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28056 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28057 {
28058 ref->code = ref->code == DW_MACINFO_define
28059 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28060 output_macinfo_op (ref);
28061 return;
28062 }
28063 dw2_asm_output_data (1, ref->code,
28064 ref->code == DW_MACINFO_define
28065 ? "Define macro" : "Undefine macro");
28066 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28067 (unsigned long) ref->lineno);
28068 dw2_asm_output_nstring (ref->info, -1, "The macro");
28069 break;
28070 case DW_MACRO_define_strp:
28071 case DW_MACRO_undef_strp:
28072 node = find_AT_string (ref->info);
28073 gcc_assert (node
28074 && (node->form == DW_FORM_strp
28075 || node->form == dwarf_FORM (DW_FORM_strx)));
28076 dw2_asm_output_data (1, ref->code,
28077 ref->code == DW_MACRO_define_strp
28078 ? "Define macro strp"
28079 : "Undefine macro strp");
28080 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28081 (unsigned long) ref->lineno);
28082 if (node->form == DW_FORM_strp)
28083 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28084 debug_str_section, "The macro: \"%s\"",
28085 ref->info);
28086 else
28087 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28088 ref->info);
28089 break;
28090 case DW_MACRO_import:
28091 dw2_asm_output_data (1, ref->code, "Import");
28092 ASM_GENERATE_INTERNAL_LABEL (label,
28093 DEBUG_MACRO_SECTION_LABEL,
28094 ref->lineno + macinfo_label_base);
28095 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28096 break;
28097 default:
28098 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28099 ASM_COMMENT_START, (unsigned long) ref->code);
28100 break;
28101 }
28102 }
28103
28104 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28105 other compilation unit .debug_macinfo sections. IDX is the first
28106 index of a define/undef, return the number of ops that should be
28107 emitted in a comdat .debug_macinfo section and emit
28108 a DW_MACRO_import entry referencing it.
28109 If the define/undef entry should be emitted normally, return 0. */
28110
28111 static unsigned
28112 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28113 macinfo_hash_type **macinfo_htab)
28114 {
28115 macinfo_entry *first, *second, *cur, *inc;
28116 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28117 unsigned char checksum[16];
28118 struct md5_ctx ctx;
28119 char *grp_name, *tail;
28120 const char *base;
28121 unsigned int i, count, encoded_filename_len, linebuf_len;
28122 macinfo_entry **slot;
28123
28124 first = &(*macinfo_table)[idx];
28125 second = &(*macinfo_table)[idx + 1];
28126
28127 /* Optimize only if there are at least two consecutive define/undef ops,
28128 and either all of them are before first DW_MACINFO_start_file
28129 with lineno {0,1} (i.e. predefined macro block), or all of them are
28130 in some included header file. */
28131 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28132 return 0;
28133 if (vec_safe_is_empty (files))
28134 {
28135 if (first->lineno > 1 || second->lineno > 1)
28136 return 0;
28137 }
28138 else if (first->lineno == 0)
28139 return 0;
28140
28141 /* Find the last define/undef entry that can be grouped together
28142 with first and at the same time compute md5 checksum of their
28143 codes, linenumbers and strings. */
28144 md5_init_ctx (&ctx);
28145 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28146 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28147 break;
28148 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28149 break;
28150 else
28151 {
28152 unsigned char code = cur->code;
28153 md5_process_bytes (&code, 1, &ctx);
28154 checksum_uleb128 (cur->lineno, &ctx);
28155 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28156 }
28157 md5_finish_ctx (&ctx, checksum);
28158 count = i - idx;
28159
28160 /* From the containing include filename (if any) pick up just
28161 usable characters from its basename. */
28162 if (vec_safe_is_empty (files))
28163 base = "";
28164 else
28165 base = lbasename (files->last ().info);
28166 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28167 if (ISIDNUM (base[i]) || base[i] == '.')
28168 encoded_filename_len++;
28169 /* Count . at the end. */
28170 if (encoded_filename_len)
28171 encoded_filename_len++;
28172
28173 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28174 linebuf_len = strlen (linebuf);
28175
28176 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28177 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28178 + 16 * 2 + 1);
28179 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28180 tail = grp_name + 4;
28181 if (encoded_filename_len)
28182 {
28183 for (i = 0; base[i]; i++)
28184 if (ISIDNUM (base[i]) || base[i] == '.')
28185 *tail++ = base[i];
28186 *tail++ = '.';
28187 }
28188 memcpy (tail, linebuf, linebuf_len);
28189 tail += linebuf_len;
28190 *tail++ = '.';
28191 for (i = 0; i < 16; i++)
28192 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28193
28194 /* Construct a macinfo_entry for DW_MACRO_import
28195 in the empty vector entry before the first define/undef. */
28196 inc = &(*macinfo_table)[idx - 1];
28197 inc->code = DW_MACRO_import;
28198 inc->lineno = 0;
28199 inc->info = ggc_strdup (grp_name);
28200 if (!*macinfo_htab)
28201 *macinfo_htab = new macinfo_hash_type (10);
28202 /* Avoid emitting duplicates. */
28203 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28204 if (*slot != NULL)
28205 {
28206 inc->code = 0;
28207 inc->info = NULL;
28208 /* If such an entry has been used before, just emit
28209 a DW_MACRO_import op. */
28210 inc = *slot;
28211 output_macinfo_op (inc);
28212 /* And clear all macinfo_entry in the range to avoid emitting them
28213 in the second pass. */
28214 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28215 {
28216 cur->code = 0;
28217 cur->info = NULL;
28218 }
28219 }
28220 else
28221 {
28222 *slot = inc;
28223 inc->lineno = (*macinfo_htab)->elements ();
28224 output_macinfo_op (inc);
28225 }
28226 return count;
28227 }
28228
28229 /* Save any strings needed by the macinfo table in the debug str
28230 table. All strings must be collected into the table by the time
28231 index_string is called. */
28232
28233 static void
28234 save_macinfo_strings (void)
28235 {
28236 unsigned len;
28237 unsigned i;
28238 macinfo_entry *ref;
28239
28240 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28241 {
28242 switch (ref->code)
28243 {
28244 /* Match the logic in output_macinfo_op to decide on
28245 indirect strings. */
28246 case DW_MACINFO_define:
28247 case DW_MACINFO_undef:
28248 len = strlen (ref->info) + 1;
28249 if (!dwarf_strict
28250 && len > DWARF_OFFSET_SIZE
28251 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28252 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28253 set_indirect_string (find_AT_string (ref->info));
28254 break;
28255 case DW_MACRO_define_strp:
28256 case DW_MACRO_undef_strp:
28257 set_indirect_string (find_AT_string (ref->info));
28258 break;
28259 default:
28260 break;
28261 }
28262 }
28263 }
28264
28265 /* Output macinfo section(s). */
28266
28267 static void
28268 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28269 {
28270 unsigned i;
28271 unsigned long length = vec_safe_length (macinfo_table);
28272 macinfo_entry *ref;
28273 vec<macinfo_entry, va_gc> *files = NULL;
28274 macinfo_hash_type *macinfo_htab = NULL;
28275 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28276
28277 if (! length)
28278 return;
28279
28280 /* output_macinfo* uses these interchangeably. */
28281 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28282 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28283 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28284 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28285
28286 /* AIX Assembler inserts the length, so adjust the reference to match the
28287 offset expected by debuggers. */
28288 strcpy (dl_section_ref, debug_line_label);
28289 if (XCOFF_DEBUGGING_INFO)
28290 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28291
28292 /* For .debug_macro emit the section header. */
28293 if (!dwarf_strict || dwarf_version >= 5)
28294 {
28295 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28296 "DWARF macro version number");
28297 if (DWARF_OFFSET_SIZE == 8)
28298 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28299 else
28300 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28301 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28302 debug_line_section, NULL);
28303 }
28304
28305 /* In the first loop, it emits the primary .debug_macinfo section
28306 and after each emitted op the macinfo_entry is cleared.
28307 If a longer range of define/undef ops can be optimized using
28308 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28309 the vector before the first define/undef in the range and the
28310 whole range of define/undef ops is not emitted and kept. */
28311 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28312 {
28313 switch (ref->code)
28314 {
28315 case DW_MACINFO_start_file:
28316 vec_safe_push (files, *ref);
28317 break;
28318 case DW_MACINFO_end_file:
28319 if (!vec_safe_is_empty (files))
28320 files->pop ();
28321 break;
28322 case DW_MACINFO_define:
28323 case DW_MACINFO_undef:
28324 if ((!dwarf_strict || dwarf_version >= 5)
28325 && HAVE_COMDAT_GROUP
28326 && vec_safe_length (files) != 1
28327 && i > 0
28328 && i + 1 < length
28329 && (*macinfo_table)[i - 1].code == 0)
28330 {
28331 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28332 if (count)
28333 {
28334 i += count - 1;
28335 continue;
28336 }
28337 }
28338 break;
28339 case 0:
28340 /* A dummy entry may be inserted at the beginning to be able
28341 to optimize the whole block of predefined macros. */
28342 if (i == 0)
28343 continue;
28344 default:
28345 break;
28346 }
28347 output_macinfo_op (ref);
28348 ref->info = NULL;
28349 ref->code = 0;
28350 }
28351
28352 if (!macinfo_htab)
28353 return;
28354
28355 /* Save the number of transparent includes so we can adjust the
28356 label number for the fat LTO object DWARF. */
28357 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28358
28359 delete macinfo_htab;
28360 macinfo_htab = NULL;
28361
28362 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28363 terminate the current chain and switch to a new comdat .debug_macinfo
28364 section and emit the define/undef entries within it. */
28365 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28366 switch (ref->code)
28367 {
28368 case 0:
28369 continue;
28370 case DW_MACRO_import:
28371 {
28372 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28373 tree comdat_key = get_identifier (ref->info);
28374 /* Terminate the previous .debug_macinfo section. */
28375 dw2_asm_output_data (1, 0, "End compilation unit");
28376 targetm.asm_out.named_section (debug_macinfo_section_name,
28377 SECTION_DEBUG
28378 | SECTION_LINKONCE
28379 | (early_lto_debug
28380 ? SECTION_EXCLUDE : 0),
28381 comdat_key);
28382 ASM_GENERATE_INTERNAL_LABEL (label,
28383 DEBUG_MACRO_SECTION_LABEL,
28384 ref->lineno + macinfo_label_base);
28385 ASM_OUTPUT_LABEL (asm_out_file, label);
28386 ref->code = 0;
28387 ref->info = NULL;
28388 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28389 "DWARF macro version number");
28390 if (DWARF_OFFSET_SIZE == 8)
28391 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28392 else
28393 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28394 }
28395 break;
28396 case DW_MACINFO_define:
28397 case DW_MACINFO_undef:
28398 output_macinfo_op (ref);
28399 ref->code = 0;
28400 ref->info = NULL;
28401 break;
28402 default:
28403 gcc_unreachable ();
28404 }
28405
28406 macinfo_label_base += macinfo_label_base_adj;
28407 }
28408
28409 /* Initialize the various sections and labels for dwarf output and prefix
28410 them with PREFIX if non-NULL. Returns the generation (zero based
28411 number of times function was called). */
28412
28413 static unsigned
28414 init_sections_and_labels (bool early_lto_debug)
28415 {
28416 /* As we may get called multiple times have a generation count for
28417 labels. */
28418 static unsigned generation = 0;
28419
28420 if (early_lto_debug)
28421 {
28422 if (!dwarf_split_debug_info)
28423 {
28424 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28425 SECTION_DEBUG | SECTION_EXCLUDE,
28426 NULL);
28427 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28428 SECTION_DEBUG | SECTION_EXCLUDE,
28429 NULL);
28430 debug_macinfo_section_name
28431 = ((dwarf_strict && dwarf_version < 5)
28432 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28433 debug_macinfo_section = get_section (debug_macinfo_section_name,
28434 SECTION_DEBUG
28435 | SECTION_EXCLUDE, NULL);
28436 }
28437 else
28438 {
28439 /* ??? Which of the following do we need early? */
28440 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28441 SECTION_DEBUG | SECTION_EXCLUDE,
28442 NULL);
28443 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28444 SECTION_DEBUG | SECTION_EXCLUDE,
28445 NULL);
28446 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28447 SECTION_DEBUG
28448 | SECTION_EXCLUDE, NULL);
28449 debug_skeleton_abbrev_section
28450 = get_section (DEBUG_LTO_ABBREV_SECTION,
28451 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28452 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28453 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28454 generation);
28455
28456 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28457 stay in the main .o, but the skeleton_line goes into the split
28458 off dwo. */
28459 debug_skeleton_line_section
28460 = get_section (DEBUG_LTO_LINE_SECTION,
28461 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28462 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28463 DEBUG_SKELETON_LINE_SECTION_LABEL,
28464 generation);
28465 debug_str_offsets_section
28466 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28467 SECTION_DEBUG | SECTION_EXCLUDE,
28468 NULL);
28469 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28470 DEBUG_SKELETON_INFO_SECTION_LABEL,
28471 generation);
28472 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28473 DEBUG_STR_DWO_SECTION_FLAGS,
28474 NULL);
28475 debug_macinfo_section_name
28476 = ((dwarf_strict && dwarf_version < 5)
28477 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28478 debug_macinfo_section = get_section (debug_macinfo_section_name,
28479 SECTION_DEBUG | SECTION_EXCLUDE,
28480 NULL);
28481 }
28482 /* For macro info and the file table we have to refer to a
28483 debug_line section. */
28484 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28485 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28486 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28487 DEBUG_LINE_SECTION_LABEL, generation);
28488
28489 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28490 DEBUG_STR_SECTION_FLAGS
28491 | SECTION_EXCLUDE, NULL);
28492 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28493 debug_line_str_section
28494 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28495 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28496 }
28497 else
28498 {
28499 if (!dwarf_split_debug_info)
28500 {
28501 debug_info_section = get_section (DEBUG_INFO_SECTION,
28502 SECTION_DEBUG, NULL);
28503 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28504 SECTION_DEBUG, NULL);
28505 debug_loc_section = get_section (dwarf_version >= 5
28506 ? DEBUG_LOCLISTS_SECTION
28507 : DEBUG_LOC_SECTION,
28508 SECTION_DEBUG, NULL);
28509 debug_macinfo_section_name
28510 = ((dwarf_strict && dwarf_version < 5)
28511 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28512 debug_macinfo_section = get_section (debug_macinfo_section_name,
28513 SECTION_DEBUG, NULL);
28514 }
28515 else
28516 {
28517 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28518 SECTION_DEBUG | SECTION_EXCLUDE,
28519 NULL);
28520 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28521 SECTION_DEBUG | SECTION_EXCLUDE,
28522 NULL);
28523 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28524 SECTION_DEBUG, NULL);
28525 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28526 SECTION_DEBUG, NULL);
28527 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28528 SECTION_DEBUG, NULL);
28529 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28530 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28531 generation);
28532
28533 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28534 stay in the main .o, but the skeleton_line goes into the
28535 split off dwo. */
28536 debug_skeleton_line_section
28537 = get_section (DEBUG_DWO_LINE_SECTION,
28538 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28539 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28540 DEBUG_SKELETON_LINE_SECTION_LABEL,
28541 generation);
28542 debug_str_offsets_section
28543 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28544 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28545 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28546 DEBUG_SKELETON_INFO_SECTION_LABEL,
28547 generation);
28548 debug_loc_section = get_section (dwarf_version >= 5
28549 ? DEBUG_DWO_LOCLISTS_SECTION
28550 : DEBUG_DWO_LOC_SECTION,
28551 SECTION_DEBUG | SECTION_EXCLUDE,
28552 NULL);
28553 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28554 DEBUG_STR_DWO_SECTION_FLAGS,
28555 NULL);
28556 debug_macinfo_section_name
28557 = ((dwarf_strict && dwarf_version < 5)
28558 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28559 debug_macinfo_section = get_section (debug_macinfo_section_name,
28560 SECTION_DEBUG | SECTION_EXCLUDE,
28561 NULL);
28562 }
28563 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28564 SECTION_DEBUG, NULL);
28565 debug_line_section = get_section (DEBUG_LINE_SECTION,
28566 SECTION_DEBUG, NULL);
28567 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28568 SECTION_DEBUG, NULL);
28569 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28570 SECTION_DEBUG, NULL);
28571 debug_str_section = get_section (DEBUG_STR_SECTION,
28572 DEBUG_STR_SECTION_FLAGS, NULL);
28573 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28574 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28575 DEBUG_STR_SECTION_FLAGS, NULL);
28576
28577 debug_ranges_section = get_section (dwarf_version >= 5
28578 ? DEBUG_RNGLISTS_SECTION
28579 : DEBUG_RANGES_SECTION,
28580 SECTION_DEBUG, NULL);
28581 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28582 SECTION_DEBUG, NULL);
28583 }
28584
28585 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28586 DEBUG_ABBREV_SECTION_LABEL, generation);
28587 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28588 DEBUG_INFO_SECTION_LABEL, generation);
28589 info_section_emitted = false;
28590 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28591 DEBUG_LINE_SECTION_LABEL, generation);
28592 /* There are up to 4 unique ranges labels per generation.
28593 See also output_rnglists. */
28594 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28595 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28596 if (dwarf_version >= 5 && dwarf_split_debug_info)
28597 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28598 DEBUG_RANGES_SECTION_LABEL,
28599 1 + generation * 4);
28600 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28601 DEBUG_ADDR_SECTION_LABEL, generation);
28602 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28603 (dwarf_strict && dwarf_version < 5)
28604 ? DEBUG_MACINFO_SECTION_LABEL
28605 : DEBUG_MACRO_SECTION_LABEL, generation);
28606 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28607 generation);
28608
28609 ++generation;
28610 return generation - 1;
28611 }
28612
28613 /* Set up for Dwarf output at the start of compilation. */
28614
28615 static void
28616 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28617 {
28618 /* Allocate the file_table. */
28619 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28620
28621 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28622 /* Allocate the decl_die_table. */
28623 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28624
28625 /* Allocate the decl_loc_table. */
28626 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28627
28628 /* Allocate the cached_dw_loc_list_table. */
28629 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28630
28631 /* Allocate the initial hunk of the abbrev_die_table. */
28632 vec_alloc (abbrev_die_table, 256);
28633 /* Zero-th entry is allocated, but unused. */
28634 abbrev_die_table->quick_push (NULL);
28635
28636 /* Allocate the dwarf_proc_stack_usage_map. */
28637 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28638
28639 /* Allocate the pubtypes and pubnames vectors. */
28640 vec_alloc (pubname_table, 32);
28641 vec_alloc (pubtype_table, 32);
28642
28643 vec_alloc (incomplete_types, 64);
28644
28645 vec_alloc (used_rtx_array, 32);
28646
28647 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28648 vec_alloc (macinfo_table, 64);
28649 #endif
28650
28651 /* If front-ends already registered a main translation unit but we were not
28652 ready to perform the association, do this now. */
28653 if (main_translation_unit != NULL_TREE)
28654 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28655 }
28656
28657 /* Called before compile () starts outputtting functions, variables
28658 and toplevel asms into assembly. */
28659
28660 static void
28661 dwarf2out_assembly_start (void)
28662 {
28663 if (text_section_line_info)
28664 return;
28665
28666 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28667 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28668 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28669 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28670 COLD_TEXT_SECTION_LABEL, 0);
28671 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28672
28673 switch_to_section (text_section);
28674 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28675 #endif
28676
28677 /* Make sure the line number table for .text always exists. */
28678 text_section_line_info = new_line_info_table ();
28679 text_section_line_info->end_label = text_end_label;
28680
28681 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28682 cur_line_info_table = text_section_line_info;
28683 #endif
28684
28685 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28686 && dwarf2out_do_cfi_asm ()
28687 && !dwarf2out_do_eh_frame ())
28688 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28689 }
28690
28691 /* A helper function for dwarf2out_finish called through
28692 htab_traverse. Assign a string its index. All strings must be
28693 collected into the table by the time index_string is called,
28694 because the indexing code relies on htab_traverse to traverse nodes
28695 in the same order for each run. */
28696
28697 int
28698 index_string (indirect_string_node **h, unsigned int *index)
28699 {
28700 indirect_string_node *node = *h;
28701
28702 find_string_form (node);
28703 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28704 {
28705 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28706 node->index = *index;
28707 *index += 1;
28708 }
28709 return 1;
28710 }
28711
28712 /* A helper function for output_indirect_strings called through
28713 htab_traverse. Output the offset to a string and update the
28714 current offset. */
28715
28716 int
28717 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28718 {
28719 indirect_string_node *node = *h;
28720
28721 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28722 {
28723 /* Assert that this node has been assigned an index. */
28724 gcc_assert (node->index != NO_INDEX_ASSIGNED
28725 && node->index != NOT_INDEXED);
28726 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28727 "indexed string 0x%x: %s", node->index, node->str);
28728 *offset += strlen (node->str) + 1;
28729 }
28730 return 1;
28731 }
28732
28733 /* A helper function for dwarf2out_finish called through
28734 htab_traverse. Output the indexed string. */
28735
28736 int
28737 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28738 {
28739 struct indirect_string_node *node = *h;
28740
28741 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28742 {
28743 /* Assert that the strings are output in the same order as their
28744 indexes were assigned. */
28745 gcc_assert (*cur_idx == node->index);
28746 assemble_string (node->str, strlen (node->str) + 1);
28747 *cur_idx += 1;
28748 }
28749 return 1;
28750 }
28751
28752 /* A helper function for output_indirect_strings. Counts the number
28753 of index strings offsets. Must match the logic of the functions
28754 output_index_string[_offsets] above. */
28755 int
28756 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28757 {
28758 struct indirect_string_node *node = *h;
28759
28760 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28761 *last_idx += 1;
28762 return 1;
28763 }
28764
28765 /* A helper function for dwarf2out_finish called through
28766 htab_traverse. Emit one queued .debug_str string. */
28767
28768 int
28769 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28770 {
28771 struct indirect_string_node *node = *h;
28772
28773 node->form = find_string_form (node);
28774 if (node->form == form && node->refcount > 0)
28775 {
28776 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28777 assemble_string (node->str, strlen (node->str) + 1);
28778 }
28779
28780 return 1;
28781 }
28782
28783 /* Output the indexed string table. */
28784
28785 static void
28786 output_indirect_strings (void)
28787 {
28788 switch_to_section (debug_str_section);
28789 if (!dwarf_split_debug_info)
28790 debug_str_hash->traverse<enum dwarf_form,
28791 output_indirect_string> (DW_FORM_strp);
28792 else
28793 {
28794 unsigned int offset = 0;
28795 unsigned int cur_idx = 0;
28796
28797 if (skeleton_debug_str_hash)
28798 skeleton_debug_str_hash->traverse<enum dwarf_form,
28799 output_indirect_string> (DW_FORM_strp);
28800
28801 switch_to_section (debug_str_offsets_section);
28802 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28803 header. Note that we don't need to generate a label to the
28804 actual index table following the header here, because this is
28805 for the split dwarf case only. In an .dwo file there is only
28806 one string offsets table (and one debug info section). But
28807 if we would start using string offset tables for the main (or
28808 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28809 pointing to the actual index after the header. Split dwarf
28810 units will never have a string offsets base attribute. When
28811 a split unit is moved into a .dwp file the string offsets can
28812 be found through the .debug_cu_index section table. */
28813 if (dwarf_version >= 5)
28814 {
28815 unsigned int last_idx = 0;
28816 unsigned long str_offsets_length;
28817
28818 debug_str_hash->traverse_noresize
28819 <unsigned int *, count_index_strings> (&last_idx);
28820 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28821 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28822 dw2_asm_output_data (4, 0xffffffff,
28823 "Escape value for 64-bit DWARF extension");
28824 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28825 "Length of string offsets unit");
28826 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28827 dw2_asm_output_data (2, 0, "Header zero padding");
28828 }
28829 debug_str_hash->traverse_noresize
28830 <unsigned int *, output_index_string_offset> (&offset);
28831 switch_to_section (debug_str_dwo_section);
28832 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28833 (&cur_idx);
28834 }
28835 }
28836
28837 /* Callback for htab_traverse to assign an index to an entry in the
28838 table, and to write that entry to the .debug_addr section. */
28839
28840 int
28841 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28842 {
28843 addr_table_entry *entry = *slot;
28844
28845 if (entry->refcount == 0)
28846 {
28847 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28848 || entry->index == NOT_INDEXED);
28849 return 1;
28850 }
28851
28852 gcc_assert (entry->index == *cur_index);
28853 (*cur_index)++;
28854
28855 switch (entry->kind)
28856 {
28857 case ate_kind_rtx:
28858 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28859 "0x%x", entry->index);
28860 break;
28861 case ate_kind_rtx_dtprel:
28862 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28863 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28864 DWARF2_ADDR_SIZE,
28865 entry->addr.rtl);
28866 fputc ('\n', asm_out_file);
28867 break;
28868 case ate_kind_label:
28869 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28870 "0x%x", entry->index);
28871 break;
28872 default:
28873 gcc_unreachable ();
28874 }
28875 return 1;
28876 }
28877
28878 /* A helper function for dwarf2out_finish. Counts the number
28879 of indexed addresses. Must match the logic of the functions
28880 output_addr_table_entry above. */
28881 int
28882 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28883 {
28884 addr_table_entry *entry = *slot;
28885
28886 if (entry->refcount > 0)
28887 *last_idx += 1;
28888 return 1;
28889 }
28890
28891 /* Produce the .debug_addr section. */
28892
28893 static void
28894 output_addr_table (void)
28895 {
28896 unsigned int index = 0;
28897 if (addr_index_table == NULL || addr_index_table->size () == 0)
28898 return;
28899
28900 switch_to_section (debug_addr_section);
28901 addr_index_table
28902 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28903 }
28904
28905 #if ENABLE_ASSERT_CHECKING
28906 /* Verify that all marks are clear. */
28907
28908 static void
28909 verify_marks_clear (dw_die_ref die)
28910 {
28911 dw_die_ref c;
28912
28913 gcc_assert (! die->die_mark);
28914 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28915 }
28916 #endif /* ENABLE_ASSERT_CHECKING */
28917
28918 /* Clear the marks for a die and its children.
28919 Be cool if the mark isn't set. */
28920
28921 static void
28922 prune_unmark_dies (dw_die_ref die)
28923 {
28924 dw_die_ref c;
28925
28926 if (die->die_mark)
28927 die->die_mark = 0;
28928 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28929 }
28930
28931 /* Given LOC that is referenced by a DIE we're marking as used, find all
28932 referenced DWARF procedures it references and mark them as used. */
28933
28934 static void
28935 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28936 {
28937 for (; loc != NULL; loc = loc->dw_loc_next)
28938 switch (loc->dw_loc_opc)
28939 {
28940 case DW_OP_implicit_pointer:
28941 case DW_OP_convert:
28942 case DW_OP_reinterpret:
28943 case DW_OP_GNU_implicit_pointer:
28944 case DW_OP_GNU_convert:
28945 case DW_OP_GNU_reinterpret:
28946 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28947 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28948 break;
28949 case DW_OP_GNU_variable_value:
28950 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28951 {
28952 dw_die_ref ref
28953 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28954 if (ref == NULL)
28955 break;
28956 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28957 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28958 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28959 }
28960 /* FALLTHRU */
28961 case DW_OP_call2:
28962 case DW_OP_call4:
28963 case DW_OP_call_ref:
28964 case DW_OP_const_type:
28965 case DW_OP_GNU_const_type:
28966 case DW_OP_GNU_parameter_ref:
28967 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28968 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28969 break;
28970 case DW_OP_regval_type:
28971 case DW_OP_deref_type:
28972 case DW_OP_GNU_regval_type:
28973 case DW_OP_GNU_deref_type:
28974 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28975 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28976 break;
28977 case DW_OP_entry_value:
28978 case DW_OP_GNU_entry_value:
28979 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28980 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28981 break;
28982 default:
28983 break;
28984 }
28985 }
28986
28987 /* Given DIE that we're marking as used, find any other dies
28988 it references as attributes and mark them as used. */
28989
28990 static void
28991 prune_unused_types_walk_attribs (dw_die_ref die)
28992 {
28993 dw_attr_node *a;
28994 unsigned ix;
28995
28996 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28997 {
28998 switch (AT_class (a))
28999 {
29000 /* Make sure DWARF procedures referenced by location descriptions will
29001 get emitted. */
29002 case dw_val_class_loc:
29003 prune_unused_types_walk_loc_descr (AT_loc (a));
29004 break;
29005 case dw_val_class_loc_list:
29006 for (dw_loc_list_ref list = AT_loc_list (a);
29007 list != NULL;
29008 list = list->dw_loc_next)
29009 prune_unused_types_walk_loc_descr (list->expr);
29010 break;
29011
29012 case dw_val_class_view_list:
29013 /* This points to a loc_list in another attribute, so it's
29014 already covered. */
29015 break;
29016
29017 case dw_val_class_die_ref:
29018 /* A reference to another DIE.
29019 Make sure that it will get emitted.
29020 If it was broken out into a comdat group, don't follow it. */
29021 if (! AT_ref (a)->comdat_type_p
29022 || a->dw_attr == DW_AT_specification)
29023 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29024 break;
29025
29026 case dw_val_class_str:
29027 /* Set the string's refcount to 0 so that prune_unused_types_mark
29028 accounts properly for it. */
29029 a->dw_attr_val.v.val_str->refcount = 0;
29030 break;
29031
29032 default:
29033 break;
29034 }
29035 }
29036 }
29037
29038 /* Mark the generic parameters and arguments children DIEs of DIE. */
29039
29040 static void
29041 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29042 {
29043 dw_die_ref c;
29044
29045 if (die == NULL || die->die_child == NULL)
29046 return;
29047 c = die->die_child;
29048 do
29049 {
29050 if (is_template_parameter (c))
29051 prune_unused_types_mark (c, 1);
29052 c = c->die_sib;
29053 } while (c && c != die->die_child);
29054 }
29055
29056 /* Mark DIE as being used. If DOKIDS is true, then walk down
29057 to DIE's children. */
29058
29059 static void
29060 prune_unused_types_mark (dw_die_ref die, int dokids)
29061 {
29062 dw_die_ref c;
29063
29064 if (die->die_mark == 0)
29065 {
29066 /* We haven't done this node yet. Mark it as used. */
29067 die->die_mark = 1;
29068 /* If this is the DIE of a generic type instantiation,
29069 mark the children DIEs that describe its generic parms and
29070 args. */
29071 prune_unused_types_mark_generic_parms_dies (die);
29072
29073 /* We also have to mark its parents as used.
29074 (But we don't want to mark our parent's kids due to this,
29075 unless it is a class.) */
29076 if (die->die_parent)
29077 prune_unused_types_mark (die->die_parent,
29078 class_scope_p (die->die_parent));
29079
29080 /* Mark any referenced nodes. */
29081 prune_unused_types_walk_attribs (die);
29082
29083 /* If this node is a specification,
29084 also mark the definition, if it exists. */
29085 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29086 prune_unused_types_mark (die->die_definition, 1);
29087 }
29088
29089 if (dokids && die->die_mark != 2)
29090 {
29091 /* We need to walk the children, but haven't done so yet.
29092 Remember that we've walked the kids. */
29093 die->die_mark = 2;
29094
29095 /* If this is an array type, we need to make sure our
29096 kids get marked, even if they're types. If we're
29097 breaking out types into comdat sections, do this
29098 for all type definitions. */
29099 if (die->die_tag == DW_TAG_array_type
29100 || (use_debug_types
29101 && is_type_die (die) && ! is_declaration_die (die)))
29102 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29103 else
29104 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29105 }
29106 }
29107
29108 /* For local classes, look if any static member functions were emitted
29109 and if so, mark them. */
29110
29111 static void
29112 prune_unused_types_walk_local_classes (dw_die_ref die)
29113 {
29114 dw_die_ref c;
29115
29116 if (die->die_mark == 2)
29117 return;
29118
29119 switch (die->die_tag)
29120 {
29121 case DW_TAG_structure_type:
29122 case DW_TAG_union_type:
29123 case DW_TAG_class_type:
29124 break;
29125
29126 case DW_TAG_subprogram:
29127 if (!get_AT_flag (die, DW_AT_declaration)
29128 || die->die_definition != NULL)
29129 prune_unused_types_mark (die, 1);
29130 return;
29131
29132 default:
29133 return;
29134 }
29135
29136 /* Mark children. */
29137 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29138 }
29139
29140 /* Walk the tree DIE and mark types that we actually use. */
29141
29142 static void
29143 prune_unused_types_walk (dw_die_ref die)
29144 {
29145 dw_die_ref c;
29146
29147 /* Don't do anything if this node is already marked and
29148 children have been marked as well. */
29149 if (die->die_mark == 2)
29150 return;
29151
29152 switch (die->die_tag)
29153 {
29154 case DW_TAG_structure_type:
29155 case DW_TAG_union_type:
29156 case DW_TAG_class_type:
29157 if (die->die_perennial_p)
29158 break;
29159
29160 for (c = die->die_parent; c; c = c->die_parent)
29161 if (c->die_tag == DW_TAG_subprogram)
29162 break;
29163
29164 /* Finding used static member functions inside of classes
29165 is needed just for local classes, because for other classes
29166 static member function DIEs with DW_AT_specification
29167 are emitted outside of the DW_TAG_*_type. If we ever change
29168 it, we'd need to call this even for non-local classes. */
29169 if (c)
29170 prune_unused_types_walk_local_classes (die);
29171
29172 /* It's a type node --- don't mark it. */
29173 return;
29174
29175 case DW_TAG_const_type:
29176 case DW_TAG_packed_type:
29177 case DW_TAG_pointer_type:
29178 case DW_TAG_reference_type:
29179 case DW_TAG_rvalue_reference_type:
29180 case DW_TAG_volatile_type:
29181 case DW_TAG_typedef:
29182 case DW_TAG_array_type:
29183 case DW_TAG_interface_type:
29184 case DW_TAG_friend:
29185 case DW_TAG_enumeration_type:
29186 case DW_TAG_subroutine_type:
29187 case DW_TAG_string_type:
29188 case DW_TAG_set_type:
29189 case DW_TAG_subrange_type:
29190 case DW_TAG_ptr_to_member_type:
29191 case DW_TAG_file_type:
29192 /* Type nodes are useful only when other DIEs reference them --- don't
29193 mark them. */
29194 /* FALLTHROUGH */
29195
29196 case DW_TAG_dwarf_procedure:
29197 /* Likewise for DWARF procedures. */
29198
29199 if (die->die_perennial_p)
29200 break;
29201
29202 return;
29203
29204 default:
29205 /* Mark everything else. */
29206 break;
29207 }
29208
29209 if (die->die_mark == 0)
29210 {
29211 die->die_mark = 1;
29212
29213 /* Now, mark any dies referenced from here. */
29214 prune_unused_types_walk_attribs (die);
29215 }
29216
29217 die->die_mark = 2;
29218
29219 /* Mark children. */
29220 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29221 }
29222
29223 /* Increment the string counts on strings referred to from DIE's
29224 attributes. */
29225
29226 static void
29227 prune_unused_types_update_strings (dw_die_ref die)
29228 {
29229 dw_attr_node *a;
29230 unsigned ix;
29231
29232 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29233 if (AT_class (a) == dw_val_class_str)
29234 {
29235 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29236 s->refcount++;
29237 /* Avoid unnecessarily putting strings that are used less than
29238 twice in the hash table. */
29239 if (s->refcount
29240 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29241 {
29242 indirect_string_node **slot
29243 = debug_str_hash->find_slot_with_hash (s->str,
29244 htab_hash_string (s->str),
29245 INSERT);
29246 gcc_assert (*slot == NULL);
29247 *slot = s;
29248 }
29249 }
29250 }
29251
29252 /* Mark DIE and its children as removed. */
29253
29254 static void
29255 mark_removed (dw_die_ref die)
29256 {
29257 dw_die_ref c;
29258 die->removed = true;
29259 FOR_EACH_CHILD (die, c, mark_removed (c));
29260 }
29261
29262 /* Remove from the tree DIE any dies that aren't marked. */
29263
29264 static void
29265 prune_unused_types_prune (dw_die_ref die)
29266 {
29267 dw_die_ref c;
29268
29269 gcc_assert (die->die_mark);
29270 prune_unused_types_update_strings (die);
29271
29272 if (! die->die_child)
29273 return;
29274
29275 c = die->die_child;
29276 do {
29277 dw_die_ref prev = c, next;
29278 for (c = c->die_sib; ! c->die_mark; c = next)
29279 if (c == die->die_child)
29280 {
29281 /* No marked children between 'prev' and the end of the list. */
29282 if (prev == c)
29283 /* No marked children at all. */
29284 die->die_child = NULL;
29285 else
29286 {
29287 prev->die_sib = c->die_sib;
29288 die->die_child = prev;
29289 }
29290 c->die_sib = NULL;
29291 mark_removed (c);
29292 return;
29293 }
29294 else
29295 {
29296 next = c->die_sib;
29297 c->die_sib = NULL;
29298 mark_removed (c);
29299 }
29300
29301 if (c != prev->die_sib)
29302 prev->die_sib = c;
29303 prune_unused_types_prune (c);
29304 } while (c != die->die_child);
29305 }
29306
29307 /* Remove dies representing declarations that we never use. */
29308
29309 static void
29310 prune_unused_types (void)
29311 {
29312 unsigned int i;
29313 limbo_die_node *node;
29314 comdat_type_node *ctnode;
29315 pubname_entry *pub;
29316 dw_die_ref base_type;
29317
29318 #if ENABLE_ASSERT_CHECKING
29319 /* All the marks should already be clear. */
29320 verify_marks_clear (comp_unit_die ());
29321 for (node = limbo_die_list; node; node = node->next)
29322 verify_marks_clear (node->die);
29323 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29324 verify_marks_clear (ctnode->root_die);
29325 #endif /* ENABLE_ASSERT_CHECKING */
29326
29327 /* Mark types that are used in global variables. */
29328 premark_types_used_by_global_vars ();
29329
29330 /* Set the mark on nodes that are actually used. */
29331 prune_unused_types_walk (comp_unit_die ());
29332 for (node = limbo_die_list; node; node = node->next)
29333 prune_unused_types_walk (node->die);
29334 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29335 {
29336 prune_unused_types_walk (ctnode->root_die);
29337 prune_unused_types_mark (ctnode->type_die, 1);
29338 }
29339
29340 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29341 are unusual in that they are pubnames that are the children of pubtypes.
29342 They should only be marked via their parent DW_TAG_enumeration_type die,
29343 not as roots in themselves. */
29344 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29345 if (pub->die->die_tag != DW_TAG_enumerator)
29346 prune_unused_types_mark (pub->die, 1);
29347 for (i = 0; base_types.iterate (i, &base_type); i++)
29348 prune_unused_types_mark (base_type, 1);
29349
29350 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29351 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29352 callees). */
29353 cgraph_node *cnode;
29354 FOR_EACH_FUNCTION (cnode)
29355 if (cnode->referred_to_p (false))
29356 {
29357 dw_die_ref die = lookup_decl_die (cnode->decl);
29358 if (die == NULL || die->die_mark)
29359 continue;
29360 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29361 if (e->caller != cnode
29362 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29363 {
29364 prune_unused_types_mark (die, 1);
29365 break;
29366 }
29367 }
29368
29369 if (debug_str_hash)
29370 debug_str_hash->empty ();
29371 if (skeleton_debug_str_hash)
29372 skeleton_debug_str_hash->empty ();
29373 prune_unused_types_prune (comp_unit_die ());
29374 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29375 {
29376 node = *pnode;
29377 if (!node->die->die_mark)
29378 *pnode = node->next;
29379 else
29380 {
29381 prune_unused_types_prune (node->die);
29382 pnode = &node->next;
29383 }
29384 }
29385 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29386 prune_unused_types_prune (ctnode->root_die);
29387
29388 /* Leave the marks clear. */
29389 prune_unmark_dies (comp_unit_die ());
29390 for (node = limbo_die_list; node; node = node->next)
29391 prune_unmark_dies (node->die);
29392 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29393 prune_unmark_dies (ctnode->root_die);
29394 }
29395
29396 /* Helpers to manipulate hash table of comdat type units. */
29397
29398 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29399 {
29400 static inline hashval_t hash (const comdat_type_node *);
29401 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29402 };
29403
29404 inline hashval_t
29405 comdat_type_hasher::hash (const comdat_type_node *type_node)
29406 {
29407 hashval_t h;
29408 memcpy (&h, type_node->signature, sizeof (h));
29409 return h;
29410 }
29411
29412 inline bool
29413 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29414 const comdat_type_node *type_node_2)
29415 {
29416 return (! memcmp (type_node_1->signature, type_node_2->signature,
29417 DWARF_TYPE_SIGNATURE_SIZE));
29418 }
29419
29420 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29421 to the location it would have been added, should we know its
29422 DECL_ASSEMBLER_NAME when we added other attributes. This will
29423 probably improve compactness of debug info, removing equivalent
29424 abbrevs, and hide any differences caused by deferring the
29425 computation of the assembler name, triggered by e.g. PCH. */
29426
29427 static inline void
29428 move_linkage_attr (dw_die_ref die)
29429 {
29430 unsigned ix = vec_safe_length (die->die_attr);
29431 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29432
29433 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29434 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29435
29436 while (--ix > 0)
29437 {
29438 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29439
29440 if (prev->dw_attr == DW_AT_decl_line
29441 || prev->dw_attr == DW_AT_decl_column
29442 || prev->dw_attr == DW_AT_name)
29443 break;
29444 }
29445
29446 if (ix != vec_safe_length (die->die_attr) - 1)
29447 {
29448 die->die_attr->pop ();
29449 die->die_attr->quick_insert (ix, linkage);
29450 }
29451 }
29452
29453 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29454 referenced from typed stack ops and count how often they are used. */
29455
29456 static void
29457 mark_base_types (dw_loc_descr_ref loc)
29458 {
29459 dw_die_ref base_type = NULL;
29460
29461 for (; loc; loc = loc->dw_loc_next)
29462 {
29463 switch (loc->dw_loc_opc)
29464 {
29465 case DW_OP_regval_type:
29466 case DW_OP_deref_type:
29467 case DW_OP_GNU_regval_type:
29468 case DW_OP_GNU_deref_type:
29469 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29470 break;
29471 case DW_OP_convert:
29472 case DW_OP_reinterpret:
29473 case DW_OP_GNU_convert:
29474 case DW_OP_GNU_reinterpret:
29475 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29476 continue;
29477 /* FALLTHRU */
29478 case DW_OP_const_type:
29479 case DW_OP_GNU_const_type:
29480 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29481 break;
29482 case DW_OP_entry_value:
29483 case DW_OP_GNU_entry_value:
29484 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29485 continue;
29486 default:
29487 continue;
29488 }
29489 gcc_assert (base_type->die_parent == comp_unit_die ());
29490 if (base_type->die_mark)
29491 base_type->die_mark++;
29492 else
29493 {
29494 base_types.safe_push (base_type);
29495 base_type->die_mark = 1;
29496 }
29497 }
29498 }
29499
29500 /* Comparison function for sorting marked base types. */
29501
29502 static int
29503 base_type_cmp (const void *x, const void *y)
29504 {
29505 dw_die_ref dx = *(const dw_die_ref *) x;
29506 dw_die_ref dy = *(const dw_die_ref *) y;
29507 unsigned int byte_size1, byte_size2;
29508 unsigned int encoding1, encoding2;
29509 unsigned int align1, align2;
29510 if (dx->die_mark > dy->die_mark)
29511 return -1;
29512 if (dx->die_mark < dy->die_mark)
29513 return 1;
29514 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29515 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29516 if (byte_size1 < byte_size2)
29517 return 1;
29518 if (byte_size1 > byte_size2)
29519 return -1;
29520 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29521 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29522 if (encoding1 < encoding2)
29523 return 1;
29524 if (encoding1 > encoding2)
29525 return -1;
29526 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29527 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29528 if (align1 < align2)
29529 return 1;
29530 if (align1 > align2)
29531 return -1;
29532 return 0;
29533 }
29534
29535 /* Move base types marked by mark_base_types as early as possible
29536 in the CU, sorted by decreasing usage count both to make the
29537 uleb128 references as small as possible and to make sure they
29538 will have die_offset already computed by calc_die_sizes when
29539 sizes of typed stack loc ops is computed. */
29540
29541 static void
29542 move_marked_base_types (void)
29543 {
29544 unsigned int i;
29545 dw_die_ref base_type, die, c;
29546
29547 if (base_types.is_empty ())
29548 return;
29549
29550 /* Sort by decreasing usage count, they will be added again in that
29551 order later on. */
29552 base_types.qsort (base_type_cmp);
29553 die = comp_unit_die ();
29554 c = die->die_child;
29555 do
29556 {
29557 dw_die_ref prev = c;
29558 c = c->die_sib;
29559 while (c->die_mark)
29560 {
29561 remove_child_with_prev (c, prev);
29562 /* As base types got marked, there must be at least
29563 one node other than DW_TAG_base_type. */
29564 gcc_assert (die->die_child != NULL);
29565 c = prev->die_sib;
29566 }
29567 }
29568 while (c != die->die_child);
29569 gcc_assert (die->die_child);
29570 c = die->die_child;
29571 for (i = 0; base_types.iterate (i, &base_type); i++)
29572 {
29573 base_type->die_mark = 0;
29574 base_type->die_sib = c->die_sib;
29575 c->die_sib = base_type;
29576 c = base_type;
29577 }
29578 }
29579
29580 /* Helper function for resolve_addr, attempt to resolve
29581 one CONST_STRING, return true if successful. Similarly verify that
29582 SYMBOL_REFs refer to variables emitted in the current CU. */
29583
29584 static bool
29585 resolve_one_addr (rtx *addr)
29586 {
29587 rtx rtl = *addr;
29588
29589 if (GET_CODE (rtl) == CONST_STRING)
29590 {
29591 size_t len = strlen (XSTR (rtl, 0)) + 1;
29592 tree t = build_string (len, XSTR (rtl, 0));
29593 tree tlen = size_int (len - 1);
29594 TREE_TYPE (t)
29595 = build_array_type (char_type_node, build_index_type (tlen));
29596 rtl = lookup_constant_def (t);
29597 if (!rtl || !MEM_P (rtl))
29598 return false;
29599 rtl = XEXP (rtl, 0);
29600 if (GET_CODE (rtl) == SYMBOL_REF
29601 && SYMBOL_REF_DECL (rtl)
29602 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29603 return false;
29604 vec_safe_push (used_rtx_array, rtl);
29605 *addr = rtl;
29606 return true;
29607 }
29608
29609 if (GET_CODE (rtl) == SYMBOL_REF
29610 && SYMBOL_REF_DECL (rtl))
29611 {
29612 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29613 {
29614 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29615 return false;
29616 }
29617 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29618 return false;
29619 }
29620
29621 if (GET_CODE (rtl) == CONST)
29622 {
29623 subrtx_ptr_iterator::array_type array;
29624 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29625 if (!resolve_one_addr (*iter))
29626 return false;
29627 }
29628
29629 return true;
29630 }
29631
29632 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29633 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29634 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29635
29636 static rtx
29637 string_cst_pool_decl (tree t)
29638 {
29639 rtx rtl = output_constant_def (t, 1);
29640 unsigned char *array;
29641 dw_loc_descr_ref l;
29642 tree decl;
29643 size_t len;
29644 dw_die_ref ref;
29645
29646 if (!rtl || !MEM_P (rtl))
29647 return NULL_RTX;
29648 rtl = XEXP (rtl, 0);
29649 if (GET_CODE (rtl) != SYMBOL_REF
29650 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29651 return NULL_RTX;
29652
29653 decl = SYMBOL_REF_DECL (rtl);
29654 if (!lookup_decl_die (decl))
29655 {
29656 len = TREE_STRING_LENGTH (t);
29657 vec_safe_push (used_rtx_array, rtl);
29658 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29659 array = ggc_vec_alloc<unsigned char> (len);
29660 memcpy (array, TREE_STRING_POINTER (t), len);
29661 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29662 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29663 l->dw_loc_oprnd2.v.val_vec.length = len;
29664 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29665 l->dw_loc_oprnd2.v.val_vec.array = array;
29666 add_AT_loc (ref, DW_AT_location, l);
29667 equate_decl_number_to_die (decl, ref);
29668 }
29669 return rtl;
29670 }
29671
29672 /* Helper function of resolve_addr_in_expr. LOC is
29673 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29674 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29675 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29676 with DW_OP_implicit_pointer if possible
29677 and return true, if unsuccessful, return false. */
29678
29679 static bool
29680 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29681 {
29682 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29683 HOST_WIDE_INT offset = 0;
29684 dw_die_ref ref = NULL;
29685 tree decl;
29686
29687 if (GET_CODE (rtl) == CONST
29688 && GET_CODE (XEXP (rtl, 0)) == PLUS
29689 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29690 {
29691 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29692 rtl = XEXP (XEXP (rtl, 0), 0);
29693 }
29694 if (GET_CODE (rtl) == CONST_STRING)
29695 {
29696 size_t len = strlen (XSTR (rtl, 0)) + 1;
29697 tree t = build_string (len, XSTR (rtl, 0));
29698 tree tlen = size_int (len - 1);
29699
29700 TREE_TYPE (t)
29701 = build_array_type (char_type_node, build_index_type (tlen));
29702 rtl = string_cst_pool_decl (t);
29703 if (!rtl)
29704 return false;
29705 }
29706 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29707 {
29708 decl = SYMBOL_REF_DECL (rtl);
29709 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29710 {
29711 ref = lookup_decl_die (decl);
29712 if (ref && (get_AT (ref, DW_AT_location)
29713 || get_AT (ref, DW_AT_const_value)))
29714 {
29715 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29716 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29717 loc->dw_loc_oprnd1.val_entry = NULL;
29718 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29719 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29720 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29721 loc->dw_loc_oprnd2.v.val_int = offset;
29722 return true;
29723 }
29724 }
29725 }
29726 return false;
29727 }
29728
29729 /* Helper function for resolve_addr, handle one location
29730 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29731 the location list couldn't be resolved. */
29732
29733 static bool
29734 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29735 {
29736 dw_loc_descr_ref keep = NULL;
29737 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29738 switch (loc->dw_loc_opc)
29739 {
29740 case DW_OP_addr:
29741 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29742 {
29743 if ((prev == NULL
29744 || prev->dw_loc_opc == DW_OP_piece
29745 || prev->dw_loc_opc == DW_OP_bit_piece)
29746 && loc->dw_loc_next
29747 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29748 && (!dwarf_strict || dwarf_version >= 5)
29749 && optimize_one_addr_into_implicit_ptr (loc))
29750 break;
29751 return false;
29752 }
29753 break;
29754 case DW_OP_GNU_addr_index:
29755 case DW_OP_addrx:
29756 case DW_OP_GNU_const_index:
29757 case DW_OP_constx:
29758 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29759 || loc->dw_loc_opc == DW_OP_addrx)
29760 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29761 || loc->dw_loc_opc == DW_OP_constx)
29762 && loc->dtprel))
29763 {
29764 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29765 if (!resolve_one_addr (&rtl))
29766 return false;
29767 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29768 loc->dw_loc_oprnd1.val_entry
29769 = add_addr_table_entry (rtl, ate_kind_rtx);
29770 }
29771 break;
29772 case DW_OP_const4u:
29773 case DW_OP_const8u:
29774 if (loc->dtprel
29775 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29776 return false;
29777 break;
29778 case DW_OP_plus_uconst:
29779 if (size_of_loc_descr (loc)
29780 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29781 + 1
29782 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29783 {
29784 dw_loc_descr_ref repl
29785 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29786 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29787 add_loc_descr (&repl, loc->dw_loc_next);
29788 *loc = *repl;
29789 }
29790 break;
29791 case DW_OP_implicit_value:
29792 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29793 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29794 return false;
29795 break;
29796 case DW_OP_implicit_pointer:
29797 case DW_OP_GNU_implicit_pointer:
29798 case DW_OP_GNU_parameter_ref:
29799 case DW_OP_GNU_variable_value:
29800 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29801 {
29802 dw_die_ref ref
29803 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29804 if (ref == NULL)
29805 return false;
29806 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29807 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29808 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29809 }
29810 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29811 {
29812 if (prev == NULL
29813 && loc->dw_loc_next == NULL
29814 && AT_class (a) == dw_val_class_loc)
29815 switch (a->dw_attr)
29816 {
29817 /* Following attributes allow both exprloc and reference,
29818 so if the whole expression is DW_OP_GNU_variable_value
29819 alone we could transform it into reference. */
29820 case DW_AT_byte_size:
29821 case DW_AT_bit_size:
29822 case DW_AT_lower_bound:
29823 case DW_AT_upper_bound:
29824 case DW_AT_bit_stride:
29825 case DW_AT_count:
29826 case DW_AT_allocated:
29827 case DW_AT_associated:
29828 case DW_AT_byte_stride:
29829 a->dw_attr_val.val_class = dw_val_class_die_ref;
29830 a->dw_attr_val.val_entry = NULL;
29831 a->dw_attr_val.v.val_die_ref.die
29832 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29833 a->dw_attr_val.v.val_die_ref.external = 0;
29834 return true;
29835 default:
29836 break;
29837 }
29838 if (dwarf_strict)
29839 return false;
29840 }
29841 break;
29842 case DW_OP_const_type:
29843 case DW_OP_regval_type:
29844 case DW_OP_deref_type:
29845 case DW_OP_convert:
29846 case DW_OP_reinterpret:
29847 case DW_OP_GNU_const_type:
29848 case DW_OP_GNU_regval_type:
29849 case DW_OP_GNU_deref_type:
29850 case DW_OP_GNU_convert:
29851 case DW_OP_GNU_reinterpret:
29852 while (loc->dw_loc_next
29853 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29854 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29855 {
29856 dw_die_ref base1, base2;
29857 unsigned enc1, enc2, size1, size2;
29858 if (loc->dw_loc_opc == DW_OP_regval_type
29859 || loc->dw_loc_opc == DW_OP_deref_type
29860 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29861 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29862 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29863 else if (loc->dw_loc_oprnd1.val_class
29864 == dw_val_class_unsigned_const)
29865 break;
29866 else
29867 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29868 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29869 == dw_val_class_unsigned_const)
29870 break;
29871 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29872 gcc_assert (base1->die_tag == DW_TAG_base_type
29873 && base2->die_tag == DW_TAG_base_type);
29874 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29875 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29876 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29877 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29878 if (size1 == size2
29879 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29880 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29881 && loc != keep)
29882 || enc1 == enc2))
29883 {
29884 /* Optimize away next DW_OP_convert after
29885 adjusting LOC's base type die reference. */
29886 if (loc->dw_loc_opc == DW_OP_regval_type
29887 || loc->dw_loc_opc == DW_OP_deref_type
29888 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29889 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29890 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29891 else
29892 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29893 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29894 continue;
29895 }
29896 /* Don't change integer DW_OP_convert after e.g. floating
29897 point typed stack entry. */
29898 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29899 keep = loc->dw_loc_next;
29900 break;
29901 }
29902 break;
29903 default:
29904 break;
29905 }
29906 return true;
29907 }
29908
29909 /* Helper function of resolve_addr. DIE had DW_AT_location of
29910 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29911 and DW_OP_addr couldn't be resolved. resolve_addr has already
29912 removed the DW_AT_location attribute. This function attempts to
29913 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29914 to it or DW_AT_const_value attribute, if possible. */
29915
29916 static void
29917 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29918 {
29919 if (!VAR_P (decl)
29920 || lookup_decl_die (decl) != die
29921 || DECL_EXTERNAL (decl)
29922 || !TREE_STATIC (decl)
29923 || DECL_INITIAL (decl) == NULL_TREE
29924 || DECL_P (DECL_INITIAL (decl))
29925 || get_AT (die, DW_AT_const_value))
29926 return;
29927
29928 tree init = DECL_INITIAL (decl);
29929 HOST_WIDE_INT offset = 0;
29930 /* For variables that have been optimized away and thus
29931 don't have a memory location, see if we can emit
29932 DW_AT_const_value instead. */
29933 if (tree_add_const_value_attribute (die, init))
29934 return;
29935 if (dwarf_strict && dwarf_version < 5)
29936 return;
29937 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29938 and ADDR_EXPR refers to a decl that has DW_AT_location or
29939 DW_AT_const_value (but isn't addressable, otherwise
29940 resolving the original DW_OP_addr wouldn't fail), see if
29941 we can add DW_OP_implicit_pointer. */
29942 STRIP_NOPS (init);
29943 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29944 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29945 {
29946 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29947 init = TREE_OPERAND (init, 0);
29948 STRIP_NOPS (init);
29949 }
29950 if (TREE_CODE (init) != ADDR_EXPR)
29951 return;
29952 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29953 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29954 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29955 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29956 && TREE_OPERAND (init, 0) != decl))
29957 {
29958 dw_die_ref ref;
29959 dw_loc_descr_ref l;
29960
29961 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29962 {
29963 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29964 if (!rtl)
29965 return;
29966 decl = SYMBOL_REF_DECL (rtl);
29967 }
29968 else
29969 decl = TREE_OPERAND (init, 0);
29970 ref = lookup_decl_die (decl);
29971 if (ref == NULL
29972 || (!get_AT (ref, DW_AT_location)
29973 && !get_AT (ref, DW_AT_const_value)))
29974 return;
29975 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29976 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29977 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29978 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29979 add_AT_loc (die, DW_AT_location, l);
29980 }
29981 }
29982
29983 /* Return NULL if l is a DWARF expression, or first op that is not
29984 valid DWARF expression. */
29985
29986 static dw_loc_descr_ref
29987 non_dwarf_expression (dw_loc_descr_ref l)
29988 {
29989 while (l)
29990 {
29991 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29992 return l;
29993 switch (l->dw_loc_opc)
29994 {
29995 case DW_OP_regx:
29996 case DW_OP_implicit_value:
29997 case DW_OP_stack_value:
29998 case DW_OP_implicit_pointer:
29999 case DW_OP_GNU_implicit_pointer:
30000 case DW_OP_GNU_parameter_ref:
30001 case DW_OP_piece:
30002 case DW_OP_bit_piece:
30003 return l;
30004 default:
30005 break;
30006 }
30007 l = l->dw_loc_next;
30008 }
30009 return NULL;
30010 }
30011
30012 /* Return adjusted copy of EXPR:
30013 If it is empty DWARF expression, return it.
30014 If it is valid non-empty DWARF expression,
30015 return copy of EXPR with DW_OP_deref appended to it.
30016 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30017 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30018 If it is DWARF expression followed by DW_OP_stack_value, return
30019 copy of the DWARF expression without anything appended.
30020 Otherwise, return NULL. */
30021
30022 static dw_loc_descr_ref
30023 copy_deref_exprloc (dw_loc_descr_ref expr)
30024 {
30025 dw_loc_descr_ref tail = NULL;
30026
30027 if (expr == NULL)
30028 return NULL;
30029
30030 dw_loc_descr_ref l = non_dwarf_expression (expr);
30031 if (l && l->dw_loc_next)
30032 return NULL;
30033
30034 if (l)
30035 {
30036 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30037 tail = new_loc_descr ((enum dwarf_location_atom)
30038 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30039 0, 0);
30040 else
30041 switch (l->dw_loc_opc)
30042 {
30043 case DW_OP_regx:
30044 tail = new_loc_descr (DW_OP_bregx,
30045 l->dw_loc_oprnd1.v.val_unsigned, 0);
30046 break;
30047 case DW_OP_stack_value:
30048 break;
30049 default:
30050 return NULL;
30051 }
30052 }
30053 else
30054 tail = new_loc_descr (DW_OP_deref, 0, 0);
30055
30056 dw_loc_descr_ref ret = NULL, *p = &ret;
30057 while (expr != l)
30058 {
30059 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30060 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30061 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30062 p = &(*p)->dw_loc_next;
30063 expr = expr->dw_loc_next;
30064 }
30065 *p = tail;
30066 return ret;
30067 }
30068
30069 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30070 reference to a variable or argument, adjust it if needed and return:
30071 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30072 attribute if present should be removed
30073 0 keep the attribute perhaps with minor modifications, no need to rescan
30074 1 if the attribute has been successfully adjusted. */
30075
30076 static int
30077 optimize_string_length (dw_attr_node *a)
30078 {
30079 dw_loc_descr_ref l = AT_loc (a), lv;
30080 dw_die_ref die;
30081 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30082 {
30083 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30084 die = lookup_decl_die (decl);
30085 if (die)
30086 {
30087 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30088 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30089 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30090 }
30091 else
30092 return -1;
30093 }
30094 else
30095 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30096
30097 /* DWARF5 allows reference class, so we can then reference the DIE.
30098 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30099 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30100 {
30101 a->dw_attr_val.val_class = dw_val_class_die_ref;
30102 a->dw_attr_val.val_entry = NULL;
30103 a->dw_attr_val.v.val_die_ref.die = die;
30104 a->dw_attr_val.v.val_die_ref.external = 0;
30105 return 0;
30106 }
30107
30108 dw_attr_node *av = get_AT (die, DW_AT_location);
30109 dw_loc_list_ref d;
30110 bool non_dwarf_expr = false;
30111
30112 if (av == NULL)
30113 return dwarf_strict ? -1 : 0;
30114 switch (AT_class (av))
30115 {
30116 case dw_val_class_loc_list:
30117 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30118 if (d->expr && non_dwarf_expression (d->expr))
30119 non_dwarf_expr = true;
30120 break;
30121 case dw_val_class_view_list:
30122 gcc_unreachable ();
30123 case dw_val_class_loc:
30124 lv = AT_loc (av);
30125 if (lv == NULL)
30126 return dwarf_strict ? -1 : 0;
30127 if (non_dwarf_expression (lv))
30128 non_dwarf_expr = true;
30129 break;
30130 default:
30131 return dwarf_strict ? -1 : 0;
30132 }
30133
30134 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30135 into DW_OP_call4 or DW_OP_GNU_variable_value into
30136 DW_OP_call4 DW_OP_deref, do so. */
30137 if (!non_dwarf_expr
30138 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30139 {
30140 l->dw_loc_opc = DW_OP_call4;
30141 if (l->dw_loc_next)
30142 l->dw_loc_next = NULL;
30143 else
30144 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30145 return 0;
30146 }
30147
30148 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30149 copy over the DW_AT_location attribute from die to a. */
30150 if (l->dw_loc_next != NULL)
30151 {
30152 a->dw_attr_val = av->dw_attr_val;
30153 return 1;
30154 }
30155
30156 dw_loc_list_ref list, *p;
30157 switch (AT_class (av))
30158 {
30159 case dw_val_class_loc_list:
30160 p = &list;
30161 list = NULL;
30162 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30163 {
30164 lv = copy_deref_exprloc (d->expr);
30165 if (lv)
30166 {
30167 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30168 p = &(*p)->dw_loc_next;
30169 }
30170 else if (!dwarf_strict && d->expr)
30171 return 0;
30172 }
30173 if (list == NULL)
30174 return dwarf_strict ? -1 : 0;
30175 a->dw_attr_val.val_class = dw_val_class_loc_list;
30176 gen_llsym (list);
30177 *AT_loc_list_ptr (a) = list;
30178 return 1;
30179 case dw_val_class_loc:
30180 lv = copy_deref_exprloc (AT_loc (av));
30181 if (lv == NULL)
30182 return dwarf_strict ? -1 : 0;
30183 a->dw_attr_val.v.val_loc = lv;
30184 return 1;
30185 default:
30186 gcc_unreachable ();
30187 }
30188 }
30189
30190 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30191 an address in .rodata section if the string literal is emitted there,
30192 or remove the containing location list or replace DW_AT_const_value
30193 with DW_AT_location and empty location expression, if it isn't found
30194 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30195 to something that has been emitted in the current CU. */
30196
30197 static void
30198 resolve_addr (dw_die_ref die)
30199 {
30200 dw_die_ref c;
30201 dw_attr_node *a;
30202 dw_loc_list_ref *curr, *start, loc;
30203 unsigned ix;
30204 bool remove_AT_byte_size = false;
30205
30206 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30207 switch (AT_class (a))
30208 {
30209 case dw_val_class_loc_list:
30210 start = curr = AT_loc_list_ptr (a);
30211 loc = *curr;
30212 gcc_assert (loc);
30213 /* The same list can be referenced more than once. See if we have
30214 already recorded the result from a previous pass. */
30215 if (loc->replaced)
30216 *curr = loc->dw_loc_next;
30217 else if (!loc->resolved_addr)
30218 {
30219 /* As things stand, we do not expect or allow one die to
30220 reference a suffix of another die's location list chain.
30221 References must be identical or completely separate.
30222 There is therefore no need to cache the result of this
30223 pass on any list other than the first; doing so
30224 would lead to unnecessary writes. */
30225 while (*curr)
30226 {
30227 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30228 if (!resolve_addr_in_expr (a, (*curr)->expr))
30229 {
30230 dw_loc_list_ref next = (*curr)->dw_loc_next;
30231 dw_loc_descr_ref l = (*curr)->expr;
30232
30233 if (next && (*curr)->ll_symbol)
30234 {
30235 gcc_assert (!next->ll_symbol);
30236 next->ll_symbol = (*curr)->ll_symbol;
30237 next->vl_symbol = (*curr)->vl_symbol;
30238 }
30239 if (dwarf_split_debug_info)
30240 remove_loc_list_addr_table_entries (l);
30241 *curr = next;
30242 }
30243 else
30244 {
30245 mark_base_types ((*curr)->expr);
30246 curr = &(*curr)->dw_loc_next;
30247 }
30248 }
30249 if (loc == *start)
30250 loc->resolved_addr = 1;
30251 else
30252 {
30253 loc->replaced = 1;
30254 loc->dw_loc_next = *start;
30255 }
30256 }
30257 if (!*start)
30258 {
30259 remove_AT (die, a->dw_attr);
30260 ix--;
30261 }
30262 break;
30263 case dw_val_class_view_list:
30264 {
30265 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30266 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30267 dw_val_node *llnode
30268 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30269 /* If we no longer have a loclist, or it no longer needs
30270 views, drop this attribute. */
30271 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30272 {
30273 remove_AT (die, a->dw_attr);
30274 ix--;
30275 }
30276 break;
30277 }
30278 case dw_val_class_loc:
30279 {
30280 dw_loc_descr_ref l = AT_loc (a);
30281 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30282 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30283 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30284 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30285 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30286 with DW_FORM_ref referencing the same DIE as
30287 DW_OP_GNU_variable_value used to reference. */
30288 if (a->dw_attr == DW_AT_string_length
30289 && l
30290 && l->dw_loc_opc == DW_OP_GNU_variable_value
30291 && (l->dw_loc_next == NULL
30292 || (l->dw_loc_next->dw_loc_next == NULL
30293 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30294 {
30295 switch (optimize_string_length (a))
30296 {
30297 case -1:
30298 remove_AT (die, a->dw_attr);
30299 ix--;
30300 /* If we drop DW_AT_string_length, we need to drop also
30301 DW_AT_{string_length_,}byte_size. */
30302 remove_AT_byte_size = true;
30303 continue;
30304 default:
30305 break;
30306 case 1:
30307 /* Even if we keep the optimized DW_AT_string_length,
30308 it might have changed AT_class, so process it again. */
30309 ix--;
30310 continue;
30311 }
30312 }
30313 /* For -gdwarf-2 don't attempt to optimize
30314 DW_AT_data_member_location containing
30315 DW_OP_plus_uconst - older consumers might
30316 rely on it being that op instead of a more complex,
30317 but shorter, location description. */
30318 if ((dwarf_version > 2
30319 || a->dw_attr != DW_AT_data_member_location
30320 || l == NULL
30321 || l->dw_loc_opc != DW_OP_plus_uconst
30322 || l->dw_loc_next != NULL)
30323 && !resolve_addr_in_expr (a, l))
30324 {
30325 if (dwarf_split_debug_info)
30326 remove_loc_list_addr_table_entries (l);
30327 if (l != NULL
30328 && l->dw_loc_next == NULL
30329 && l->dw_loc_opc == DW_OP_addr
30330 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30331 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30332 && a->dw_attr == DW_AT_location)
30333 {
30334 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30335 remove_AT (die, a->dw_attr);
30336 ix--;
30337 optimize_location_into_implicit_ptr (die, decl);
30338 break;
30339 }
30340 if (a->dw_attr == DW_AT_string_length)
30341 /* If we drop DW_AT_string_length, we need to drop also
30342 DW_AT_{string_length_,}byte_size. */
30343 remove_AT_byte_size = true;
30344 remove_AT (die, a->dw_attr);
30345 ix--;
30346 }
30347 else
30348 mark_base_types (l);
30349 }
30350 break;
30351 case dw_val_class_addr:
30352 if (a->dw_attr == DW_AT_const_value
30353 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30354 {
30355 if (AT_index (a) != NOT_INDEXED)
30356 remove_addr_table_entry (a->dw_attr_val.val_entry);
30357 remove_AT (die, a->dw_attr);
30358 ix--;
30359 }
30360 if ((die->die_tag == DW_TAG_call_site
30361 && a->dw_attr == DW_AT_call_origin)
30362 || (die->die_tag == DW_TAG_GNU_call_site
30363 && a->dw_attr == DW_AT_abstract_origin))
30364 {
30365 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30366 dw_die_ref tdie = lookup_decl_die (tdecl);
30367 dw_die_ref cdie;
30368 if (tdie == NULL
30369 && DECL_EXTERNAL (tdecl)
30370 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30371 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30372 {
30373 dw_die_ref pdie = cdie;
30374 /* Make sure we don't add these DIEs into type units.
30375 We could emit skeleton DIEs for context (namespaces,
30376 outer structs/classes) and a skeleton DIE for the
30377 innermost context with DW_AT_signature pointing to the
30378 type unit. See PR78835. */
30379 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30380 pdie = pdie->die_parent;
30381 if (pdie == NULL)
30382 {
30383 /* Creating a full DIE for tdecl is overly expensive and
30384 at this point even wrong when in the LTO phase
30385 as it can end up generating new type DIEs we didn't
30386 output and thus optimize_external_refs will crash. */
30387 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30388 add_AT_flag (tdie, DW_AT_external, 1);
30389 add_AT_flag (tdie, DW_AT_declaration, 1);
30390 add_linkage_attr (tdie, tdecl);
30391 add_name_and_src_coords_attributes (tdie, tdecl, true);
30392 equate_decl_number_to_die (tdecl, tdie);
30393 }
30394 }
30395 if (tdie)
30396 {
30397 a->dw_attr_val.val_class = dw_val_class_die_ref;
30398 a->dw_attr_val.v.val_die_ref.die = tdie;
30399 a->dw_attr_val.v.val_die_ref.external = 0;
30400 }
30401 else
30402 {
30403 if (AT_index (a) != NOT_INDEXED)
30404 remove_addr_table_entry (a->dw_attr_val.val_entry);
30405 remove_AT (die, a->dw_attr);
30406 ix--;
30407 }
30408 }
30409 break;
30410 default:
30411 break;
30412 }
30413
30414 if (remove_AT_byte_size)
30415 remove_AT (die, dwarf_version >= 5
30416 ? DW_AT_string_length_byte_size
30417 : DW_AT_byte_size);
30418
30419 FOR_EACH_CHILD (die, c, resolve_addr (c));
30420 }
30421 \f
30422 /* Helper routines for optimize_location_lists.
30423 This pass tries to share identical local lists in .debug_loc
30424 section. */
30425
30426 /* Iteratively hash operands of LOC opcode into HSTATE. */
30427
30428 static void
30429 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30430 {
30431 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30432 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30433
30434 switch (loc->dw_loc_opc)
30435 {
30436 case DW_OP_const4u:
30437 case DW_OP_const8u:
30438 if (loc->dtprel)
30439 goto hash_addr;
30440 /* FALLTHRU */
30441 case DW_OP_const1u:
30442 case DW_OP_const1s:
30443 case DW_OP_const2u:
30444 case DW_OP_const2s:
30445 case DW_OP_const4s:
30446 case DW_OP_const8s:
30447 case DW_OP_constu:
30448 case DW_OP_consts:
30449 case DW_OP_pick:
30450 case DW_OP_plus_uconst:
30451 case DW_OP_breg0:
30452 case DW_OP_breg1:
30453 case DW_OP_breg2:
30454 case DW_OP_breg3:
30455 case DW_OP_breg4:
30456 case DW_OP_breg5:
30457 case DW_OP_breg6:
30458 case DW_OP_breg7:
30459 case DW_OP_breg8:
30460 case DW_OP_breg9:
30461 case DW_OP_breg10:
30462 case DW_OP_breg11:
30463 case DW_OP_breg12:
30464 case DW_OP_breg13:
30465 case DW_OP_breg14:
30466 case DW_OP_breg15:
30467 case DW_OP_breg16:
30468 case DW_OP_breg17:
30469 case DW_OP_breg18:
30470 case DW_OP_breg19:
30471 case DW_OP_breg20:
30472 case DW_OP_breg21:
30473 case DW_OP_breg22:
30474 case DW_OP_breg23:
30475 case DW_OP_breg24:
30476 case DW_OP_breg25:
30477 case DW_OP_breg26:
30478 case DW_OP_breg27:
30479 case DW_OP_breg28:
30480 case DW_OP_breg29:
30481 case DW_OP_breg30:
30482 case DW_OP_breg31:
30483 case DW_OP_regx:
30484 case DW_OP_fbreg:
30485 case DW_OP_piece:
30486 case DW_OP_deref_size:
30487 case DW_OP_xderef_size:
30488 hstate.add_object (val1->v.val_int);
30489 break;
30490 case DW_OP_skip:
30491 case DW_OP_bra:
30492 {
30493 int offset;
30494
30495 gcc_assert (val1->val_class == dw_val_class_loc);
30496 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30497 hstate.add_object (offset);
30498 }
30499 break;
30500 case DW_OP_implicit_value:
30501 hstate.add_object (val1->v.val_unsigned);
30502 switch (val2->val_class)
30503 {
30504 case dw_val_class_const:
30505 hstate.add_object (val2->v.val_int);
30506 break;
30507 case dw_val_class_vec:
30508 {
30509 unsigned int elt_size = val2->v.val_vec.elt_size;
30510 unsigned int len = val2->v.val_vec.length;
30511
30512 hstate.add_int (elt_size);
30513 hstate.add_int (len);
30514 hstate.add (val2->v.val_vec.array, len * elt_size);
30515 }
30516 break;
30517 case dw_val_class_const_double:
30518 hstate.add_object (val2->v.val_double.low);
30519 hstate.add_object (val2->v.val_double.high);
30520 break;
30521 case dw_val_class_wide_int:
30522 hstate.add (val2->v.val_wide->get_val (),
30523 get_full_len (*val2->v.val_wide)
30524 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30525 break;
30526 case dw_val_class_addr:
30527 inchash::add_rtx (val2->v.val_addr, hstate);
30528 break;
30529 default:
30530 gcc_unreachable ();
30531 }
30532 break;
30533 case DW_OP_bregx:
30534 case DW_OP_bit_piece:
30535 hstate.add_object (val1->v.val_int);
30536 hstate.add_object (val2->v.val_int);
30537 break;
30538 case DW_OP_addr:
30539 hash_addr:
30540 if (loc->dtprel)
30541 {
30542 unsigned char dtprel = 0xd1;
30543 hstate.add_object (dtprel);
30544 }
30545 inchash::add_rtx (val1->v.val_addr, hstate);
30546 break;
30547 case DW_OP_GNU_addr_index:
30548 case DW_OP_addrx:
30549 case DW_OP_GNU_const_index:
30550 case DW_OP_constx:
30551 {
30552 if (loc->dtprel)
30553 {
30554 unsigned char dtprel = 0xd1;
30555 hstate.add_object (dtprel);
30556 }
30557 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30558 }
30559 break;
30560 case DW_OP_implicit_pointer:
30561 case DW_OP_GNU_implicit_pointer:
30562 hstate.add_int (val2->v.val_int);
30563 break;
30564 case DW_OP_entry_value:
30565 case DW_OP_GNU_entry_value:
30566 hstate.add_object (val1->v.val_loc);
30567 break;
30568 case DW_OP_regval_type:
30569 case DW_OP_deref_type:
30570 case DW_OP_GNU_regval_type:
30571 case DW_OP_GNU_deref_type:
30572 {
30573 unsigned int byte_size
30574 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30575 unsigned int encoding
30576 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30577 hstate.add_object (val1->v.val_int);
30578 hstate.add_object (byte_size);
30579 hstate.add_object (encoding);
30580 }
30581 break;
30582 case DW_OP_convert:
30583 case DW_OP_reinterpret:
30584 case DW_OP_GNU_convert:
30585 case DW_OP_GNU_reinterpret:
30586 if (val1->val_class == dw_val_class_unsigned_const)
30587 {
30588 hstate.add_object (val1->v.val_unsigned);
30589 break;
30590 }
30591 /* FALLTHRU */
30592 case DW_OP_const_type:
30593 case DW_OP_GNU_const_type:
30594 {
30595 unsigned int byte_size
30596 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30597 unsigned int encoding
30598 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30599 hstate.add_object (byte_size);
30600 hstate.add_object (encoding);
30601 if (loc->dw_loc_opc != DW_OP_const_type
30602 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30603 break;
30604 hstate.add_object (val2->val_class);
30605 switch (val2->val_class)
30606 {
30607 case dw_val_class_const:
30608 hstate.add_object (val2->v.val_int);
30609 break;
30610 case dw_val_class_vec:
30611 {
30612 unsigned int elt_size = val2->v.val_vec.elt_size;
30613 unsigned int len = val2->v.val_vec.length;
30614
30615 hstate.add_object (elt_size);
30616 hstate.add_object (len);
30617 hstate.add (val2->v.val_vec.array, len * elt_size);
30618 }
30619 break;
30620 case dw_val_class_const_double:
30621 hstate.add_object (val2->v.val_double.low);
30622 hstate.add_object (val2->v.val_double.high);
30623 break;
30624 case dw_val_class_wide_int:
30625 hstate.add (val2->v.val_wide->get_val (),
30626 get_full_len (*val2->v.val_wide)
30627 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30628 break;
30629 default:
30630 gcc_unreachable ();
30631 }
30632 }
30633 break;
30634
30635 default:
30636 /* Other codes have no operands. */
30637 break;
30638 }
30639 }
30640
30641 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30642
30643 static inline void
30644 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30645 {
30646 dw_loc_descr_ref l;
30647 bool sizes_computed = false;
30648 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30649 size_of_locs (loc);
30650
30651 for (l = loc; l != NULL; l = l->dw_loc_next)
30652 {
30653 enum dwarf_location_atom opc = l->dw_loc_opc;
30654 hstate.add_object (opc);
30655 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30656 {
30657 size_of_locs (loc);
30658 sizes_computed = true;
30659 }
30660 hash_loc_operands (l, hstate);
30661 }
30662 }
30663
30664 /* Compute hash of the whole location list LIST_HEAD. */
30665
30666 static inline void
30667 hash_loc_list (dw_loc_list_ref list_head)
30668 {
30669 dw_loc_list_ref curr = list_head;
30670 inchash::hash hstate;
30671
30672 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30673 {
30674 hstate.add (curr->begin, strlen (curr->begin) + 1);
30675 hstate.add (curr->end, strlen (curr->end) + 1);
30676 hstate.add_object (curr->vbegin);
30677 hstate.add_object (curr->vend);
30678 if (curr->section)
30679 hstate.add (curr->section, strlen (curr->section) + 1);
30680 hash_locs (curr->expr, hstate);
30681 }
30682 list_head->hash = hstate.end ();
30683 }
30684
30685 /* Return true if X and Y opcodes have the same operands. */
30686
30687 static inline bool
30688 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30689 {
30690 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30691 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30692 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30693 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30694
30695 switch (x->dw_loc_opc)
30696 {
30697 case DW_OP_const4u:
30698 case DW_OP_const8u:
30699 if (x->dtprel)
30700 goto hash_addr;
30701 /* FALLTHRU */
30702 case DW_OP_const1u:
30703 case DW_OP_const1s:
30704 case DW_OP_const2u:
30705 case DW_OP_const2s:
30706 case DW_OP_const4s:
30707 case DW_OP_const8s:
30708 case DW_OP_constu:
30709 case DW_OP_consts:
30710 case DW_OP_pick:
30711 case DW_OP_plus_uconst:
30712 case DW_OP_breg0:
30713 case DW_OP_breg1:
30714 case DW_OP_breg2:
30715 case DW_OP_breg3:
30716 case DW_OP_breg4:
30717 case DW_OP_breg5:
30718 case DW_OP_breg6:
30719 case DW_OP_breg7:
30720 case DW_OP_breg8:
30721 case DW_OP_breg9:
30722 case DW_OP_breg10:
30723 case DW_OP_breg11:
30724 case DW_OP_breg12:
30725 case DW_OP_breg13:
30726 case DW_OP_breg14:
30727 case DW_OP_breg15:
30728 case DW_OP_breg16:
30729 case DW_OP_breg17:
30730 case DW_OP_breg18:
30731 case DW_OP_breg19:
30732 case DW_OP_breg20:
30733 case DW_OP_breg21:
30734 case DW_OP_breg22:
30735 case DW_OP_breg23:
30736 case DW_OP_breg24:
30737 case DW_OP_breg25:
30738 case DW_OP_breg26:
30739 case DW_OP_breg27:
30740 case DW_OP_breg28:
30741 case DW_OP_breg29:
30742 case DW_OP_breg30:
30743 case DW_OP_breg31:
30744 case DW_OP_regx:
30745 case DW_OP_fbreg:
30746 case DW_OP_piece:
30747 case DW_OP_deref_size:
30748 case DW_OP_xderef_size:
30749 return valx1->v.val_int == valy1->v.val_int;
30750 case DW_OP_skip:
30751 case DW_OP_bra:
30752 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30753 can cause irrelevant differences in dw_loc_addr. */
30754 gcc_assert (valx1->val_class == dw_val_class_loc
30755 && valy1->val_class == dw_val_class_loc
30756 && (dwarf_split_debug_info
30757 || x->dw_loc_addr == y->dw_loc_addr));
30758 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30759 case DW_OP_implicit_value:
30760 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30761 || valx2->val_class != valy2->val_class)
30762 return false;
30763 switch (valx2->val_class)
30764 {
30765 case dw_val_class_const:
30766 return valx2->v.val_int == valy2->v.val_int;
30767 case dw_val_class_vec:
30768 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30769 && valx2->v.val_vec.length == valy2->v.val_vec.length
30770 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30771 valx2->v.val_vec.elt_size
30772 * valx2->v.val_vec.length) == 0;
30773 case dw_val_class_const_double:
30774 return valx2->v.val_double.low == valy2->v.val_double.low
30775 && valx2->v.val_double.high == valy2->v.val_double.high;
30776 case dw_val_class_wide_int:
30777 return *valx2->v.val_wide == *valy2->v.val_wide;
30778 case dw_val_class_addr:
30779 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30780 default:
30781 gcc_unreachable ();
30782 }
30783 case DW_OP_bregx:
30784 case DW_OP_bit_piece:
30785 return valx1->v.val_int == valy1->v.val_int
30786 && valx2->v.val_int == valy2->v.val_int;
30787 case DW_OP_addr:
30788 hash_addr:
30789 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30790 case DW_OP_GNU_addr_index:
30791 case DW_OP_addrx:
30792 case DW_OP_GNU_const_index:
30793 case DW_OP_constx:
30794 {
30795 rtx ax1 = valx1->val_entry->addr.rtl;
30796 rtx ay1 = valy1->val_entry->addr.rtl;
30797 return rtx_equal_p (ax1, ay1);
30798 }
30799 case DW_OP_implicit_pointer:
30800 case DW_OP_GNU_implicit_pointer:
30801 return valx1->val_class == dw_val_class_die_ref
30802 && valx1->val_class == valy1->val_class
30803 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30804 && valx2->v.val_int == valy2->v.val_int;
30805 case DW_OP_entry_value:
30806 case DW_OP_GNU_entry_value:
30807 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30808 case DW_OP_const_type:
30809 case DW_OP_GNU_const_type:
30810 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30811 || valx2->val_class != valy2->val_class)
30812 return false;
30813 switch (valx2->val_class)
30814 {
30815 case dw_val_class_const:
30816 return valx2->v.val_int == valy2->v.val_int;
30817 case dw_val_class_vec:
30818 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30819 && valx2->v.val_vec.length == valy2->v.val_vec.length
30820 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30821 valx2->v.val_vec.elt_size
30822 * valx2->v.val_vec.length) == 0;
30823 case dw_val_class_const_double:
30824 return valx2->v.val_double.low == valy2->v.val_double.low
30825 && valx2->v.val_double.high == valy2->v.val_double.high;
30826 case dw_val_class_wide_int:
30827 return *valx2->v.val_wide == *valy2->v.val_wide;
30828 default:
30829 gcc_unreachable ();
30830 }
30831 case DW_OP_regval_type:
30832 case DW_OP_deref_type:
30833 case DW_OP_GNU_regval_type:
30834 case DW_OP_GNU_deref_type:
30835 return valx1->v.val_int == valy1->v.val_int
30836 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30837 case DW_OP_convert:
30838 case DW_OP_reinterpret:
30839 case DW_OP_GNU_convert:
30840 case DW_OP_GNU_reinterpret:
30841 if (valx1->val_class != valy1->val_class)
30842 return false;
30843 if (valx1->val_class == dw_val_class_unsigned_const)
30844 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30845 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30846 case DW_OP_GNU_parameter_ref:
30847 return valx1->val_class == dw_val_class_die_ref
30848 && valx1->val_class == valy1->val_class
30849 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30850 default:
30851 /* Other codes have no operands. */
30852 return true;
30853 }
30854 }
30855
30856 /* Return true if DWARF location expressions X and Y are the same. */
30857
30858 static inline bool
30859 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30860 {
30861 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30862 if (x->dw_loc_opc != y->dw_loc_opc
30863 || x->dtprel != y->dtprel
30864 || !compare_loc_operands (x, y))
30865 break;
30866 return x == NULL && y == NULL;
30867 }
30868
30869 /* Hashtable helpers. */
30870
30871 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30872 {
30873 static inline hashval_t hash (const dw_loc_list_struct *);
30874 static inline bool equal (const dw_loc_list_struct *,
30875 const dw_loc_list_struct *);
30876 };
30877
30878 /* Return precomputed hash of location list X. */
30879
30880 inline hashval_t
30881 loc_list_hasher::hash (const dw_loc_list_struct *x)
30882 {
30883 return x->hash;
30884 }
30885
30886 /* Return true if location lists A and B are the same. */
30887
30888 inline bool
30889 loc_list_hasher::equal (const dw_loc_list_struct *a,
30890 const dw_loc_list_struct *b)
30891 {
30892 if (a == b)
30893 return 1;
30894 if (a->hash != b->hash)
30895 return 0;
30896 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30897 if (strcmp (a->begin, b->begin) != 0
30898 || strcmp (a->end, b->end) != 0
30899 || (a->section == NULL) != (b->section == NULL)
30900 || (a->section && strcmp (a->section, b->section) != 0)
30901 || a->vbegin != b->vbegin || a->vend != b->vend
30902 || !compare_locs (a->expr, b->expr))
30903 break;
30904 return a == NULL && b == NULL;
30905 }
30906
30907 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30908
30909
30910 /* Recursively optimize location lists referenced from DIE
30911 children and share them whenever possible. */
30912
30913 static void
30914 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30915 {
30916 dw_die_ref c;
30917 dw_attr_node *a;
30918 unsigned ix;
30919 dw_loc_list_struct **slot;
30920 bool drop_locviews = false;
30921 bool has_locviews = false;
30922
30923 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30924 if (AT_class (a) == dw_val_class_loc_list)
30925 {
30926 dw_loc_list_ref list = AT_loc_list (a);
30927 /* TODO: perform some optimizations here, before hashing
30928 it and storing into the hash table. */
30929 hash_loc_list (list);
30930 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30931 if (*slot == NULL)
30932 {
30933 *slot = list;
30934 if (loc_list_has_views (list))
30935 gcc_assert (list->vl_symbol);
30936 else if (list->vl_symbol)
30937 {
30938 drop_locviews = true;
30939 list->vl_symbol = NULL;
30940 }
30941 }
30942 else
30943 {
30944 if (list->vl_symbol && !(*slot)->vl_symbol)
30945 drop_locviews = true;
30946 a->dw_attr_val.v.val_loc_list = *slot;
30947 }
30948 }
30949 else if (AT_class (a) == dw_val_class_view_list)
30950 {
30951 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30952 has_locviews = true;
30953 }
30954
30955
30956 if (drop_locviews && has_locviews)
30957 remove_AT (die, DW_AT_GNU_locviews);
30958
30959 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30960 }
30961
30962
30963 /* Recursively assign each location list a unique index into the debug_addr
30964 section. */
30965
30966 static void
30967 index_location_lists (dw_die_ref die)
30968 {
30969 dw_die_ref c;
30970 dw_attr_node *a;
30971 unsigned ix;
30972
30973 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30974 if (AT_class (a) == dw_val_class_loc_list)
30975 {
30976 dw_loc_list_ref list = AT_loc_list (a);
30977 dw_loc_list_ref curr;
30978 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30979 {
30980 /* Don't index an entry that has already been indexed
30981 or won't be output. Make sure skip_loc_list_entry doesn't
30982 call size_of_locs, because that might cause circular dependency,
30983 index_location_lists requiring address table indexes to be
30984 computed, but adding new indexes through add_addr_table_entry
30985 and address table index computation requiring no new additions
30986 to the hash table. In the rare case of DWARF[234] >= 64KB
30987 location expression, we'll just waste unused address table entry
30988 for it. */
30989 if (curr->begin_entry != NULL
30990 || skip_loc_list_entry (curr))
30991 continue;
30992
30993 curr->begin_entry
30994 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30995 }
30996 }
30997
30998 FOR_EACH_CHILD (die, c, index_location_lists (c));
30999 }
31000
31001 /* Optimize location lists referenced from DIE
31002 children and share them whenever possible. */
31003
31004 static void
31005 optimize_location_lists (dw_die_ref die)
31006 {
31007 loc_list_hash_type htab (500);
31008 optimize_location_lists_1 (die, &htab);
31009 }
31010 \f
31011 /* Traverse the limbo die list, and add parent/child links. The only
31012 dies without parents that should be here are concrete instances of
31013 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31014 For concrete instances, we can get the parent die from the abstract
31015 instance. */
31016
31017 static void
31018 flush_limbo_die_list (void)
31019 {
31020 limbo_die_node *node;
31021
31022 /* get_context_die calls force_decl_die, which can put new DIEs on the
31023 limbo list in LTO mode when nested functions are put in a different
31024 partition than that of their parent function. */
31025 while ((node = limbo_die_list))
31026 {
31027 dw_die_ref die = node->die;
31028 limbo_die_list = node->next;
31029
31030 if (die->die_parent == NULL)
31031 {
31032 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31033
31034 if (origin && origin->die_parent)
31035 add_child_die (origin->die_parent, die);
31036 else if (is_cu_die (die))
31037 ;
31038 else if (seen_error ())
31039 /* It's OK to be confused by errors in the input. */
31040 add_child_die (comp_unit_die (), die);
31041 else
31042 {
31043 /* In certain situations, the lexical block containing a
31044 nested function can be optimized away, which results
31045 in the nested function die being orphaned. Likewise
31046 with the return type of that nested function. Force
31047 this to be a child of the containing function.
31048
31049 It may happen that even the containing function got fully
31050 inlined and optimized out. In that case we are lost and
31051 assign the empty child. This should not be big issue as
31052 the function is likely unreachable too. */
31053 gcc_assert (node->created_for);
31054
31055 if (DECL_P (node->created_for))
31056 origin = get_context_die (DECL_CONTEXT (node->created_for));
31057 else if (TYPE_P (node->created_for))
31058 origin = scope_die_for (node->created_for, comp_unit_die ());
31059 else
31060 origin = comp_unit_die ();
31061
31062 add_child_die (origin, die);
31063 }
31064 }
31065 }
31066 }
31067
31068 /* Reset DIEs so we can output them again. */
31069
31070 static void
31071 reset_dies (dw_die_ref die)
31072 {
31073 dw_die_ref c;
31074
31075 /* Remove stuff we re-generate. */
31076 die->die_mark = 0;
31077 die->die_offset = 0;
31078 die->die_abbrev = 0;
31079 remove_AT (die, DW_AT_sibling);
31080
31081 FOR_EACH_CHILD (die, c, reset_dies (c));
31082 }
31083
31084 /* Output stuff that dwarf requires at the end of every file,
31085 and generate the DWARF-2 debugging info. */
31086
31087 static void
31088 dwarf2out_finish (const char *)
31089 {
31090 comdat_type_node *ctnode;
31091 dw_die_ref main_comp_unit_die;
31092 unsigned char checksum[16];
31093 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31094
31095 /* Flush out any latecomers to the limbo party. */
31096 flush_limbo_die_list ();
31097
31098 if (inline_entry_data_table)
31099 gcc_assert (inline_entry_data_table->elements () == 0);
31100
31101 if (flag_checking)
31102 {
31103 verify_die (comp_unit_die ());
31104 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31105 verify_die (node->die);
31106 }
31107
31108 /* We shouldn't have any symbols with delayed asm names for
31109 DIEs generated after early finish. */
31110 gcc_assert (deferred_asm_name == NULL);
31111
31112 gen_remaining_tmpl_value_param_die_attribute ();
31113
31114 if (flag_generate_lto || flag_generate_offload)
31115 {
31116 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31117
31118 /* Prune stuff so that dwarf2out_finish runs successfully
31119 for the fat part of the object. */
31120 reset_dies (comp_unit_die ());
31121 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31122 reset_dies (node->die);
31123
31124 hash_table<comdat_type_hasher> comdat_type_table (100);
31125 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31126 {
31127 comdat_type_node **slot
31128 = comdat_type_table.find_slot (ctnode, INSERT);
31129
31130 /* Don't reset types twice. */
31131 if (*slot != HTAB_EMPTY_ENTRY)
31132 continue;
31133
31134 /* Remove the pointer to the line table. */
31135 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31136
31137 if (debug_info_level >= DINFO_LEVEL_TERSE)
31138 reset_dies (ctnode->root_die);
31139
31140 *slot = ctnode;
31141 }
31142
31143 /* Reset die CU symbol so we don't output it twice. */
31144 comp_unit_die ()->die_id.die_symbol = NULL;
31145
31146 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31147 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31148 if (have_macinfo)
31149 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31150
31151 /* Remove indirect string decisions. */
31152 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31153 }
31154
31155 #if ENABLE_ASSERT_CHECKING
31156 {
31157 dw_die_ref die = comp_unit_die (), c;
31158 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31159 }
31160 #endif
31161 resolve_addr (comp_unit_die ());
31162 move_marked_base_types ();
31163
31164 /* Initialize sections and labels used for actual assembler output. */
31165 unsigned generation = init_sections_and_labels (false);
31166
31167 /* Traverse the DIE's and add sibling attributes to those DIE's that
31168 have children. */
31169 add_sibling_attributes (comp_unit_die ());
31170 limbo_die_node *node;
31171 for (node = cu_die_list; node; node = node->next)
31172 add_sibling_attributes (node->die);
31173 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31174 add_sibling_attributes (ctnode->root_die);
31175
31176 /* When splitting DWARF info, we put some attributes in the
31177 skeleton compile_unit DIE that remains in the .o, while
31178 most attributes go in the DWO compile_unit_die. */
31179 if (dwarf_split_debug_info)
31180 {
31181 limbo_die_node *cu;
31182 main_comp_unit_die = gen_compile_unit_die (NULL);
31183 if (dwarf_version >= 5)
31184 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31185 cu = limbo_die_list;
31186 gcc_assert (cu->die == main_comp_unit_die);
31187 limbo_die_list = limbo_die_list->next;
31188 cu->next = cu_die_list;
31189 cu_die_list = cu;
31190 }
31191 else
31192 main_comp_unit_die = comp_unit_die ();
31193
31194 /* Output a terminator label for the .text section. */
31195 switch_to_section (text_section);
31196 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31197 if (cold_text_section)
31198 {
31199 switch_to_section (cold_text_section);
31200 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31201 }
31202
31203 /* We can only use the low/high_pc attributes if all of the code was
31204 in .text. */
31205 if (!have_multiple_function_sections
31206 || (dwarf_version < 3 && dwarf_strict))
31207 {
31208 /* Don't add if the CU has no associated code. */
31209 if (text_section_used)
31210 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31211 text_end_label, true);
31212 }
31213 else
31214 {
31215 unsigned fde_idx;
31216 dw_fde_ref fde;
31217 bool range_list_added = false;
31218
31219 if (text_section_used)
31220 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31221 text_end_label, &range_list_added, true);
31222 if (cold_text_section_used)
31223 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31224 cold_end_label, &range_list_added, true);
31225
31226 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31227 {
31228 if (DECL_IGNORED_P (fde->decl))
31229 continue;
31230 if (!fde->in_std_section)
31231 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31232 fde->dw_fde_end, &range_list_added,
31233 true);
31234 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31235 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31236 fde->dw_fde_second_end, &range_list_added,
31237 true);
31238 }
31239
31240 if (range_list_added)
31241 {
31242 /* We need to give .debug_loc and .debug_ranges an appropriate
31243 "base address". Use zero so that these addresses become
31244 absolute. Historically, we've emitted the unexpected
31245 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31246 Emit both to give time for other tools to adapt. */
31247 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31248 if (! dwarf_strict && dwarf_version < 4)
31249 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31250
31251 add_ranges (NULL);
31252 }
31253 }
31254
31255 /* AIX Assembler inserts the length, so adjust the reference to match the
31256 offset expected by debuggers. */
31257 strcpy (dl_section_ref, debug_line_section_label);
31258 if (XCOFF_DEBUGGING_INFO)
31259 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31260
31261 if (debug_info_level >= DINFO_LEVEL_TERSE)
31262 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31263 dl_section_ref);
31264
31265 if (have_macinfo)
31266 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31267 macinfo_section_label);
31268
31269 if (dwarf_split_debug_info)
31270 {
31271 if (have_location_lists)
31272 {
31273 /* Since we generate the loclists in the split DWARF .dwo
31274 file itself, we don't need to generate a loclists_base
31275 attribute for the split compile unit DIE. That attribute
31276 (and using relocatable sec_offset FORMs) isn't allowed
31277 for a split compile unit. Only if the .debug_loclists
31278 section was in the main file, would we need to generate a
31279 loclists_base attribute here (for the full or skeleton
31280 unit DIE). */
31281
31282 /* optimize_location_lists calculates the size of the lists,
31283 so index them first, and assign indices to the entries.
31284 Although optimize_location_lists will remove entries from
31285 the table, it only does so for duplicates, and therefore
31286 only reduces ref_counts to 1. */
31287 index_location_lists (comp_unit_die ());
31288 }
31289
31290 if (addr_index_table != NULL)
31291 {
31292 unsigned int index = 0;
31293 addr_index_table
31294 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31295 (&index);
31296 }
31297 }
31298
31299 loc_list_idx = 0;
31300 if (have_location_lists)
31301 {
31302 optimize_location_lists (comp_unit_die ());
31303 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31304 if (dwarf_version >= 5 && dwarf_split_debug_info)
31305 assign_location_list_indexes (comp_unit_die ());
31306 }
31307
31308 save_macinfo_strings ();
31309
31310 if (dwarf_split_debug_info)
31311 {
31312 unsigned int index = 0;
31313
31314 /* Add attributes common to skeleton compile_units and
31315 type_units. Because these attributes include strings, it
31316 must be done before freezing the string table. Top-level
31317 skeleton die attrs are added when the skeleton type unit is
31318 created, so ensure it is created by this point. */
31319 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31320 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31321 }
31322
31323 /* Output all of the compilation units. We put the main one last so that
31324 the offsets are available to output_pubnames. */
31325 for (node = cu_die_list; node; node = node->next)
31326 output_comp_unit (node->die, 0, NULL);
31327
31328 hash_table<comdat_type_hasher> comdat_type_table (100);
31329 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31330 {
31331 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31332
31333 /* Don't output duplicate types. */
31334 if (*slot != HTAB_EMPTY_ENTRY)
31335 continue;
31336
31337 /* Add a pointer to the line table for the main compilation unit
31338 so that the debugger can make sense of DW_AT_decl_file
31339 attributes. */
31340 if (debug_info_level >= DINFO_LEVEL_TERSE)
31341 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31342 (!dwarf_split_debug_info
31343 ? dl_section_ref
31344 : debug_skeleton_line_section_label));
31345
31346 output_comdat_type_unit (ctnode);
31347 *slot = ctnode;
31348 }
31349
31350 if (dwarf_split_debug_info)
31351 {
31352 int mark;
31353 struct md5_ctx ctx;
31354
31355 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31356 index_rnglists ();
31357
31358 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31359 md5_init_ctx (&ctx);
31360 mark = 0;
31361 die_checksum (comp_unit_die (), &ctx, &mark);
31362 unmark_all_dies (comp_unit_die ());
31363 md5_finish_ctx (&ctx, checksum);
31364
31365 if (dwarf_version < 5)
31366 {
31367 /* Use the first 8 bytes of the checksum as the dwo_id,
31368 and add it to both comp-unit DIEs. */
31369 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31370 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31371 }
31372
31373 /* Add the base offset of the ranges table to the skeleton
31374 comp-unit DIE. */
31375 if (!vec_safe_is_empty (ranges_table))
31376 {
31377 if (dwarf_version >= 5)
31378 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31379 ranges_base_label);
31380 else
31381 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31382 ranges_section_label);
31383 }
31384
31385 switch_to_section (debug_addr_section);
31386 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31387 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31388 before DWARF5, didn't have a header for .debug_addr units.
31389 DWARF5 specifies a small header when address tables are used. */
31390 if (dwarf_version >= 5)
31391 {
31392 unsigned int last_idx = 0;
31393 unsigned long addrs_length;
31394
31395 addr_index_table->traverse_noresize
31396 <unsigned int *, count_index_addrs> (&last_idx);
31397 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31398
31399 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31400 dw2_asm_output_data (4, 0xffffffff,
31401 "Escape value for 64-bit DWARF extension");
31402 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31403 "Length of Address Unit");
31404 dw2_asm_output_data (2, 5, "DWARF addr version");
31405 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31406 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31407 }
31408 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31409 output_addr_table ();
31410 }
31411
31412 /* Output the main compilation unit if non-empty or if .debug_macinfo
31413 or .debug_macro will be emitted. */
31414 output_comp_unit (comp_unit_die (), have_macinfo,
31415 dwarf_split_debug_info ? checksum : NULL);
31416
31417 if (dwarf_split_debug_info && info_section_emitted)
31418 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31419
31420 /* Output the abbreviation table. */
31421 if (vec_safe_length (abbrev_die_table) != 1)
31422 {
31423 switch_to_section (debug_abbrev_section);
31424 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31425 output_abbrev_section ();
31426 }
31427
31428 /* Output location list section if necessary. */
31429 if (have_location_lists)
31430 {
31431 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31432 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31433 /* Output the location lists info. */
31434 switch_to_section (debug_loc_section);
31435 if (dwarf_version >= 5)
31436 {
31437 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31438 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31439 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31440 dw2_asm_output_data (4, 0xffffffff,
31441 "Initial length escape value indicating "
31442 "64-bit DWARF extension");
31443 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31444 "Length of Location Lists");
31445 ASM_OUTPUT_LABEL (asm_out_file, l1);
31446 output_dwarf_version ();
31447 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31448 dw2_asm_output_data (1, 0, "Segment Size");
31449 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31450 "Offset Entry Count");
31451 }
31452 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31453 if (dwarf_version >= 5 && dwarf_split_debug_info)
31454 {
31455 unsigned int save_loc_list_idx = loc_list_idx;
31456 loc_list_idx = 0;
31457 output_loclists_offsets (comp_unit_die ());
31458 gcc_assert (save_loc_list_idx == loc_list_idx);
31459 }
31460 output_location_lists (comp_unit_die ());
31461 if (dwarf_version >= 5)
31462 ASM_OUTPUT_LABEL (asm_out_file, l2);
31463 }
31464
31465 output_pubtables ();
31466
31467 /* Output the address range information if a CU (.debug_info section)
31468 was emitted. We output an empty table even if we had no functions
31469 to put in it. This because the consumer has no way to tell the
31470 difference between an empty table that we omitted and failure to
31471 generate a table that would have contained data. */
31472 if (info_section_emitted)
31473 {
31474 switch_to_section (debug_aranges_section);
31475 output_aranges ();
31476 }
31477
31478 /* Output ranges section if necessary. */
31479 if (!vec_safe_is_empty (ranges_table))
31480 {
31481 if (dwarf_version >= 5)
31482 output_rnglists (generation);
31483 else
31484 output_ranges ();
31485 }
31486
31487 /* Have to end the macro section. */
31488 if (have_macinfo)
31489 {
31490 switch_to_section (debug_macinfo_section);
31491 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31492 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31493 : debug_skeleton_line_section_label, false);
31494 dw2_asm_output_data (1, 0, "End compilation unit");
31495 }
31496
31497 /* Output the source line correspondence table. We must do this
31498 even if there is no line information. Otherwise, on an empty
31499 translation unit, we will generate a present, but empty,
31500 .debug_info section. IRIX 6.5 `nm' will then complain when
31501 examining the file. This is done late so that any filenames
31502 used by the debug_info section are marked as 'used'. */
31503 switch_to_section (debug_line_section);
31504 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31505 if (! output_asm_line_debug_info ())
31506 output_line_info (false);
31507
31508 if (dwarf_split_debug_info && info_section_emitted)
31509 {
31510 switch_to_section (debug_skeleton_line_section);
31511 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31512 output_line_info (true);
31513 }
31514
31515 /* If we emitted any indirect strings, output the string table too. */
31516 if (debug_str_hash || skeleton_debug_str_hash)
31517 output_indirect_strings ();
31518 if (debug_line_str_hash)
31519 {
31520 switch_to_section (debug_line_str_section);
31521 const enum dwarf_form form = DW_FORM_line_strp;
31522 debug_line_str_hash->traverse<enum dwarf_form,
31523 output_indirect_string> (form);
31524 }
31525
31526 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31527 symview_upper_bound = 0;
31528 if (zero_view_p)
31529 bitmap_clear (zero_view_p);
31530 }
31531
31532 /* Returns a hash value for X (which really is a variable_value_struct). */
31533
31534 inline hashval_t
31535 variable_value_hasher::hash (variable_value_struct *x)
31536 {
31537 return (hashval_t) x->decl_id;
31538 }
31539
31540 /* Return nonzero if decl_id of variable_value_struct X is the same as
31541 UID of decl Y. */
31542
31543 inline bool
31544 variable_value_hasher::equal (variable_value_struct *x, tree y)
31545 {
31546 return x->decl_id == DECL_UID (y);
31547 }
31548
31549 /* Helper function for resolve_variable_value, handle
31550 DW_OP_GNU_variable_value in one location expression.
31551 Return true if exprloc has been changed into loclist. */
31552
31553 static bool
31554 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31555 {
31556 dw_loc_descr_ref next;
31557 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31558 {
31559 next = loc->dw_loc_next;
31560 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31561 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31562 continue;
31563
31564 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31565 if (DECL_CONTEXT (decl) != current_function_decl)
31566 continue;
31567
31568 dw_die_ref ref = lookup_decl_die (decl);
31569 if (ref)
31570 {
31571 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31572 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31573 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31574 continue;
31575 }
31576 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31577 if (l == NULL)
31578 continue;
31579 if (l->dw_loc_next)
31580 {
31581 if (AT_class (a) != dw_val_class_loc)
31582 continue;
31583 switch (a->dw_attr)
31584 {
31585 /* Following attributes allow both exprloc and loclist
31586 classes, so we can change them into a loclist. */
31587 case DW_AT_location:
31588 case DW_AT_string_length:
31589 case DW_AT_return_addr:
31590 case DW_AT_data_member_location:
31591 case DW_AT_frame_base:
31592 case DW_AT_segment:
31593 case DW_AT_static_link:
31594 case DW_AT_use_location:
31595 case DW_AT_vtable_elem_location:
31596 if (prev)
31597 {
31598 prev->dw_loc_next = NULL;
31599 prepend_loc_descr_to_each (l, AT_loc (a));
31600 }
31601 if (next)
31602 add_loc_descr_to_each (l, next);
31603 a->dw_attr_val.val_class = dw_val_class_loc_list;
31604 a->dw_attr_val.val_entry = NULL;
31605 a->dw_attr_val.v.val_loc_list = l;
31606 have_location_lists = true;
31607 return true;
31608 /* Following attributes allow both exprloc and reference,
31609 so if the whole expression is DW_OP_GNU_variable_value alone
31610 we could transform it into reference. */
31611 case DW_AT_byte_size:
31612 case DW_AT_bit_size:
31613 case DW_AT_lower_bound:
31614 case DW_AT_upper_bound:
31615 case DW_AT_bit_stride:
31616 case DW_AT_count:
31617 case DW_AT_allocated:
31618 case DW_AT_associated:
31619 case DW_AT_byte_stride:
31620 if (prev == NULL && next == NULL)
31621 break;
31622 /* FALLTHRU */
31623 default:
31624 if (dwarf_strict)
31625 continue;
31626 break;
31627 }
31628 /* Create DW_TAG_variable that we can refer to. */
31629 gen_decl_die (decl, NULL_TREE, NULL,
31630 lookup_decl_die (current_function_decl));
31631 ref = lookup_decl_die (decl);
31632 if (ref)
31633 {
31634 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31635 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31636 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31637 }
31638 continue;
31639 }
31640 if (prev)
31641 {
31642 prev->dw_loc_next = l->expr;
31643 add_loc_descr (&prev->dw_loc_next, next);
31644 free_loc_descr (loc, NULL);
31645 next = prev->dw_loc_next;
31646 }
31647 else
31648 {
31649 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31650 add_loc_descr (&loc, next);
31651 next = loc;
31652 }
31653 loc = prev;
31654 }
31655 return false;
31656 }
31657
31658 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31659
31660 static void
31661 resolve_variable_value (dw_die_ref die)
31662 {
31663 dw_attr_node *a;
31664 dw_loc_list_ref loc;
31665 unsigned ix;
31666
31667 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31668 switch (AT_class (a))
31669 {
31670 case dw_val_class_loc:
31671 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31672 break;
31673 /* FALLTHRU */
31674 case dw_val_class_loc_list:
31675 loc = AT_loc_list (a);
31676 gcc_assert (loc);
31677 for (; loc; loc = loc->dw_loc_next)
31678 resolve_variable_value_in_expr (a, loc->expr);
31679 break;
31680 default:
31681 break;
31682 }
31683 }
31684
31685 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31686 temporaries in the current function. */
31687
31688 static void
31689 resolve_variable_values (void)
31690 {
31691 if (!variable_value_hash || !current_function_decl)
31692 return;
31693
31694 struct variable_value_struct *node
31695 = variable_value_hash->find_with_hash (current_function_decl,
31696 DECL_UID (current_function_decl));
31697
31698 if (node == NULL)
31699 return;
31700
31701 unsigned int i;
31702 dw_die_ref die;
31703 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31704 resolve_variable_value (die);
31705 }
31706
31707 /* Helper function for note_variable_value, handle one location
31708 expression. */
31709
31710 static void
31711 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31712 {
31713 for (; loc; loc = loc->dw_loc_next)
31714 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31715 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31716 {
31717 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31718 dw_die_ref ref = lookup_decl_die (decl);
31719 if (! ref && (flag_generate_lto || flag_generate_offload))
31720 {
31721 /* ??? This is somewhat a hack because we do not create DIEs
31722 for variables not in BLOCK trees early but when generating
31723 early LTO output we need the dw_val_class_decl_ref to be
31724 fully resolved. For fat LTO objects we'd also like to
31725 undo this after LTO dwarf output. */
31726 gcc_assert (DECL_CONTEXT (decl));
31727 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31728 gcc_assert (ctx != NULL);
31729 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31730 ref = lookup_decl_die (decl);
31731 gcc_assert (ref != NULL);
31732 }
31733 if (ref)
31734 {
31735 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31736 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31737 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31738 continue;
31739 }
31740 if (VAR_P (decl)
31741 && DECL_CONTEXT (decl)
31742 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31743 && lookup_decl_die (DECL_CONTEXT (decl)))
31744 {
31745 if (!variable_value_hash)
31746 variable_value_hash
31747 = hash_table<variable_value_hasher>::create_ggc (10);
31748
31749 tree fndecl = DECL_CONTEXT (decl);
31750 struct variable_value_struct *node;
31751 struct variable_value_struct **slot
31752 = variable_value_hash->find_slot_with_hash (fndecl,
31753 DECL_UID (fndecl),
31754 INSERT);
31755 if (*slot == NULL)
31756 {
31757 node = ggc_cleared_alloc<variable_value_struct> ();
31758 node->decl_id = DECL_UID (fndecl);
31759 *slot = node;
31760 }
31761 else
31762 node = *slot;
31763
31764 vec_safe_push (node->dies, die);
31765 }
31766 }
31767 }
31768
31769 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31770 with dw_val_class_decl_ref operand. */
31771
31772 static void
31773 note_variable_value (dw_die_ref die)
31774 {
31775 dw_die_ref c;
31776 dw_attr_node *a;
31777 dw_loc_list_ref loc;
31778 unsigned ix;
31779
31780 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31781 switch (AT_class (a))
31782 {
31783 case dw_val_class_loc_list:
31784 loc = AT_loc_list (a);
31785 gcc_assert (loc);
31786 if (!loc->noted_variable_value)
31787 {
31788 loc->noted_variable_value = 1;
31789 for (; loc; loc = loc->dw_loc_next)
31790 note_variable_value_in_expr (die, loc->expr);
31791 }
31792 break;
31793 case dw_val_class_loc:
31794 note_variable_value_in_expr (die, AT_loc (a));
31795 break;
31796 default:
31797 break;
31798 }
31799
31800 /* Mark children. */
31801 FOR_EACH_CHILD (die, c, note_variable_value (c));
31802 }
31803
31804 /* Perform any cleanups needed after the early debug generation pass
31805 has run. */
31806
31807 static void
31808 dwarf2out_early_finish (const char *filename)
31809 {
31810 set_early_dwarf s;
31811 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31812
31813 /* PCH might result in DW_AT_producer string being restored from the
31814 header compilation, so always fill it with empty string initially
31815 and overwrite only here. */
31816 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31817 producer_string = gen_producer_string ();
31818 producer->dw_attr_val.v.val_str->refcount--;
31819 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31820
31821 /* Add the name for the main input file now. We delayed this from
31822 dwarf2out_init to avoid complications with PCH. */
31823 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31824 add_comp_dir_attribute (comp_unit_die ());
31825
31826 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31827 DW_AT_comp_dir into .debug_line_str section. */
31828 if (!output_asm_line_debug_info ()
31829 && dwarf_version >= 5
31830 && DWARF5_USE_DEBUG_LINE_STR)
31831 {
31832 for (int i = 0; i < 2; i++)
31833 {
31834 dw_attr_node *a = get_AT (comp_unit_die (),
31835 i ? DW_AT_comp_dir : DW_AT_name);
31836 if (a == NULL
31837 || AT_class (a) != dw_val_class_str
31838 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31839 continue;
31840
31841 if (! debug_line_str_hash)
31842 debug_line_str_hash
31843 = hash_table<indirect_string_hasher>::create_ggc (10);
31844
31845 struct indirect_string_node *node
31846 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31847 set_indirect_string (node);
31848 node->form = DW_FORM_line_strp;
31849 a->dw_attr_val.v.val_str->refcount--;
31850 a->dw_attr_val.v.val_str = node;
31851 }
31852 }
31853
31854 /* With LTO early dwarf was really finished at compile-time, so make
31855 sure to adjust the phase after annotating the LTRANS CU DIE. */
31856 if (in_lto_p)
31857 {
31858 early_dwarf_finished = true;
31859 return;
31860 }
31861
31862 /* Walk through the list of incomplete types again, trying once more to
31863 emit full debugging info for them. */
31864 retry_incomplete_types ();
31865
31866 /* The point here is to flush out the limbo list so that it is empty
31867 and we don't need to stream it for LTO. */
31868 flush_limbo_die_list ();
31869
31870 gen_scheduled_generic_parms_dies ();
31871 gen_remaining_tmpl_value_param_die_attribute ();
31872
31873 /* Add DW_AT_linkage_name for all deferred DIEs. */
31874 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31875 {
31876 tree decl = node->created_for;
31877 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31878 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31879 ended up in deferred_asm_name before we knew it was
31880 constant and never written to disk. */
31881 && DECL_ASSEMBLER_NAME (decl))
31882 {
31883 add_linkage_attr (node->die, decl);
31884 move_linkage_attr (node->die);
31885 }
31886 }
31887 deferred_asm_name = NULL;
31888
31889 if (flag_eliminate_unused_debug_types)
31890 prune_unused_types ();
31891
31892 /* Generate separate COMDAT sections for type DIEs. */
31893 if (use_debug_types)
31894 {
31895 break_out_comdat_types (comp_unit_die ());
31896
31897 /* Each new type_unit DIE was added to the limbo die list when created.
31898 Since these have all been added to comdat_type_list, clear the
31899 limbo die list. */
31900 limbo_die_list = NULL;
31901
31902 /* For each new comdat type unit, copy declarations for incomplete
31903 types to make the new unit self-contained (i.e., no direct
31904 references to the main compile unit). */
31905 for (comdat_type_node *ctnode = comdat_type_list;
31906 ctnode != NULL; ctnode = ctnode->next)
31907 copy_decls_for_unworthy_types (ctnode->root_die);
31908 copy_decls_for_unworthy_types (comp_unit_die ());
31909
31910 /* In the process of copying declarations from one unit to another,
31911 we may have left some declarations behind that are no longer
31912 referenced. Prune them. */
31913 prune_unused_types ();
31914 }
31915
31916 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31917 with dw_val_class_decl_ref operand. */
31918 note_variable_value (comp_unit_die ());
31919 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31920 note_variable_value (node->die);
31921 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31922 ctnode = ctnode->next)
31923 note_variable_value (ctnode->root_die);
31924 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31925 note_variable_value (node->die);
31926
31927 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31928 both the main_cu and all skeleton TUs. Making this call unconditional
31929 would end up either adding a second copy of the AT_pubnames attribute, or
31930 requiring a special case in add_top_level_skeleton_die_attrs. */
31931 if (!dwarf_split_debug_info)
31932 add_AT_pubnames (comp_unit_die ());
31933
31934 /* The early debug phase is now finished. */
31935 early_dwarf_finished = true;
31936
31937 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31938 if ((!flag_generate_lto && !flag_generate_offload)
31939 /* FIXME: Disable debug info generation for PE-COFF targets since the
31940 copy_lto_debug_sections operation of the simple object support in
31941 libiberty is not implemented for them yet. */
31942 || TARGET_PECOFF)
31943 return;
31944
31945 /* Now as we are going to output for LTO initialize sections and labels
31946 to the LTO variants. We don't need a random-seed postfix as other
31947 LTO sections as linking the LTO debug sections into one in a partial
31948 link is fine. */
31949 init_sections_and_labels (true);
31950
31951 /* The output below is modeled after dwarf2out_finish with all
31952 location related output removed and some LTO specific changes.
31953 Some refactoring might make both smaller and easier to match up. */
31954
31955 /* Traverse the DIE's and add add sibling attributes to those DIE's
31956 that have children. */
31957 add_sibling_attributes (comp_unit_die ());
31958 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31959 add_sibling_attributes (node->die);
31960 for (comdat_type_node *ctnode = comdat_type_list;
31961 ctnode != NULL; ctnode = ctnode->next)
31962 add_sibling_attributes (ctnode->root_die);
31963
31964 /* AIX Assembler inserts the length, so adjust the reference to match the
31965 offset expected by debuggers. */
31966 strcpy (dl_section_ref, debug_line_section_label);
31967 if (XCOFF_DEBUGGING_INFO)
31968 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31969
31970 if (debug_info_level >= DINFO_LEVEL_TERSE)
31971 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31972
31973 if (have_macinfo)
31974 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31975 macinfo_section_label);
31976
31977 save_macinfo_strings ();
31978
31979 if (dwarf_split_debug_info)
31980 {
31981 unsigned int index = 0;
31982 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31983 }
31984
31985 /* Output all of the compilation units. We put the main one last so that
31986 the offsets are available to output_pubnames. */
31987 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31988 output_comp_unit (node->die, 0, NULL);
31989
31990 hash_table<comdat_type_hasher> comdat_type_table (100);
31991 for (comdat_type_node *ctnode = comdat_type_list;
31992 ctnode != NULL; ctnode = ctnode->next)
31993 {
31994 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31995
31996 /* Don't output duplicate types. */
31997 if (*slot != HTAB_EMPTY_ENTRY)
31998 continue;
31999
32000 /* Add a pointer to the line table for the main compilation unit
32001 so that the debugger can make sense of DW_AT_decl_file
32002 attributes. */
32003 if (debug_info_level >= DINFO_LEVEL_TERSE)
32004 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32005 (!dwarf_split_debug_info
32006 ? debug_line_section_label
32007 : debug_skeleton_line_section_label));
32008
32009 output_comdat_type_unit (ctnode);
32010 *slot = ctnode;
32011 }
32012
32013 /* Stick a unique symbol to the main debuginfo section. */
32014 compute_comp_unit_symbol (comp_unit_die ());
32015
32016 /* Output the main compilation unit. We always need it if only for
32017 the CU symbol. */
32018 output_comp_unit (comp_unit_die (), true, NULL);
32019
32020 /* Output the abbreviation table. */
32021 if (vec_safe_length (abbrev_die_table) != 1)
32022 {
32023 switch_to_section (debug_abbrev_section);
32024 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32025 output_abbrev_section ();
32026 }
32027
32028 /* Have to end the macro section. */
32029 if (have_macinfo)
32030 {
32031 /* We have to save macinfo state if we need to output it again
32032 for the FAT part of the object. */
32033 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32034 if (flag_fat_lto_objects)
32035 macinfo_table = macinfo_table->copy ();
32036
32037 switch_to_section (debug_macinfo_section);
32038 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32039 output_macinfo (debug_line_section_label, true);
32040 dw2_asm_output_data (1, 0, "End compilation unit");
32041
32042 if (flag_fat_lto_objects)
32043 {
32044 vec_free (macinfo_table);
32045 macinfo_table = saved_macinfo_table;
32046 }
32047 }
32048
32049 /* Emit a skeleton debug_line section. */
32050 switch_to_section (debug_line_section);
32051 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32052 output_line_info (true);
32053
32054 /* If we emitted any indirect strings, output the string table too. */
32055 if (debug_str_hash || skeleton_debug_str_hash)
32056 output_indirect_strings ();
32057
32058 /* Switch back to the text section. */
32059 switch_to_section (text_section);
32060 }
32061
32062 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32063 within the same process. For use by toplev::finalize. */
32064
32065 void
32066 dwarf2out_c_finalize (void)
32067 {
32068 last_var_location_insn = NULL;
32069 cached_next_real_insn = NULL;
32070 used_rtx_array = NULL;
32071 incomplete_types = NULL;
32072 debug_info_section = NULL;
32073 debug_skeleton_info_section = NULL;
32074 debug_abbrev_section = NULL;
32075 debug_skeleton_abbrev_section = NULL;
32076 debug_aranges_section = NULL;
32077 debug_addr_section = NULL;
32078 debug_macinfo_section = NULL;
32079 debug_line_section = NULL;
32080 debug_skeleton_line_section = NULL;
32081 debug_loc_section = NULL;
32082 debug_pubnames_section = NULL;
32083 debug_pubtypes_section = NULL;
32084 debug_str_section = NULL;
32085 debug_line_str_section = NULL;
32086 debug_str_dwo_section = NULL;
32087 debug_str_offsets_section = NULL;
32088 debug_ranges_section = NULL;
32089 debug_frame_section = NULL;
32090 fde_vec = NULL;
32091 debug_str_hash = NULL;
32092 debug_line_str_hash = NULL;
32093 skeleton_debug_str_hash = NULL;
32094 dw2_string_counter = 0;
32095 have_multiple_function_sections = false;
32096 text_section_used = false;
32097 cold_text_section_used = false;
32098 cold_text_section = NULL;
32099 current_unit_personality = NULL;
32100
32101 early_dwarf = false;
32102 early_dwarf_finished = false;
32103
32104 next_die_offset = 0;
32105 single_comp_unit_die = NULL;
32106 comdat_type_list = NULL;
32107 limbo_die_list = NULL;
32108 file_table = NULL;
32109 decl_die_table = NULL;
32110 common_block_die_table = NULL;
32111 decl_loc_table = NULL;
32112 call_arg_locations = NULL;
32113 call_arg_loc_last = NULL;
32114 call_site_count = -1;
32115 tail_call_site_count = -1;
32116 cached_dw_loc_list_table = NULL;
32117 abbrev_die_table = NULL;
32118 delete dwarf_proc_stack_usage_map;
32119 dwarf_proc_stack_usage_map = NULL;
32120 line_info_label_num = 0;
32121 cur_line_info_table = NULL;
32122 text_section_line_info = NULL;
32123 cold_text_section_line_info = NULL;
32124 separate_line_info = NULL;
32125 info_section_emitted = false;
32126 pubname_table = NULL;
32127 pubtype_table = NULL;
32128 macinfo_table = NULL;
32129 ranges_table = NULL;
32130 ranges_by_label = NULL;
32131 rnglist_idx = 0;
32132 have_location_lists = false;
32133 loclabel_num = 0;
32134 poc_label_num = 0;
32135 last_emitted_file = NULL;
32136 label_num = 0;
32137 tmpl_value_parm_die_table = NULL;
32138 generic_type_instances = NULL;
32139 frame_pointer_fb_offset = 0;
32140 frame_pointer_fb_offset_valid = false;
32141 base_types.release ();
32142 XDELETEVEC (producer_string);
32143 producer_string = NULL;
32144 }
32145
32146 #include "gt-dwarf2out.h"